注意:服务器flink版本等要与代码一致,不然会发布失败,本地成功
pom文件
4.0.0
com.imooc.flink.java
flink-train-java
1.0
jar
Flink Quickstart Job
http://www.myorganization.org
UTF-8
1.11.2
1.8
2.11
${java.version}
${java.version}
apache.snapshots
Apache Development Snapshot Repository
https://repository.apache.org/content/repositories/snapshots/
false
true
org.apache.flink
flink-java
${flink.version}
provided
org.apache.flink
flink-streaming-java_${scala.binary.version}
${flink.version}
provided
com.taosdata.jdbc
taos-jdbcdriver
2.0.18
com.zaxxer
HikariCP
3.4.1
com.alibaba
druid
1.2.6
com.alibaba
fastjson
1.2.47
org.apache.flink
flink-connector-filesystem_2.11
${flink.version}
org.apache.flink
flink-streaming-java_2.11
${flink.version}
org.apache.flink
flink-clients_2.11
${flink.version}
org.apache.hadoop
hadoop-client
2.8.1
org.slf4j
slf4j-log4j12
1.7.7
runtime
log4j
log4j
1.2.17
runtime
org.apache.flink
flink-connector-kafka_2.11
${flink.version}
org.apache.flink
flink-table-api-java-bridge_2.11
1.9.0
provided
org.apache.flink
flink-json
1.7.2
org.apache.flink
flink-table-planner_2.11
1.9.0
org.apache.flink
flink-scala_2.11
1.10.0
org.apache.flink
flink-streaming-scala_2.11
1.10.0
net.alchim31.maven
scala-maven-plugin
3.2.0
org.apache.maven.plugins
maven-assembly-plugin
3.0.0
net.alchim31.maven
scala-maven-plugin
3.2.0
compile
testCompile
org.apache.maven.plugins
maven-assembly-plugin
3.0.0
jar-with-dependencies
make-assembly
package
single
add-dependencies-for-IDEA
idea.version
org.apache.flink
flink-java
${flink.version}
compile
org.apache.flink
flink-streaming-java_${scala.binary.version}
${flink.version}
compile
org.springframework.boot
spring-boot-starter-logging
2.4.5
主函数
package com.zx.iot.consumer;
import com.alibaba.fastjson.JSONObject;
import com.zx.iot.dto.Equipment;
import com.zx.iot.dto.Thing;
import com.zx.iot.producer.IotProducer;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
//@Slf4j
/**
* 使用Java API来开发Flink的实时处理应用程序.
*
* wc统计的数据我们源自于socket
*/
public class IotDataFlink {
public static void main(String[] args) throws Exception {
// step1 :获取执行环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// env.enableCheckpointing(1000);
//配置kafka
Properties properties = new Properties();
properties.setProperty("bootstrap.servers", "10.6.24.56:9092");//kafka10.6.24.56:2181
properties.setProperty("group.id", "test");
DataStream stream = env
.addSource(new FlinkKafkaConsumer<>("iot-data", new SimpleStringSchema(), properties));
DataStream answerDataStream = stream.map((String strMsg) -> {
Thing thing = new Thing();
try {
JSONObject jsonObject = JSONObject.parseObject(strMsg);
thing.setTs(jsonObject.get("ts").toString());
String data = jsonObject.get("data").toString();
//thing.setData(data);
List list = new ArrayList<>();
if(data!=null){
String[] equipmentArray = data.split(",");
for (int i = 0;ix!=null);
/*stream.flatMap(new FlatMapFunction>() {
@Override
public void flatMap(String value, Collector> collector) throws Exception {
String[] tokens = value.toLowerCase().split(",");
System.err.println(tokens[0]);
for(String token : tokens) {
if(token.length() > 0) {
collector.collect(new Tuple2(token,1));
}
}
}
}).keyBy(0).timeWindow(Time.seconds(5000)).sum(1).print().setParallelism(10);*///
env.execute("wordcount");
}
}
数据接收类
Thing类
package com.zx.iot.dto;
import java.util.List;
public class Thing {
private String ts;
private String data;
private List list;
public String getTs() {
return ts;
}
public void setTs(String ts) {
this.ts = ts;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public List getList() {
return list;
}
public void setList(List list) {
this.list = list;
}
public Thing(String ts, String data, List list) {
this.ts = ts;
this.data = data;
this.list = list;
}
public Thing() {
}
@Override
public String toString() {
return "Thing{" +
"ts='" + ts + '\'' +
", data='" + data + '\'' +
", list=" + list +
'}';
}
}
Equipment类
package com.zx.iot.dto;
public class Equipment {
private String equipmentCode;
private String status;
public String getEquipmentCode() {
return equipmentCode;
}
public void setEquipmentCode(String equipmentCode) {
this.equipmentCode = equipmentCode;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public Equipment(String equipmentCode, String status) {
this.equipmentCode = equipmentCode;
this.status = status;
}
public Equipment() {
}
@Override
public String toString() {
return "Equipment{" +
"equipmentCode='" + equipmentCode + '\'' +
", status='" + status + '\'' +
'}';
}
}
监听后发送数据
package com.zx.iot.producer;
import com.alibaba.fastjson.JSON;
import com.zx.iot.dto.Equipment;
import com.zx.iot.dto.Thing;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
public class IotProducer {
/*public static void main(String[] args) {
IotProducer iotProducer = new IotProducer();
Thing thing = new Thing();
List list = new ArrayList();
Equipment equipment = new Equipment();
equipment.setStatus("1");
equipment.setEquipmentCode("fdghudj237utcdysihxj237yh");
list.add(equipment);
thing.setList(list);
thing.setTs("9872120988421");
thing.setData("fduoijwps");
try {
iotProducer.sendList("iot-data-calculate",thing);
} catch (Exception e) {
e.printStackTrace();
}
}*/
public static void sendList(String topic, Thing thing) throws Exception {
// 0 初始化flink环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// 分区数:不能超过cpu数
env.setParallelism(3);
// 1 读取集合中数据
/*ArrayList wordsList = new ArrayList<>();
wordsList.add("hello");
wordsList.add("world");
wordsList.add("intmall");*/
//JSONArray Object = JSONArray.toJSON(list);
//DataStreamSource stream = env.fromElements(thing);
String json = JSON.toJSONString(thing);
DataStreamSource stream =env.fromElements(json);
System.err.println("发送。。。。。。。。。。"+topic);
// 2 kafka 生产者配置信息
Properties properties = new Properties();
//properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "10.6.24.56:9092");
properties.setProperty("bootstrap.servers", "10.6.24.56:9092");//kafka10.6.24.56:9092
properties.setProperty("group.id", "test");
// 3 创建kafka生产者
FlinkKafkaProducer kafkaProducer = new FlinkKafkaProducer(topic, new SimpleStringSchema(), properties);
// 4 生产者和 flink 流关联
stream.addSink(kafkaProducer);
System.err.println("发送后。。。。。。。。。。");
// 5 执行
env.execute("sender");
}
}