数据流向:kafka ->kafka ->mysql
模拟写数据到kafka topic:wxt中
import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Properties;
public class KafkaProducerExample {
public static void main(String[] args) throws Exception {
// 设置kafka服务器地址和端口号
String kafkaServers = "localhost:9092";
// 设置producer属性
Properties properties = new Properties();
properties.put("bootstrap.servers", kafkaServers);
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
// 创建Kafka producer对象
KafkaProducer<String, String> producer = new KafkaProducer<>(properties);
// 发送消息
String topic = "wxt";
JSONObject jsonObject = new JSONObject();
jsonObject.put("id", 9);
jsonObject.put("name", "王大大");
jsonObject.put("age", 11);
// 将JSON对象转换成字符串
String jsonString = jsonObject.toString();
// 输出JSON字符串
System.out.println("JSON String: " + jsonString);
ProducerRecord<String, String> record = new ProducerRecord<>(topic, jsonString);
producer.send(record);
// 关闭producer
producer.close();
}
}
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
public class KafkaToMysqlJob {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
TableEnvironment tEnv = TableEnvironment.create(settings);
// 定义Kafka连接属性
String kafkaBootstrapServers = "localhost:9092";
String kafkaTopic = "wxt";
String groupId = "wxt1";
// 注册Kafka表
tEnv.executeSql("CREATE TABLE kafka_table (\n" +
" id INT,\n" +
" name STRING,\n" +
" age INT,\n" +
" proctime as PROCTIME()\n" +
") WITH (\n" +
" 'connector' = 'kafka',\n" +
" 'topic' = '" + kafkaTopic + "',\n" +
" 'properties.bootstrap.servers' = '" + kafkaBootstrapServers + "',\n" +
" 'properties.group.id' = '" + groupId + "',\n" +
" 'format' = 'json',\n" +
" 'scan.startup.mode' = 'earliest-offset'\n" +
")");
// 注册Kafka表
// latest-offset
//earliest-offset
tEnv.executeSql("CREATE TABLE kafka_table2 (\n" +
" window_start STRING,\n" +
" window_end STRING,\n" +
" name STRING,\n" +
" age INT\n" +
") WITH (\n" +
" 'connector' = 'kafka',\n" +
" 'topic' = 'wxt2',\n" +
" 'properties.bootstrap.servers' = '" + kafkaBootstrapServers + "',\n" +
" 'properties.group.id' = 'kafka_table2',\n" +
" 'format' = 'json',\n" +
" 'scan.startup.mode' = 'latest-offset',\n" +
" 'value.format' = 'csv'\n" +
")");
tEnv.executeSql("CREATE TABLE mysql_sink_table (\n" +
" window_start String,\n" +
" window_end String,\n" +
" name String,\n" +
" age INT\n" +
") WITH (\n" +
" 'connector' = 'jdbc',\n" +
" 'url' = 'jdbc:mysql://localhost:3306/tests?serverTimezone=Asia/Shanghai&useUnicode=true&characterEncoding=UTF-8',\n" +
" 'username' = 'root',\n" +
" 'password' = '12345678',\n" +
" 'table-name' = 'leiji_age'\n" +
")");
tEnv.executeSql("insert into kafka_table2 select cast(window_start as string) as window_start,cast(window_end as string) as window_end,name,sum(age) as age\n" +
"from TABLE( CUMULATE( TABLE kafka_table, DESCRIPTOR(proctime), INTERVAL '20' SECOND, INTERVAL '1' DAY))\n" +
"group by window_start,window_end,name");
tEnv.executeSql("insert into mysql_sink_table select window_start,window_end,name,age from kafka_table2");
env.execute("KafkaToMysqlJob");
}
}
kafka topic :wxt2
mysql结果数据:
pom文件
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0modelVersion>
<groupId>org.examplegroupId>
<artifactId>flinksqlartifactId>
<version>1.0-SNAPSHOTversion>
<properties>
<maven.compiler.source>8maven.compiler.source>
<maven.compiler.target>8maven.compiler.target>
<scala.binary.version>2.12scala.binary.version>
<flink.version>1.14.3flink.version>
properties>
<dependencies>
<dependency>
<groupId>org.apache.flinkgroupId>
<artifactId>flink-table-api-java-bridge_${scala.binary.version}artifactId>
<version>${flink.version}version>
dependency>
<dependency>
<groupId>org.apache.flinkgroupId>
<artifactId>flink-table-planner_${scala.binary.version}artifactId>
<version>${flink.version}version>
dependency>
<dependency>
<groupId>org.apache.flinkgroupId>
<artifactId>flink-streaming-scala_${scala.binary.version}artifactId>
<version>${flink.version}version>
dependency>
<dependency>
<groupId>org.apache.flinkgroupId>
<artifactId>flink-table-commonartifactId>
<version>${flink.version}version>
dependency>
<dependency>
<groupId>org.apache.flinkgroupId>
<artifactId>flink-clients_${scala.binary.version}artifactId>
<version>${flink.version}version>
dependency>
<dependency>
<groupId>org.projectlombokgroupId>
<artifactId>lombokartifactId>
<version>1.18.22version>
<scope>providedscope>
dependency>
<dependency>
<groupId>com.alibabagroupId>
<artifactId>fastjsonartifactId>
<version>1.2.73version>
dependency>
<dependency>
<groupId>org.slf4jgroupId>
<artifactId>slf4j-simpleartifactId>
<version>1.7.15version>
dependency>
<dependency>
<groupId>org.apache.flinkgroupId>
<artifactId>flink-csvartifactId>
<version>${flink.version}version>
dependency>
<dependency>
<groupId>org.apache.flinkgroupId>
<artifactId>flink-jsonartifactId>
<version>${flink.version}version>
dependency>
<dependency>
<groupId>org.apache.flinkgroupId>
<artifactId>flink-jsonartifactId>
<version>${flink.version}version>
dependency>
<dependency>
<groupId>org.slf4jgroupId>
<artifactId>slf4j-simpleartifactId>
<version>1.7.15version>
dependency>
<dependency>
<groupId>org.apache.commonsgroupId>
<artifactId>commons-math3artifactId>
<version>3.5version>
dependency>
<dependency>
<groupId>org.apache.flinkgroupId>
<artifactId>flink-connector-kafka_${scala.binary.version}artifactId>
<version>${flink.version}version>
dependency>
<dependency>
<groupId>org.apache.flinkgroupId>
<artifactId>flink-connector-jdbc_2.11artifactId>
<version>${flink.version}version>
dependency>
<dependency>
<groupId>com.mysqlgroupId>
<artifactId>mysql-connector-jartifactId>
<version>8.0.31version>
dependency>
<dependency>
<groupId>mysqlgroupId>
<artifactId>mysql-connector-javaartifactId>
<version>5.1.38version>
dependency>
<dependency>
<groupId>commons-iogroupId>
<artifactId>commons-ioartifactId>
<version>2.11.0version>
dependency>
<dependency>
<groupId>org.antlrgroupId>
<artifactId>antlr-runtimeartifactId>
<version>3.5.2version>
dependency>
<dependency>
<groupId>org.apache.thriftgroupId>
<artifactId>libfb303artifactId>
<version>0.9.3version>
dependency>
<dependency>
<groupId>org.slf4jgroupId>
<artifactId>slf4j-simpleartifactId>
<version>1.7.15version>
dependency>
<dependency>
<groupId>org.slf4jgroupId>
<artifactId>slf4j-log4j12artifactId>
<version>1.7.7version>
<scope>runtimescope>
dependency>
<dependency>
<groupId>log4jgroupId>
<artifactId>log4jartifactId>
<version>1.2.17version>
<scope>runtimescope>
dependency>
dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.pluginsgroupId>
<artifactId>maven-compiler-pluginartifactId>
<version>3.5.1version>
<configuration>
<source>1.8source>
<target>1.8target>
configuration>
plugin>
<plugin>
<groupId>org.apache.maven.pluginsgroupId>
<artifactId>maven-shade-pluginartifactId>
<version>3.1.1version>
<configuration>
configuration>
<executions>
<execution>
<phase>packagephase>
<goals>
<goal>shadegoal>
goals>
execution>
executions>
plugin>
<plugin>
<groupId>org.apache.maven.pluginsgroupId>
<artifactId>maven-dependency-pluginartifactId>
<version>2.10version>
<executions>
<execution>
<id>copy-dependenciesid>
<phase>packagephase>
<goals>
<goal>copy-dependenciesgoal>
goals>
<configuration>
<outputDirectory>${project.build.directory}/liboutputDirectory>
configuration>
execution>
executions>
plugin>
<plugin>
<groupId>org.scala-toolsgroupId>
<artifactId>maven-scala-pluginartifactId>
<version>2.15.2version>
<executions>
<execution>
<goals>
<goal>compilegoal>
<goal>testCompilegoal>
goals>
execution>
executions>
plugin>
plugins>
build>
project>