[另一篇通过JDBCAppendTableSink方式实现存入MySQL:https://blog.csdn.net/qq_39799876/article/details/91884031 ]
附有Kafka生产json格式数据的代码
package cn.flink;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Json;
import org.apache.flink.table.descriptors.Kafka;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.types.Row;
public class MainDemo {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
env.enableCheckpointing(5000);
StreamTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env);
Kafka kafka = new Kafka()
.version("0.10")
.topic("kafka")
.property("bootstrap.servers", "localhost:9092")
.property("zookeeper.connect", "localhost:2181");
tableEnv.connect(kafka)
.withFormat(
new Json().failOnMissingField(true).deriveSchema()
)
.withSchema(
new Schema()
.field("id", Types.INT)
.field("name", Types.STRING)
.field("sex", Types.STRING)
.field("score", Types.FLOAT)
)
.inAppendMode()
.registerTableSource("tmp_table");
String sql = "select * from tmp_table";
Table table = tableEnv.sqlQuery(sql);
tableEnv.toAppendStream(table, Info.class).addSink(new MySQLWriter());
env.execute();
}
}
public class Info {
public int id;
public String name;
public String sex;
public float score;
public Info(){} //要带有这个无参构造
public Info(int id,String name,String sex,float score){
this.id= id;
this.name = name;
this.sex = sex;
this.score = score;
}
@Override
public String toString() {
return id+":"+name+":"+sex+":"+score;
}
}
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
public class MySQLWriter
extends RichSinkFunction
{
private Connection connection;
private PreparedStatement preparedStatement;
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
String className = "com.mysql.jdbc.Driver";
Class.forName(className);
String url = "jdbc:mysql://localhost:3306/flink";
String user = "root";
String password = "123456";
connection = DriverManager.getConnection(url, user, password);
String sql = "replace into flinkjson(id,name,sex,score) values(?,?,?,?)";
preparedStatement = connection.prepareStatement(sql);
super.open(parameters);
}
@Override
public void close() throws Exception {
super.close();
if (preparedStatement != null) {
preparedStatement.close();
}
if (connection != null) {
connection.close();
}
super.close();
}
@Override
public void invoke(Info value, Context context) throws Exception {
int id = value.id;
String name = value.name;
String sex = value.sex;
Float score = value.score;
preparedStatement.setInt(1, id);
preparedStatement.setString(2, name);
preparedStatement.setString(3,sex);
preparedStatement.setFloat(4,score);
int i = preparedStatement.executeUpdate();
if (i > 0) {
System.out.println("value=" + value);
}else{
System.out.println("error");
}
}
}
Kafka生产json格式数据的代码
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.codehaus.jettison.json.JSONObject;
import java.util.Properties;
public class KafkaProducerTest {
public static void main(String[] args) throws Exception {
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Producer producer = new KafkaProducer(props);
for (int i = 0; i < 100; i++) {
JSONObject event = new JSONObject();
event.put("id", (int) (Math.random() * 100 + 1))
.put("name", "mingzi" + i)
.put("sex", i)
.put("score", i * 1.0);
producer.send(new ProducerRecord("nima", Integer.toString(i), event.toString()));
System.out.println(i);
Thread.sleep(5000);
}
producer.close();
}
}
有些依赖可能没有用到
org.apache.flink
flink-core
1.7.2
org.apache.flink
flink-clients_2.11
1.7.2
org.apache.flink
flink-java
1.7.2
org.apache.flink
flink-streaming-scala_2.11
1.7.2
org.apache.flink
flink-scala_2.11
1.7.2
org.apache.flink
flink-table_2.11
1.7.2
org.apache.flink
flink-json
1.7.2
org.apache.flink
flink-streaming-java_2.11
1.7.2
org.apache.flink
flink-connector-kafka-0.10_2.11
1.7.2
org.apache.logging.log4j
log4j-core
2.8.2
log4j
log4j
1.2.17
com.fasterxml.jackson.core
jackson-databind
2.9.8
joda-time
joda-time
2.9.9
mysql
mysql-connector-java
5.1.45
org.codehaus.jettison
jettison
1.3.7
log4j
log4j
1.2.11