记录flink实战
父亲工程依赖
8
8
1.13.6
1.8
2.12
1.7.30
3.1.2
org.apache.flink
flink-java
${flink.version}
org.apache.flink
flink-streaming-java_${scala.binary.version}
${flink.version}
org.apache.flink
flink-connector-kafka_${scala.binary.version}
${flink.version}
org.apache.flink
flink-connector-base
${flink.version}
org.apache.flink
flink-clients_${scala.binary.version}
${flink.version}
org.slf4j
slf4j-api
${slf4j.version}
org.slf4j
slf4j-log4j12
${slf4j.version}
org.apache.flink
flink-table-api-java-bridge_${scala.binary.version}
${flink.version}
org.apache.flink
flink-table-planner-blink_${scala.binary.version}
${flink.version}
org.apache.logging.log4j
log4j-to-slf4j
2.14.0
com.alibaba
fastjson
1.2.62
org.apache.hadoop
hadoop-client
3.1.3
org.apache.flink
flink-connector-hive_${scala.binary.version}
${flink.version}
org.apache.flink
flink-table-api-java-bridge_${scala.binary.version}
${flink.version}
org.apache.hive
hive-exec
${hive.version}
org.projectlombok
lombok
1.18.24
org.apache.flink
flink-json
${flink.version}
org.apache.maven.plugins
maven-compiler-plugin
8
org.apache.maven.plugins
maven-assembly-plugin
3.0.0
jar-with-dependencies
make-assembly
package
single
nexus-aliyun
nexus-aliyun
http://maven.aliyun.com/nexus/content/groups/public/
true
false
日志
log4j.properties
log4j.rootLogger=ERROR, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
log4j.appender.logfile=org.apache.log4j.FileAppender
log4j.appender.logfile.File=target/spring.log
log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n
子依赖
8
8
org.apache.flink
flink-java
${flink.version}
org.apache.flink
flink-streaming-java_${scala.binary.version}
org.apache.flink
flink-connector-kafka_${scala.binary.version}
org.apache.flink
flink-clients_${scala.binary.version}
org.slf4j
slf4j-api
org.slf4j
slf4j-log4j12
org.apache.flink
flink-table-api-java-bridge_${scala.binary.version}
org.apache.flink
flink-table-planner-blink_${scala.binary.version}
com.alibaba
fastjson
org.apache.hadoop
hadoop-client
org.apache.flink
flink-connector-hive_${scala.binary.version}
${flink.version}
org.apache.hive
hive-exec
org.apache.flink
flink-json
集成HiveCatalog
public static void main(String[] args) throws Exception {
// TODO 1. 环境准备
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(4);
StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
// 2. 注册到hive的catalog
HiveCatalog hive = new HiveCatalog(CATALOG_NAME, DATABASE_NAME, HIVE_CONFG_DIR, HIVE_VERSION);
tableEnv.registerCatalog(CATALOG_NAME, hive);
tableEnv.useCatalog(CATALOG_NAME);
String targetHiveSql = HIVE_SQL_STRING;
//targetHiveSql,因为上面指定了数据库那么久可以直接使用这个库操作hive里面的数据,不过不兼容hivesql,使用的是flinksql语法
Table hiveTempTable = tableEnv.sqlQuery(targetHiveSql);
tableEnv.createTemporaryView(FLINK_HIVE_TEMP_TABLE_NAME, hiveTempTable);
// 3.把数据写到kafka
tableEnv.executeSql(KAFKA_SINK_DDL);
// 4.把数据保存到kafka
tableEnv.executeSql(KAFKA_SINK_SQL_STRING);
}
相关配置例子
#=========env config===============
#kafka server url
bootstrap_servers=kafkaip:9092
#hive-site.xml fold path
hive_confg_dir=/home/bigdata/etlconf
#flink in hive_catalog name
catalog_name=bigdatatest
#hive database
database_name=hivetestdb
#============hive source config==========
hive_sql_string=select * from bigdatatest where load_date = DATE_FORMAT(TIMESTAMPADD(DAY, -1, CURRENT_TIMESTAMP), 'yyyy-MM-dd')
flink_hive_temp_table_name=hive_temp_table
#===========kafka sink config=========
kafka_sink_ddl=create table if not exists kafka_bigdatatest( value1 string , value2 string) WITH ( 'connector' = 'kafka', 'topic' = 'bigdatatest', 'properties.bootstrap.servers' = 'kafkaip:9092','key.fields' = 'value1', 'key.format' = 'json', 'value.format' = 'json' )
#up hive source config flink_hive_temp_table_name down the same
kafka_sink_sql_string=insert into kafka_bigdatatest select * from hive_temp_table