使用FlinkCatalog将kafka的数据写入hive

package com.atguigu.flink.test_hk;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

public class KafkaToHive3 {

    public static void main(String[] args) {

        System.setProperty("HADOOP_USER_NAME", "atguigu");

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        env.setParallelism(1);

        env.enableCheckpointing(6000);

// to use hive dialect
        tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);

        // 注册Hive Catalog
        HiveCatalog hiveCatalog = new HiveCatalog("hive", "default", "conf");
        tableEnv.registerCatalog("hive", hiveCatalog);

// 使用默认的Flink catalog创建Kafka表
        tableEnv.useCatalog("hive");

// to use default dialect
//        tableEnv.getConfig().setSqlDialect(SqlDialect.DEFAULT);

        String hivesql = "CREATE TABLE  IF NOT EXISTS hive_table7 (\n" +
                "  id int\n" +
                ") ";

        tableEnv.executeSql(hivesql);



//        tableEnv.sqlQuery("select * from hive_table7  ").execute().print();

        // 使用默认的Flink catalog创建Kafka表
        tableEnv.getConfig().setSqlDialect(SqlDialect.DEFAULT);
        tableEnv.useCatalog("default_catalog");

        String readSql = " create table t2 (" +
                " id int  " +
                ") WITH (" +
                " 'connector' = 'kafka', " +
                " 'topic' = 'topicA', " +
                " 'properties.bootstrap.servers' = 'hadoop102:9092', " +
                " 'properties.group.id' = 'flin_"+System.currentTimeMillis()+"', " +
                " 'scan.startup.mode' = 'latest-offset', " +
                " 'value.format' = 'csv' " +
                ")" ;
        tableEnv.executeSql(readSql);

//        tableEnv.sqlQuery("select * from t2  ").execute().print();

        tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
        tableEnv.useCatalog("hive");

        tableEnv.executeSql("INSERT INTO hive_table7 select id from default_catalog.default_database.t2");


    }

}


    
        BigData230201
        com.atguigu
        1.0-SNAPSHOT
    
    4.0.0

    Flink

    
        8
        8
        UTF-8
        1.17.0
        2.12
    

    


    
        
            org.apache.flink
            flink-streaming-java
            ${flink.version}
            provided
        

        
            org.apache.flink
            flink-clients
            ${flink.version}
            provided
        
        
            org.slf4j
            slf4j-log4j12
            1.7.25
            provided
        

        
        
            org.projectlombok
            lombok
            1.18.26
        

        
        
            org.apache.flink
            flink-runtime-web
            ${flink.version}
            provided
        

        
        
            org.apache.flink
            flink-connector-datagen
            ${flink.version}
            provided
        


        
        
            org.apache.flink
            flink-connector-files
            ${flink.version}
            provided
        

        
        
            org.apache.hadoop
            hadoop-client
            3.3.4
            provided
        

        
        
            org.apache.flink
            flink-connector-kafka
            ${flink.version}
            provided
        

        
        
            com.alibaba
            fastjson
            1.2.83
            provided
        

        
        
            org.apache.flink
            flink-connector-jdbc
            3.1.0-1.17
            provided
        

        
        
            com.mysql
            mysql-connector-j
            8.0.32
            provided
        

        
        
            org.apache.flink
            flink-statebackend-rocksdb
            ${flink.version}
            provided
        

        
        
            org.apache.flink
            flink-statebackend-changelog
            ${flink.version}
            provided
        

        
        
            org.apache.flink
            flink-table-api-java-uber
            ${flink.version}
        

        
            org.apache.flink
            flink-table-runtime
            ${flink.version}
        









        
            org.apache.flink
            flink-table-planner_${scala.version}
            ${flink.version}
        

        
        
            org.apache.flink
            flink-csv
            ${flink.version}
        

        
        
            org.apache.flink
            flink-json
            ${flink.version}
        

        
        
            org.apache.flink
            flink-connector-hive_2.12
            ${flink.version}
            provided
        

        
            org.apache.hive
            hive-exec
            3.1.3
            provided
        






    

    
        
            
                org.apache.maven.plugins
                maven-shade-plugin
                3.2.4
                
                    
                        package
                        
                            shade
                        
                        
                            
                                
                                    com.google.code.findbugs:jsr305
                                    org.slf4j:*
                                    log4j:*
                                
                            
                            
                                
                                    
                                    *:*
                                    
                                        META-INF/*.SF
                                        META-INF/*.DSA
                                        META-INF/*.RSA
                                    
                                
                            
                            
                                
                                
                            
                        
                    
                
            
        
    




hive-site.xml




    
    
        javax.jdo.option.ConnectionURL
        jdbc:mysql://hadoop102:3306/metastore?useSSL=false&useUnicode=true&characterEncoding=UTF-8&allowPublicKeyRetrieval=true
    

    
    
        javax.jdo.option.ConnectionDriverName
        com.mysql.cj.jdbc.Driver
    

    
    
        javax.jdo.option.ConnectionUserName
        root
    

    
    
        javax.jdo.option.ConnectionPassword
        000000
    

    
        hive.metastore.warehouse.dir
        /user/hive/warehouse
    

    
        hive.metastore.schema.verification
        false
    

    
    hive.server2.thrift.port
    10000
    

    
        hive.server2.thrift.bind.host
        hadoop102
    

    
        hive.metastore.event.db.notification.api.auth
        false
    
    
    
        hive.cli.print.header
        true
    

    
        hive.cli.print.current.db
        true
    


    spark.yarn.jars
    hdfs://hadoop102:8020/spark-jars/*

  


    hive.execution.engine
    spark



	metastore.storage.schema.reader.impl
	org.apache.hadoop.hive.metastore.SerDeStorageSchemaReader



    hive.metastore.uris
    thrift://hadoop102:9083






你可能感兴趣的:(Flink,kafka,hive,分布式)