flink 1.18 sql demo

flink 1.18 sql demo

更换flink-table-planner 为 flink-table-planner-loader pom.xml

    <dependencies>
        
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-table-api-java-uberartifactId>
            <version>1.18.0version>
            <scope>providedscope>
        dependency>
        
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-table-runtimeartifactId>
            <version>1.18.0version>
        dependency>
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-table-api-java-bridgeartifactId>
            <version>1.18.0version>
        dependency>
        
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-clientsartifactId>
            <version>1.18.0version>
        dependency>

        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-connector-baseartifactId>
            <version>1.18.0version>
        dependency>
        
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-csvartifactId>
            <version>1.18.0version>
        dependency>

        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-connector-kafkaartifactId>
            <version>3.0.2-1.18version>
        dependency>
        






        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId> artifactId>
            <version>1.18.0version>
        dependency>
        <dependency>
            <groupId>org.slf4jgroupId>
            <artifactId>slf4j-apiartifactId>
            <version>1.7.21version>
        dependency>
        <dependency>
            <groupId>org.slf4jgroupId>
            <artifactId>slf4j-log4j12artifactId>
            <version>1.7.21version>
        dependency>

    dependencies>
    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.pluginsgroupId>
                <artifactId>maven-shade-pluginartifactId>
                <version>3.1.1version>
                <executions>
                    <execution>
                        <phase>packagephase>
                        <goals>
                            <goal>shadegoal>
                        goals>
                        <configuration>
                            <artifactSet>
                                <excludes>
                                    <exclude>com.google.code.findbugs:jsr305exclude>
                                excludes>
                            artifactSet>
                            <filters>
                                <filter>
                                    
                                    <artifact>*:*artifact>
                                    <excludes>
                                        <exclude>META-INF/*.SFexclude>
                                        <exclude>META-INF/*.DSAexclude>
                                        <exclude>META-INF/*.RSAexclude>
                                    excludes>
                                filter>
                            filters>
                            <transformers>
                                <transformer
                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
                                    
                                    <mainClass>my.programs.main.clazzmainClass>
                                transformer>
                                <transformer
                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
                            transformers>
                        configuration>
                    execution>
                executions>
            plugin>
        plugins>
    build>

demo

package com.cn;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import org.apache.flink.table.api.Table;

import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;


/**
 * @Classname app
 * @Description TODO
 * @Date 2024/1/12 11:26
 * @Created by typezhou
 */
public class App {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(1000L);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        String str = "CREATE TABLE KafkaTable (\n" +
                "  `user_id` STRING,\n" +
                "  `ts` TIMESTAMP(3) METADATA FROM 'timestamp'\n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'aaaa',\n" +
                "  'properties.bootstrap.servers' = '172.xx.xx.xx:9092,172.xx.86.xx:9092,172.xx.xx.xx:9092',\n" +
                "  'properties.group.id' = 'testGrou1p',\n" +
                "  'scan.startup.mode' = 'latest-offset',\n" +
                "  'format' = 'csv'\n" +
                ")";
        tableEnv.executeSql(str);
        Table tableResult = tableEnv.sqlQuery("SELECT user_id  FROM KafkaTable group by user_id");
//        DataStream tuple2DataStream = tableEnv.toDataStream(result, ResultBean.class);
//        SingleOutputStreamOperator map = tuple2DataStream.map(new MapFunction() {
//            @Override
//            public ResultBean map(ResultBean s) throws Exception {
//                Thread.sleep(3000L);
//                return s;
//            }
//        });
//        tuple2DataStream.print();
        String sqlPri = "CREATE TABLE print_table (\n" +
                "  `user_id` STRING \n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'bbbb',\n" +
                "  'properties.bootstrap.servers' = '172.xx.xx.xx:9092,172.xx.86.xx:9092,172.xx.xx.xx:9092',\n" +
                "  'format' = 'csv'\n" +
                ")";
        tableEnv.executeSql(sqlPri);
        tableEnv.executeSql("insert into  print_table SELECT user_id FROM KafkaTable");

    }


}

你可能感兴趣的:(flink,java)