Flink-3.Flink SQL API

package com.ctgu.flink.sql;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.*;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.runtime.operators.window.slicing.SliceAssigners;
import org.apache.flink.types.Row;

import static org.apache.flink.table.api.Expressions.*;


public class Flink_Sql_Window {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        EnvironmentSettings settings = EnvironmentSettings
                .newInstance()
                .inStreamingMode()
                .build();


        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);

        String createSql =
                "CREATE TABLE windowTable " +
                "    (" +
                "    `id` STRING," +
                "    `timestamp` BIGINT," +
                "    `address` STRING," +
                "    `value` DOUBLE," +
                "    `time_ltz` AS TO_TIMESTAMP_LTZ(`timestamp`, 3)," +
                "    `pt` AS PROCTIME()," +
                "    WATERMARK FOR time_ltz AS time_ltz - INTERVAL '5' SECOND" +
                "    )" +
                "    WITH (" +
                "       'connector'='filesystem'," +
                "       'format'='csv'," +
                "       'csv.field-delimiter'=','," +
                "       'path'='data/data.txt'" +
                "    )";

        tableEnv.executeSql(createSql);

        String slideWindowSql =
                "       select window_start, window_end, id, count(id) " +
                        "from Table(" +
                        "    TUMBLE(Table windowTable, DESCRIPTOR(time_ltz), INTERVAL '4' seconds)) " +
                        "group by window_start, window_end, GROUPING SETS ((id), ())";

        Table slideTable = tableEnv.sqlQuery(slideWindowSql);

        tableEnv.toDataStream(slideTable, Row.class).print("slideTable");

        String hopWindowSql =
                "       select window_start, window_end, id, count(id) " +
                        "from Table(" +
                        "    HOP(Table windowTable, DESCRIPTOR(time_ltz), INTERVAL '4' seconds, INTERVAL '20' seconds)) " +
                        "group by window_start, window_end, ROLLUP (id)";

        Table hopTable = tableEnv.sqlQuery(hopWindowSql);

        tableEnv.toDataStream(hopTable, Row.class).print("hopTable");

        String cumulateWindowSql =
                "       select window_start, window_end, id, address, count(id) " +
                        "from Table(" +
                        "    CUMULATE(Table windowTable, DESCRIPTOR(time_ltz), INTERVAL '4' seconds, INTERVAL '20' seconds)) " +
                        "group by window_start, window_end, CUBE (id, address)";

        Table cumulateTable = tableEnv.sqlQuery(cumulateWindowSql);

        tableEnv.toDataStream(cumulateTable, Row.class).print("cumulateTable");

        env.execute("Table SQL");
    }

}

你可能感兴趣的:(Flink-3.Flink SQL API)