//构建环境
val env = StreamExecutionEnvironment.getExecutionEnvironment
//构建table环境
val tableEnvironmentSettings = EnvironmentSettings
.newInstance()
.inStreamingMode()
.build()
val tableEnv = StreamTableEnvironment.create(env, tableEnvironmentSettings)
老版本的Flink的实现如下:
val inputTable = tableEnv.fromDataStream("sensor_table",datastream,
$("id"),
$("timestamp")
$("temperature"))
但是在新的版本中这一方式已经被移除掉了,使用下面的方式定义表结构
val inputTable = tableEnv.fromDataStream(
datastream,
Schema
.newBuilder()
.column("id",DataTypes.STRING())
.column("timestamp",DataTypes.BIGINT())
.column("temperature",DataTypes.DOUBLE())
.build()
)
val inputTable = tableEnv.fromDataStream(
datastream,
Schema
.newBuilder()
.column("id",DataTypes.STRING().notNull())
.column("timestamp",DataTypes.BIGINT())
.column("temperature",DataTypes.DOUBLE())
.primaryKey("id")
.build()
)
val inputTable = tableEnv.fromDataStream(
datastream,
Schema
.newBuilder()
.column("id",DataTypes.STRING().notNull())
.column("timestamp",DataTypes.BIGINT().notNull())
.column("temperature",DataTypes.DOUBLE())
.primaryKey("id","timestamp")
.build()
)
//整张表按字段顺序重命名
inputTable.as("id","timestamp","ctime")
//重命名指定字段
inputTable.renameColumns($"id","sensor_id")
val inputTable = tableEnv.fromDataStream(
datastream,
Schema
.newBuilder()
.column("id",DataTypes.STRING().notNull())
.column("timestamp",DataTypes.BIGINT().notNull())
.column("temperature",DataTypes.DOUBLE())
.primaryKey("id","timestamp")
.columnByExpression("rowtime","CAST(TO_TIMESTAMP(FROM_UNIXTIME(`timestamp`)) AS TIMESTAMP(3))")
.build()
)
对应的DDL操作
CREATE TABLE sensor(
id STRING NOT NULL,
timestamp BIGINT NOT NULL,
temperature DOUBLE NOT NULL,
`proctime` TIMESTAMP_LTZ(3) NOT NULL *PROCTIME* AS proctime(),
CONSTRAINT `PK_id_timestamp` PRIMARY KEY (`id`, `timestamp`) NOT ENFORCED
)
val inputTable = tableEnv.fromDataStream(
datastream,
Schema
.newBuilder()
.column("id",DataTypes.STRING().notNull())
.column("timestamp",DataTypes.BIGINT().notNull())
.column("temperature",DataTypes.DOUBLE())
.primaryKey("id","timestamp")
.columnByExpression("procTime","proctime()")
.build()
)
对应的DDL操作
CREATE TABLE sensor(
id STRING NOT NULL,
timestamp BIGINT NOT NULL,
temperature DOUBLE NOT NULL,
`rowtime` TIMESTAMP(3) AS CAST(TO_TIMESTAMP(FROM_UNIXTIME(`timestamp`)) AS TIMESTAMP(3)),
CONSTRAINT `PK_id_timestamp` PRIMARY KEY (`id`, `timestamp`) NOT ENFORCED
)
datastream.assignTimestampsAndWatermarks(
WatermarkStrategy
.forBoundedOutOfOrderness[SensorReading](Duration.ofSeconds(5))
.withTimestampAssigner(
new SerializableTimestampAssigner[SensorReading] {
override def extractTimestamp(element: SensorReading,
recordTimestamp: Long): Long = {
element.timestamp * 1000L
}
})
)
//然后在表中可以指定相应的时间戳字段
val inputTable2 = tableEnv.fromDataStream(datastream,
$("id"),
$("timestamp").rowtime(),
$("temperature"))
val inputTable = tableEnv.fromDataStream(
datastream,
Schema
.newBuilder()
.column("id",DataTypes.STRING().notNull())
.column("timestamp",DataTypes.BIGINT().notNull())
.column("temperature",DataTypes.DOUBLE())
.primaryKey("id","timestamp")
.columnByExpression("rowtime","CAST(TO_TIMESTAMP(FROM_UNIXTIME(`timestamp`)) AS TIMESTAMP(3))")
.watermark("rowtime","rowtime - interval '5' SECOND ")
.build()
)
语法:WATERMARK FOR order_time AS order_time - INTERVAL ‘5’ SECOND
其中的5,表示的最大延迟时间
CREATE TABLE sensor(
`id` STRING NOT NULL,
`timestamp` BIGINT NOT NULL,
`temperature` DOUBLE,
`rowtime` TIMESTAMP(3) AS CAST(TO_TIMESTAMP(FROM_UNIXTIME(`timestamp`)) AS TIMESTAMP(3)),
WATERMARK FOR `rowtime`: TIMESTAMP(3) AS rowtime - interval '5' SECOND ,
CONSTRAINT `PK_id_timestamp` PRIMARY KEY (`id`, `timestamp`) NOT ENFORCED
)
tableEnv.createTemporaryView("sensor_view",datastream,
Schema
.newBuilder()
.column("id",DataTypes.STRING())
.build()
)
val result = tableEnv.sqlQuery("SELECT * FROM sensor_view")
tableEnv.toDataStream(result).print()
|CREATE TEMPORARY VIEW sensor_view
|AS
|SELECT id,temperature,rowtime FROM sensor
具体设置见TableAPI的Connector操作一文
val targetTable = tableEnv.createTemporaryTable("sensor_temp_table",
TableDescriptor
.forConnector("filesystem")
.schema(Schema.newBuilder()
.column("id",DataTypes.STRING())
.column("temperature",DataTypes.DOUBLE())
.build())
.format("csv")
.option("path","D:\\LearnWorkSpace\\FlinkDemo\\src\\main\\resources\\out")
.build()
)
CREATE TEMPORARY TABLE sensor_temp_table (
id STRING NOT NULL,
temperature DOUBLE
) WITH (
'connector'='filesystem',
'format'='csv',
'path'='file:///filepath/'
...
)
var result = inputTable.select($"id",$"timestamp",'temperature,'rowtime)
tableEnv.toDataStream(result).print()
SELECT id,timestamp,temperature,rowtime FROM inputtable
var result = inputTable.select($"id",$"timestamp",'temperature,'rowtime).dictinct()
tableEnv.toDataStream(result).print()
SELECT distinct id,timestamp,temperature,rowtime FROM inputtable
var result = inputTable.select($"id",$"timestamp",'temperature,'rowtime)
.filter($"id" === "sensor_1")
.filter($"temperature" >= 40.8 )
.distinct()
tableEnv.toDataStream(result).print()
var result = inputTable.select($"id",$"timestamp",'temperature,'rowtime)
.filter($"id" === "sensor_1" or $"id" === "sensor_2")
.distinct()
tableEnv.toDataStream(result).print()
SELECT distinct id,timestamp,temperature,rowtime FROM inputtable
WHERE id = 'sensor_1' or id = 'sensor_2'
然后使用tableEnv.sqlQuery(sqlStatement)执行查询
var result = inputTable
.groupBy($"id")
.select($"id",$"id".count() as "cnt")
tableEnv.toDataStream(result).print()
SELECT id,count(id) as cnt FROM inputtable
group by id