Flink SQL代码示例(scala版本)

一、创建SBT项目,添加以下依赖

 

// https://mvnrepository.com/artifact/org.apache.flink/flink-table
libraryDependencies += "org.apache.flink" %% "flink-table" % "1.2.0"
// https://mvnrepository.com/artifact/org.apache.flink/flink-scala
libraryDependencies += "org.apache.flink" %% "flink-scala" % "1.2.0"

二、

 

package Table

import org.apache.flink.api.scala.ExecutionEnvironment
import org.apache.flink.table.api.TableEnvironment
import org.apache.flink.table.sinks.CsvTableSink

/**
  * Author: HuangWei
  * Date: 18-6-27 下午6:10
  */
case class ID(id:String)
case class Mark(col1:String,col2:String,col3:String,col4:String,col5:String,col6:String,col7:String,col8:String,col9:String,col10:String,col11:String,col12:String,col13:String,col14:String,col15:String,col16:String,col17:String,col18:String,col19:String,col20:String,col21:String,col22:String,col23:String,col24:String,col25:String,col26:String,col27:String,col28:String,col29:String,col30:String,col31:String,col32:String,col33:String,col34:String)
object Demo02 extends App {
  val env = ExecutionEnvironment.getExecutionEnvironment
  val tavleEvn = TableEnvironment.getTableEnvironment(env)

  import org.apache.flink.api.scala._
  val ds1 = env.readCsvFile[ID]("/home/huangwei/桌面/day_id/2017-07-01.txt")

  val path = "/home/huangwei/桌面/tag.txt"
  val ds2= env.readCsvFile[Mark](
    filePath = path,
    lineDelimiter = "\n",
    fieldDelimiter = "\001",
    lenient = false,
    ignoreFirstLine = true
    //    includedFields = Array(0, 1, 2, 3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33),
    //    pojoFields = Array("col1","col2","col3","col4","col5","col6","col7","col8","col9","col10","col11","col12","col13","col14","col15","col16","col17","col18","col19","col20","col21","col22","col23","col24","col25","col26","col27","col28","col29","col30","col31","col32","col33","col34")

  )


  tavleEvn.registerDataSet("ID",ds1)
  tavleEvn.registerDataSet("TAG",ds2)
  val result = tavleEvn.sql("select * from TAG where col1 in (select id from ID)")
  val sink = new CsvTableSink("/home/huangwei/桌面/result/A.csv")
  result.writeToSink(sink)
  env.setParallelism(1)   //设置并发数  最后生成一个文件
  env.execute()
  

}

/*
  错误:could not find implicit value for evidence parameter of type TypeInformation[Mark]
  这种异常的发生通常是因为程序需要一个隐式参数(implicit parameter)
  解决方法:在代码中引入一个包
  动态数据:import org.apache.flink.streaming.api.scala._
  静态数据:import org.apache.flink.api.scala._

 */

 

你可能感兴趣的:(Flink)