spark:updateStateByKey

package com.ws.spark

import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object UpdateStateByKeyTest {
  def main(args: Array[String]): Unit = {
    //初始化sc/sparkSession
    val sparkSession: SparkSession = SparkSession.builder().appName("UpdateStateByKeyTest").master("local[*]").getOrCreate()
    sparkSession.sparkContext.setLogLevel("WARN")

    //updateStateByKey必须设置此目录,否则报错     HDFS的目录
    sparkSession.sparkContext.setCheckpointDir("/UpdateStateByKeyTest")

    val streamingContext: StreamingContext = new StreamingContext(sparkSession.sparkContext, Seconds(5))

    val sourceRdd: ReceiverInputDStream[String] = streamingContext.socketTextStream("192.168.0.21", 5566)

    val result: DStream[(String, Int)] = sourceRdd.flatMap(_.split(" ")).map((_, 1))

    def updateFunc(seq: Seq[Int], options: Option[Int]): Option[Int] = {
        val total =  options.getOrElse(0) + seq.sum
        Some(total)
    }

    val value = result.updateStateByKey(updateFunc)

    value.print()

    streamingContext.start()

    streamingContext.awaitTermination()
  }
}

你可能感兴趣的:(spark)