SparkStreaming与Kafka010之05之02 Consumer的offset 自定义设置offset

package Kafka010

import Kafka010.Utils.MyKafkaUtils
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * Created by Shi shuai RollerQing on 2019/12/24 19:47
 *
 * kakfa的API 0-10版本的Consumer测试
 */
//TODO :  kakfa的API 0-10版本的Consumer测试
object Kafka010Demo02 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName(s"${this.getClass.getCanonicalName}")
    val ssc = new StreamingContext(conf, Seconds(5))

    val topics = List("topicB") //后面的ConsumerStrategies的参数要求topic为集合的形式 可能不止一个topic
    val kafkaParams = MyKafkaUtils.getKafkaConsumerParams("SparkKafka010")


    //自定义设置offsets
    val offsets: Map[TopicPartition, Long] = Map(
      new TopicPartition("topicB", 0) -> 300L,
      new TopicPartition("topicB", 1) -> 300L,
      new TopicPartition("topicB", 2) -> 300L
    )


    val ds: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](topics, kafkaParams, offsets)
    )
    ds.foreachRDD(rdd => {
      rdd.foreach(println)
    })

    ssc.start()
    ssc.awaitTermination()

  }
}

没错 从三百开始的
SparkStreaming与Kafka010之05之02 Consumer的offset 自定义设置offset_第1张图片

你可能感兴趣的:(spark,kafka)