目录
一、nc -lk 端口号连接sparkStreaming
二、sparkStreaming : kafka订阅主题
三、SparkStreaming: kafkaSource to kafkaSink
import org.apache.spark.SparkConf
import org.apache.spark.sql.catalyst.expressions.Second
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}
object SparkStreamDemo1 {
def main(args: Array[String]): Unit = {
val sparkConf: SparkConf = new SparkConf().setMaster("local[2]").setAppName("sparkstream1")
//定义流,采集周期3秒
val streamingContext = new StreamingContext(sparkConf, Seconds(3))
// 配置数据源为指定机器和端口
val socketLineStream: ReceiverInputDStream[String] = streamingContext.socketTextStream("192.168.91.180", 8888)
//业务处理
val wordStream: DStream[String] = socketLineStream.flatMap(x => x.split("\\s+"))
val mapStream: DStream[(String, Int)] = wordStream.map((_, 1))
val wordcountStream: DStream[(String, Int)] = mapStream.reduceByKey(_ + _)
//输出结果
wordcountStream.print()
//启动采集器
streamingContext.start()
streamingContext.awaitTermination()
}
}
[root@reagan180 ~] nc -lk 8888
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}
object SparkStreamKafkaSource {
def main(args: Array[String]): Unit = {
val conf: SparkConf = new SparkConf().setAppName("sparkKafkaStream").setMaster("local[*]")
val streamingContext = new StreamingContext(conf, Seconds(5))
streamingContext.checkpoint("checkpoint")
val kafkaParams = Map(
(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "192.168.91.180:9092"),
(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> "org.apache.kafka.common.serialization.StringDeserializer"),
(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> "org.apache.kafka.common.serialization.StringDeserializer"),
(ConsumerConfig.GROUP_ID_CONFIG -> "sparkstreamgropu1")
)
val kafkaStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
streamingContext,
LocationStrategies.PreferConsistent,
ConsumerStrategies.Subscribe(Set("sparkkafkastu"), kafkaParams)
//kafka-topics.sh --create --zookeeper 192.168.91.180:2181 --topic sparkkafkastu --partitions 1 --replication-factor 1
)
/* //KeyValue(key,value)
//无状态,每个窗口数据独立
val wordCountStream: DStream[(String, Int)] = kafkaStream.flatMap(x => x.value().toString.split("\\s+"))
.map((_, 1))
.reduceByKey(_ + _)
wordCountStream.print()*/
//记录状态
//updateStateByKey:根据key对数据的状态进行更新
//传递的参数含有两个值
//第一个值表示相同key的value数据
//第二个值表示缓冲区中相同key的value值
val sumStateStream: DStream[(String, Int)] = kafkaStream.flatMap(x => x.value().toString.split("\\s+"))
.map(x => (x, 1))
.updateStateByKey {
case (seq, buffer) => {
println("j进入到updateStateByKey函数中")
println("seqvalue", seq.toList.toString())
println("buffer", buffer.getOrElse(0).toString)
val sum: Int = buffer.getOrElse(0) + seq.sum
Option(sum)
}
}
sumStateStream.print()
streamingContext.start()
streamingContext.awaitTermination()
}
}
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import java.util
object SparkStreamUserFriendramToUserFriend {
def main(args: Array[String]): Unit = {
val conf: SparkConf = new SparkConf().setAppName("sparkufStream3").setMaster("local[*]")
//定义流,采集周期5秒
val streamingContext = new StreamingContext(conf, Seconds(5))
streamingContext.checkpoint("checkpoint")
val kafkaParams: Map[String, String] = Map(
(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "192.168.91.180:9092"),
(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> "org.apache.kafka.common.serialization.StringDeserializer"),
(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> "org.apache.kafka.common.serialization.StringDeserializer"),
(ConsumerConfig.GROUP_ID_CONFIG -> "sparkuf3"),
(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG->"earliest")
)
val kafkaStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
streamingContext,
LocationStrategies.PreferConsistent,
ConsumerStrategies.Subscribe(Set("user_friends_raw"), kafkaParams)
)
kafkaStream.foreachRDD(
rdd => {
rdd.foreachPartition(
x => {
val props = new util.HashMap[String, Object]()
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.91.180:9092")
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
val producer = new KafkaProducer[String, String](props)
x.foreach(
y => {
val splits: Array[String] = y.value().split(",")
if (splits.length == 2) {
val userid = splits(0)
val friends = splits(1).split("\\s+")
for (friend <- friends) {
val record = new ProducerRecord[String, String]("user_friends2", userid + "," + friend)
producer.send(record)
}
}
}
)
}
)
}
)
streamingContext.start()
streamingContext.awaitTermination()
}
}