sparkstreaming写入elasticsearch

简单的写了一个sparkstreaming入es的demo,直接看代码吧:

package spark

import kafka.{PropertiesScalaUtils, RedisKeysListUtils}
import kafka.streamingRedisHive.{dbIndex, kafkaStreams}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, HasOffsetRanges, KafkaUtils, LocationStrategies}
import org.elasticsearch.spark.rdd.EsSpark
import redis.RedisPool

/**
  * 历史价格曲线的数据写入到es;
  */
object writeToES {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.INFO)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.INFO)
    Logger.getLogger("org.apache.kafka.clients.consumer").setLevel(Level.INFO)
    val conf = new SparkConf().setAppName("Spark Streaming Jason")
    conf.set("spark.streami

你可能感兴趣的:(Spark,elasticsearch)