redis读取数据,写入mysql

package com.ws.spark

import java.io.{File, PrintWriter}
import java.util.Properties

import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
import redis.clients.jedis.Jedis


object RedisToMysql {

  def main(args: Array[String]): Unit = {
    import scala.collection.JavaConversions._
    val jedis: Jedis = new Jedis("192.168.0.21", 6379)
    val list = jedis.lrange("rank:arena_rank", 0, 2500)

    val printWriter: PrintWriter = new PrintWriter(new File("rank.txt"))

    for (l <- list) {
    //写入文本,每次写入都是从新的一行开始
      printWriter.println(l)
    }
    printWriter.close()

    val session: SparkSession = SparkSession.builder().appName("SqlWordCount").master("local[*]").getOrCreate()

    val dataFrame: DataFrame = session.read.json("rank.txt")

    val filterRdd: Dataset[Row] = dataFrame.filter(x => {
      val uid: String = x.getAs[String]("uid")
      uid.length > 20
    })

    filterRdd.createTempView("user_arena")

    val frame: DataFrame = session.sql(" select * from user_arena")

    val p = new Properties()
    p.put("user", "root")
    p.put("password", "root")

    //SaveMode枚举
    frame.write.mode("append").jdbc("jdbc:mysql://192.168.0.21:3306/cok_db1?characterEncoding=utf-8", "user_arena_2", p)

    session.stop()
  }
}

灵感来自

你可能感兴趣的:(spark,redis,mysql)