黑猴子的家:Spark 写入数据到 Mysql

1、MysqlAnalysis

使用spark 写入mysql 无须自己建表

import java.util.UUID
import net.sf.json.JSONObject
import org.apache.spark.SparkConf
import org.apache.spark.sql.{SaveMode, SparkSession}
import scala.collection.mutable

object MysqlAnalysis {

  def main(args: Array[String]): Unit = {

    //创建sparkConf
    val sparkConf = new SparkConf().setAppName("session").setMaster("local[*]")

    //创建SparkSession
    val spark = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()
    val sc = spark.sparkContext
    //写入MySQL数据库
    import spark.implicits._
    val sessionAggrStatRDD = spark.sparkContext.makeRDD(Array(sessionAggrStat))
    sessionAggrStatRDD.toDF().write
      .format("jdbc")
      .option("url", ConfigManager.config.getString("jdbc.url"))
      .option("dbtable", "session_aggr_stat")
      .option("user", ConfigManager.config.getString("jdbc.username"))
      .option("password", ConfigManager.config.getString("jdbc.password"))
      .mode(SaveMode.Append)
      .save()

    //关闭Spark
    spark.close()
  }

}

2、pom.xml

    

        
            org.apache.spark
            spark-core_2.11
        

        
            org.apache.spark
            spark-hive_2.11
        

        
            org.apache.spark
            spark-sql_2.11
        

        
            mysql
            mysql-connector-java
            5.1.30
        

    

    
        
            
                net.alchim31.maven
                scala-maven-plugin
            
        
    

你可能感兴趣的:(黑猴子的家:Spark 写入数据到 Mysql)