saprk sql查询mysql的两种方法

saprk sql查询mysql的两种方法:

package com.spark.test
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.SaveMode
import java.util.Properties
import java.util.Map
import java.util.HashMap
object MysqlDemo {

  def main(args: Array[String]): Unit = {
    
 /**
 * spark查询mysql测试
 */
    //第一种
    // Spark2.0始,spark使用SparkSession接口代替SQLcontext和HiveContext
    val spark = SparkSession.builder().appName("MysqlQueryDemo").master("local").getOrCreate()
    val jdbcDF = spark.read
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.81.129:3306/sparktest?useUnicode=true&characterEncoding=utf-8")
      .option("driver","com.mysql.jdbc.Driver") //驱动
      .option("user", "hive")
      .option("password", "hive")
      .option("dbtable", "mysqldemo")
      .load()
    jdbcDF.show()
    
    //注册成临时表
    jdbcDF.createOrReplaceTempView("people")
    spark.sql("select * from people").show()
   
    //第二种
    val connectionProperties = new Properties();
    connectionProperties.put("user", "hive");
    connectionProperties.put("password", "hive");
    connectionProperties.put("driver", "com.mysql.jdbc.Driver");
    val jdbcDF2 = spark.read
      .jdbc("jdbc:mysql://192.168.81.129:3306/sparktest", "mysqldemo", connectionProperties);

    jdbcDF2.show()
    
  }
}

你可能感兴趣的:(Spark,mysql,sql,spark,大数据)