idea编写spark连接hive

  • idea下编码,测试连接hive,代码如下:
package com.ws.spark

import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.{SparkConf, SparkContext}

object RangeTest {
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setAppName("RangeTest").setMaster("local")
    val sparkContext: SparkContext = new SparkContext(sparkConf)

    val hiveContext: HiveContext = new HiveContext(sparkContext)

    val dataFrame: DataFrame = hiveContext.sql("select * from ws.t_hive")

    dataFrame.show()

   // val sourceRdd: RDD[String] = sparkContext.textFile("hdfs://192.168.0.21:9000/test")

    //val result: RDD[(String, Int)] = sourceRdd.flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _)

    //result.saveAsTextFile("hdfs://192.168.0.21:9000/result")

    sparkContext.stop()
  }

}

  • resource如下(不添加hive配置以及hadoop配置会报错)
    hdfs-site.xml(可有可无)







  
	dfs.replication
    3
  

  
    dfs.namenode.name.dir
    file:/usr/local/apps/hadoop-2.7.7/dfs/name
  

  
    dfs.datanode.data.dir
    file:/usr/local/apps/hadoop-2.7.7/dfs/data
  
  
    
    dfs.permissions
    false
  

hive-site.xml






  
    hive.metastore.local
    true
    
  

  
  
    javax.jdo.option.ConnectionURL
    jdbc:mysql://192.168.0.21:3306/hive?createDatabaseIfNotExist=true&characterEncoding=UTF-8&useSSL=false
    
  

  
    javax.jdo.option.ConnectionDriverName
    com.mysql.jdbc.Driver
    
  

  
    javax.jdo.option.ConnectionUserName
    root
    
  

  
    javax.jdo.option.ConnectionPassword
    Admin_123456
    
  

  
    hive.metastore.schema.verification.record.version
    false
    
      When true the current MS version is recorded in the VERSION table. If this is disabled and verification is
      enabled the MS will be unusable.
    
  

  
    hive.metastore.warehouse.dir
    /usr/local/apps/hive/warehouse
    location of default database for the warehouse
  

你可能感兴趣的:(spark,hive)