spark 2.1.1访问hive2.3.0

添加jdbc依赖

 <dependency>
        <groupId>mysqlgroupId>
        <artifactId>mysql-connector-javaartifactId>
        <version>5.1.43version>
    dependency>

添加spark-hive支持依赖

    <dependency>
        <groupId>org.apache.sparkgroupId>
        <artifactId>spark-hive_2.11artifactId>
        <version>2.1.1version>
    dependency>

拷贝hive-site,hdfs-site,core-site到resources中

运行spark程序

 val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("hive")
      .enableHiveSupport()
      .getOrCreate()

    import spark.sql

        sql("CREATE TABLE IF NOT EXISTS src (ci STRING)")
        sql("LOAD DATA LOCAL INPATH 'src/main/resources/1.txt' INTO TABLE src")

        sql("SELECT * FROM src").show()
        sql("SELECT COUNT(*) FROM src").show()

        sql("DROP TABLE IF EXISTS LOC")
        df.write.saveAsTable("LOC")

若建表失败,提示权限不足,无法向HDFS中写入,修改HDFS上warehouse的权限即可

hadoop fs -chmod 777 /user/hive/warehouse

你可能感兴趣的:(hive,spark)