Spark SQL DataFrame 小案例

package com.looc.spark.hpeu

import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.{SparkConf, SparkContext}

object Spark_SQL_DataFrame {

  def main(args: Array[String]): Unit = {
    // 创建SparkConf()并设置AppName和Master
    val conf = new SparkConf().setAppName("SQL").setMaster("local")
    // SparkContext依赖SparkConf
    val sc = new SparkContext(conf)
    // 创建sqlContext
    val sqlContext = new SQLContext(sc)

    // ----------创建DF的第一种方式----------
    // 将personRDD和case class关联
    val personRDDOne = sc.textFile("hdfs://mini1:9000/bigdata/person.txt").map(line => {
      val fields = line.split(" ")
      Person(fields(0).toInt, fields(1), fields(2).toInt)
    })
    // 导入隐式转换,将RDD转换成DF(DataFrame)
    import sqlContext.implicits._
    val personDFOne = personRDDOne.toDF()
    // ------------------------------------

    // ----------创建DF的第二种方式----------
    // 从指定的地址创建RDD
    val personRDDTwo = sc.textFile("hdfs://mini1:9000/bigdata/person.txt").map(_.split(" "))
    // 将RDD转换成DF(DataFrame)
    val rowRDD = personRDDTwo.map(line => Row(line(0).toInt, line(1).trim, line(2).toInt))
    // 通过StructType直接指定每个字段的schema
    val schema = StructType(
      List(
        StructField("id", IntegerType, true)
        ,
        StructField("name", StringType, true)
        ,
        StructField("age", IntegerType, true)
      )
    )
    // 将schema的信息映射到rowRDD上
    val personDFTwo = sqlContext.createDataFrame(rowRDD, schema)
    // ------------------------------------

    // 将DataFrame注册成为一张person表
    personDFOne.registerTempTable("person")
    // 传入SQL
    sqlContext.sql("select * from person where age >= 25 order by age").show()
    // 停止SparkContext
    sc.stop()
  }
}

// 创建case class
case class Person(id: Int, name: String, age: Int)

你可能感兴趣的:(大数据,spark,sql,dataframe)