DataFrame案例

package SparkSQLDemo

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by tg on 10/27/16.
  */
object SqlDemo {
  def main(args: Array[String]): Unit = {
    val conf=new SparkConf().setAppName("SqlDemo").setMaster("local")
    val sc=new SparkContext(conf)
    val sqlContext=new SQLContext(sc)


    val df=sqlContext.read.json("file:///home/tg/datas/stus.json")


    //select * from stus
    df.show()


    //show creat table stus
    df.printSchema()


    //select name from stus
    df.select("name").show()


    //select name,age+1 from stus
    df.select(df("name"),df("age")+1).show()


    //select * from stus where age >18
    df.filter(df("age")>18).show()


    //select count(*) from stus group by age
    df.groupBy("age").count().show()
  }
}

你可能感兴趣的:(Spark,SQL,spark)