创建空的DataFrame
var ret = spark.emptyDataFrame
引入隐式转换的包
import sqlContext.implicits._
创建case-class的Rdd
val rdd: RDD[Person] = sc.parallelize(Array(
Person("fanghailiang", 29),
Person("sunyu", 28),
Person("jiaolu", 26),
Person("dingzelin", 31)
))
转换成DataFrame
val df: DataFrame = rdd.toDF()
val rdd2: RDD[(String, Int)] = sc.parallelize(Array(
("fanghailiang", 29),
("sunyu", 28),
("jiaolu", 26),
("dingzelin", 31)
))
val df2: DataFrame = rdd2.toDF("name2", "age3")
1, 构建RowRdd
val rowRdd: RDD[Row] = sc.parallelize(Array(
("fanghailiang", 29),
("sunyu", 28),
("jiaolu", 26),
("dingzelin", 31)
)).map{
case (name, age) => {
Row(name, age)
}
}
2. 构建scheme
val schema: StructType = StructType(Array(
StructField("name", StringType, false),
StructField("age", IntegerType, false)
))
3. 创建DataFrame
val df3: DataFrame = sqlContext.createDataFrame(rowRdd, schema)