spark读写kudu

package sparkUtil

import org.apache.kudu.spark.kudu.KuduContext
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object SparkKudu {
  //kuduMasters and tableName
  val kuduMasters = "node1:7051"
  val tableName = "person"

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName("SparkKudu ")
    conf.setMaster("local")
    val option = Map("kudu.master" -> kuduMasters, "kudu.table" -> tableName)
    val sparkSession = SparkSession.builder().config(conf).getOrCreate()
    val kc = new KuduContext(kuduMasters, sparkSession.sparkContext)

    //write and read kudu
    val DataFrame = sparkSession.read.format("org.apache.kudu.spark.kudu").options(option).load()
    kc.updateRows(DataFrame, tableName)

    sparkSession.close()
  }
}

 

你可能感兴趣的:(spark)