sparksession wordcount

package com.wjl7813.spark.core.rdd.transformations

import org.apache.spark.sql.SparkSession

object WordCount {


  def main(args: Array[String]): Unit = {

    val spark =SparkSession.builder().appName("WordCountexample").master("local[4]").getOrCreate()

    import spark.implicits._

    val data = spark.read.text("data/test.txt").as[String]

    val words = data.flatMap(value => value.split("\\s+"))
    val groupedWords = words.groupByKey(_.toLowerCase)

    val counts = groupedWords.count()
    counts.show()

    spark.stop()
    
    
  }

}

 

+--------+--------+
|   value|count(1)|
+--------+--------+
| session|       1|
|      by|       1|
|   using|       1|
| program|       1|
|   count|       1|
|   group|       1|
|      is|       1|
| testing|       2|
|   spark|       2|
|    word|       1|
|     and|       1|
|     key|       1|
|complete|       1|
+--------+--------+

你可能感兴趣的:(spark,spark)