val lines = List("hello world", "hello spark") val wordlist = lines.flatMap(line => line.split(" ")).map(word => (word, 1)) //方法一:先groupBy再map wordlist.groupBy(_._1).map { case (word, list) => (word, list.size) }.foreach(println) //方法二:通过aggregate来实现map reduce,效率更高 val seqop = (result: mutable.HashMap[String, Int], wordcount: (String, Int)) => { val addOne = (wordcount._1, result.getOrElse(wordcount._1, 0) + wordcount._2) result.+=(addOne) } val combop = (result1: mutable.HashMap[String, Int], result2: mutable.HashMap[String, Int]) => { result1 ++= result2 } val result = wordlist.aggregate(mutable.HashMap[String, Int]())(seqop,combop) println(result)
从文件读取进行word count:
val lines = Source.fromFile("test.txt").getLines() val seqop = (result: mutable.HashMap[String, Int], line: String) => { val wordcount = line.replace(",", " ").replace(".", " ").replace("(", " ").replace(")", " ").split(" ").filter(_.trim.length > 0).map(word => (word, 1)) wordcount.foreach(wc => { val addOne = (wc._1, result.getOrElse(wc._1, 0) + wc._2) result += addOne }) result } val combop = (result1: mutable.HashMap[String, Int], result2: mutable.HashMap[String, Int]) => { result1 ++= result2 } val test = lines.aggregate(mutable.HashMap[String, Int]())(seqop, combop) println(test)