编写Spark测试用例

使用scalaTest工具,用法参考:
scalaTest的使用

代码

src/test/tool/LocalSparkContext.scala

import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest._

trait LocalSparkContext extends BeforeAndAfterAll {
  self: Suite =>

  @transient var sc: SparkContext = _

  override def beforeAll() {
    val conf = new SparkConf()
      .setMaster("local[2]")
      .setAppName("test")
    sc = new SparkContext(conf)
  }

  override def afterAll() {
    if (sc != null) {
      sc.stop()
    }
  }
}

src/test/SparkWCSuit

import org.apache.spark.sql.{Row, SQLContext}
import org.scalatest.FunSuite
import tool.LocalSparkContext

class SparkWCSuit extends FunSuite
  with LocalSparkContext {

//rdd wordCount
  test("test rdd wc") {
    sc.setLogLevel("ERROR")
    val rdd = sc.makeRDD(Seq("a", "b", "b"))
    val res = rdd.map((_, 1)).reduceByKey(_ + _).collect().sorted
    assert(res === Array(("a", 1), ("b", 2)))
  }

//df wordCount
  test("test df wc") {
    val sqlContext = SQLContext.getOrCreate(sc)
    import sqlContext.implicits._
    val df = sc.makeRDD(Seq("a", "b", "b")).toDF("word")
    val res = df.groupBy("word").count().collect()
    assert(res === Array(Row("a",1),Row("b",2)))
  }
}

结果

这里写图片描述

你可能感兴趣的:(编写Spark测试用例)