1. 程式人生 > >編寫Spark測試用例

編寫Spark測試用例

使用scalaTest工具,用法參考:
scalaTest的使用

程式碼

src/test/tool/LocalSparkContext.scala

import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest._

trait LocalSparkContext extends BeforeAndAfterAll {
  self: Suite =>

  @transient var sc: SparkContext = _

  override def beforeAll() {
    val
conf = new SparkConf() .setMaster("local[2]") .setAppName("test") sc = new SparkContext(conf) } override def afterAll() { if (sc != null) { sc.stop() } } }

src/test/SparkWCSuit

import org.apache.spark.sql.{Row, SQLContext}
import org.scalatest.FunSuite
import tool.LocalSparkContext

class
SparkWCSuit extends FunSuite
with LocalSparkContext { //rdd wordCount test("test rdd wc") { sc.setLogLevel("ERROR") val rdd = sc.makeRDD(Seq("a", "b", "b")) val res = rdd.map((_, 1)).reduceByKey(_ + _).collect().sorted assert(res === Array(("a", 1), ("b", 2))) } //df wordCount test("test df wc"
) { val sqlContext = SQLContext.getOrCreate(sc) import sqlContext.implicits._ val df = sc.makeRDD(Seq("a", "b", "b")).toDF("word") val res = df.groupBy("word").count().collect() assert(res === Array(Row("a",1),Row("b",2))) } }

結果

這裡寫圖片描述