spark RDD 小实验 测试

class SparkContext(clientDriver:String){
var scname = clientDriver
}
abstract class RDD(var  sc:SparkContext){
def persist(level:String){}
}

class myRDD(var myRDDsc:SparkContext) extends RDD(myRDDsc:SparkContext) {
var mysc=myRDDsc.scname
override   def persist(level:String){
  println(mysc  +"  "+"the bigdata saving" +"  "+level)
}
}

 

测试一下

 

scala> var noc =newSparkContext("2016'netbigdata")

noc: SparkContext = SparkContext@8a0d7d

 

scala> var dzhRDD=new myRDD(noc)

dzhRDD: myRDD = myRDD@1f7fb55

 

scala> dzhRDD.persist(" hdfs levelstoraging!!")

2016'netbigdata  the bigdata saving   hdfs level storaging!!

 

scala>

 

你可能感兴趣的:(spark RDD 小实验 测试)