用sparkRDD进行分组排序使用groupbykey+ flatmap + zipWithIndex
valconf=newSparkConf().setAppName(“name”).setMaster(“local[2]”)valcontext=newSparkContext(conf)//context.makeRDD(List[])valssh=List((“ma”,3),(“ma”,4),(“ma”,5),(“mb”,2),(“mb”,5))valunit:RDD[(String,Int