spark scala版本的二次排序

import java.io.Serializable

class SortKey(val clickCount:Int,val orderCount:Int,val payCount:Int) extends Ordered[SortKey] with Serializable {
    override def compare(that: SortKey): Int ={
        if(clickCount-that.clickCount!=0){
            clickCount-that.clickCount
        }else if(orderCount-that.orderCount!=0){
            orderCount-that.orderCount
        }else if(payCount-that.payCount!=0){
            payCount-that.payCount
        }else{

            return 0
        }
    }
}

import org.apache.spark.{SparkConf, SparkContext}

object SortKeyTest {
    def main(args: Array[String]): Unit = {
        val conf=new SparkConf()
            .setAppName("sortByKey")
            .setMaster("local")
        val sc=new SparkContext(conf)

        val ar=Array(Tuple2(new SortKey(30,35,40),"1"),
            Tuple2(new SortKey(35,30,40),"2"),
               Tuple2(new SortKey(30,38,30),"3"),
                   Tuple2(new SortKey(35,38,30),"4")

        )
        val rdd = sc.parallelize(ar,1)
        val sortRDD= rdd.sortByKey(false)
        for (tuple<- sortRDD.collect()){
            println(tuple._2)
        }

    }
}

你可能感兴趣的:(实战)