Spark Core基础RDD操作【map,flatMap,mapPartitions,flatMapToPair】20200302

Spark Core基础RDD操作【map,flatMap,mapPartitions,flatMapToPair】


    
        
            org.apache.spark
            spark-core_2.11
            2.3.1
        
    
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import scala.Tuple2;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;

public class SparkCoreDemo {

    private static String appName = "spark.demo";
    private static String master = "local[*]";

    public static void main(String args[]){
        //初始化 JavaSparkContext
        SparkConf conf = new SparkConf().setAppName(appName).setMaster(master);
        JavaSparkContext sc = new JavaSparkContext(conf);

        // 构造数据源
        List data = Arrays.asList(1, 2, 3, 4, 5);

        //并行化创建rdd
        JavaRDD rdd = sc.parallelize(data,3);

        //map
        List list_rdd = rdd.map(new Function() {
            public Integer call(Integer i) throws Exception {
                i = i + 1;
                return i;
            }
        }).collect();

        //map
        for(Integer i : list_rdd){
            System.out.println(i);
        }



        //flatMap
        JavaRDD rdd0 = rdd.flatMap(new FlatMapFunction(){

            public Iterator call(Integer integer) throws Exception {
                List l = new ArrayList();
                l.add(integer);
                return l.iterator();
            }
        });

        System.out.println("flatMap: "+rdd0.collect());


        //mapPartitions
        JavaRDD rdd1  = rdd.mapPartitions(new FlatMapFunction, Integer[]>(){
            public Iterator call(Iterator integerIterator) throws Exception {
                List l = new ArrayList();
                Integer temp = 0;
                while(integerIterator.hasNext()){
                    Integer i = integerIterator.next();
                    temp = temp + i;
                }
                l.add(temp);
                return l.iterator();
            }
        });

        System.out.println("mapPartitions: "+ rdd1.collect());



        //flatMapToPair
        JavaPairRDD rdd2 = rdd.flatMapToPair(new PairFlatMapFunction(){
            public Iterator> call(Integer integer) throws Exception {
                List> resultTuple = new ArrayList>();
                System.out.println(integer);
                resultTuple.add(new Tuple2(integer,integer));

                return resultTuple.iterator();
            }
        });

        System.out.println("mapPartitionsToPair: "+ rdd2.collect());


    }
}

result

2
3
4
5
6

flatMap: [1, 2, 3, 4, 5]

mapPartitions: [1, 5, 9]

mapPartitionsToPair: [(1,1), (2,2), (3,3), (4,4), (5,5)]

 

你可能感兴趣的:(Spark,Java)