package com.chinatelecom.hadoop;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import scala.Tuple2;

import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;

public final class SparkWordCount {
    private static final Pattern SPACE = Pattern.compile(" ");

    public static void main(String[] args) throws Exception {
        if (args.length < 1) {
            System.err.println("Usage: SparkWordCount ");
            System.exit(1);
        }

        SparkConf sparkConf = new SparkConf().setAppName("JavaWordCount");
        JavaSparkContext ctx = new JavaSparkContext(sparkConf);
         JavaRDD lines = ctx.textFile(args[0], 1);


        JavaRDD words = lines.flatMap(
                new FlatMapFunction() {
                    @Override
                    public Iterable call(String s) {
                        return Arrays.asList(SPACE.split(s));
                    }
                });

        JavaPairRDD ones = words.mapToPair(
                new PairFunction() {
                    @Override
                    public Tuple2 call(String s) {
                        return new Tuple2(s, 1);
                    }
                });

        JavaPairRDD counts = ones.reduceByKey(
                new Function2() {
                    @Override
                    public Integer call(Integer i1, Integer i2) {
                        return i1 + i2;
                    }
                });

        List> output = counts.collect();
        for (Tuple2 tuple : output) {
            System.out.println(tuple._1() + ": " + tuple._2());
        }
        System.exit(0);
    }
}




spark-submit --master spark://NameNode34:7077 --name JavaWordCountByLCS --class com.chinatelecom.hadoop.SparkWordCount --executor-memory 1G --total-executor-cores 2 /opt/spark.jar hdfs://NameNode34:9000/input


spark-submit --help 
查看所有参数说明