spark maven项目加入scala

POM文件



    4.0.0

    com.unionpay
    sparkDemo
    1.0-SNAPSHOT

    UTF-8
    2.4.0
    2.6.0-cdh5.14.4
    2.11
    2.9.1
    compile


    
        cloudera
        https://repository.cloudera.com/content/repositories/releases/
    

    
        
            
                io.netty
                netty-all
                4.1.18.Final
            
        
    
    
        
            org.apache.hadoop
            hadoop-hdfs
            ${hadoop-version}

        
        
            org.apache.hadoop
            hadoop-common
            ${hadoop-version}

        
        
            org.apache.spark
            spark-core_${scala.version}
            ${spark-version}
            ${scope}
        
        
            org.scala-lang
            scala-library
            2.11.12
        
    
    
        
            
                src/main/resources
            
        
        
        
            org.apache.maven.plugins
            maven-compiler-plugin
            3.8.0
            
                1.8
                1.8
                UTF-8
            
        
            
                org.codehaus.mojo
                build-helper-maven-plugin
                1.4
                
                    
                        add-source
                        generate-sources
                        
                            add-source
                        
                        
                            
                                src/main/java
                                src/main/scala
                            
                        
                    
                
            
            
                
                net.alchim31.maven
                scala-maven-plugin
                3.2.2
                
                    
                        
                            compile
                            testCompile
                        
                        
                            
                                -dependencyfile
                                ${project.build.directory}/.scala_dependencies
                            
                        
                    
                
                
                    2.11.12
                
            
        
    

scala类:

object SparkCount {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setMaster("local[*]").setAppName("demo")
    val sc = new SparkContext(conf)

    val r = sc.textFile("D:\\mr\\test.txt").flatMap(line=>line.split(" ")).map((_,1)).reduceByKey(_ + _).collect()
    r.foreach(println)

  }
}

 

你可能感兴趣的:(Spark)