Flink单词统计例子本地调试的错误解决

1、遇到的问题:
com.typesafe.config.ConfigException$UnresolvedSubstitution: reference.conf @ jar:file:/C:/Users/0262000099/.m2/repository/org/apache/flink/flink-runtime_2.11/1.4.0/flink-runtime_2.11-1.4.0.jar!/reference.conf: 804: Could not resolve substitution to a value: ${akka.stream.materializer}
解决:主要是版本的问题,全部换成老的版本的jar,下面是完整的pom文件里面的内容

 
        
            
                maven-compiler-plugin
                3.7.0
                
                    1.8
                    1.8
                
            
            
                org.apache.maven.plugins
                maven-shade-plugin
                3.1.0
                
                    
                        package
                        
                            shade
                        
                        
                            
                                
                                    com.ink.FlinkLambdaTest.FlinkToLambda
                                
                                
                                    reference.conf
                                
                            
                            
                                
                                    org.codehaus.plexus.util
                                    org.shaded.plexus.util
                                    
                                        org.codehaus.plexus.util.xml.Xpp3Dom
                                        org.codehaus.plexus.util.xml.pull.*
                                    
                                
                            
                        
                    
                
            
        
    

    
        
            
            
            
        
        
        
            org.apache.flink
            flink-java
            1.3.2
        
        
            org.apache.flink
            flink-streaming-java_2.10
            1.3.2
        
        
            org.apache.flink
            flink-clients_2.10
            1.3.2
        

        
            org.apache.flink
            flink-scala_2.10
            1.3.2
        

        
            org.apache.flink
            flink-streaming-scala_2.10
            1.3.2
        
    

2、单词统计完整代码(三种)
(1)

package wordcount;

import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.aggregation.Aggregations;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

import java.util.logging.Logger;

/**
 * Created by 0262000099 on 2018/9/14.
 */
public class WordCountTextTest {
  public static void main(String[] args) throws Exception {

    // set up the execution environment
//    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    final ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();

    // get input data
    DataSet text = env.fromElements(
            "To be, or not to be,--that is the question:--",
            "Whether 'tis nobler in the mind to suffer",
            "The slings and arrows of outrageous fortune",
            "Or to take arms against a sea of troubles,"
    );

    DataSet> counts =
            // split up the lines in pairs (2-tuples) containing: (word,1)
            text.flatMap(new LineSplitter())
                    // group by the tuple field "0" and sum up tuple field "1"
                    .groupBy(0)
                    .aggregate(Aggregations.SUM, 1);

    // emit result
  counts.print();
  }


  public static class LineSplitter implements FlatMapFunction> {

    public void flatMap(String value, Collector> out) {
      // normalize and split the line into words
      String[] tokens = value.toLowerCase().split("\\W+");

      // emit the pairs
      for (String token : tokens) {
        if (token.length() > 0) {
          out.collect(new Tuple2(token, 1));
        }
      }
    }
  }

}

(2)

package wordcount;

import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;

/**
 * Created by 0262000099 on 2018/9/20.
 */
public class WordCountFileTest {

    public static void main(String[] args) throws Exception {
//    ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

    DataSet data = env.readTextFile("E:\\FlinkTest\\test.txt");

    data
            .filter(new FilterFunction() {
              public boolean filter(String value) {
                return value.startsWith("http://");
              }
            })
            .writeAsText("E:\\FlinkTest\\result");

    JobExecutionResult res = env.execute();
  }
}

(3)

package wordcount;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

/**
 * Created by 0262000099 on 2018/9/17.
 */
public class WordCountTest {

  public static void main(String[] args) throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

    DataSet text = env.readTextFile("E:\\FlinkTest\\test.txt");

    DataSet> counts =
            // split up the lines in pairs (2-tuples) containing: (word,1)
            text.flatMap(new Tokenizer())
                    // group by the tuple field "0" and sum up tuple field "1"
                    .groupBy(0)
                    .sum(1);

    counts.print();
    counts.writeAsCsv("E:\\FlinkTest\\", "\n", " ");
//    env.execute();
  }


  // User-defined functions
  public static class Tokenizer implements FlatMapFunction> {
    public void flatMap(String value, Collector> out) {
      // normalize and split the line
      String[] tokens = value.toLowerCase().split("\\W+");
      // emit the pairs
      for (String token : tokens) {
        if (token.length() > 0) {
          out.collect(new Tuple2(token, 1));
        }
      }
    }
  }
}

你可能感兴趣的:(flink,单词统计,错误)