flink程序java编写实时wordcount

maven

	<properties>
		<project.build.sourceEncoding>UTF-8project.build.sourceEncoding>
		<flink.version>1.12.0flink.version>
		<target.java.version>1.8target.java.version>
		<maven.compiler.source>${target.java.version}maven.compiler.source>
		<maven.compiler.target>${target.java.version}maven.compiler.target>
		<log4j.version>2.12.1log4j.version>
	properties>
	
	<dependencies>
		<dependency>
			<groupId>org.apache.flinkgroupId>
			<artifactId>flink-javaartifactId>
			<version>${flink.version}version>
			
		dependency>
		<dependency>
			<groupId>org.apache.flinkgroupId>
			<artifactId>flink-streaming-java_${scala.binary.version}artifactId>
			<version>${flink.version}version>
		
		dependency>
		<dependency>
			<groupId>org.apache.flinkgroupId>
			<artifactId>flink-clients_${scala.binary.version}artifactId>
			<version>${flink.version}version>
			
		dependency>
		<dependency>
			<groupId>org.apache.logging.log4jgroupId>
			<artifactId>log4j-slf4j-implartifactId>
			<version>${log4j.version}version>
			<scope>runtimescope>
		dependency>
		<dependency>
			<groupId>org.apache.logging.log4jgroupId>
			<artifactId>log4j-apiartifactId>
			<version>${log4j.version}version>
			<scope>runtimescope>
		dependency>
		<dependency>
			<groupId>org.apache.logging.log4jgroupId>
			<artifactId>log4j-coreartifactId>
			<version>${log4j.version}version>
			<scope>runtimescope>
		dependency>
	dependencies>
package cn._51doit.flink;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * @author :xiaotao
 * @date :2021/4/30 10:15
 * @description: 从指定得socket读取数据,对单词进行计算
 */
public class StreamingWordCount {
     

    public static void main(String[] args) throws Exception {
     

        //创建Flink流计算执行环境,getExecutionEnvironment()根据你运行得情况自动判断是本地执行还是集群执行
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //调用source 指定socket地址和端口
        DataStreamSource<String> lines = env.socketTextStream("localhost", 8888);

        //调用transformation
        //切分压平 将word和1放到元组
        /*SingleOutputStreamOperator> wordAndOne = lines.flatMap(new FlatMapFunction>() {
            @Override
            public void flatMap(String line, Collector> collector) throws Exception {
                String[] words = line.split(" ");
                for (String word : words) {
                    collector.collect(Tuple2.of(word, 1));
                }
            }
        });*/

        //匿名内部类的方式flatMap(new FlatMapFunction() {}),也可以使用lambda表达式的范式,官方建议使用匿名内部类的方式.
        SingleOutputStreamOperator<String> word = lines.flatMap(new FlatMapFunction<String, String>() {
     
            @Override
            public void flatMap(String lines, Collector<String> collector) throws Exception {
     
                String[] words = lines.split(" ");
                for (String word : words) {
     
                    collector.collect(word);
                }
            }
        });

        SingleOutputStreamOperator<Tuple2<String, Integer>> wordAndOne = word.map(new MapFunction<String, Tuple2<String, Integer>>() {
     
            @Override
            public Tuple2<String, Integer> map(String word) throws Exception {
     
                return Tuple2.of(word, 1);
            }
        });

        KeyedStream<Tuple2<String, Integer>, String> keyed = wordAndOne.keyBy(new KeySelector<Tuple2<String, Integer>, String>() {
     
            @Override
            public String getKey(Tuple2<String, Integer> tp) throws Exception {
     
                return tp.f0;
            }
        });

        SingleOutputStreamOperator<Tuple2<String, Integer>> summed = keyed.sum(1);

        //调用sink
        summed.print();

        //启动执行
        env.execute("StreamingWordCount");
    }
}

socket
flink程序java编写实时wordcount_第1张图片

控制台

11:46:53,210 INFO  org.apache.flink.runtime.state.heap.HeapKeyedStateBackend    [] - Initializing heap keyed state backend with stream factory.
11:46:53,210 INFO  org.apache.flink.runtime.state.heap.HeapKeyedStateBackend    [] - Initializing heap keyed state backend with stream factory.
6> (hbase,1)
2> (java,1)
9> (word,1)
2> (java,2)
10> (zookeeper,1)
1> (spark,1)
3> (age,1)
1> (kafka,1)
1> (scala,1)
5> (name,1)
2> (java,3)
4> (sex,1)
10> (flink,1)
3> (age,2)
9> (word,2)
3> (age,3)
2> (java,4)

你可能感兴趣的:(flink,flink,java)