flink docker compose

docker-compse.yml

# docker-compose -p flink-1 up -d
version: "3.8"
services:
  jobmanager:
    image: ${FLINK_DOCKER_IMAGE_NAME:-my-flink}
    container_name: flink-jobmanager1
    privileged: true
    expose:
      - "6123"
    ports:
      - "8081:8081"
    command: jobmanager
    environment:
      - JOB_MANAGER_RPC_ADDRESS=jobmanager
      # 宿主机IP
      - _host=192.168.11.11

  taskmanager1:
    image: ${FLINK_DOCKER_IMAGE_NAME:-my-flink}
    container_name: flink-taskmanager1
    privileged: true
    expose:
      - "6121"
      - "6122"
    depends_on:
      - jobmanager
    command: taskmanager
    links:
      - "jobmanager:jobmanager"
    environment:
      - JOB_MANAGER_RPC_ADDRESS=jobmanager
      - _host=192.168.11.11
    
  taskmanager2:
    image: ${FLINK_DOCKER_IMAGE_NAME:-my-flink}
    container_name: flink-taskmanager2
    privileged: true
    expose:
      - "6121"
      - "6122"
    depends_on:
      - jobmanager
    command: taskmanager
    links:
      - "jobmanager:jobmanager"
    environment:
      - JOB_MANAGER_RPC_ADDRESS=jobmanager
      - _host=192.168.11.11

package com.wurd.flink;

import java.io.OutputStreamWriter;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.Random;

public class ServerSocketDemo {

    public static void main(String[] args) {
        new ServerSocketDemo().createSocket();
    }

    public void createSocket() {
        new Thread(() -> {
            try {
                ServerSocket server = new ServerSocket();
                server.bind(new InetSocketAddress(3333));
                while (true) {
                    Socket socket = null;
                    try {
                        socket = server.accept();
                        OutputStreamWriter writer = new OutputStreamWriter(socket.getOutputStream());
                        Random random = new Random();
                        while (true) {
                            writer.write(random.nextInt(100) + "\n");
                            writer.flush();
                            try {
                                Thread.sleep(1000);
                            } catch (InterruptedException e) {
                                e.printStackTrace();
                            }
                        }
                    } catch (Exception e) {
                        e.printStackTrace();
                        if (socket != null) {
                            socket.close();
                        }
                    }
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
        }).start();
    }
}

package com.wurd.flink;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

public class Main {
    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        String host = System.getenv("_host");
        System.out.println("########: " + host);
        host = (host == null || host.equals(""))?"127.0.0.1":host;
        DataStreamSource d = env.socketTextStream(host, 3333);
        d.flatMap(new Tokenizer())
                .keyBy(value -> value.f0)
                .sum(1).print();
        env.execute("socket");
    }

    public static final class Tokenizer
            implements FlatMapFunction> {

        @Override
        public void flatMap(String value, Collector> out) {
            // normalize and split the line
            String[] tokens = value.toLowerCase().split("\\W+");

            // emit the pairs
            for (String token : tokens) {
                if (token.length() > 0) {
                    out.collect(new Tuple2<>(token, 1));
                }
            }
        }
    }
}

你可能感兴趣的:(java,flink,docker)