flink同步mysql数据到ES

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
/*
https://nightlies.apache.org/flink/flink-docs-release-1.13/docs/connectors/table/elasticsearch/
 */
public class MysqlSinkToES {
    public static void main(String[] args) throws Exception {
        EnvironmentSettings fsSettings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, fsSettings);

        tableEnv.getConfig().setSqlDialect(SqlDialect.DEFAULT);

        // 数据源表
        String sourceDDL =
                "CREATE TABLE users (\n" +
                        "  id BIGINT PRIMARY KEY NOT ENFORCED ,\n" +
                        "  name STRING,\n" +
                        "  birthday TIMESTAMP(3),\n" +
                        "  ts TIMESTAMP(3)\n" +
                        ") WITH (\n" +
                        "      'connector' = 'mysql-cdc',\n" +
                        "      'hostname' = '192.168.129.102',\n" +
                        "      'port' = '3306',\n" +
                        "      'username' = 'root',\n" +
                        "      'password' = '123456',\n" +
                        "      'server-time-zone' = 'Asia/Shanghai',\n" +
                        "      'database-name' = 'cdc',\n" +
                        "      'table-name' = 'users'\n" +
                        "      )";
        // 输出目标表
        String sinkDDL =
                "CREATE TABLE users_sink_es\n" +
                        "(\n" +
                        "    id BIGINT PRIMARY KEY NOT ENFORCED,\n" +
                        "    name STRING,\n" +
                        "    birthday TIMESTAMP(3),\n" +
                        "    ts TIMESTAMP(3)\n" +
                        ") \n" +
                        "WITH (\n" +
                        "  'connector' = 'elasticsearch-6',\n" +
                        "  'hosts' = 'http://192.168.129.103:9200',\n" +
                        "  'index' = 'users'\n" +
                        ",  'document-type' = 'doc'\n" +
                        ")";
        // 简单的聚合处理
        String transformSQL = "INSERT INTO users_sink_es SELECT * FROM users";

        tableEnv.executeSql(sourceDDL);
        tableEnv.executeSql(sinkDDL);
        TableResult result = tableEnv.executeSql(transformSQL);
        result.print();

        env.execute("mysql-to-es");
    }
}

pom.xml


<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0modelVersion>
    <groupId>org.examplegroupId>
    <artifactId>flink_es_testartifactId>
    <version>1.0-SNAPSHOTversion>
    <properties>
        <maven.compiler.source>8maven.compiler.source>
        <maven.compiler.target>8maven.compiler.target>
        <hadoop.version>2.6.0hadoop.version>
        <flink.version>1.12.2flink.version>
        <hudi.version>0.9.0hudi.version>
    properties>
    <dependencies>
        <dependency>
            <groupId>org.apache.hadoopgroupId>
            <artifactId>hadoop-clientartifactId>
            <version>${hadoop.version}version>
        dependency>
        <dependency>
            <groupId>org.apache.hadoopgroupId>
            <artifactId>hadoop-hdfsartifactId>
            <version>${hadoop.version}version>
        dependency>
        <dependency>
            <groupId>org.apache.hadoopgroupId>
            <artifactId>hadoop-commonartifactId>
            <version>${hadoop.version}version>
        dependency>
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-coreartifactId>
            <version>${flink.version}version>
        dependency>
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-streaming-java_2.11artifactId>
            <version>${flink.version}version>
        dependency>
                <dependency>
                    <groupId>org.apache.flinkgroupId>
                    <artifactId>flink-jdbc_2.12artifactId>
                    <version>1.10.3version>
                dependency>
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-connector-jdbc_2.11artifactId>
            <version>${flink.version}version>
        dependency>
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-javaartifactId>
            <version>${flink.version}version>
        dependency>
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-clients_2.11artifactId>
            <version>${flink.version}version>
        dependency>
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-table-api-java-bridge_2.11artifactId>
            <version>${flink.version}version>
        dependency>
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-table-commonartifactId>
            <version>${flink.version}version>
        dependency>
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-table-planner_2.11artifactId>
            <version>${flink.version}version>
        dependency>
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-table-planner-blink_2.11artifactId>
            <version>${flink.version}version>
        dependency>
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-table-planner-blink_2.11artifactId>
            <version>${flink.version}version>
            <type>test-jartype>
        dependency>

        <dependency>
            <groupId>com.alibaba.ververicagroupId>
            <artifactId>flink-connector-mysql-cdcartifactId>
            <version>1.2.0version>
        dependency>
        <dependency>
            <groupId>com.alibaba.ververicagroupId>
            <artifactId>flink-connector-mysql-cdcartifactId>
            <version>1.2.0version>
        dependency>

        <dependency>
            <groupId>org.apache.hudigroupId>
            <artifactId>hudi-flink-bundle_2.11artifactId>
            <version>${hudi.version}version>
        dependency>
        <dependency>
            <groupId>mysqlgroupId>
            <artifactId>mysql-connector-javaartifactId>
            <version>5.1.49version>
        dependency>

        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-connector-elasticsearch6_2.11artifactId>
            <version>${flink.version}version>
        dependency>
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-csvartifactId>
            <version>${flink.version}version>
        dependency>
        <dependency>
            <groupId>org.apache.flinkgroupId>
            <artifactId>flink-jsonartifactId>
            <version>${flink.version}version>
        dependency>

        <dependency>
            <groupId>com.google.code.gsongroupId>
            <artifactId>gsonartifactId>
            <version>2.8.6version>
        dependency>
        <dependency>
            <groupId>org.slf4jgroupId>
            <artifactId>slf4j-apiartifactId>
            <version>1.7.25version>

        dependency>
        <dependency>
            <groupId>org.slf4jgroupId>
            <artifactId>slf4j-log4j12artifactId>
            <version>1.7.25version>

        dependency>
        <dependency>
            <groupId>log4jgroupId>
            <artifactId>log4jartifactId>
            <version>1.2.17version>
        dependency>

        <dependency>
            <groupId>org.apache.httpcomponentsgroupId>
            <artifactId>httpclientartifactId>
            <version>4.5.2version>
        dependency>

        <dependency>
            <groupId>org.apache.httpcomponentsgroupId>
            <artifactId>httpcoreartifactId>
            <version>4.4.5version>
        dependency>
    dependencies>
project>

es操作命令

#创建index
curl -X PUT "192.168.129.103:9200/users" -H 'Content-Type: application/json' -d'
{
    "settings" : {
        "number_of_shards" : 3,
        "number_of_replicas" : 2
    }
}'

#查询index下全部数据 
curl "http://192.168.129.103:9200/users/doc/_search"
#删除index
curl -X DELETE "192.168.129.103:9200/users"

你可能感兴趣的:(Flink,代码示例,elasticsearch,flink,mysql)