logstash从MySQL导入数据到ElasticSearch的配置

input {
    stdin {
    }
    jdbc {
      # 数据库
      jdbc_connection_string => "jdbc:mysql://172.0.0.1:3306/inssa?characterEncoding=UTF-8&useSSL=false"
      # 用户名密码
      jdbc_user => "root"
      jdbc_password => "root"
      jdbc_default_timezone => "UTC"
      # jar包的位置
      jdbc_driver_library => "/usr/local/logtash-5.1.1/mysql-connector-java-5.5.19.jar"
      # mysql的Driver
      jdbc_driver_class => "com.mysql.jdbc.Driver"
      jdbc_paging_enabled => "true"
      lowercase_column_names => "false"
      jdbc_page_size => "50000"
      # statement_filepath => "config-mysql/jdbc2logstash_history.sql"
      statement => "select * from operationlogs"
      schedule => "* * * * *"
      #索引的类型
      type => "operationlogs"
    }


}

filter {

    mutate {
          convert => [ "publish_time", "string" ]
    }
    json {
        source => "message"
        remove_field => ["message"]
    }
    date {
        timezone => "Europe/Berlin"
        match => ["publish_time" , "ISO8601", "yyyy-MM-dd HH:mm:ss"]
    }
}

output {
    #if [type]=="operationlogs" {
        elasticsearch {
            hosts => "172.16.2.50:9201"
            # index名
            index => "inssa"
            # 需要关联的数据库中有有一个id字段,对应索引的id号
            document_id => "%{operationLogId}"
        }
        stdout {
            codec => json_lines
        }
    #}

 

你可能感兴趣的:(数据库部分,LogStash)