做此实验前请搭建 logstash elasticsearch kafka
本实验topic名称为fcsh01 fcsh02 fcsh03
目的是为了根据kafka的topic来判断,将日志写到不同的索引中

input {
  kafka {
    bootstrap_servers => " 192.168.99.11:9092 "
    topics_pattern => ["fcsh.*"]
    session_timeout_ms => "30000"
    codec => "json"
    consumer_threads => 12
    auto_offset_reset => "latest"
    enable_auto_commit => "true"
    auto_commit_interval_ms => "60000"
    decorate_events => true
  }
}
filter {
    mutate {
        #从kafka的key中获取数据并按照逗号切割
        split => ["[@metadata][kafka][topic]", ","]
        add_field => {
            #将切割后的第一位数据放入自定义的“index”字段中
            "fcsh" => "%{[@metadata][kafka][topic][0]}"
        }
    }
}
output {
      if [fcsh] == "fcsh01" {
          elasticsearch {
            hosts => ["192.168.99.11:9200"]
            index => "logstash-kafka-%{+YYYY.MM.dd}-debug"
            action => "index"
            template_name => "elk" }
    }
        else if [fcsh] == "fcsh02" {
          elasticsearch {
            hosts => ["192.168.99.11:9200"]
            index => "logstash-kafka-%{+YYYY.MM.dd}-info"
            action => "index"
            template_name => "elk" }
    }
        else if [fcsh] == "fcsh03" {
          elasticsearch {
            hosts => ["192.168.99.11:9200"]
            index => "logstash-kafka-%{+YYYY.MM.dd}-info"
            action => "index"
            template_name => "elk" }
    }
   stdout {
     codec => rubydebug
 } 
}