logstash 区分多个文件index端配置

[elk@rsyslog logstash]$ cat logstash_weblogic_access_log_index.conf 
input {

        redis {
                host => "10.4.145.93"
                data_type => "list"
                key => "weblogic_10.4.32.111_access:redis"
                password => "1234567"
                port =>"6379"
        }


            redis {
                host => "10.4.145.93"
                data_type => "list"
                key => "weblogic_10.4.32.48_access:redis"
                password => "1234567"
                port =>"6379"
        }

       




}
output {
        elasticsearch {
                hosts => "10.4.145.93:9200"
                index => "logstash-%{type}-%{+YYYY.MM.dd}"
        }
		stdout {
			codec => rubydebug
		}
      }  


[elk@rsyslog hdxt]$ echo "10.4.32.111--tlcb88888" >> weblogic-10.4.32.111-access.2017-03-18


{
      "@version" => "1",
    "@timestamp" => "2017-07-05T01:21:40.880Z",
          "path" => "/data01/hdxt/weblogic-10.4.32.111-access.2017-03-18",
          "host" => "rsyslog",
          "type" => "weblogic_10.4.32.111_access",
      "messages" => "weblogic_10.4.32.111_access-10.4.32.111--tlcb88888",
          "tags" => [
        [0] "_grokparsefailure"
    ]
}

[elk@rsyslog logstash]$ logstash -f logstash_weblogic_access_log_index.conf 
Settings: Default pipeline workers: 4
Pipeline main started
{
      "@version" => "1",
    "@timestamp" => "2017-07-05T01:21:40.880Z",
          "path" => "/data01/hdxt/weblogic-10.4.32.111-access.2017-03-18",
          "host" => "rsyslog",
          "type" => "weblogic_10.4.32.111_access",
      "messages" => "weblogic_10.4.32.111_access-10.4.32.111--tlcb88888",
          "tags" => [
        [0] "_grokparsefailure"
    ]
}

你可能感兴趣的:(logstash[tlcb])