运维开发网

在Logstash的配置文件中对日志事件进行区分

运维开发网 https://www.qedev.com 2020-04-26 14:27 出处:网络 作者:运维开发网整理
1、多个日志文件作为输入源 input { # 通过给日志事件定义类型来区分 file { path => ["/var/log/nginx/access.log"] type => "nginx_access" start_position => "beginning" } # 通过给日志事

1、多个日志文件作为输入源

input {
    # 通过给日志事件定义类型来区分
    file {
        path => ["/var/log/nginx/access.log"]
        type => "nginx_access"
        start_position => "beginning"
    }
    
    # 通过给日志事件定义类型来区分
    file {
        path => ["/var/log/nginx/error.log"]
        type => "nginx_error"
        start_position => "beginning"
    }

    # 通过给日志事件新增字段来区分
    file {
        path => ["/var/log/nginx/api.log"]
        add_field => {"myid" => "api"}
        start_position => "beginning"
    }
}

filter {
    # 判断类型后,分别对事件做相应处理
    if [type] == "nginx_access" {
        grok {
            match => { "message" => "" }
        }
    }

    if [type] == "nginx_error" {
        grok {
            match => { "message" => "" }
        }
    }

    if [myid] == "api" {
        grok {
            match => { "message" => "" }
        }
    }
}

output {
    # 根据类型的不同,分别存储到不同的索引名称中
    if [type] == ‘nginx_access‘ {
        elasticsearch {
            hosts => ["127.0.0.1:9200"]
            index => "logstash_access-%{+YYYY.MM.dd}"
        }
    }

    if [type] == ‘nginx_error‘ {
        elasticsearch {
            hosts => ["127.0.0.1:9200"]
            index => "logstash_error-%{+YYYY.MM.dd}"
        }
    }

    if [myid] == "api" {
        elasticsearch {
            hosts => ["127.0.0.1:9200"]
            index => "logstash_api-%{+YYYY.MM.dd}"
        }
    }
}

 

2、以redis作为输入源

input {
    redis {
        host => ‘10.105.199.10‘
        type => ‘web_error‘
        port => ‘8000‘
        data_type => ‘list‘
        key => ‘web_error‘
        password => "E1e7ed7eF437416165597b956fac004e"
        db => 0
    }

}

output {
    if [type] == "web_error" {
        elasticsearch {
            hosts => ["127.0.0.1:9200"]
            index => "logstash_web_error-%{+YYYY.MM.dd}"
        }
    }

}

 

 3、以kafka作为输入源

input {
    kafka {
        bootstrap_servers => "10.105.199.10:9092"
        topics => ["www.example.com"]
        codec => "json"
    }
}

filter {
    grok {
        match => {
            "message" => "正则表达式匹配nginx日志"
        }
    }
}

output {
    elasticsearch {
        hosts => ["127.0.0.1:9200"]
        index => "logstash-www.example.com_%{+YYYY.MM.dd}"
    }
}
0

精彩评论

暂无评论...
验证码 换一张
取 消