ELKStack实时分析Haproxy访问日志配置

1.Haproxy配置日志规则

在/etc/haproxy/haproxy.conf的frontend下增加

1
2
3
4
5
6
7
8
9
option httplog
option logasap
log LogServerIP local5
 
capture request header Host len 40
capture request header X-Forwarded-For len 50
#capture request header Accept-Language len 50
capture request header Referer len 200
capture request header User-Agent len 200

2.syslog配置开启远程接收

3.Logstash配置

indexer

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
input {
        file {
                path => "/var/log/haproxy.log"
                start_position => beginning
                sincedb_write_interval => 0
                type => "HAPROXY_LOG"
                codec => plain {
                        charset => "ISO-8859-1"
                }
        }
}
 
 
 
output {
        #stdout { codec => rubydebug}
        redis {
                data_type => "list"
                key => "logstash:Haproxy_log"
                host => "192.168.1.2"
                port => 6379
        }
}

shipping

在logstash配置文件目录下新建patterns目录,并在这个目录下新建一个haproxy的文件

vim /usr/local/logstash2.2.2/conf/patterns/haproxy

1
2
HAPROXYTIME (?!<[0-9])%{HOUR:haproxy_hour}:%{MINUTE:haproxy_minute}(?::%{SECOND:haproxy_second})(?![0-9])
HAPROXYDATE %{MONTHDAY:haproxy_monthday}/%{MONTH:haproxy_month}/%{YEAR:haproxy_year}:%{HAPROXYTIME:haproxy_time}.%{INT:haproxy_milliseconds}

vim /usr/local/logstash2.2.2/conf/haproxy.conf

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
input {
    redis {
        data_type => "list"
        key => "logstash:Haproxy_log"
        host => "192.168.1.2"
        port => 6379
        threads => 5
    type => "HAPROXY_LOG"
    }
}
 
filter {
       grok{
    patterns_dir => "./patterns"
          match => ["message" "%{SYSLOGTIMESTAMP:Syslog_Timestamp} %{IPORHOST:Syslog_Server} %{SYSLOGPROG}: %{IP:Client_IP}:%{INT:Client_Port} \[%{HAPROXYDATE:Accept_Date}\] %{NOTSPACE:Frontend_Name} %{NOTSPACE:Backend_Name}/%{NOTSPACE:Server_Name} %{INT:Time_Request}/%{INT:Time_Queue}/%{INT:Time_Backend_Connect}/%{INT:Time_Backend_Response}/\+%{NOTSPACE:Time_Duration} %{INT:Http_Status_Code} \+%{NOTSPACE:Bytes_Read} %{DATA:Captured_Request_Cookie} %{DATA:Captured_Response_Cookie} %{NOTSPACE:Termination_State} %{INT:Actconn}/%{INT:Feconn}/%{INT:Beconn}/%{INT:Srvconn}/%{NOTSPACE:Retries} %{INT:Srv_Queue}/%{INT:Backend_Queue} (\{%{IPORHOST:Site}\|?((%{IP:X_Forward_IP}|)(, |)(%{IP:X_Forward_IP2}|)(, |)(%{IP:X_Forward_IP3|)(%{DATA:X_Forward_Other}|))?\|?(%{URI:Http_Referer})?\|%{GREEDYDATA:User_Agent}\})?( )( )?\"(<BADREQ>|(%{WORD:Http_Method} (%{URIPROTO:Http_Proto}://)?(?:%{USER:Http_User}(?::[^@]*)?@)?(?:%{URIHOST:Http_Host})?(?:%{URIPATHPARAM:Http_Request})?( HTTP/%{NUMBER:Http_Version})?))?\""]
      }
    useragent {
        source => "User_Agent"
        target => "ua"
    }
    if [X_Forward_IP] =~ "." {
       geoip {
        source => ["X_Forward_IP"]
        database => "/usr/local/logstash2.2.2/bin/GeoLiteCity.dat"
 
      }
    else {
           geoip {
                source => ["Client_IP"]
                database => "/usr/local/logstash2.2.2/bin/GeoLiteCity.dat"
 
          }
    }
       date{
        match => ["Accept_Date""dd/MMM/yyyy:HH:mm:ss.SSS" ]
       }
       mutate{
        remove_field => ["Syslog_Timestamp"]
        remove_field => ["Accept_Date"]
        remove_field => [ "Host" ]
        remove_field => [ "Syslog_Server" ]
        remove_field => [ "Path" ]
        remove_field => [ "pid" ]
        remove_field => [ "Client_Port" ]
        remove_field => [ "program" ]
        remove_field => [ "Haproxy_Monthday" ]
        remove_field => [ "Haproxy_Month" ]
        remove_field => [ "Haproxy_Year" ]
        remove_field => [ "Haproxy_Hour" ]
        remove_field => [ "Haproxy_Minute" ]
        remove_field => [ "Haproxy_Second" ]
        remove_field => [ "Haproxy_Milliseconds" ]
        remove_field => [ "Frontend_Name" ]
        remove_field => [ "Captured_Response_Cookie" ]
        remove_field => [ "Captured_Request_Cookie" ]
        convert => [ "Http_Status_Code","integer" ]
        convert => [ "Bytes_Read","integer" ]
        convert => [ "Time_Duration","integer" ]
        convert => [ "Time_Backend_Response","integer" ]
        convert => [ "Actconn","integer" ]
        convert => [ "Feconn","integer" ]
        convert => [ "Beconn","integer" ]
        convert => [ "Srvconn","integer" ]
        convert => [ "Retries","integer" ]
        convert => [ "Srv_Queue","integer" ]
        convert => [ "Backend_Queue","integer" ]
        convert => [ "Time_Request","integer" ]
        convert => [ "Time_Queue","integer" ]
        convert => [ "Time_Backend_Connect","integer" ]
     
      }
}
 
 
output {
     #stdout { codec => rubydebug }
        elasticsearch {
                hosts => "192.168.2.240:9200"
                index => "logstash-haproxy-%{+YYYY.MM.dd}"
        }
}


上一篇:InfluxData【环境搭建 02】时序数据库客户端 InfluxCLI 最新版本安装启动验证


下一篇:SpringBoot实战(二)之计划任务