docker-compose部署elfk

通过docker-compose部署elfk。elfk最新版本:7.5.1

mkdir -p /home/elfk/{elasticsearch,logstash,kibana,filebeat}/configmkdir /home/elfk/elasticsearch/data /home/elfk/logstash/pipelinecd /home/elfkecho 'ELK_VERSION=7.5.1' > .env

tree ..├── docker-compose.yml
├── elasticsearch
│   ├── config
│   │   └── elasticsearch.yml
│   ├── data
│   └── Dockerfile
├── filebeat
│   ├── config
│   │   └── filebeat.yml
│   └── Dockerfile
├── kibana
│   ├── config
│   │   └── kibana.yml
│   └── Dockerfile
└── logstash
    ├── config
    │   └── logstash.yml
    ├── Dockerfile
    └── pipeline
        └── logstash.conf

10 directories, 10 files


elasticsearch

Dockerfile

vim /home/elfk/elasticsearch/Dockerfile

ARG ELK_VERSION=7.5.1

# https://github.com/elastic/elasticsearch-docker
# FROM docker.elastic.co/elasticsearch/elasticsearch:${ELK_VERSION}
FROM elasticsearch:${ELK_VERSION}
# Add your elasticsearch plugins setup here
# Example: RUN elasticsearch-plugin install analysis-icu

elasticsearch.yml

vim /home/elfk/elasticsearch/config/elasticsearch.yml

---## Default Elasticsearch configuration from Elasticsearch base image.## https://github.com/elastic/elasticsearch/blob/master/distribution/docker/src/docker/config/elasticsearch.yml#cluster.name: "docker-cluster"network.host: 0.0.0.0## X-Pack settings## see https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-xpack.html#xpack.license.self_generated.type: trial                #trial为试用版,一个月期限,可更改为basic版本xpack.security.enabled: truexpack.monitoring.collection.enabled: truehttp.cors.enabled: truehttp.cors.allow-origin: "*"http.cors.allow-headers: Authorization,X-Requested-With,Content-Length,Content-Type


logstash

Dockerfile

vim /home/elfk/logstash/Dockerfile

ARG ELK_VERSION=7.5.1

# https://github.com/elastic/logstash-docker
# FROM docker.elastic.co/logstash/logstash:${ELK_VERSION}
FROM logstash:${ELK_VERSION}

# Add your logstash plugins setup here
# Example: RUN logstash-plugin install logstash-filter-json

RUN logstash-plugin install logstash-filter-multiline \
    && logstash-plugin install logstash-output-zabbix

logstash.yml

vim /home/elfk/logstash/config/logstash.yml

---## Default Logstash configuration from Logstash base image.## https://github.com/elastic/logstash/blob/master/docker/data/logstash/config/logstash-full.yml#http.host: "0.0.0.0"xpack.monitoring.elasticsearch.hosts: [ "http://elasticsearch:9200" ]## X-Pack security credentials#xpack.monitoring.enabled: truexpack.monitoring.elasticsearch.username: elasticxpack.monitoring.elasticsearch.password: changemexpack.monitoring.collection.interval: 10s

pipeline

vim /home/elfk/logstash/pipeline/logstash.conf

input {
    tcp {
    	port => 5000
    }
}

#input {
#    beats {
#        port => 5044
#    }
#}

output {
    elasticsearch {
    	hosts => "elasticsearch:9200"
    	user => "elastic"
    	password => "changeme"
    }
}


kibana

Dockerfile

vim /home/elfk/kibana/Dockerfile

ARG ELK_VERSION=7.5.1

# https://github.com/elastic/kibana-docker
# FROM docker.elastic.co/kibana/kibana:${ELK_VERSION}
FROM kibana:${ELK_VERSION}

# Add your kibana plugins setup here
# Example: RUN kibana-plugin install

kibana.yml

vim /home/elfk/kibana/config/kibana.yml

---## Default Kibana configuration from Kibana base image.## https://github.com/elastic/kibana/blob/master/src/dev/build/tasks/os_packages/docker_generator/templates/kibana_yml.template.js#server.name: kibanaserver.host: "0"elasticsearch.hosts: [ "http://elasticsearch:9200" ]xpack.monitoring.ui.container.elasticsearch.enabled: true## X-Pack security credentials#elasticsearch.username: elasticelasticsearch.password: changeme


docker-compose.yml

vim /home/elfk/docker-compose.yml

version: '3.7'services:
  elasticsearch:
    build:
      context: elasticsearch/      args:
        ELK_VERSION: $ELK_VERSION    volumes:
      - type: bind        source: ./elasticsearch/config/elasticsearch.yml        target: /usr/share/elasticsearch/config/elasticsearch.yml        read_only: true
      - type: volume        source: elasticsearch        target: /usr/share/elasticsearch/data    ports:
      - "9200:9200"
      - "9300:9300"
    environment:
      ES_JAVA_OPTS: "-Xmx256m -Xms256m"
      ELASTIC_PASSWORD: changeme      discovery.type: single-node    networks:
      - elk  logstash:
    depends_on:
      - elasticsearch    build:
      context: logstash/      args:
        ELK_VERSION: $ELK_VERSION    volumes:
      - type: bind        source: ./logstash/config/logstash.yml        target: /usr/share/logstash/config/logstash.yml        read_only: true
      - type: bind        source: ./logstash/pipeline        target: /usr/share/logstash/pipeline        read_only: true
    ports:
      - "5000:5000"
      - "9600:9600"
    environment:
      LS_JAVA_OPTS: "-Xmx256m -Xms256m"
    networks:
      - elk  kibana:
    depends_on:
      - elasticsearch    build:
      context: kibana/      args:
        ELK_VERSION: $ELK_VERSION    volumes:
      - type: bind        source: ./kibana/config/kibana.yml        target: /usr/share/kibana/config/kibana.yml        read_only: true
    ports:
      - "5601:5601"
    networks:
      - elkvolumes:
  elasticsearch:
    driver: local    driver_opts:
      type: none      o: bind      device: /home/elfk/elasticsearch/data      
networks:
  elk:
    driver: bridge

if [ $(grep 'vm.max_map_count' /etc/sysctl.conf |wc -l) -eq 0 ] ; \then echo 'vm.max_map_count=655360' >> /etc/sysctl.conf; \fisysctl -p

docker-compose up --build -d

docker psCONTAINER ID        IMAGE                COMMAND                  CREATED             STATUS              PORTS                                                      NAMES
448e2f464297        elfk_logstash        "/usr/local/bin/dock…"   12 seconds ago      Up 9 seconds        0.0.0.0:5000->5000/tcp, 0.0.0.0:9600->9600/tcp, 5044/tcp   elfk_logstash_1
ca391653eb39        elfk_kibana          "/usr/local/bin/dumb…"   12 seconds ago      Up 9 seconds        0.0.0.0:5601->5601/tcp                                     elfk_kibana_1
2cd23fd30f61        elfk_elasticsearch   "/usr/local/bin/dock…"   14 seconds ago      Up 12 seconds       0.0.0.0:9200->9200/tcp, 0.0.0.0:9300->9300/tcp             elfk_elasticsearch_1netstat -lntp

Active Internet connections (only servers)Proto Recv-Q Send-Q Local Address           Foreign Address         State       PID/Program name    
tcp        0      0 0.0.0.0:22              0.0.0.0:*               LISTEN      52770/sshd          
tcp        0      0 127.0.0.1:25            0.0.0.0:*               LISTEN      23263/master        
tcp6       0      0 :::9200                 :::*                    LISTEN      127138/docker-proxy 
tcp6       0      0 :::9300                 :::*                    LISTEN      127126/docker-proxy 
tcp6       0      0 :::22                   :::*                    LISTEN      52770/sshd          
tcp6       0      0 ::1:25                  :::*                    LISTEN      23263/master        
tcp6       0      0 :::9600                 :::*                    LISTEN      127275/docker-proxy 
tcp6       0      0 :::5601                 :::*                    LISTEN      127237/docker-proxy 
tcp6       0      0 :::5000                 :::*                    LISTEN      127300/docker-proxy


filebeat

Dockerfile

vim /home/elfk/filebeat/Dockerfile

ARG ELK_VERSION=7.5.1

# https://github.com/elastic/beats-docker
# FROM docker.elastic.co/beats/filebeat:${ELK_VERSION}
FROM elastic/filebeat:${ELK_VERSION}
COPY config/filebeat.yml /usr/share/filebeat/filebeat.yml

filebeat.yml

vim /home/elfk/filebeat/config/filebeat.yml

---filebeat.inputs:- type: log  enabled: false
  paths:
    - /var/log/*.logfilebeat.config.modules:
  path: ${path.config}/modules.d/*.yml  reload.enabled: falseprocessors:
  - add_host_metadata: ~
  - add_cloud_metadata: ~
  output.elasticsearch:
  hosts: ['elasticsearch:9200']
  username: elastic  password: changeme 
#output.logstash:
  # The Logstash hosts
  #hosts: ["localhost:5044"]

cd filebeat/

docker build -t elfk_filebeat:latest .


日志收集

  • 登录kibana:

地址:ip:5601

账号:elastic,密码:changeme

docker-compose部署elfk

  • 修改filebeat配置:
docker run -d \
  --name=filebeat \
  --user=root \
  -v /var/lib/docker/containers:/var/lib/docker/containers:ro \
  -v /var/run/docker.sock:/var/run/docker.sock:ro \
  -v /home/logs/nginx.log:/home/logs/nginx.log:rw \
  elfk_filebeat:latest filebeat -e -strict.perms=false \
  -E output.elasticsearch.hosts=["localhost:9200"]docker exec -it filebeat bashvi /usr/share/filebeat/filebeat.yml

---filebeat.inputs:- type: log  enabled: false
  paths:
    - /var/log/*.log- type: log  enabled: true
  paths:
    - /home/logs/nginx.logfilebeat.config.modules:
  path: ${path.config}/modules.d/*.yml  reload.enabled: falseprocessors:
  - add_host_metadata: ~
  - add_cloud_metadata: ~
 output.elasticsearch:
  hosts: ['elasticsearch:9200']
  username: elastic  password: changeme#output.logstash:
  #hosts: ["localhost:5044"]
  #enabled: true
  #worker: 1
  #compression_level: 3

docker restart filebeatmkdir /home/logsecho '该消息通过filebeat容器到达es容器,111!!!' > /home/logs/nginx.logecho '该消息通过filebeat容器到达es容器,222!!!' >> /home/logs/nginx.logecho '该消息通过filebeat容器到达es容器,333!!!' >> /home/logs/nginx.log

docker-compose部署elfk

kibana创建index,可以看到一个filebeat index自动生成,因为没有在filebeat配置文件中自定义index。

docker-compose部署elfk

这就达到了收集日志的目的。

将所有要收集的日志全部存放在宿主机的一个目录下,然后将该目录映射到filebeat容器内(注意给rw权限),接着在filebeat配置文件中配置收集并自定义index,从而完成日志收集。

这里只是演示filebeat将日志直接传输到elasticsearch的过程,其实并不太合适,因为日志没有做任何处理。

应该将filebeat的日志传输至logstash,在logstash中做日志处理,然后由logstash传输至elasticsearch,最后在kibana上展示出来,这样还可以结合zabbix完成错误日志告警的目的。

已存放至个人gitgub:docker-compose


上一篇:「Elasticsearch」- 安装(CentOS) @20210401


下一篇:Kibana的简单使用