version: '3'
services:
elasticsearch:
image: elasticsearch:latest
container_name: elasticsearch
restart: always
networks:
- elk
ports:
- '9200:9200'
- '9300:9300'
volumes:
- /mnt/elk/elasticsearch/data:/usr/share/elasticsearch/data
kibana:
image: kibana:latest
container_name: kibana
restart: always
networks:
- elk
ports:
- "5601:5601"
depends_on:
- elasticsearch
networks:
elk:
docker run --name test-springboot -p 1001:80 -f /mnt/logs:/mnt/logs -t test-springboot
logstash:
image: logstash:latest
container_name: logstash
restart: always
ports:
- "9601:9601"
volumes:
- /mnt/elk/logstash/config-dir:/config-dir
- /mnt/logs:/mnt/logs
command: logstash -f /config-dir
input{
file{
path => "/mnt/logs/*.log"
type => "web_log"
start_position => "beginning"
}
}
output{
elasticsearch{
hosts => ["192.168.1.18:9200"]
index => "log-%{+YYYY.MM.dd}"
}
}
input:为输入的方式,这里使用file;web
output :是输出的方式,这里输出到192.168.1.18的es中;spring
path:指向读取日志文件的位置,*表示读取目录下全部后缀名为log的日志文件;docker
type:自定义该文件的类型,在es中为type字段;springboot
start_position:为从log文件中读取文件的位置,beginning为从头开始读取,只在第一次启动有效,后面只读取最后一行;elasticsearch