**通俗来说,ELK由Elasticsearch、Logstash和Kibana三部分组件组成;html
Elasticsearch是个开源分布式搜索引擎,它的特色有:分布式,零配置,自动发现,索引自动分片,索引副本机制,restful风格接口,多数据源,自动搜索负载等。java
Logstash是一个彻底开源的工具,它能够对你的日志进行收集、分析,并将其存储供之后使用node
kibana 是一个开源和免费的工具,它能够为 Logstash 和 ElasticSearch 提供的日志分析友好的 Web 界面,能够帮助您汇总、分析和搜索重要数据日志。**nginx
Logstash: logstash server端用来搜集日志;web
Elasticsearch: 存储各种日志;json
Kibana: web化接口用做查寻和可视化日志;bootstrap
Logstash Forwarder: logstash client端用来经过lumberjack 网络协议发送日志到logstash server;vim
在须要收集日志的全部服务上部署logstash,做为logstashcentos
agent用于监控并过滤收集日志,将过滤后的内容发送到Redis,而后logstashruby
indexer将日志收集在一块儿交给全文搜索服务ElasticSearch,能够用ElasticSearch进行自定义搜索经过Kibana 来结合自定义搜索进行页面展现。
[root@elk ~]# yum -y install java [root@elk ~]# java -version java version "1.7.0_141" OpenJDK Runtime Environment (rhel-2.6.10.1.el6_9-x86_64 u141-b02) OpenJDK 64-Bit Server VM (build 24.141-b02, mixed mode)
[root@elk ~]# rpm --import https://packages.elastic.co/GPG-KEY-elasticsearch
[root@elk ~]# vim /etc/yum.repos.d/elasticsearch.repo [elasticsearch-2.x] name=Elasticsearch repository for 2.x packages baseurl=http://packages.elastic.co/elasticsearch/2.x/centos gpgcheck=1 gpgkey=http://packages.elastic.co/GPG-KEY-elasticsearch enabled=1
[root@elk ~]# yum -y install elasticsearch
[root@elk ~]# yum -y install java [root@elk ~]# java -version java version "1.7.0_141" OpenJDK Runtime Environment (rhel-2.6.10.1.el6_9-x86_64 u141-b02) OpenJDK 64-Bit Server VM (build 24.141-b02, mixed mode)
[root@elk ~]# rpm --import https://packages.elastic.co/GPG-KEY-elasticsearch
[root@elk ~]# vim /etc/yum.repos.d/logstash.repo [logstash-2.3] name=Logstash repository for 2.3.x packages baseurl=https://packages.elastic.co/logstash/2.3/centos gpgcheck=1 gpgkey=https://packages.elastic.co/GPG-KEY-elasticsearch enabled=1
[root@elk ~]# yum -y install logstash
[root@elk ~]# rpm --import https://packages.elastic.co/GPG-KEY-elasticsearch
[root@elk ~]# vim /etc/yum.repos.d/kibana.repo [kibana-4.5] name=kibana repository for 4.5.x packages baseurl=http://packages.elastic.co/kibana/4.5/centos gpgcheck=1 gpgkey=https://packages.elastic.co/GPG-KEY-elasticsearch enabled=1
[root@elk ~]# yum -y install kibana
[root@elk ~]# grep '^[a-Z]' /etc/elasticsearch/elasticsearch.yml node.name: elk path.data: /data/es-data path.logs: /var/log/elasticsearch bootstrap.memory_lock: true network.host: 10.0.0.201 http.port: 9200
[root@elk ~]# mkdir -p /data/es-data
[root@elk ~]# chown -R elasticsearch:elasticsearch /data/es-data
[root@elk ~]# /etc/init.d/elasticsearch start [root@elk ~]# netstat -lntup | grep 9200 tcp 0 0 ::ffff:10.0.0.201:9200 :::* LISTEN 46675/java
[root@elk ~]# curl 10.0.0.201:9200 { "name" : "elk", "cluster_name" : "elasticsearch", "cluster_uuid" : "EK9OKreaRguU91XGDny6DA", "version" : { "number" : "2.4.5", "build_hash" : "c849dd13904f53e63e88efc33b2ceeda0b6a1276", "build_timestamp" : "2017-04-24T16:18:17Z", "build_snapshot" : false, "lucene_version" : "5.5.4" }, "tagline" : "You Know, for Search" }
[root@elk ~]# /usr/share/elasticsearch/bin/plugin install mobz/elasticsearch-head
[root@elk ~]# /opt/logstash/bin/logstash -e 'input { stdin{} } output { stdout{ codec =>rubydebug }}' hello world Settings: Default pipeline workers: 1 Pipeline main started { "message" => "hello world", "@version" => "1", "@timestamp" => "2017-07-03T08:21:16.035Z", "host" => "elk" }
[root@elk ~]# /opt/logstash/bin/logstash -e 'input { stdin{} } output { file { path =>"/tmp/log-%{+YYYY.MM.dd}messages.log"}}' Settings: Default pipeline workers: 1 Pipeline main started hello world
[root@elk ~]# tailf /tmp/log-2017.07.03messages.log {"message":"hell\\xE5o world","@version":"1","@timestamp":"2017-07-03T08:28:44.672Z","host":"elk"}
[root@elk ~]# /opt/logstash/bin/logstash -e 'input { stdin{} } output { elasticsearch { hosts => ["10.0.0.201"] index => "mytest-%{+YYYY.MM.dd}" }}'
[root@elk ~]# cd /data/es-data/ [root@elk es-data]# ls elasticsearch [root@elk es-data]# tree . └── elasticsearch └── nodes └── 0 ├── indices │ └── mytest-2017.07.03
[root@elk ~]# egrep "^[a-Z]" /opt/kibana/config/kibana.yml server.port: 5601 server.host: "0.0.0.0" elasticsearch.url: "http://10.0.0.201:9200"
[root@elk ~]# /etc/init.d/kibana start kibana started [root@elk ~]# netstat -lntup| grep 5601 tcp 0 0 0.0.0.0:5601 0.0.0.0:* LISTEN 47353/node
[root@elk ~]# cat /etc/logstash/conf.d/system-log.conf input{ file { path => ["/var/log/messages","/var/log/secure"] type => "system-log" start_position => "beginning" } } filter{ } output{ elasticsearch { hosts => ["10.0.0.201:9200"] index => "system-log-%{+YYYY.MM}" } }
[root@elk ~]# /opt/logstash/bin/logstash -f /etc/logstash/conf.d/system-log.conf –t Configuration OK
[root@elk ~]# /opt/logstash/bin/logstash -f /etc/logstash/conf.d/system-log.conf
◆ 使用编译安装Nginx
[root@elk logs]# cat /application/nginx/conf/nginx.conf worker_processes 1; events { worker_connections 1024; } http { include mime.types; default_type application/octet-stream; sendfile on; keepalive_timeout 65; log_format main '$remote_addr - $remote_user [$time_local] "$request" ' '$status $body_bytes_sent "$http_referer" ' '"$http_user_agent" "$http_x_forwarded_for"'; log_format access_log_json '{"@timestamp":"$time_iso8601",' '"host":"$server_addr",' '"clientip":"$remote_addr",' '"size":"$body_bytes_sent",' '"reponsetime":"$request_time",' '"url":"$uri",' '"domain":"$host",' '"http_host":"$host",' '"xff":"$http_x_forwarded_for",' '"referer":"$http_referer",' '"status":"$status"}'; access_log /application/nginx/logs/access_log_json.log access_log_json; server { listen 80; server_name localhost; location / { root html; index index.html index.htm; } } }
[root@elk logs]# cat /application/nginx/html/index.html test
[root@elk logs]# /application/nginx/sbin/nginx [root@elk logs]# curl 10.0.0.201 test
[root@elk logs]# tailf /application/nginx/logs/access_log_json.log {"@timestamp":"2017-07-04T12:25:25+08:00","host":"10.0.0.201","clientip":"10.0.0.201","size":"5","reponsetime":"0.000","url":"/index.html","domain":"10.0.0.201","http_host":"10.0.0.201","xff":"-","referer":"-","status":"200"}
[root@elk logs]# vim /etc/logstash/conf.d/nginx.conf input{ file { path => ["/application/nginx/logs/access_log_json.log"] type => "nginx-access-log" start_position => "beginning" codec => "json" } } filter{ } output{ elasticsearch { hosts => ["10.0.0.201:9200"] index => "nginx-access-log-%{+YYYY.MM}" } }
[root@elk conf.d]# /opt/logstash/bin/logstash -f /etc/logstash/conf.d/nginx.conf -t Configuration OK [root@elk conf.d]# /opt/logstash/bin/logstash -f /etc/logstash/conf.d/nginx.conf Settings: Default pipeline workers: 1 Pipeline main started ......
◆ 已验证