主机环境为:html
centos 6.8 X64java
各软件及版本:linux
kibana-4.5.3-linux-x64.tar.gz
elasticsearch-2.3.4.rpm
logstash-2.3.2.tar.gznginx
filebeat-1.2.3-x86_64.rpm
jdk-8u121-linux-x64.rpmgit
下载地址:redis
http://pan.baidu.com/s/1pLGzoYRvim
须要的主机及安装到的软件:centos
192.168.40.83 iptables2
kibana-4.5.3-linux-x64.tar.gz
elasticsearch-2.3.4.rpm
logstash-2.3.2.tar.gz
jdk-8u121-linux-x64.rpmtomcat
192.168.40.103 test2
filebeat-1.2.3-x86_64.rpm
192.168.40.101 test1
filebeat-1.2.3-x86_64.rpm
ruby
elk日志收集架构
这个是官网的
https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-overview.html
filebeat收集到的日志能够发送到Elasticsearch/logstash/Kafka/redis中,本实验中收集的日志发送到logstash,logstash经过fileter段处理后发送到Elasticsearch,用户经过访问kibana进行数据展现,这里不进行filebeat、logstash、Elasticsearch、kibana原理讲解
iptables2
1.准备好jdk
yum localinstall -y jdk-8u121-linux-x64.rpm
[root@iptables2 ~]# cat /etc/profile.d/java.sh
export JAVA_HOME=/usr/java/latest
export PATH=/usr/java/latest/bin:${PATH}
2.安装Elasticsearch
[root@iptables2 ~]# yum localinstall -y elasticsearch-2.3.4.rpm
安装插件
使用ES自带的命令plugin
# head
/usr/share/elasticsearch/bin/plugin install mobz/elasticsearch-head
# kopf
/usr/share/elasticsearch/bin/plugin install lmenezes/elasticsearch-kopf
# bigdesk
/usr/share/elasticsearch/bin/plugin install hlstudio/bigdesk
那如何访问安装好的插件呢?
http://ES_server_ip:port/_plugin/plugin_name
Example:
http://192.168.40.83:9200/_plugin/head/
http://192.168.40.83:9200/_plugin/kopf/
编辑配置文件
[root@iptables2 ~]# vim /etc/elasticsearch/elasticsearch.yml
# ---------------------------------- Network -----------------------------------
#
# Set the bind address to a specific IP (IPv4 or IPv6):
#
network.host: 0.0.0.0
#
# Set a custom port for HTTP:
#
http.port: 9200
启动Elasticsearch
[root@iptables2 ~]# service elasticsearch start
安装logstash
tar xf logstash-2.3.2.tar.gz
wget http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz
mkdir logstash-2.3.2/conf
unzip GeoLiteCity.dat.gz
cat logstash-2.3.2/conf/ver11.conf
input {
beats {
port => 5044
type => "syslog"
}
}
filter {
if [type] == "filebeat" {
grok {
match => [ "message", "%{SYSLOGLINE}" ]
overwrite => [ "message" ]
}
}
date {
match => [ "timestamp", "MMM dd HH:mm:ss", "MMM d HH:mm:ss" ]
}
if [type] == "nginxacclog" {
grok {
match => {
"message" => "%{IP:client} - (?:%{USERNAME:remote_user}|-) \[%{HTTPDATE:timestamp}\] \"%{WORD:method} %{URIPATHPARAM:request} HTTP/%{NUMBER:http_version}\" \"%{NUMBER:request_time:float}\" %{INT:status} %{NUMBER:bytes} \"(?:%{URI:referer}|-)\" \"(?:%{GREEDYDATA:user_agent}|-)\" (?:%{IP:x_forword_for}|-)"
}
}
date {
match => [ "timestamp","dd/MMM/YYYY:HH:mm:ss Z" ]
}
urldecode {
all_fields => true
}
}
if [type] == "test1log" {
grok {
patterns_dir => "/root/logstash-2.3.2/patterns/"
match => {
"message" => "%{IP:client} - - \[%{ELKTIMES:log_timestamp} \] \"%{WORD:method} %{URIPATHPARAM:request} HTTP/%{NUMBER:http_version}\" %{INT:status} %{NUMBER:bytes} \"(?:%{URI:referer}|-)\" \"(?:%{GREEDYDATA:user_agent}|-)\""
}
}
date {
match => [ "log_timestamp","dd/MMM/YYYY:HH:mm:ss" ]
}
geoip {
source => "client"
target => "geoip"
database => "/root/GeoLiteCity.dat"
add_field => ["[geoip][coordinates]","%{[geoip][longitude]}"]
add_field => ["[geoip][coordinates]","%{[geoip][latitude]}"]
}
mutate {
convert => ["[geoip][coordinates]","float", "bytes","integer", "bytes.raw","integer"]
}
urldecode {
all_fields => true
}
}
if [type] == "loginmsg" {
grok {
match => {"message" => "%{SYSLOGPAMSESSION}"}
match => {"message" => "%{SECURELOG}"}
match => {"message" => "%{SYSLOGBASE2}"}
}
geoip {
source => "IP"
fields => ["city_name"]
database => "/root/GeoLiteCity.dat"
}
if ([status] == "Accepted") {
mutate {
add_tag => ["Success"]
}
}
else if ([status] == "Failed") {
mutate {
add_tag => ["Failed"]
}
}
}
}
output {
stdout {
codec => rubydebug
}
elasticsearch {
hosts => "192.168.40.83:9200"
}
}
mkdir logstash-2.3.2/patterns
cat logstash-2.3.2/patterns/linux-syslog
SECURELOG %{WORD:program}\[%{DATA:pid}\]: %{WORD:status} password for ?(invaliduser)? %{WORD:USER} from %{DATA:IP} port
ELKTIMES %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME}
启动logstash
./logstash-2.3.2/bin/logstash -f logstash-2.3.2/conf/ver11.conf
安装kibana
tar xf /usr/local/src/kibana-4.5.3-linux-x64.tar.gz -C /usr/local
cd /usr/local/
ln -s kibana-4.5.3-linux-x64 kibana
cp kibana/config/kibana.yml kibaba/config/kibana.yml.bak_$(date +%F_%H:%M)
配置kibana.yml
server.port: 5601
server.host: "0.0.0.0"
其它内容不动
启动kibana
./kibana/bin/kibana
访问
http://192.168.40.83:5601/便可
test1
安装filebeat
yum localinstall -y filebeat-1.2.3-x86_64.rpm
cp /etc/filebeat/filebeat.yml{,$(date +%F_%H:%M)}
# cat /etc/filebeat/filebeat.yml
##################################################### filebeat #######################################################
filebeat:
prospectors:
-
paths:
- /var/log/messages
input_type: log
document_type: messages
-
paths:
- /var/log/secure
input_type: syslog
document_type: loginmsg
-
paths:
- /var/log/nginx_access.log
input_type: log
document_type: nginxacclog
-
paths:
- /usr/local/tomcat/logs/catalina.out
input_type: catalina
document_type: catalinalog
multiline:
pattern: '^[[:space:]]'
negate: true
match: after
registry_file: /var/lib/filebeat/registry
##################################################### output #######################################################
output:
logstash:
hosts: ["192.168.40.83:5044"]
##################################################### Logging #######################################################
logging:
files:
rotateeverybytes: 10485760 # = 10MB
启动filebeat
service filebeat start
test2
安装filebeat
yum localinstall -y filebeat-1.2.3-x86_64.rpm
cp /etc/filebeat/filebeat.yml{,$(date +%F_%H:%M)}
# cat /etc/filebeat/filebeat.yml
##################################################### filebeat #######################################################
filebeat:
prospectors:
-
paths:
- /var/log/messages
input_type: log
document_type: messages
-
paths:
- /var/log/secure
input_type: syslog
document_type: loginmsg
-
paths:
- /var/log/nginx_access.log
input_type: log
document_type: nginxacclog
-
paths:
- /usr/local/tomcat/logs/catalina.out
input_type: catalina
document_type: catalinalog
multiline:
pattern: '^[[:space:]]'
negate: true
match: after
registry_file: /var/lib/filebeat/registry
##################################################### output #######################################################
output:
logstash:
hosts: ["192.168.40.83:5044"]
##################################################### Logging #######################################################
logging:
files:
rotateeverybytes: 10485760 # = 10MB
启动filebeat
service filebeat start
查看Elasticsearch是否接收到了数据:
http://192.168.40.83:9200/_search?pretty
查看Elasticsearch中全部索引列表
http://192.168.40.83:9200/_aliases
访问kibana
参考连接:
logstash快速入门
http://www.2cto.com/os/201411/352015.html
使用 curl 命令发送请求来查看 ES 是否接收到了数据:
# curl 'http://localhost:9200/_search?pretty'
开源实时日志分析ELK平台部署
http://www.tuicool.com/articles/QFvARfr
ELK系列一:ELK安装配置及nginx日志分析
http://www.myhack58.com/Article/sort099/sort0102/2016/81990_3.htm
ELK系列二:kibana操做及nginx日志分析图表建立
http://www.myhack58.com/Article/sort099/sort0102/2016/81991.htm
ELK+Filebeat+Kafka+ZooKeeper 构建海量日志分析平台
http://tchuairen.blog.51cto.com/3848118/1861167