日志收集 - ELK

安装配置以下:java

1.安装java
    wget http://download.oracle.com/otn-pub/java/jdk/8u45-b14/jdk-8u45-linux-x64.tar.gz
    mkdir /usr/local/java
    tar -zxf jdk-8u45-linux-x64.tar.gz -C /usr/local/java/
    vim /etc/profile
    -----------
    export JAVA_HOME=/usr/local/jdk-knet-8u45
    export PATH=$PATH:$JAVA_HOME/bin
    exportCLASSPATH=.:$JAVA_HOME/lib/tools.jar:$JAVA_HOME/lib/dt.jar:$CLASSPATH
    -----------
    source /etc/profile //从新加载node

    执行:java -version
    提示:
    java version "1.8.0_45"linux

   Java(TM) SE Runtime Environment (build 1.8.0_45-b14)nginx

   Java HotSpot(TM) 64-Bit Server VM (build 25.45-b02, mixed mode)apache

2.logstash-agent配置(应用服务器节点)
    这里用到了kafka插件,能够直接下载包含全部插件的包进行安装。
    wget https://download.elastic.co/logstash/logstash/logstash-all-plugins-2.3.1.tar.gz
    tar -zxvf logstash-2.3.1.tar.gz -C /usr/local/
    cd /usr/local/
    ln -s logstash-2.3.1/ logstash
    cd /usr/local/logstashjson

    logstash.conf参见一下配置
    上传修改好的logstash配置文件bootstrap

    启动logstash-agent:
    ./bin/logstash agent -f conf/logstash.conf -l logs/logstash.log &vim

3.安装kafka
    公用kafka服务tomcat

4.安装logstash-index、elasticsearch、kibana
    wget https://download.elastic.co/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/2.3.2/elasticsearch-2.3.2.tar.gz
    wget https://download.elastic.co/logstash/logstash/logstash-all-plugins-2.3.1.tar.gz
    wget https://download.elastic.co/kibana/kibana/kibana-4.5.0-linux-x64.tar.gz服务器

    tar -zxvf elasticsearch-2.3.2.tar.gz -C /usr/local/
    tar -zxvf logstash-all-plugins-2.3.1.tar.gz -C /usr/local/
    tar -zxvf kibana-4.5.0-linux-x64.tar.gz -C /usr/local/
    cd /usr/local/
    ln -s logstash-2.3.1/ logstash
    ln -s elasticsearch-2.3.2 elasticsearch
    ln -s kibana-4.5.0-linux-x64 kibana

5.配置elasticsearch
    cd elasticsearch 
    vim config/elasticsearch.yml
    集群名称:cluster.name: elastic-logstash
    定义节点名称:node.name: "node-192.168.1.202"

    修改ES内存

    vim bin/elasticsearch.in.sh

    ----------

    if [ "x$ES_MIN_MEM" = "x" ]; then
    ES_MIN_MEM=3g
    fi
    if [ "x$ES_MAX_MEM" = "x" ]; then
    ES_MAX_MEM=3g
    fi

    ----------

    安装 elasticsearch-kopf、elasticsearch-head、marvel插件,方便监控
    cd /usr/local/elasticsearch/bin/
    ./plugin -install lmenezes/elasticsearch-kopf 
    ./plugin -install mobz/elasticsearch-head

    ./plugin install license

    ./plugin install marvel-agent

    cd /usr/local/kibana/

    ./bin/kibana plugin --install elasticsearch/marvel/latest

    启动elasticsearch:
    cd /usr/local/elasticsearch
    .bin/elasticsearch -d 

    访问一下地址进行验证:
    http://xxx.xxx.xxx.xxx:9200/_plugin/kopf/
    http://xxx.xxx.xxx.xxx:9200/_plugin/head/

6.配置logstash
    cd /usr/local/logstash
    上传已修改好的logstash配置文件
    启动logstash-agent:
    ./bin/logstash agent -f conf/logstash.conf -l logs/logstash.log &

7.配置kibana
    vim /usr/local/kibana/config/kibana.yml 
   修改es接口地址:
   elasticsearch_url: http://xxx.xxx.xxx.xxx:9200
   cd /usr/local/kibana/bin/
   启动kibana:
   nohup ./kibana &

   查看kibana数据
   http://xxx.xxx.xxx.xxx:5601

   查看elasticsearch集群的状况

   http://xxx.xxx.xxx.xxx:5601/app/marvel

8.安装nginx
    1.用nginx代理
        http://xxx.xxx.xxx.xxx:5601
        http://xxx.xxx.xxx.xxx:9200/_plugin
    2.为kibana、_plugin提供用户认证服务
    3.屏蔽掉5601 9200端口

9.logstash-agent的logstash.conf (应用服务器节点)
    # 不一样业务配置多个input{},一个logstash.conf可配置多个input{}
    input {
        file {
            codec => multiline {
                pattern => "^%{TIME}|%{SYSLOGTIMESTAMP_EXT}|Hibernate:|%{YEAR}[./-]%{MONTHNUM}[./-]%{MONTHDAY}|%{MONTHDAY}[./-]%{MONTH}[./-]%{YEAR}"

                patterns_dir => ["/usr/local/logstash/local_patterns"]
                negate => true
                what => "previous"
            }
        discover_interval => 10
        # 配置业务类型,建议加前缀方便查询区分,好比:tomcat_xxx
        type => "tomcat_app"
        # 配置日志文件的地址,须要修改成实际目录
        path => ["/home/eqs/server/tomcat/app/logs/catalina.out","/home/eqs/server/tomcat/app/logs/*.log"]
        # 配置读取位置存放的文件目录,不一样业务建议分别存储
        sincedb_path => "/home/eqs/logstash-1.5.4/sincedb-tomcat_app"
        sincedb_write_interval => 1
        start_position => "beginning"
        stat_interval => 1
    }
}
output {
    kafka {
        # 配置kafka集群地址和端口,地址端口须要响应的修改
        bootstrap_servers => "192.168.249.64:9092,192.168.249.65:9092,192.168.249.66:9092"
        # logstash-1.5.4版本须要用broker_list参数
        #broker_list => "192.168.249.64:9092,192.168.249.65:9092,192.168.249.66:9092"
        # 指定日志存放的topic_id
        topic_id => "tomcat-log"
    }
}

10.logstash-index的logstash.conf(中心节点)
    input {
        kafka {
        # 配置kafka集群的zookeeper集群的IP和端口,需修改成实际的IP和端口
        zk_connect => "192.168.1.4:2181,192.168.1.5:2181,192.168.1.6:2181"
        group_id => "logstash"
        # 配置同logstash-agent相同的topic_id
        topic_id => "tomcat-log"
        codec => json
        reset_beginning => false
        consumer_threads => 5
        decorate_events => true
        }

    }

    filter {
        grok {

            # 复制下面11local_patterns的内容,上传到服务器/usr/local/logstash/目录
            patterns_dir => ["/usr/local/logstash/local_patterns"]
            match => [
                #2016-04-26 18:44:13.885 [localhost-startStop-1] INFO c.k.modules.base.log.LogFileWriter - loginError LogFileWriter init......
                "message", "%{DATESTAMP_EXT:time} \[%{NOTSPACE:thread}\] %{LOGLEVEL:level} %{PROG:class} - %{LOGMSG:log_msg}",
                #2016-04-28 09:28:35.744 [catalina-exec-55] ERROR o.h.e.jdbc.spi.SqlExceptionHelper - java.util.concurrent.CancellationException: Cancelled
                "message", "%{DATESTAMP_EXT:time} \[%{NOTSPACE:thread}\] %{LOGLEVEL:level} %{PROG:class} - %{LOGMSG:log_msg}",
                #2016-04-26 18:44:12.695 INFO net.spy.memcached.auth.AuthThread: Authenticated to /202.173.10.95:11210
                "message", "%{DATESTAMP_EXT:time} %{LOGLEVEL:level} %{PROG:class}: %{LOGMSG:log_msg}",
                #Apr 27, 2016 1:35:43 PM org.apache.catalina.core.StandardWrapperValve invoke
                "message", "%{SYSLOGTIMESTAMP_EXT:time} %{LOGMSG:msg}",
                #18:44:09,389 |-INFO in ch.qos.logback.classic.joran.action.ConfigurationAction - debug attribute not set
                "message", "%{TIME:time} \|-%{LOGLEVEL:level} in %{PROG_EXT:class} - %{LOGMSG:log_msg}"
            ]
        }
     # if [level] =~ "ERROR" {
         grok {
            patterns_dir => ["/usr/local/logstash/local_patterns"]
            match => ["log_msg", "%{EXCEPTIONTYPE:exception_type}"]
        }
        mutate {
             remove_field => [ "log_msg"]
        }

    # }
    }

    output {
        elasticsearch {
            # 须要修改成实际的elasticsearch的IP和端口
            hosts => ["192.168.1.8:9200"]
            codec => "json"
       }
}

11.local_patterns文件(应用服务器节点和中心节点都有配置local_patterns文件)

    DATESTAMP_EXT %{YEAR}[./-]%{MONTHNUM}[./-]%{MONTHDAY} %{TIME}     LOGMSG [\s\S]*     EXCEPTIONTYPE [a-zA-Z.]*Exception     AMPM [AP]M     SYSLOGTIMESTAMP_EXT %{MONTH} %{MONTHDAY}, %{YEAR} %{TIME} %{AMPM}     PROG_EXT (?:[\w._/%-\[\]]+)

相关文章
相关标签/搜索