微服務日誌監控系統ELK之docker-compose搭建ELK

docker-compose搭建日誌監控系統ELK

1、基礎環境

環境要求

服務器:centos 7.0

環境: docker、dockers-compose

內存: 4G

2、環境配置

elasticsearch需要設置系統內核參數,否則會因爲內存不足無法啓動。

# 改變設置
sysctl -w vm.max_map_count=262144
# 使之立即生效
sysctl -p

3、配置啓動docker-compose.yml

創建logstash的配置文件 logstash-springboot.conf

input {
  tcp {
    mode => "server"
    host => "0.0.0.0"
    port => 4560
    codec => json_lines
  }
}
output {
  elasticsearch {
    hosts => "es:9200"
    index => "ms-%{+YYYY.MM.dd}" 
  }
}

啓動文件 docker-compose.yml

version: '3'
services:
  elasticsearch:
    image: elasticsearch:6.4.0
    container_name: elasticsearch
    environment:
      - "cluster.name=elasticsearch" #設置集羣名稱爲elasticsearch
      - "discovery.type=single-node" #以單一節點模式啓動
      - "ES_JAVA_OPTS=-Xms512m -Xmx512m" #設置使用jvm內存大小
      - TZ=Asia/Shanghai
    volumes:
      - /data/elk/elasticsearch/plugins:/usr/share/elasticsearch/plugins #插件文件掛載
      - /data/elk/elasticsearch/data:/usr/share/elasticsearch/data #數據文件掛載
    ports:
      - 9200:9200
  kibana:
    image: kibana:6.4.0
    container_name: kibana
    links:
      - elasticsearch:es #可以用es這個域名訪問elasticsearch服務
    depends_on:
      - elasticsearch #kibana在elasticsearch啓動之後再啓動
    environment:
      - "elasticsearch.hosts=http://es:9200" #設置訪問elasticsearch的地址
      - TZ=Asia/Shanghai
    ports:
      - 5601:5601
  logstash:
    image: logstash:6.4.0
    container_name: logstash
    volumes:
      - /data/elk/logstash/logstash-springboot.conf:/usr/share/logstash/pipeline/logstash.conf #掛載logstash的配置文件
    environment:
      - TZ=Asia/Shanghai #環境設置時區
    depends_on:
      - elasticsearch #kibana在elasticsearch啓動之後再啓動
    links:
      - elasticsearch:es #可以用es這個域名訪問elasticsearch服務
    ports:
      - 4560:4560

給掛載目錄授權

chmod 777 /data/elk/elasticsearch/plugins
chmod 777 /data/elk/elasticsearch/data

在docker-compose.yml文件夾下執行 命令啓動服務

docker-compose up -d

執行啓動成功

在這裏插入圖片描述

在logstash中安裝json_lines插件

# 進入logstash容器
docker exec -it logstash /bin/bash
# 進入bin目錄
cd /bin/
# 安裝插件
logstash-plugin install logstash-codec-json_lines
# 退出容器
exit
# 重啓logstash服務
docker restart logstash

關閉防火牆,或開啓相關接口即可訪問kibana.

systemctl stop firewalld

本地地址:

http://192.168.2.113:5601

kibana頁面:
在這裏插入圖片描述

通常ELK可應用微服務的日誌收集,線上查看,日誌統計等。

4.spring booot輸出日誌到ELK

spring boot日誌中配置logback.xml向logstash中發送日誌。

配置文件如下:

<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="true">
    <!-- 微服務資源配置文件 -->
    <property resource="bootstrap.yml"/>
    <property name="APP_NAME" value="${appId}"/>
    <!-- 日誌輸出目錄 -->
    <property name="LOG_DIR" value="/data/log/${APP_NAME}"/>
    <contextName>default</contextName>
    <jmxConfigurator/>
	<!-- 控制檯打印STDOUT標準日誌-->
    <appender name="STDOUT標準" class="ch.qos.logback.core.ConsoleAppender">
        <encoder>
            <pattern>[%d{HH:mm:ss}] [%thread] |%level| %marker %logger{50} -> %X{traceId} %msg %n%ex</pattern>
        </encoder>
    </appender>
		<!-- 控制檯打印DEBUG標準日誌-->
    <appender name="DEBUG" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <file>${LOG_DIR}/debug.log</file>
        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            <fileNamePattern>${LOG_DIR}/history/debug-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
            <maxHistory>60</maxHistory>
            <maxFileSize>50MB</maxFileSize>
        </rollingPolicy>
        <encoder charset="UTF-8">
            <Pattern>[%d{HH:mm:ss}] [%thread] |%level| %marker %logger{50} -> %X{traceId} %msg %n%ex</Pattern>
        </encoder>
    </appender>

    <appender name="ASYNC" class="ch.qos.logback.classic.AsyncAppender">
        <discardingThreshold>0</discardingThreshold>
        <queueSize>1000</queueSize>
        <appender-ref ref="DEBUG"/>
    </appender>

    <appender name="INFO" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <file>${LOG_DIR}/info.log</file>
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>error</level>
            <onMatch>ACCEPT</onMatch>
            <onMismatch>DENY</onMismatch>
        </filter>
        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            <fileNamePattern>${LOG_DIR}/history/info-%d{yyyy-MM}.%i.log</fileNamePattern>
            <maxHistory>12</maxHistory>
            <maxFileSize>50MB</maxFileSize>
        </rollingPolicy>
        <encoder charset="UTF-8">
            <Pattern>[%d] [%thread] |%level| %marker %logger{10} -> %X{traceId} %msg %n%ex</Pattern>
        </encoder>
    </appender>

    <appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <file>${LOG_DIR}/error.log</file>
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>error</level>
            <onMatch>ACCEPT</onMatch>
            <onMismatch>DENY</onMismatch>
        </filter>
        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            <fileNamePattern>${LOG_DIR}/history/error-%d{yyyy-MM}.%i.log</fileNamePattern>
            <maxHistory>12</maxHistory>
            <maxFileSize>50MB</maxFileSize>
        </rollingPolicy>
        <encoder charset="UTF-8">
            <Pattern>[%d] [%thread] |%level| %marker %logger{50} -> %X{traceId} %msg %n%ex</Pattern>
        </encoder>
    </appender>

    <appender name="ACCESS" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <file>${LOG_DIR}/access.log</file>
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>error</level>
            <onMatch>ACCEPT</onMatch>
            <onMismatch>DENY</onMismatch>
        </filter>
        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            <fileNamePattern>${LOG_DIR}/history/access-%d{yyyy-MM}.%i.log</fileNamePattern>
            <maxHistory>12</maxHistory>
            <maxFileSize>50MB</maxFileSize>
        </rollingPolicy>
        <encoder charset="UTF-8">
            <Pattern>[%d] [%thread] |%level| %marker %logger{10} -> %X{traceId} %msg %n%ex</Pattern>
        </encoder>
    </appender>
    <!-- logstash 配置部分 appanme 根據實際情況修改 -->
    <!--<appender name="LOGSTASH" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
        <destination>192.168.112.224:4560</destination>
        <encoder class="net.logstash.logback.encoder.LogstashEncoder">
        <includeContext>false</includeContext>
        <customFields>{"appname": "${APP_NAME}", "server": "${HOSTNAME},"}</customFields>
        </encoder>
    </appender>-->
    <root level="info">
        <appender-ref ref="STDOUT"/>
        <appender-ref ref="ASYNC"/>
        <appender-ref ref="INFO"/>
        <appender-ref ref="ERROR"/>
        <appender-ref ref="DEBUG"/>
        <!--<appender-ref ref="LOGSTASH"/>-->
    </root>

    <logger name="org.springframework" level="info" additivity="false">
        <appender-ref ref="DEBUG"/>
        <appender-ref ref="INFO"/>
        <appender-ref ref="STDOUT"/>
        <!--<appender-ref ref="LOGSTASH"/>-->
    </logger>

    <logger name="com.upanda.coyhzx.dao" level="DEBUG" additivity="true">
        <appender-ref ref="DEBUG"/>
        <appender-ref ref="INFO"/>
        <appender-ref ref="ERROR"/>
        <!--<appender-ref ref="LOGSTASH"/>-->
    </logger>
</configuration>
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章