目录

docker搭建elk

docker搭建elk

1.拉取镜像

docker pull elasticsearch:7.16.3
docker pull logstash:7.16.3
docker pull kibana:7.16.3

2.ES配置

elasticsearch.yml

http.host: 0.0.0.0
xpack.security.enabled: false
xpack.license.self_generated.type: basic
xpack.security.transport.ssl.enabled: false
network.host: 0.0.0.0

3.logstash配置

logstash.yml

http.host: 0.0.0.0
xpack.monitoring.elasticsearch.hosts: [ "http://es7163:9200" ]

logstash.conf:

input {
  tcp {
          mode => "server"
          host => "0.0.0.0"
          port => 5044
          codec => json_lines
      }
}
filter{

}
output {
  elasticsearch {
      hosts => "es7163:9200"
      index => "tingshu-%{+YYYY.MM.dd}"
  }
}

4.kibana配置

kibana.yml:

server.host: "0.0.0.0"
elasticsearch.hosts: ["http://es7163:9200"]
i18n.locale: "zh-CN"

5.创建自定义网络

docker network create elk

6.docker-compose.yml文件

version: '3'

services:
  elasticsearch:
    image: elasticsearch:7.16.3
    container_name: es7163
    networks:
      - elk
    environment:
      - ES_JAVA_OPTS=-Xms1024m -Xmx1024m
      - discovery.type=single-node
    ports:
      - "9200:9200"
      - "9300:9300"
    volumes:
      - /usr/local/es/data:/usr/share/elasticsearch/data
      - /usr/local/es/plugins:/usr/share/elasticsearch/plugins
      - /usr/local/es/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml
    restart: always

  logstash:
    image: logstash:7.16.3
    container_name: logstash7163
    networks:
      - elk
    deploy:
      resources:
        limits:
          cpus: '2'
          memory: 4g
    ports:
      - "5044:5044"  # Logstash 默认端口
      - "9600:9600"
    volumes:
      - /usr/local/logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml  # 配置文件
      - /usr/local/logstash/config/logstash.conf:/usr/share/logstash/pipeline/logstash.conf  # 配置文件
    restart: always
    depends_on:
      - elasticsearch

  kibana:
    image: kibana:7.16.3
    container_name: kibana7163
    networks:
      - elk
    ports:
      - "5601:5601"
    volumes:
      - /usr/local/kibana/config/kibana.yml:/usr/share/kibana/config/kibana.yml
    restart: always
    depends_on:
      - elasticsearch

networks:
  elk:
    driver: bridge

7.springboot对接Logstash

1.创建一个springboot项目引入主要依赖
<dependency>
     <groupId>net.logstash.logback</groupId>
     <artifactId>logstash-logback-encoder</artifactId>
     <version>6.6</version>
</dependency>
2.application.yml配置
server:
  port: 8080
logging:
  config: classpath:logback-spring.xml
spring:
  application:
    name: linging-test
3.resources目录中新增logback-spring.xml
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
    <include resource="org/springframework/boot/logging/logback/base.xml" />
    <springProperty scope="context" name="springAppName" source="spring.application.name"/>
    <springProperty scope="context" name="serverPort" source="server.port"/>
    <appender name="LOGSTASH" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
        <!--logstash的服务地址和端口,可以实际情况设置-->
        <destination>192.168.159.100:5044</destination>
        <!-- 日志输出编码 -->
        <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
            <providers>
                <timestamp>
                    <timeZone>UTC</timeZone>
                </timestamp>
                <pattern>
                    <pattern>
                        {
                        <!--应用名称 -->
                        "app": "${springAppName}_${serverPort}",
                        <!--打印时间 -->
                        "timestamp": "%d{yyyy-MM-dd HH:mm:ss.SSS}",
                        <!--线程名称 -->
                        "thread": "%thread",
                        <!--日志级别 -->
                        "level": "%level",
                        <!--日志名称 -->
                        "logger_name": "%logger",
                        <!--日志信息 -->
                        "message": "%msg",
                        <!--日志堆栈 -->
                        "stack_trace": "%exception"
                        }
                    </pattern>
                </pattern>
            </providers>
        </encoder>
    </appender>
    <!--定义日志文件的存储地址,使用绝对路径-->
    <property name="LOG_HOME" value="/home/logs"/>
    <!-- 按照每天生成日志文件 -->
    <appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <!--日志文件输出的文件名-->
            <fileNamePattern>${LOG_HOME}/${springAppName}-${serverPort}-%d{yyyy-MM-dd}.log</fileNamePattern>
        </rollingPolicy>
        <encoder>
            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
        </encoder>
    </appender>
    <root level="INFO">
        <appender-ref ref="LOGSTASH" />
        <appender-ref ref="FILE" />
        <appender-ref ref="CONSOLE" />
    </root>
</configuration>
4.启动项目,搞点日志

https://i-blog.csdnimg.cn/direct/8a27c7d2dac641aba7fb116f1d320605.png

5.进入kibana控制台

https://i-blog.csdnimg.cn/direct/612d75eee1414a4994866fab30ee2488.png

https://i-blog.csdnimg.cn/direct/c72bd8250df84cfab19e0ba16d3fdf73.png

6.添加索引模式

https://i-blog.csdnimg.cn/direct/fc75d9193c954bcd9e549a436a48493a.png

7.检索日志,打开Discover

https://i-blog.csdnimg.cn/direct/3b21899fbc114afca942fff36a293cb9.png