docker 部署 filebeat 采集日志导入到elasticsearch 设置pipeline

filebeat.yml

yaml 复制代码
filebeat.inputs:
 - type: log #默认log,从日志文件读取每一行。stdin,从标准输入读取
   paths:
    - /root/logs/*/*.log
   multiline.pattern: '^\d{4}\-\d{2}\-\d{2}' #匹配的正则
   multiline.negate: true #多行匹配模式后配置的模式是否取反,默认false
   multiline.match: after #定义多行内容被添加到模式匹配行之后还是之前,默认无,可以被设置为after或者before
   fields:
    index: 'server_log'

setup.ilm.enabled: false
setup.template.name: "java_logback_service_index_template"
setup.template.pattern: "java_logback_service_index_template-*"
setup.template.overwrite: true
setup.template.settings:
  index.number_of_shards: 1

output.elasticsearch:
  hosts: ["110.238.107.151:9001"]
  indices:
    - index: "server_log-%{+yyyy.MM.dd}"
      when.contains:
        fields:
          index: "server_log"
  pipeline: "test_java_log_pipeline"
  document_type: log #该type会被添加到type字段,对于输出到ES来说,这个输入时的type字段会被存储,默认log
  max_retries: 3 #ES重试次数,默认3次,超过3次后,当前事件将被丢弃


processors: 
  - drop_fields:
      fields: ["log","host","input","agent","ecs","start_time"]

docker run

yaml 复制代码
docker run -itd \
  --privileged=true \
  --user=root \
  --name=filebeat \
  --restart always \
  --network=my_network \
  -v /root/filebeat/filebeat.yml:/usr/share/filebeat/filebeat.yml:rw \
  -v /root/filebeat/data/:/usr/share/filebeat/data/:rw  \
  -v /root/logs/:/root/logs/:rw  \
  -v /root/xiaoye_worker/logs/:/root/xiaoye_worker/logs/:rw  \
  docker.elastic.co/beats/filebeat:6.4.2

Elasticsearch 模版和pipeline 设置

shell 复制代码
GET _template/java_logback_service_index_template
DELETE _template/java_logback_service_index_template
# 创建模版索引
PUT _template/java_logback_service_index_template
{
  "order": 1,
  "index_patterns": [
    "java_log-*"
  ],
  "settings": {
    "number_of_shards": 1,
    "number_of_replicas": 1
  },
  "mappings": {
    "type_name":{
      "properties": {
      "app_name": {
        "type": "keyword"
      },
      "trance_id": {
        "type": "keyword"
      },
      "log_level": {
        "type": "keyword"
      },
      "thread": {
        "type": "keyword"
      },
      "class_line": {
        "type": "keyword"
      },
      "message": {
        "type": "text",
        "analyzer": "ik_max_word",
        "search_analyzer": "ik_smart",
        "norms": false
      },
      "timestamp": {
        "type": "date"
      }
    }
    }
  },
  "aliases": {}
}




GET _ingest/pipeline/test_java_log_pipeline

DELETE /_ingest/pipeline/test_java_log_pipeline
# 设置自定义处理
PUT /_ingest/pipeline/test_java_log_pipeline
{
  "description": "test_java_log_pipeline",
  "processors": [
    {
      "grok": {
        "field": "message",
        "patterns": [
          """%{TIMESTAMP_ISO8601:timestamp} %{DATA:app_name} %{LOGLEVEL:log_level} %{DATA:thread} \[%{DATA:trance_id}\] %{DATA:class_line} %{GREEDYDATA:message}
          """
        ],
        "pattern_definitions": {
          "ALL_CODE": "(\n)*"
        }
      },
      "remove": {
        "field": "@timestamp"
      }
    },
    {
      "date": {
        "field": "timestamp",
        "formats": [
          "yyyy-MM-dd HH:mm:ss.SSS"
        ],
        "timezone": "Asia/Shanghai",
        "target_field": "timestamp"
      },
      "remove": {
        "field": "@timestamp"
      }
    }
  ]
}

测试 gork

shell 复制代码
# 测试 gork
POST _ingest/pipeline/_simulate
{
  "pipeline": {
    "description": "timestamp pipeline",
    "processors": [
      {
        "grok": {
          "field": "message",
          "patterns": [
            """%{TIMESTAMP_ISO8601:timestamp} %{DATA:app_name} %{LOGLEVEL:log_level} %{DATA:thread} \[%{DATA:trance_id}\] %{DATA:class_line} %{GREEDYDATA:message}"""
          ]
        }
      },
      {
        "date": {
          "field": "timestamp",
          "formats": [
            "yyyy-MM-dd HH:mm:ss.SSS"
          ],
          "timezone": "Asia/Shanghai",
          "target_field": "create_time"
        },
        "remove": {
          "field": "timestamp"
        }
      }
    ]
  },
  "docs": [
    {
      "_index": "syne_sys_log",
      "_id": "id",
      "_source": {
        "message": "2024-10-02 21:11:20.083 xiaoye-scheduler INFO  scheduling-1 [] com.xiaoye.orion.scheduler.service.SchedulerService:83 now: 2024-10-02T21:11:20.083, size: 0, id:1727874680011, startTime:1727874680011"
      }
    }
  ]
}

Entity

java 复制代码
@Data
@SuperBuilder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
@Document(indexName = "server_log-*")
public class ServerLogEs {

    @Field(name = "log_level", type = FieldType.Keyword)
    @JsonProperty("log_level")
    private String logLevel;
    
    @Field(name = "thread", type = FieldType.Keyword)
    private String thread;
    
    @Field(name = "app_name", type = FieldType.Keyword)
    @JsonProperty("app_name")
    private String appName;
    
    @Field(name = "trance_id", type = FieldType.Keyword)
    @JsonProperty("trance_id")
    private String tranceId;
    
    @Field(name = "class_line", type = FieldType.Keyword)
    @JsonProperty("class_line")
    private String classLine;
    @Field(type = FieldType.Text, analyzer = "ik_smart", searchAnalyzer = "ik_smart")
    private String message;
    
    @Field(name = "@create_time")
    @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
    @DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")
    private Date createTime;
}

logback.xml

xml 复制代码
<?xml version="1.0" encoding="UTF-8" ?>
<configuration>
    <springProperty scope="context" name="logPath" source="log.path" defaultValue="/root/logs/${APP_NAME}/"/>
     <!--获取服务名称-->
    <springProperty scope="context" name="APP_NAME" source="spring.application.name" defaultValue="xiaoye-admin"/>
    <include resource="org/springframework/boot/logging/logback/defaults.xml"/>
    <jmxConfigurator/>

    <appender name="consoleLog" class="ch.qos.logback.core.ConsoleAppender">
        <layout class="ch.qos.logback.classic.PatternLayout">
            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} ${APP_NAME} %-5level %thread [%X{traceId}] %class:%line %msg%n
            </pattern>
        </layout>
    </appender>

    <!--    根据需要,是否输出到文件-->
    <appender name="fileInfoLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>ERROR</level>
            <onMatch>DENY</onMatch>
            <onMismatch>ACCEPT</onMismatch>
        </filter>
        <encoder>
            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} ${APP_NAME} %-5level %thread [%X{traceId}] %class:%line %msg%n
            </pattern>
            <immediateFlush>false</immediateFlush>
        </encoder>
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <fileNamePattern>${logPath}/info.%d{yyyy-MM-dd}.log</fileNamePattern>
            <maxHistory>7</maxHistory>
        </rollingPolicy>
    </appender>

    <appender name="fileErrorLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
            <level>ERROR</level>
        </filter>
        <encoder>
            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} ${APP_NAME} %-5level %thread [%X{traceId}] %class:%line %msg%n
            </pattern>
        </encoder>
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <fileNamePattern>${logPath}/error.%d{yyyy-MM-dd}.log</fileNamePattern>
            <maxHistory>7</maxHistory>
        </rollingPolicy>
    </appender>

    <logger name="org.springframework" level="ERROR"/>
    <logger name="org.xnio" level="ERROR"/>
    <logger name="io.undertow" level="ERROR"/>
    <logger name="com.netflix.discovery" level="ERROR"/>

    <!--    <springProfile name="dev,test">-->
    <!--        <root level="info">-->
    <!--            <appender-ref ref="consoleLog"/>-->
    <!--            <appender-ref ref="fileInfoLog"/>-->
    <!--            <appender-ref ref="fileErrorLog"/>-->
    <!--        </root>-->
    <!--    </springProfile>-->

    <!--    异步日志输出,对于量大的日志,能够明显提升性能,但有延迟-->
    <!--    <appender name="asyncLog" class="ch.qos.logback.classic.AsyncAppender">-->
    <!--        <discardingThreshold>0</discardingThreshold>-->
    <!--        <queueSize>100</queueSize>-->
    <!--        <appender-ref ref="fileInfoLog"/>-->
    <!--    </appender>-->

    <!--    <springProfile name="prod">-->
    <!--        <root level="info">-->
    <!--            <appender-ref ref="asyncLog"/>-->
    <!--            <appender-ref ref="fileErrorLog"/>-->
    <!--        </root>-->
    <!--    </springProfile>-->

    <root level="info">
        <appender-ref ref="consoleLog"/>
        <appender-ref ref="fileInfoLog"/>
        <appender-ref ref="fileErrorLog"/>
    </root>

</configuration>
相关推荐
丶213621 分钟前
【大数据】MySQL与Elasticsearch的对比分析:如何选择适合的查询解决方案
大数据·mysql·elasticsearch
TsengOnce1 小时前
Docker安装稳定版本nginx-1.26.2
linux·nginx·docker
你熬夜了吗?2 小时前
java实现代码沙盒(docker-java)
java·spring boot·docker
刘瑾言2 小时前
ES操作命令
java·elasticsearch
灰灰的辉3 小时前
统信操作系统离线安装JDK、Nginx、elasticsearch、kibana、ik、pinyin
java·开发语言·elasticsearch
南猿北者4 小时前
docker compose
运维·docker·容器
hongkid4 小时前
docker 部署freeswitch(非编译方式)
docker·容器·freeswitch
尘佑不尘8 小时前
kali上安装docker,并且生成centos7容器和创建apache容器后台运行
笔记·web安全·docker·容器·apache
想学习java初学者9 小时前
Docker compose部署Activemq
docker·容器·activemq
椰汁菠萝9 小时前
docker运行ActiveMQ-Artemis
docker·容器·activemq