前言
最近业务上链路追踪 技术选型了 cat
zipkin
skywalking
Jaeger
openTracing
Elastic Stack
Elastic Stack 和 skywalking 他们都支持 cncf 开源项目 OpenTelemetry
标准 目前OpenTelemetry 日志还不太成熟 所以没有采用此方案,由于某种原因 在skywalking
Elastic Stack
选型了 skywalking
。 她还支持了日志 这样日志采集就可以 不使用 elk 那套方案了 更简单和轻量级
spring boot log4j2 集成skywalking
1、添加依赖
- skywalking v9.3
- spring boot 2.7.14
xml
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
<exclusions>
<exclusion>
<artifactId>spring-boot-starter-logging</artifactId>
<groupId>org.springframework.boot</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>5.8.21</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<exclusions>
<exclusion>
<artifactId>spring-boot-starter-logging</artifactId>
<groupId>org.springframework.boot</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-log4j2</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-spring-boot</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba.fastjson2</groupId>
<artifactId>fastjson2</artifactId>
<version>${fastjson.version}</version>
</dependency>
<dependency>
<groupId>com.lmax</groupId>
<artifactId>disruptor</artifactId>
<version>3.4.4</version>
</dependency>
<!-- 该引用用于代码获取tranceId -->
<dependency>
<groupId>org.apache.skywalking</groupId>
<artifactId>apm-toolkit-trace</artifactId>
<version>8.12.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.skywalking/apm-toolkit-logback-1.x -->
<!-- 该引用用于logback获取tranceId,也就是tid -->
<dependency>
<groupId>org.apache.skywalking</groupId>
<artifactId>apm-toolkit-log4j-2.x</artifactId>
<version>8.12.0</version>
</dependency>
log4j2.xml
xml
<?xml version="1.0" encoding="UTF-8"?>
<!--packages 保证jar包的插件生效 -->
<Configuration status="INFO" packages="com.tuanzhang.skywalking.log4j2.lookup" schema="Log4J-V2.0.xsd" monitorInterval="5" >
<Properties>
<Property name="LOG_HOME">/home/logs</Property>
<property name="FILE_NAME">log</property>
<!--
[%d{yyyy-MM-dd'T'HH:mm:ss.SSSZZ}] 日期 [2023-03-27T10:39:56.514+08:00]
[%traceId] skyWalking tid
[%level{length=5}] 日志级别 [ERROR]
[%thread-%tid] 线程id [http-nio-8081-exec-1-37]
[%logger] [com.tuanzhang.log.controller.LogController]
[%X{hostName}] [jg-a-0079]
[%X{ip}] [172.20.5.112]
[%X{applicationName}] [demo-spring-boot-log]
[%F,%L,%C,%M] [LogController.java,36,com.tuanzhang.log.controller.LogController$1,run]
[%m] ## '%ex'%n [error日志] ## ' java.lang.ArithmeticException: / by zero
at com.tuanzhang.log.controller.LogController.error(LogController.java:44)
at java.lang.Thread.run(Thread.java:748)
'
-->
<property name="patternLayout">[%d{yyyy-MM-dd'T'HH:mm:ss.SSSZZ}][%highlight{%-5p}][%traceId][%thread-%tid][%cyan{%l}][${bomb:hostIp}][${bomb:spring.application.name}] [%m] %ex %n</property>
</Properties>
<Appenders>
<!-- skywalking grpc 日志收集 8.4.0版本开始支持 -->
<GRPCLogClientAppender name="grpc-log">
<!--<PatternLayout pattern="%level %traceId ${bomb:spring.application.name %timestamp %thread %class : %msg %throwable"/>-->
<JsonLayout>
<KeyValuePair key="app" value="${bomb:spring.application.name}" />
</JsonLayout>
</GRPCLogClientAppender>
<Console name="CONSOLE" target="SYSTEM_OUT">
<!--输出日志的格式:
1.不设置默认为: %m%n
2.disableAnsi="false" noConsoleNoAnsi="false" 配置开启支持%highlight彩色日志
-->
<PatternLayout pattern="${patternLayout}" disableAnsi="false" noConsoleNoAnsi="false"/>
</Console>
<RollingRandomAccessFile name="appAppender" fileName="${LOG_HOME}/app-${FILE_NAME}.log"
filePattern="${LOG_HOME}/app-${FILE_NAME}-%d{yyyy-MM-dd}-%i.log.gz" >
<PatternLayout pattern="${patternLayout}" />
<Policies>
<TimeBasedTriggeringPolicy interval="1"/>
<SizeBasedTriggeringPolicy size="500MB"/>
</Policies>
<!-- 同一天最大100个文件, 并采用gzip压缩 -->
<DefaultRolloverStrategy max="50">
<Delete basePath="${LOG_HOME}" maxDepth="2">
<IfFileName glob="app-*.log.gz" />
<IfLastModified age="7d">
<!--每小时滚动一次 保留最近10g 或者最近100个文件 -->
<IfAny>
<IfAccumulatedFileSize exceeds="1GB" />
<IfAccumulatedFileCount exceeds="10" />
</IfAny>
</IfLastModified>
</Delete>
</DefaultRolloverStrategy>
</RollingRandomAccessFile>
<RollingRandomAccessFile name="errorAppender" fileName="${LOG_HOME}/error-${FILE_NAME}.log" filePattern="${LOG_HOME}/error-${FILE_NAME}-%d{yyyy-MM-dd}-%i.log" >
<PatternLayout pattern="${patternLayout}" />
<Filters>
<ThresholdFilter level="warn" onMatch="ACCEPT" onMismatch="DENY"/>
</Filters>
<Policies>
<TimeBasedTriggeringPolicy interval="1"/>
<SizeBasedTriggeringPolicy size="500MB"/>
</Policies>
<DefaultRolloverStrategy max="20"/>
</RollingRandomAccessFile>
</Appenders>
<Loggers>
<!-- 业务相关 异步logger -->
<AsyncLogger name="com.tuanzahng.*" level="info" includeLocation="true">
<AppenderRef ref="appAppender"/>
</AsyncLogger>
<AsyncLogger name="com.tuanzahng.*" level="info" includeLocation="true">
<AppenderRef ref="errorAppender"/>
</AsyncLogger>
<Root level="info">
<appender-ref ref="grpc-log" />
<Appender-Ref ref="CONSOLE"/>
<Appender-Ref ref="appAppender"/>
<AppenderRef ref="errorAppender"/>
</Root>
</Loggers>
</Configuration>
因为要使用 动态参数 使用了log4j2的 lookup 插件,比如我想在日志打印 应用名称。如果不使用动态参数 需要每个应用都有要自定义一个 log4j2.xml
文件把 KeyValuePair <KeyValuePair key="app" value="spring-boot-demo" />
java
/**
* @Author: andy
* @Date: 2023/3/28 16:20
* @Description: jar 包内 plugin是不生效的所以要配置 collectPlugins 扫描路径 final PluginManager manager =
* new PluginManager(CATEGORY);
* manager.collectPlugins(Lists.newArrayList("com.tuanzhang.bomb.framework.log.lookup"));
* Map<String, PluginType<?>> plugins = manager.getPlugins();
*
* <dependency> <groupId>org.apache.logging.log4j</groupId>
* <artifactId>log4j-spring-boot</artifactId> <version>2.17.2</version> </dependency>
**/
@Plugin(name = "bomb", category = StrLookup.CATEGORY)
public class SpringEnvironmentLookup
implements ApplicationContextInitializer<ConfigurableApplicationContext>, StrLookup {
private static final String HOST_IP = Nets.getInnerIP();
private static Environment environment;
public String lookup(String key) {
if ("hostIp".equals(key)) {
return HOST_IP;
}
if (environment == null) {
return null;
}
return environment.getProperty(key);
}
@Override
public String lookup(LogEvent event, String key) {
return lookup((key));
}
@Override
public void initialize(ConfigurableApplicationContext applicationContext) {
environment = applicationContext.getEnvironment();
}
}
spring boot启动参数
首先要启动好 skywalking 和 es
ini
-javaagent:E:\application\skywalking\skywalking-agent\skywalking-agent.jar
-Dskywalking.agent.service_name=howa1
-Dskywalking.collector.backend_service=127.0.0.1:11800
-Dskywalking.logging.resolver=JSON
Skywalking 界面
日志
es存储结构
- 默认es索引名称 sw_log-yyyymmdd
这样后续解析 分析的时候会方便点比如:
- 自定义ui skywalking的日志ui 确实用着不太喜欢
- 监控告警 内容解析json会方便很多
- 等等
skywalking日志格式分析
其实 我们输出的日志虽然是json格式 但是被包装在 content
字段里面了 如果想打平其实配置是无法做单的 我们来看下源码 org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.Builder#map2Data
ini
// 写死的一级 字段
public static final String ADDITIONAL_TAG_TABLE = "log_tag";
public static final String SERVICE_ID = "service_id";
public static final String SERVICE_INSTANCE_ID = "service_instance_id";
public static final String ENDPOINT_ID = "endpoint_id";
public static final String TRACE_ID = "trace_id";
public static final String TRACE_SEGMENT_ID = "trace_segment_id";
public static final String SPAN_ID = "span_id";
public static final String CONTENT_TYPE = "content_type";
public static final String CONTENT = "content";
public static final String TAGS_RAW_DATA = "tags_raw_data";
public static final String TIMESTAMP = "timestamp";
public static final String TAGS = "tags";
public static abstract class Builder<T extends AbstractLogRecord> implements StorageBuilder<T> {
protected void map2Data(T record, final Convert2Entity converter) {
record.setServiceId((String) converter.get(SERVICE_ID));
record.setServiceInstanceId((String) converter.get(SERVICE_INSTANCE_ID));
record.setEndpointId((String) converter.get(ENDPOINT_ID));
record.setTraceId((String) converter.get(TRACE_ID));
record.setTraceSegmentId((String) converter.get(TRACE_SEGMENT_ID));
record.setSpanId(((Number) converter.get(SPAN_ID)).intValue());
record.setContentType(((Number) converter.get(CONTENT_TYPE)).intValue());
record.setContent(new LongText((String) converter.get(CONTENT)));
record.setTimestamp(((Number) converter.get(TIMESTAMP)).longValue());
record.setTagsRawData(converter.getBytes(TAGS_RAW_DATA));
record.setTimeBucket(((Number) converter.get(TIME_BUCKET)).longValue());
}
protected void data2Map(final T record, final Convert2Storage converter) {
converter.accept(SERVICE_ID, record.getServiceId());
converter.accept(SERVICE_INSTANCE_ID, record.getServiceInstanceId());
converter.accept(ENDPOINT_ID, record.getEndpointId());
converter.accept(TRACE_ID, record.getTraceId());
converter.accept(TRACE_SEGMENT_ID, record.getTraceSegmentId());
converter.accept(SPAN_ID, record.getSpanId());
converter.accept(TIME_BUCKET, record.getTimeBucket());
converter.accept(CONTENT_TYPE, record.getContentType());
converter.accept(CONTENT, record.getContent());
converter.accept(TIMESTAMP, record.getTimestamp());
converter.accept(TAGS_RAW_DATA, record.getTagsRawData());
converter.accept(TAGS, record.getTagsInString());
}
}
这里都是写死的一级字段 如果 想要打平 自己二开 重写 map2Data
方法这里就暂时不讲了。