logback.xml

tech2024-03-11  57

logback.xml

<?xml version="1.0" encoding="UTF-8"?> <!-- logback.xml的基本配置信息都包含在configuration标签中, 需要含有至少一个appender标签用于指定日志输出方式和输出格式, root标签为系统默认日志进程,通过level指定日志级别, 通过appender-ref关联前面指定顶的日志输出方式。 --> <!-- 定义 每隔10秒中扫描该文件 --> <configuration scan="true" scanPeriod="10 seconds" debug="true"> <!--定义日志输出目录--> <property name="LOG_HOME" value="${catalina.home}/logs/loan"/> <!-- 控制台输出的日志格式 --> <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> <encoder> <!-- <pattern>[%d{yyyy-MM-dd HH:mm:ss,SSS\} %-5p] %-20c - %m%n</pattern> --> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern> <charset>UTF-8</charset> </encoder> </appender> <!-- 按照登录用户的userId产生日志 --> <appender name="SIFT" class="ch.qos.logback.classic.sift.SiftingAppender"> <discriminator> <Key>userId</Key> <DefaultValue>unknown</DefaultValue> </discriminator> <sift> <appender name="FILE-${userId}" class="ch.qos.logback.core.rolling.RollingFileAppender"> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <fileNamePattern>${LOG_HOME}/${userId}/${HOSTNAME}_%d{yyyyMMdd}.%i.log</fileNamePattern> <!-- 根据日志文件按天回滚,保存时间为30天,30天之前的都将被清理掉 --> <maxHistory>7</maxHistory> <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> <maxFileSize>20MB</maxFileSize> </timeBasedFileNamingAndTriggeringPolicy> </rollingPolicy> <Append>false</Append> <encoder> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern> <charset>UTF-8</charset> </encoder> </appender> </sift> </appender> <!-- 输出error log 至统一日志文件中 --> <appender name="ERRORAPPENDER" class="ch.qos.logback.core.rolling.RollingFileAppender"> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <fileNamePattern>${LOG_HOME}/error/${HOSTNAME}_%d{yyyy-MM-dd}.%i.log</fileNamePattern> <!-- 根据日志文件按天回滚,保存时间为30天,30天之前的都将被清理掉 --> <maxHistory>7</maxHistory> <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> <maxFileSize>20MB</maxFileSize> </timeBasedFileNamingAndTriggeringPolicy> </rollingPolicy> <encoder> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern> <charset>UTF-8</charset> </encoder> <filter class="ch.qos.logback.classic.filter.LevelFilter"> <!-- 只打印ERROR日志 --> <level>ERROR</level> <onMatch>ACCEPT</onMatch> <onMismatch>DENY</onMismatch> </filter> </appender> <!-- 设置日志(访问日志,系统日志)输出位置以及格式 --> <appender name="INTERFACE" class="ch.qos.logback.core.rolling.RollingFileAppender"> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <FileNamePattern>${LOG_HOME}/interface/${HOSTNAME}_%d{yyyy-MM-dd}.%i.log</FileNamePattern> <!-- 根据日志文件按天回滚,保存时间为30天,30天之前的都将被清理掉 --> <maxHistory>7</maxHistory> <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> <maxFileSize>20MB</maxFileSize> </timeBasedFileNamingAndTriggeringPolicy> </rollingPolicy> <encoder> <pattern>[%date] [%thread] [%level] %msg%n</pattern> <charset>UTF-8</charset> </encoder> <filter class="ch.qos.logback.classic.filter.LevelFilter"> <!-- 只打印INFO日志 --> <level>INFO</level> <onMatch>ACCEPT</onMatch> <onMismatch>DENY</onMismatch> </filter> </appender> <!-- 输出URL访问日志 --> <appender name="URLLOG" class="ch.qos.logback.core.rolling.RollingFileAppender"> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <FileNamePattern>${LOG_HOME}/URL/${HOSTNAME}_%d{yyyy-MM-dd}.%i.log</FileNamePattern> <maxHistory>7</maxHistory> <!-- 日志大小 --> <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> <maxFileSize>20MB</maxFileSize> </timeBasedFileNamingAndTriggeringPolicy> </rollingPolicy> <encoder> <pattern>[%thread] [%date] [%level] %msg%n</pattern> <charset>UTF-8</charset> </encoder> <filter class="ch.qos.logback.classic.filter.LevelFilter"> <!-- 只打印INFO日志 --> <level>INFO</level> <onMatch>ACCEPT</onMatch> <onMismatch>DENY</onMismatch> </filter> </appender> <!-- access url --> <appender name="mq_biz_url" class="ch.qos.logback.classic.sift.SiftingAppender"> <discriminator> <Key>SYS_FLAG</Key> <DefaultValue>unknown</DefaultValue> </discriminator> <sift> <appender name="FILE-${userId}" class="ch.qos.logback.core.rolling.RollingFileAppender"> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <fileNamePattern>${LOG_HOME}/access/${SYS_FLAG}_%d{yyyyMMdd}.%i.log</fileNamePattern> <maxHistory>7</maxHistory> <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> <maxFileSize>20MB</maxFileSize> </timeBasedFileNamingAndTriggeringPolicy> </rollingPolicy> <Append>false</Append> <encoder> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %msg%n</pattern> <charset>UTF-8</charset> </encoder> </appender> </sift> </appender> <!-- 审计URL日志 --> <appender name="ADUIT_URL_APPENDER" class="ch.qos.logback.core.rolling.RollingFileAppender"> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <fileNamePattern>${LOG_HOME}/aduit_logs/${HOSTNAME}_%d{yyyyMMdd}.%i.log</fileNamePattern> <maxHistory>7</maxHistory> <!-- 日志大小 --> <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> <maxFileSize>20MB</maxFileSize> </timeBasedFileNamingAndTriggeringPolicy> </rollingPolicy> <encoder> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern> <charset>UTF-8</charset> </encoder> <filter class="ch.qos.logback.classic.filter.LevelFilter"> <!-- 只打印ERROR日志 --> <level>INFO</level> <onMatch>ACCEPT</onMatch> <onMismatch>DENY</onMismatch> </filter> </appender> <!-- 统一日志服务器 --> <!-- <appender name="jinke_logs_udp" class="com.papertrailapp.logback.Syslog4jAppender"> --> <!-- <layout class="com.jk.platform.core.common.JsonLoggerPatternLayout"> --> <!-- <pattern>{{xdcore}} [%d{yyyy-MM-dd HH:mm:ss.SSS}][%p][%X{sessionId}][%X{traceId}][%X{cip}:%X{cPort}][%X{sip}:%X{sPort}][][%X{userId}][%t|%logger{36}|%M|%X{ctime}] - %message%n</pattern> --> <!-- </layout> --> <!-- <syslogConfig class="org.productivity.java.syslog4j.impl.net.udp.UDPNetSyslogConfig"> --> <!-- <host>192.168.1.161</host> --> <!-- <port>514</port> --> <!-- <ident>xdcore-108</ident> --> <!-- make logger synchronous for the tests --> <!-- <threaded>false</threaded> --> <!-- </syslogConfig> --> <!-- <filter class="ch.qos.logback.classic.filter.LevelFilter"> --> <!-- <level>WARN</level> --> <!-- <onMatch>ACCEPT</onMatch> --> <!-- <onMismatch>DENY</onMismatch> --> <!-- </filter> --> <!-- </appender> --> <!-- 设置异常单独打印输出 --> <logger name="com.jk" additivity="true"> <level value="DEBUG"/> <appender-ref ref="ERRORAPPENDER"/> </logger> <logger name="com.jk.modules.platform.sysauth.session.JedisShiroSessionRepository" additivity="false"> <level value="ERROR"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> <appender-ref ref="ERRORAPPENDER"/> </logger> <!-- 设置过滤访问日志类路径 --> <logger name="com.jk.modules.platform.sysauth.interceptor.ResourceInterceptor" additivity="false"> <level value="INFO"/> <appender-ref ref="INTERFACE"/> </logger> <!-- 输出URL访问日志 --> <logger name="com.jk.modules.common.URLInterceptor" additivity="false"> <level value="INFO"/> <appender-ref ref="URLLOG"/> </logger> <!-- 输出MQ 消费客户端日志信息 --> <logger name="com.jk.modules.plaform.mq.consumer.DealInfoMsgListener" additivity="false"> <level value="INFO"/> <appender-ref ref="mq_biz_url"/> </logger> <logger name="com.jk.modules.platform.common.interceptor.AuditURLInterceptor" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="ADUIT_URL_APPENDER"/> </logger> <logger name="com.jk.modules.platform.sysauth.session" additivity="false"> <level value="DEBUG"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <!-- +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ --> <!-- ++++++++++++++++++++++++++++ 第三方应用包日志配置 +++++++++++++++++++++++++++ --> <!-- +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ --> <!-- sql 的日志输出设置 --> <logger name="java.sql.Connection" additivity="false"> <level value="DEBUG"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <logger name="java.sql.Statement" additivity="false"> <level value="DEBUG"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <logger name="java.sql.PreparedStatement" additivity="false"> <level value="DEBUG"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <logger name="com.alibaba.druid" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <!-- spring的日志输入设置 --> <logger name="org.springframework" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <logger name="org.mybatis.spring.mapper" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <!-- MyBatis日志输出设置 --> <logger name="org.apache.ibatis" level="DEBUG" additivity="false"> <level value="DEBUG"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <logger name="org.apache.ibatis.io.ResolverUtil" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <!-- zookeeper日志输出设置 --> <logger name="org.apache.zookeeper" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <!-- dubbo日志输出设置 --> <logger name="com.alibaba.dubbo" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <logger name="org.apache.shiro.web.filter.mgt.DefaultFilterChainManager" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <logger name="org.apache.commons.beanutils.converters" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <logger name="pushlet" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <!-- show parameters for hibernate sql 专为 Hibernate 定制 --> <logger name="org.hibernate.cfg.HbmBinder" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <logger name="org.hibernate.hql" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <logger name="org.hibernate.loader" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <logger name="org.hibernate.cfg" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <logger name="org.hibernate.persister.collection.AbstractCollectionPersister" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <logger name="org.hibernate.persister.entity.AbstractEntityPersister" additivity="false"> <level value="INFO"/> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> </logger> <!-- <logger name="org.hibernate.type.descriptor.sql.BasicBinder" level="INFO" additivity="false"> <appender-ref ref="STDOUT" /> <appender-ref ref="SIFT" /> </logger> <logger name="org.hibernate.type.descriptor.sql.BasicExtractor" level="INFO" additivity="false"> <appender-ref ref="STDOUT" /> <appender-ref ref="SIFT" /> </logger> <logger name="org.hibernate.SQL" level="INFO" additivity="false"> <appender-ref ref="STDOUT" /> <appender-ref ref="SIFT" /> </logger> <logger name="org.hibernate.engine.QueryParameters" level="INFO" additivity="false"> <appender-ref ref="STDOUT" /> <appender-ref ref="SIFT" /> </logger> <logger name="org.hibernate.engine.query.HQLQueryPlan" level="INFO" additivity="false"> <appender-ref ref="STDOUT" /> <appender-ref ref="SIFT" /> </logger> --> <root level="debug"> <appender-ref ref="STDOUT"/> <appender-ref ref="SIFT"/> <!-- <appender-ref ref="jinke_logs_udp"/> --> </root> </configuration>
最新回复(0)