sequence
-
Java er who understands the spring ecosystem and framework knows that the life cycle management and supporting interfaces of spring applications are relatively elegant and scalable.
-
But how can Java applications that are separated from spring be elegantly started and stopped and managed the life cycle of the program? (So that the application will perform corresponding actions in the running phase we need)
Overview: Start-stop hook framework for normal Java applications
Pre-knowledge:
annotation
Start-stop hook framework for normal Java applications
ApplicationStartupHook: Abstract startup hook interface
package ;
@FunctionalInterface
public interface ApplicationStartupHook {
/**
* execute the task
* @throws Exception
*/
void execute() throws Exception;
}
ApplicationStartupHookManager: Unified Management Startup Hook
package ;
import ;
import ;
public class ApplicationStartupHookManager {
private static final List<ApplicationStartupHook> hooks = new ArrayList<>();
private static boolean executed = false;
// Register to start the task
public static void registerHook(ApplicationStartupHook hook) {
if (executed) {
throw new IllegalStateException("Application startup hooks already executed");
}
(hook);
}
// Perform all startup tasks
public static void run() throws Exception {
if (!executed) {
for (ApplicationStartupHook hook : hooks) {
();
}
executed = true;
}
}
}
ApplicationShutdownHook: Close the hook
package ;
@FunctionalInterface
public interface ApplicationShutdownHook {
/**
* execute the task
* @throws Exception
*/
void execute() throws Exception;
}
ApplicationShutdownHookManager: Unified Management Close Hook
package ;
import ;
import ;
import ;
import ;
public class ApplicationShutdownHookManager {
private static final List<ApplicationShutdownHook> hooks = new ArrayList<>();
@Getter
private static boolean executed = false;
// Register to start the task
public static void registerHook(ApplicationShutdownHook hook) {
if (executed) {
throw new IllegalStateException("Application shutdown hooks already executed");
}
(hook);
}
// Perform all startup tasks
public static void run() throws Exception {
if (!executed) {
for (ApplicationShutdownHook hook : hooks) {
();
}
executed = true;
}
}
}
Demo application: Slf4j + Log4j2 + Log4j2 KafkaAppender + Kafka
Maven dependencies
<project xmlns="/POM/4.0.0" xmlns:xsi="http:///2001/XMLSchema-instance"
xsi:schemaLocation="/POM/4.0.0 /xsd/maven-4.0.">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId></groupId>
<artifactId>demos-application-parent</artifactId>
<version>1.0.0-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
<artifactId>log4j2-kafka-appender-demo-application</artifactId>
<packaging>jar</packaging>
<name>bdp-diagnosticbox-model</name>
<url></url>
<properties>
<>UTF-8</>
<>1.18.22</>
<>1.7.30</>
<>2.20.0</>
<>2.7.2</>
</properties>
<dependencies>
<dependency>
<groupId></groupId>
<artifactId>lombok</artifactId>
<version>${}</version>
</dependency>
<!-- log -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${}</version>
</dependency>
<dependency>
<groupId>.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>${}</version>
</dependency>
<dependency>
<groupId>.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>${}</version>
</dependency>
<dependency>
<groupId>.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>.log4j</groupId>
<artifactId>log4j-jul</artifactId>
<!--<version>2.13.3</version>-->
<version>${}</version>
<scope>compile</scope>
</dependency>
<!-- log [end] -->
<!-- kafka client -->
<dependency>
<groupId></groupId>
<artifactId>kafka-clients</artifactId>
<version>${}</version>
</dependency>
</dependencies>
</project>
Configuration File
resource/
<?xml version="1.0" encoding="UTF-8"?>
<!--<Configuration status="debug" name="demo-application" packages="">-->
<Configuration status="off">
<!-- Custom properties -->
<Properties>
<!-- Application Name -->
<Property name="">bdp-xxx-app</Property>
<!-- The host address of the application instance -->
<Property name="">${env:HOST_IP:-127.0.0.1}</Property>
<!-- Name of the application instance,default value: localInstance -->
<Property name="">${env:INSTANCE_NAME:-localInstance}</Property>
<!-- Character Set -->
<Property name="">UTF-8</Property>
<!-- Log Level,default INFO -->
<Property name="" value="${env:LOG_ACCESS:-INFO}" />
<!--<Property name="">${env:LOG_ACCESS:-INFO}</Property>-->
<!--<Property name="">${env:LOG_OPERATE:-INFO}</Property>-->
<Property name="">${}</Property>
<!-- . -->
<!--<Property name="" value="CustomPatternLayout"></Property>-->
<Property name="" value="PatternLayout"></Property>
<!-- =%d{yyyy/MM/dd HH:mm:} %-5p | %T | %t | (%C{1}.java:%L %M) | %m%n -->
<!-- [%d{yyyy/MM/dd HH:mm:}] [%traceId] [%-5p] [%t] [%C{1}.java:%L %M] %m%n -->
<!-- [%d{yyyy/MM/dd HH:mm:}] [%X{traceId}] [%-5p] [%t] [%C{1}.java:%L %M] %m%n -->
<!-- [%traceId] [${}] [system] [%d{yyyy/MM/dd HH:mm:}] [%-5p] [%t] [%C{1}] %M:%L__|%X{traceId}|__%m%n -->
<!-- [${}] [${}] [${env:HOST_IP}] [${env:CONTAINER_IP}] [%d{yyyy/MM/dd HH:mm:}] [%p] [%t] [%l] %m%n -->
<!-- ↓ sample: 2023-02-02 14:35:38,664 WARN main (:141 lambda$null$0) name: , configLevel(Level):DEBUG, effectiveLevel: DEBUG -->
<!-- %d %-5p %t (%C{1}.java:%L %M) %m%n -->
<!-- [%d %r] [%-5p] [%t] [%l] [%m]%n -->
<!-- %d{yyyy-MM-dd HH\:mm\:ss} %-5p[%t] : %m%n -->
<!-- ↓ sample: 2025-02-21 15:24:27 INFO | [2aa06a7b-a81f-469b-a0a0-679005bc35a3] | Log4jKafkaAppenderDemoEntry:36 - This is an information log -->
<!--%d{HH:mm:} %-5p [%-7t] %F:%L - %m%n -->
<!--[%-4level] | %d{YYYY-MM-dd HH:mm:ss} | [%X{REQ_ID}] | %m| ${sys:}%n-->
<!-- %d{yyyy-MM-dd HH:mm:ss} %-5p | [%X{REQ_ID}] | %c{1}:%L - %m%n -->
<!-- ${} | ${bundle:application:} | ${main:\\-logLevel} | ${main:\\-\-log\.appender\.kafka\.producer\.bootstrap\.servers} | %c{1}:%L - %m%n -->
<Property name="">
[%d{yyyy/MM/dd HH:mm:}] [%X{traceId}] [%-5p] [%t] [%C{1}.java:%L %M] %m%n
</Property>
<Property name="" value="${}" />
<!-- KafkaAppender attribute value -->
<!-- Way1: Get from environment variable -->
<!--<Property name="" value="${env:KAFKA_PRODUCER_BOOTSTRAP_SERVERS:-127.0.0.1:9092}"/>-->
<!-- Way2: from Log Framework MDC Get it in -->
<!--<Property name="" value="%X{}"/>-->
<!-- Way3: from Application main 方法启动入参Get it in -->
<Property name="" value="${main:\\-\-log\.appender\.kafka\.producer\.bootstrap\.servers}"/>
<!-- Target Appenders | Note: Attribute value(like: ConsoleAppender),Corresponding to <Appender> Tags `name` Attribute value -->
<!-- 1. Standard output/Console's Appender -->
<Property name="" value="MyConsoleAppender"/>
<!-- 2. File output System log Appender -->
<Property name="" value="MySystemFileAppender"/>
<!-- 3. File output Access class logs Appender -->
<!--<Property name="">MyAccessFileAppender</Property>-->
<!-- 4. File output Operation log Appender -->
<!--<Property name="">MyOperationFileAppender</Property>-->
<!-- 5. File output Protocol log Appender -->
<!--<Property name="">MyProtocolFileAppender</Property>-->
<!-- 6. remote Link tracking system Appender -->
<!--<Property name="">MySkyWalkingClientAppender</Property>-->
<!-- 7. remote KAFKA/ELK of Appender -->
<Property name="" value="MyKafkaAppender"/>
</Properties>
<!-- Output -->
<Appenders>
<Console name="MyConsoleAppender" target="SYSTEM_OUT">
<PatternLayout pattern="${}" />
<!-- -->
<!--<CustomPatternLayout pattern="${}" />-->
</Console>
<!--
@warn
1. this KafkaAppender Not recommended in Production environment of /properties Enabled in,because 无法from外部动态Note入 kafka broker servers
2. For the1point,Need to pass 自definitionof {@link .Log4j2KafkaAppenderInitializer } ,实现程序启动时动态Note册 KafkaAppender
@Appender : KafkaAppender | . | log4j-core:2.20.0
@note
1. 计划在下一个主要版本中删除this附加程序!like果您正在usethis库,Please use the official support channel and Log4j Maintenance personnel contact。
from /log4j//manual/appenders/#KafkaAppender
2. use Kafka Appender 需要额外of运行时依赖项 : :kafka-clients:{version}
from /log4j//manual/appenders/#KafkaAppender
3. Kafka appender ignoreExceptions Must be set tofalse,Otherwise it cannot be triggered FailOver Appender
4. Make sure not to let ``Logger 日志记录of日志级别为 DEBUG,because为这将导致`KafkaAppender`Recursive logging
from /log4j//manual/appenders/#KafkaAppender
@property
//Configuration Properties
* name: Log Framework's Appender 's Name
* topic : Kafka Topic Name
key:String : Kafka Message(`ProducerRecord`) of key。 support Runtime property replacement,and in the global context In the evaluation。
refer to: /log4j//manual/appenders/#KafkaAppender
refer to : /log4j//manual/#global-context
Recommended value: key="$${web:contextName}" | contextName yes log4j2 内置of变量
ignoreExceptions:boolean[DefaultValue:true] : like果false,日志记录异常将被转发给日志记录语句of调用者。otherwise,They will be ignored。
syncSend:boolean[DefaultValue:true] : like果true,The appendix will block,until Kafka Until the server confirms the record。otherwise,The appendix will return immediately,from而实现更低of延迟and更高of吞吐量。
//Nested properties
Filter
Layout
Property[0..n] : These attributes will be forwarded directly to Kafka Producer。 For more details,See Kafka Producer属性。
refer to: /#producerconfigs
: this属性yes必需of
: 不应use这些属性
: 不应use这些属性
-->
<Kafka name="MyKafkaAppender" topic="flink_monitor_log" key="$${web:contextName}" syncSend="true" ignoreExceptions="false">
<!--<JsonTemplateLayout/>-->
<PatternLayout pattern="${}"/>
<Property name="" value="${}"/>
<Property name="">2000</Property>
</Kafka>
<RollingFile name="MyFailoverKafkaLogAppender" fileName="../log/failover/"
filePattern="../log/failover/request.%d{yyyy-MM-dd}.log">
<ThresholdFilter level="INFO" onMatch="ACCEPT" onMismatch="DENY"/>
<PatternLayout>
<Pattern>${}</Pattern>
</PatternLayout>
<Policies>
<TimeBasedTriggeringPolicy />
</Policies>
</RollingFile>
<!--
<Failover name="Failover" primary="kafkaLog" retryIntervalSeconds="600">
<Failovers>
<AppenderRef ref="MyFailoverKafkaLogAppender"/>
</Failovers>
</Failover>
-->
<!--
Asynchronous output | .
1. AsyncAppenderAccept othersAppenderof引用,And makeLogEvents在单独ofThreadWrite them on。
2. default情况下,AsyncAppenderuse ,It does not require any external libraries。
请Note意,多线程应用程序在usethisappenderBe careful when:阻塞队列容易受到锁争用of影响,并且我们of test show,Performance may become worse when more threads are logged simultaneously。
考虑use无锁异步记录器以获得最佳性能。
-->
<!-- <AsyncLogger name="kafkaAyncLogger" level="INFO" additivity="false">
<appender-ref ref="Failover"/>
</AsyncLogger>-->
</Appenders>
<!-- Logger-->
<Loggers>
<!-- definition RootLogger wait Global configuration(Do not modify at will) -->
<!-- rootLogger, Root logger,所有记录器of父辈 | 指定根日志of级别 | All < Trace < Debug < Info < Warn < Error < Fatal < OFF -->
<Root level="${}"> <!-- ${} -->
<!-- 2.17.2 版本以下通过这种Way将 root and AppenderRelated / 2.17.2 版本以上有更简便of写法 -->
<!-- =${} -->
<AppenderRef ref="${}" level="INFO" />
<!-- =${} -->
<!-- <AppenderRef ref="${}"/> --><!-- MyKafkaAppender -->
</Root>
<!-- Specify individual Class of Logger (Can be modified at will,It is recommended to be nacos Modify) -->
<!-- KafkaAppender | .
1. Make sure not to let Logger of日志级别为 DEBUG,because为这将导致Recursive logging
2. Remember to configure additivity The property is set tofalse
-->
<Logger name="" level="WARN" additivity="false">
<AppenderRef ref="${}"/>
</Logger>
</Loggers>
</Configuration>
Log4j2KafkaAppenderInitializer implements ApplicationStartupHook: Responsible for implementing specific startup hooks
package ;
import ;
import .slf4j.Slf4j;
import ;
import .;
import .;
import .;
import .;
import .;
import .;
import .;
import .;
import .;
import ;
import ;
import org.;
import ;
import ;
import ;
import ;
import ;
/**
* @description Based on the log4j2 log framework, when the program starts, KafkaAppender is dynamically added according to the program's startup parameters (kafka brokers address)
* @refrence-doc
* [1] Log4j2 configuration logging is sent to kafka - CSDN | /u010454030/article/details/132589450 [Recommended]
* [2] Dynamic log configuration of Log4j2 using code - CSDN | /scruffybear/article/details/130230414 [Recommended]
* [3] Apache - Kafka Appender [Recommended]
* /log4j//manual/#KafkaAppender
* /log4j//manual/appenders/#KafkaAppender
* {@link . }
* -----
* [4] Log4j2 - Dynamic Generation Appender - Blog Park | /yulinlewis/p/
* [5] springboot dynamically adds the Appender for log4j2 - CSDN | /qq_25379811/article/details/127620062
* [6] Dynamic Read Configuration - CSDN | /xiaokanfuchen86/article/details/126695010 [Recommended]
* /log4j//manual/#global-context [Recommended]
* @gpt-promt
*/
@Slf4j
public class Log4j2KafkaAppenderInitializer implements ApplicationStartupHook {
@Getter
private Properties applicationProperties;
public Log4j2KafkaAppenderInitializer(Properties applicationProperties) {
= applicationProperties;
}
@Override
public void execute() throws Exception {
("Initializing {} ...", ().getCanonicalName());
LoggerContext ctx = (LoggerContext) (false);
Configuration config = ();
Appender kafkaAppender = createKafkaAppender(ctx, config, applicationProperties);
();//Prevent errors: Attempted to append to non-started appender testName
Level level = getLevel(applicationProperties);
().addAppender(kafkaAppender, level, null);// Add Appender to the configuration
();
("Initialized {} ...", ().getCanonicalName());
}
/**
* @note
* 1. required properties:
* 1. {@link Constants.Log4j2KafkaAppender#LEVEL_PARAM}
* @param applicationProperties
* @return
*/
private static Level getLevel(Properties applicationProperties) {
Level level = null;
String levelStr = applicationProperties == null ? Constants.Log4j2KafkaAppender.LEVEL_DEFAULT : ( Constants.Log4j2KafkaAppender.LEVEL_PARAM );
levelStr = (levelStr == null || ("") ) ? Constants.Log4j2KafkaAppender.LEVEL_DEFAULT : ();
level = (levelStr);
("user config's `{}`'s log level: {}", (), levelStr);
return level;
}
/**
* create a kafka appender base on log4j2 framework
* @reference-doc
* 1. /log4j//manual/appenders/#KafkaAppender
* @note
* 1. required properties:
* 1. {@link Constants.Log4j2KafkaAppender#KAFKA_PRODUCER_TOPIC_PARAM}
* 2. {@link ProducerConfig#BOOTSTRAP_SERVERS_CONFIG }
* 2. optional properties:
* {@link ProducerConfig } 's Config Properties
* @return
*/
private static Appender createKafkaAppender(LoggerContext loggerContext,Configuration configuration, Properties applicationProperties) {
KafkaAppender kafkaAppender = null;
if(loggerContext == null){
loggerContext = (LoggerContext) (false);
}
if(configuration == null){
configuration = ();
}
final PatternLayout layout = ()
.withCharset(("UTF-8"))
.withConfiguration(configuration)
.withPattern("%d %p %c{1.} [%t] %m%n").build();
Filter filter = null;
String topic = (Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_TOPIC_PARAM);
String appenderName = (Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_TOPIC_PARAM) + "Log4J2KafkaAppender";
Property [] propertyArray = propertiesToPropertyArray(applicationProperties);
String messageKey = ( Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_KEY_PARAM );
Boolean isIgnoreExceptions = ((Constants.Log4j2KafkaAppender.KAFKA_APPENDER_IGNORE_EXCEPTIONS_PARAM, Constants.Log4j2KafkaAppender.KAFKA_APPENDER_IGNORE_EXCEPTIONS_DEFAULT));
Boolean syncSend = ((Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_SYNC_SEND_PARAM, Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_SYNC_SEND_DEFAULT));
Boolean sendEventTimestamp = ((Constants.Log4j2KafkaAppender.KAFKA_APPENDER_SEND_EVENT_TIMESTAMP_PARAM, Constants.Log4j2KafkaAppender.KAFKA_APPENDER_SEND_EVENT_TIMESTAMP_DEFAULT));
//kafkaAppender = (layout, filter, appenderName, isIgnoreExceptions, topic, propertyArray, configuration, key);//This method does not support passing in syncSend parameters
//kafkaAppender = new KafkaAppender(name, layout, filter, isIgnoreExceptions, kafkaManager, getPropertyArray(), getRetryCount());//This method is not supported because the constructor is private
kafkaAppender = ()//This method √
.setName(appenderName)
.setConfiguration(configuration)
.setPropertyArray(propertyArray)
.setFilter(filter)
.setLayout(layout)
.setIgnoreExceptions(isIgnoreExceptions)
.setTopic(topic)
.setKey(messageKey)
.setSendEventTimestamp(sendEventTimestamp)
.setSyncSend(syncSend)
.setRetryCount(3)
.build();
return kafkaAppender; // needs to be replaced with the actual Appender to create code
}
/**
* Java Properties to Log4j2 Property []
* @return
*/
public static Property [] propertiesToPropertyArray(Properties properties){
if(properties == null){
return new Property[] {};
}
Property [] propertyArray = new Property[ () + 1];
int i = 0;
for(<Object, Object> entry : () ) {
Property property = ((String) (), (String) ());
propertyArray[i] = property;
i++;
}
/**
* Inject . Required Required Parameters {@link ProducerConfig.BOOTSTRAP_SERVERS_CONFIG}
*/
String kafkaBrokerServers = (Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_BOOTSTRAP_SERVERS_PARAM);
if(kafkaBrokerServers != null && (!().equals("")) ){
propertyArray[i] = ( ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBrokerServers);
} else {
throw new RuntimeException(
("The Property `%s` must be not empty for `%s`!"
, Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_BOOTSTRAP_SERVERS_PARAM
, ()
)
);
}
return propertyArray;
}
}
Slf4Initializer implements ApplicationStartupHook: The startup hook responsible for the specific implementation
package ;
import ;
import .slf4j.Slf4j;
import ;
import ;
import org.;
import ;
import ;
@Slf4j
public class Slf4Initializer implements ApplicationStartupHook {
@Getter
private Properties applicationProperties;
public Slf4Initializer(Properties applicationProperties) {
= applicationProperties;
}
@Override
public void execute() throws Exception {
("Initializing {} ...", ().getCanonicalName());
//Set the kafka host address
String kafkaProducerBootstrapServers = (Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_BOOTSTRAP_SERVERS_PARAM);
kafkaProducerBootstrapServers = (kafkaProducerBootstrapServers).<RuntimeException> orElseThrow(() -> {
throw new RuntimeException(("`{}` must be not empty!", Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_BOOTSTRAP_SERVERS_PARAM));
});
(Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_BOOTSTRAP_SERVERS_PARAM, kafkaProducerBootstrapServers);
("MDC | {} : {}", Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_BOOTSTRAP_SERVERS_PARAM, (Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_BOOTSTRAP_SERVERS_PARAM) );
("Initialized {} ...", ().getCanonicalName());
}
}
Slf4Finalizer implements ApplicationShutdownHook: The hook responsible for the specific implementation
package ;
import ;
import .slf4j.Slf4j;
import ;
import ;
import org.;
import ;
@Slf4j
public class Slf4Finalizer implements ApplicationShutdownHook {
@Getter
private Properties applicationProperties;
public Slf4Finalizer(Properties applicationProperties) {
= applicationProperties;
}
@Override
public void execute() throws Exception {
("Finalizing {} ...", ());
// Clean up MDC
("clear MDC before | {} : {}", Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_BOOTSTRAP_SERVERS_PARAM, (Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_BOOTSTRAP_SERVERS_PARAM) );
//();
( Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_BOOTSTRAP_SERVERS_PARAM );// or clean up only the required properties
("clear MDC after | {} : {}", Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_BOOTSTRAP_SERVERS_PARAM, (Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_BOOTSTRAP_SERVERS_PARAM) );
("Finalized {} ...", ());
}
}
Log4jKafkaAppenderDemoEntry
package ;
import .;
import ;
import ;
import .Slf4Finalizer;
import ;
import .Log4j2KafkaAppenderInitializer;
import .Slf4Initializer;
import org.;
import org.;
import ;
public class Log4jKafkaAppenderDemoEntry {
private static final Logger logger = ();
private static final String APPLICATION_NAME = "Log4jKafkaAppenderDemoApplication";
public static void main(String[] args) throws Exception {
// Dynamically obtain configuration from nacos, etc. (This can be regarded as being simulated here)
Properties applicationProperties = new Properties();
(Constants.Log4j2KafkaAppender.LEVEL_PARAM, "WARN");
(Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_BOOTSTRAP_SERVERS_PARAM, "127.0.0.1:9092");
(Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_TOPIC_PARAM, "flink_monitor_log");
(Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_KEY_PARAM, APPLICATION_NAME);
(Constants.Log4j2KafkaAppender.KAFKA_PRODUCER_SYNC_SEND_PARAM, "true");
(Constants.Log4j2KafkaAppender.KAFKA_APPENDER_IGNORE_EXCEPTIONS_PARAM, "false");
enableLog4j2MainLookup(args);//Optional steps (not required)
runStartupHooks(applicationProperties);//Run Startup Hook
// Test logs at different levels
("This is an information log");
("This is a warning log");
try {
throw new RuntimeException("test exception");
} catch (Exception e) {
("Error occurred", e);
}
//Stop the hook
runShutdownHooks(applicationProperties);
}
public static void enableLog4j2MainLookup(String [] args){
/**
* If the pattern of Appender in log4j2.[xml/properties/yaml] wants to use `${main:\\-logLevel}`, the following code needs to be enabled
*/
try {
(".")
.getDeclaredMethod("setMainArguments", String[].class)
.invoke(null, (Object) args);
} catch (final ReflectiveOperationException e) {
// Log4j Core is not used.
}
}
public static void runStartupHooks(Properties applicationProperties) throws Exception {
( new Slf4Initializer(applicationProperties) );
( new Log4j2KafkaAppenderInitializer(applicationProperties) );
();
}
public static void runShutdownHooks(Properties applicationProperties) throws Exception {
( new Slf4Finalizer(applicationProperties) );
();
}
}
X References