use of org.apache.logging.log4j.core.config.plugins.util.PluginType in project logging-log4j2 by apache.
the class TypeConverterRegistry method loadKnownTypeConverters.
private void loadKnownTypeConverters(final Collection<PluginType<?>> knownTypes) {
for (final PluginType<?> knownType : knownTypes) {
final Class<?> clazz = knownType.getPluginClass();
if (TypeConverter.class.isAssignableFrom(clazz)) {
@SuppressWarnings("rawtypes") final Class<? extends TypeConverter> pluginClass = clazz.asSubclass(TypeConverter.class);
final Type conversionType = getTypeConverterSupportedType(pluginClass);
final TypeConverter<?> converter = ReflectionUtil.instantiate(pluginClass);
if (registry.putIfAbsent(conversionType, converter) != null) {
LOGGER.warn("Found a TypeConverter [{}] for type [{}] that already exists.", converter, conversionType);
}
}
}
}
use of org.apache.logging.log4j.core.config.plugins.util.PluginType in project x-pipe by ctripcorp.
the class LogTest method testPlugin.
@Test
public void testPlugin() {
PluginManager pm = new PluginManager(PatternConverter.CATEGORY);
pm.collectPlugins();
for (Entry<String, PluginType<?>> entry : pm.getPlugins().entrySet()) {
logger.info("{} : {}", entry.getKey(), entry.getValue());
}
logger.error("[testPlugin]", new IOException("io exception message..."));
}
use of org.apache.logging.log4j.core.config.plugins.util.PluginType in project hive by apache.
the class LogDivertAppender method registerRoutingAppender.
/**
* Programmatically register a routing appender to Log4J configuration, which
* automatically writes the log of each query to an individual file.
* The equivalent property configuration is as follows:
* # queryId based routing file appender
* appender.query-routing.type = Routing
* appender.query-routing.name = query-routing
* appender.query-routing.routes.type = Routes
* appender.query-routing.routes.pattern = $${ctx:queryId}
* # default route
* appender.query-routing.routes.route-default.type = Route
* appender.query-routing.routes.route-default.key = $${ctx:queryId}
* appender.query-routing.routes.route-default.app.type = null
* appender.query-routing.routes.route-default.app.name = Null
* # queryId based route
* appender.query-routing.routes.route-mdc.type = Route
* appender.query-routing.routes.route-mdc.name = IrrelevantName-query-routing
* appender.query-routing.routes.route-mdc.app.type = RandomAccessFile
* appender.query-routing.routes.route-mdc.app.name = query-file-appender
* appender.query-routing.routes.route-mdc.app.fileName = ${sys:hive.log.dir}/${ctx:sessionId}/${ctx:queryId}
* appender.query-routing.routes.route-mdc.app.layout.type = PatternLayout
* appender.query-routing.routes.route-mdc.app.layout.pattern = %d{ISO8601} %5p %c{2}: %m%n
* @param conf the configuration for HiveServer2 instance
*/
public static void registerRoutingAppender(org.apache.hadoop.conf.Configuration conf) {
if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED, false)) {
// spare some resources, do not register logger if it is not enabled .
return;
}
String loggingLevel = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL);
OperationLog.LoggingLevel loggingMode = OperationLog.getLoggingLevel(loggingLevel);
String layout = loggingMode == OperationLog.LoggingLevel.VERBOSE ? verboseLayout : nonVerboseLayout;
// Create NullAppender
PluginEntry nullEntry = new PluginEntry();
nullEntry.setClassName(NullAppender.class.getName());
nullEntry.setKey("null");
nullEntry.setName("appender");
PluginType<NullAppender> nullChildType = new PluginType<NullAppender>(nullEntry, NullAppender.class, "appender");
Node nullChildNode = new Node(null, "Null", nullChildType);
// Create default route
PluginEntry defaultEntry = new PluginEntry();
defaultEntry.setClassName(Route.class.getName());
defaultEntry.setKey("route");
defaultEntry.setName("Route");
PluginType<Route> defaultType = new PluginType<Route>(defaultEntry, Route.class, "Route");
Node nullNode = new Node(null, "Route", defaultType);
nullNode.getChildren().add(nullChildNode);
Route defaultRoute = Route.createRoute(null, "${ctx:queryId}", nullNode);
// Create queryId based route
PluginEntry entry = new PluginEntry();
entry.setClassName(Route.class.getName());
entry.setKey("route");
entry.setName("Route");
PluginType<Route> type = new PluginType<Route>(entry, Route.class, "Route");
Node node = new Node(null, "Route", type);
PluginEntry childEntry = new PluginEntry();
childEntry.setClassName(HushableRandomAccessFileAppender.class.getName());
childEntry.setKey("HushableMutableRandomAccess");
childEntry.setName("appender");
PluginType<HushableRandomAccessFileAppender> childType = new PluginType<>(childEntry, HushableRandomAccessFileAppender.class, "appender");
Node childNode = new Node(node, "HushableMutableRandomAccess", childType);
childNode.getAttributes().put("name", "query-file-appender");
childNode.getAttributes().put("fileName", "${ctx:operationLogLocation}/${ctx:sessionId}/${ctx:queryId}");
node.getChildren().add(childNode);
PluginEntry filterEntry = new PluginEntry();
filterEntry.setClassName(NameFilter.class.getName());
filterEntry.setKey("namefilter");
filterEntry.setName("namefilter");
PluginType<NameFilter> filterType = new PluginType<>(filterEntry, NameFilter.class, "filter");
Node filterNode = new Node(childNode, "NameFilter", filterType);
filterNode.getAttributes().put("loggingLevel", loggingMode.name());
childNode.getChildren().add(filterNode);
PluginEntry layoutEntry = new PluginEntry();
layoutEntry.setClassName(PatternLayout.class.getName());
layoutEntry.setKey("patternlayout");
layoutEntry.setName("layout");
PluginType<PatternLayout> layoutType = new PluginType<>(layoutEntry, PatternLayout.class, "layout");
Node layoutNode = new Node(childNode, "PatternLayout", layoutType);
layoutNode.getAttributes().put("pattern", layout);
childNode.getChildren().add(layoutNode);
Route mdcRoute = Route.createRoute(null, null, node);
Routes routes = Routes.createRoutes("${ctx:queryId}", defaultRoute, mdcRoute);
LoggerContext context = (LoggerContext) LogManager.getContext(false);
Configuration configuration = context.getConfiguration();
String timeToLive = String.valueOf(HiveConf.getTimeVar(conf, HiveConf.ConfVars.HIVE_SERVER2_OPERATION_LOG_PURGEPOLICY_TIMETOLIVE, TimeUnit.SECONDS));
PurgePolicy purgePolicy = IdlePurgePolicy.createPurgePolicy(timeToLive, null, "SECONDS", configuration);
// Hack: due to the (non-standard) way that log4j configuration is extended to introduce the routing appender
// the life-cycle methods are not called as expected leading to initialization problems (such as the scheduler)
configuration.getScheduler().incrementScheduledItems();
RoutingAppender routingAppender = RoutingAppender.createAppender(QUERY_ROUTING_APPENDER, "true", routes, configuration, null, purgePolicy, null);
LoggerConfig loggerConfig = configuration.getRootLogger();
loggerConfig.addAppender(routingAppender, null, null);
context.updateLoggers();
routingAppender.start();
}
use of org.apache.logging.log4j.core.config.plugins.util.PluginType in project hive by apache.
the class LogDivertAppenderForTest method registerRoutingAppenderIfInTest.
/**
* If the HIVE_IN_TEST is set, then programmatically register a routing appender to Log4J
* configuration, which automatically writes the test log of each query to an individual file.
* The equivalent property configuration is as follows:
* # queryId based routing file appender
* appender.test-query-routing.type = Routing
* appender.test-query-routing.name = test-query-routing
* appender.test-query-routing.routes.type = Routes
* appender.test-query-routing.routes.pattern = $${ctx:queryId}
* # default route
* appender.test-query-routing.routes.test-route-default.type = Route
* appender.test-query-routing.routes.test-route-default.key = $${ctx:queryId}
* appender.test-query-routing.routes.test-route-default.app.type = NullAppender
* appender.test-query-routing.routes.test-route-default.app.name = test-null-appender
* # queryId based route
* appender.test-query-routing.routes.test-route-mdc.type = Route
* appender.test-query-routing.routes.test-route-mdc.name = test-query-routing
* appender.test-query-routing.routes.test-route-mdc.app.type = RandomAccessFile
* appender.test-query-routing.routes.test-route-mdc.app.name = test-query-file-appender
* appender.test-query-routing.routes.test-route-mdc.app.fileName = ${sys:hive.log.dir}/${ctx:sessionId}/${ctx:queryId}.test
* appender.test-query-routing.routes.test-route-mdc.app.layout.type = PatternLayout
* appender.test-query-routing.routes.test-route-mdc.app.layout.pattern = %d{ISO8601} %5p %c{2}: %m%n
* appender.test-query-routing.routes.test-route-mdc.app.filter.type = TestFilter
* @param conf the configuration for HiveServer2 instance
*/
public static void registerRoutingAppenderIfInTest(org.apache.hadoop.conf.Configuration conf) {
if (!conf.getBoolean(HiveConf.ConfVars.HIVE_IN_TEST.varname, HiveConf.ConfVars.HIVE_IN_TEST.defaultBoolVal)) {
// If not in test mode, then do no create the appender
return;
}
String logLocation = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION);
// Create test-null-appender to drop events without queryId
PluginEntry nullAppenderEntry = new PluginEntry();
nullAppenderEntry.setClassName(NullAppender.class.getName());
PluginType<NullAppender> nullAppenderType = new PluginType<>(nullAppenderEntry, NullAppender.class, "appender");
Node nullAppenderChildNode = new Node(null, "test-null-appender", nullAppenderType);
// Create default route where events go without queryId
PluginEntry defaultRouteEntry = new PluginEntry();
defaultRouteEntry.setClassName(Route.class.getName());
PluginType<Route> defaultRouteType = new PluginType<>(defaultRouteEntry, Route.class, "");
Node defaultRouteNode = new Node(null, "test-route-default", defaultRouteType);
// Add the test-null-appender to the default route
defaultRouteNode.getChildren().add(nullAppenderChildNode);
// Create queryId based route
PluginEntry queryIdRouteEntry = new PluginEntry();
queryIdRouteEntry.setClassName(Route.class.getName());
PluginType<Route> queryIdRouteType = new PluginType<>(queryIdRouteEntry, Route.class, "");
Node queryIdRouteNode = new Node(null, "test-route-mdc", queryIdRouteType);
// Create the queryId appender for the queryId route
PluginEntry queryIdAppenderEntry = new PluginEntry();
queryIdAppenderEntry.setClassName(HushableRandomAccessFileAppender.class.getName());
PluginType<HushableRandomAccessFileAppender> queryIdAppenderType = new PluginType<>(queryIdAppenderEntry, HushableRandomAccessFileAppender.class, "appender");
Node queryIdAppenderNode = new Node(queryIdRouteNode, "test-query-file-appender", queryIdAppenderType);
queryIdAppenderNode.getAttributes().put("fileName", logLocation + "/${ctx:sessionId}/${ctx:queryId}.test");
queryIdAppenderNode.getAttributes().put("name", "test-query-file-appender");
// Add the queryId appender to the queryId based route
queryIdRouteNode.getChildren().add(queryIdAppenderNode);
// Create the filter for the queryId appender
PluginEntry filterEntry = new PluginEntry();
filterEntry.setClassName(TestFilter.class.getName());
PluginType<TestFilter> filterType = new PluginType<>(filterEntry, TestFilter.class, "");
Node filterNode = new Node(queryIdAppenderNode, "test-filter", filterType);
// Add the filter to the queryId appender
queryIdAppenderNode.getChildren().add(filterNode);
// Create the layout for the queryId appender
PluginEntry layoutEntry = new PluginEntry();
layoutEntry.setClassName(PatternLayout.class.getName());
PluginType<PatternLayout> layoutType = new PluginType<>(layoutEntry, PatternLayout.class, "");
Node layoutNode = new Node(queryIdAppenderNode, "PatternLayout", layoutType);
layoutNode.getAttributes().put("pattern", LogDivertAppender.nonVerboseLayout);
// Add the layout to the queryId appender
queryIdAppenderNode.getChildren().add(layoutNode);
// Create the route objects based on the Nodes
Route defaultRoute = Route.createRoute(null, "${ctx:queryId}", defaultRouteNode);
Route mdcRoute = Route.createRoute(null, null, queryIdRouteNode);
// Create the routes group
Routes routes = Routes.createRoutes("${ctx:queryId}", defaultRoute, mdcRoute);
LoggerContext context = (LoggerContext) LogManager.getContext(false);
Configuration configuration = context.getConfiguration();
String timeToLive = String.valueOf(HiveConf.getTimeVar(conf, HiveConf.ConfVars.HIVE_SERVER2_OPERATION_LOG_PURGEPOLICY_TIMETOLIVE, TimeUnit.SECONDS));
PurgePolicy purgePolicy = IdlePurgePolicy.createPurgePolicy(timeToLive, null, "SECONDS", configuration);
// Hack: due to the (non-standard) way that log4j configuration is extended to introduce the routing appender
// the life-cycle methods are not called as expected leading to initialization problems (such as the scheduler)
configuration.getScheduler().incrementScheduledItems();
// Create the appender
RoutingAppender routingAppender = RoutingAppender.createAppender(TEST_QUERY_ROUTING_APPENDER, "true", routes, configuration, null, purgePolicy, null);
LoggerConfig loggerConfig = configuration.getRootLogger();
loggerConfig.addAppender(routingAppender, null, null);
context.updateLoggers();
routingAppender.start();
}
Aggregations