use of org.apache.logging.log4j.core.config.Node in project logging-log4j2 by apache.
the class PluginElementVisitor method visit.
@Override
public Object visit(final Configuration configuration, final Node node, final LogEvent event, final StringBuilder log) {
final String name = this.annotation.value();
if (this.conversionType.isArray()) {
setConversionType(this.conversionType.getComponentType());
final List<Object> values = new ArrayList<>();
final Collection<Node> used = new ArrayList<>();
log.append("={");
boolean first = true;
for (final Node child : node.getChildren()) {
final PluginType<?> childType = child.getType();
if (name.equalsIgnoreCase(childType.getElementName()) || this.conversionType.isAssignableFrom(childType.getPluginClass())) {
if (!first) {
log.append(", ");
}
first = false;
used.add(child);
final Object childObject = child.getObject();
if (childObject == null) {
LOGGER.error("Null object returned for {} in {}.", child.getName(), node.getName());
continue;
}
if (childObject.getClass().isArray()) {
log.append(Arrays.toString((Object[]) childObject)).append('}');
return childObject;
}
log.append(child.toString());
values.add(childObject);
}
}
log.append('}');
// note that we need to return an empty array instead of null if the types are correct
if (!values.isEmpty() && !this.conversionType.isAssignableFrom(values.get(0).getClass())) {
LOGGER.error("Attempted to assign attribute {} to list of type {} which is incompatible with {}.", name, values.get(0).getClass(), this.conversionType);
return null;
}
node.getChildren().removeAll(used);
// we need to use reflection here because values.toArray() will cause type errors at runtime
final Object[] array = (Object[]) Array.newInstance(this.conversionType, values.size());
for (int i = 0; i < array.length; i++) {
array[i] = values.get(i);
}
return array;
}
final Node namedNode = findNamedNode(name, node.getChildren());
if (namedNode == null) {
log.append(name).append("=null");
return null;
}
log.append(namedNode.getName()).append('(').append(namedNode.toString()).append(')');
node.getChildren().remove(namedNode);
return namedNode.getObject();
}
use of org.apache.logging.log4j.core.config.Node in project logging-log4j2 by apache.
the class DefaultMergeStrategy method mergConfigurations.
/**
* Merge the source Configuration into the target Configuration.
*
* @param target The target node to merge into.
* @param source The source node.
* @param pluginManager The PluginManager.
*/
@Override
public void mergConfigurations(final Node target, final Node source, final PluginManager pluginManager) {
for (final Node sourceChildNode : source.getChildren()) {
final boolean isFilter = isFilterNode(sourceChildNode);
boolean isMerged = false;
for (final Node targetChildNode : target.getChildren()) {
if (isFilter) {
if (isFilterNode(targetChildNode)) {
updateFilterNode(target, targetChildNode, sourceChildNode, pluginManager);
isMerged = true;
break;
}
continue;
}
if (!targetChildNode.getName().equalsIgnoreCase(sourceChildNode.getName())) {
continue;
}
switch(targetChildNode.getName().toLowerCase()) {
case PROPERTIES:
case SCRIPTS:
case APPENDERS:
{
for (final Node node : sourceChildNode.getChildren()) {
for (final Node targetNode : targetChildNode.getChildren()) {
if (targetNode.getAttributes().get(NAME).equals(node.getAttributes().get(NAME))) {
targetChildNode.getChildren().remove(targetNode);
break;
}
}
targetChildNode.getChildren().add(node);
}
isMerged = true;
break;
}
case LOGGERS:
{
final Map<String, Node> targetLoggers = new HashMap<>();
for (final Node node : targetChildNode.getChildren()) {
targetLoggers.put(node.getName(), node);
}
for (final Node node : sourceChildNode.getChildren()) {
final Node targetNode = getLoggerNode(targetChildNode, node.getAttributes().get(NAME));
final Node loggerNode = new Node(targetChildNode, node.getName(), node.getType());
if (targetNode != null) {
targetNode.getAttributes().putAll(node.getAttributes());
for (final Node sourceLoggerChild : node.getChildren()) {
if (isFilterNode(sourceLoggerChild)) {
boolean foundFilter = false;
for (final Node targetChild : targetNode.getChildren()) {
if (isFilterNode(targetChild)) {
updateFilterNode(loggerNode, targetChild, sourceLoggerChild, pluginManager);
foundFilter = true;
break;
}
}
if (!foundFilter) {
final Node childNode = new Node(loggerNode, sourceLoggerChild.getName(), sourceLoggerChild.getType());
targetNode.getChildren().add(childNode);
}
} else {
final Node childNode = new Node(loggerNode, sourceLoggerChild.getName(), sourceLoggerChild.getType());
childNode.getAttributes().putAll(sourceLoggerChild.getAttributes());
childNode.getChildren().addAll(sourceLoggerChild.getChildren());
if (childNode.getName().equalsIgnoreCase("AppenderRef")) {
for (final Node targetChild : targetNode.getChildren()) {
if (isSameReference(targetChild, childNode)) {
targetNode.getChildren().remove(targetChild);
break;
}
}
} else {
for (final Node targetChild : targetNode.getChildren()) {
if (isSameName(targetChild, childNode)) {
targetNode.getChildren().remove(targetChild);
break;
}
}
}
targetNode.getChildren().add(childNode);
}
}
} else {
loggerNode.getAttributes().putAll(node.getAttributes());
loggerNode.getChildren().addAll(node.getChildren());
targetChildNode.getChildren().add(loggerNode);
}
}
isMerged = true;
break;
}
default:
{
targetChildNode.getChildren().addAll(sourceChildNode.getChildren());
isMerged = true;
break;
}
}
}
if (!isMerged) {
if (sourceChildNode.getName().equalsIgnoreCase("Properties")) {
target.getChildren().add(0, sourceChildNode);
} else {
target.getChildren().add(sourceChildNode);
}
}
}
}
use of org.apache.logging.log4j.core.config.Node in project hive by apache.
the class LlapWrappedAppender method setupAppenderIfRequired.
private void setupAppenderIfRequired(LogEvent event) {
if (appenderControl.get() == null) {
if (node.getType().getElementName().equalsIgnoreCase("appender")) {
for (final Node cnode : node.getChildren()) {
final Node appNode = new Node(cnode);
config.createConfiguration(appNode, event);
if (appNode.getObject() instanceof Appender) {
final Appender app = appNode.getObject();
app.start();
if (!(app instanceof RandomAccessFileAppender)) {
String message = "Cannot handle appenders other than " + RandomAccessFileAppender.class.getName() + ". Found: " + app.getClass().getName();
LOGGER.error(message);
throw new IllegalStateException(message);
}
realAppender.set(app);
appenderControl.set(new AppenderControl(app, null, null));
if (LOGGER.isDebugEnabled()) {
RandomAccessFileAppender raf = (RandomAccessFileAppender) app;
LOGGER.debug("Setup new appender to write to file: " + raf.getFileName() + ", appenderName=" + raf.getName() + ", appenderManagerName=" + raf.getManager().getName());
}
break;
}
}
if (appenderControl.get() == null) {
// Fail if mis-configured.
throw new RuntimeException(LlapWrappedAppender.class.getSimpleName() + "name=" + getName() + " unable to setup actual appender." + "Could not find child appender");
}
} else {
// Fail if mis-configured.
throw new RuntimeException(LlapWrappedAppender.class.getSimpleName() + "name=" + getName() + " unable to setup actual appender." + "Could not find child appender");
}
}
}
use of org.apache.logging.log4j.core.config.Node in project hive by apache.
the class LogDivertAppender method registerRoutingAppender.
/**
* Programmatically register a routing appender to Log4J configuration, which
* automatically writes the log of each query to an individual file.
* The equivalent property configuration is as follows:
* # queryId based routing file appender
* appender.query-routing.type = Routing
* appender.query-routing.name = query-routing
* appender.query-routing.routes.type = Routes
* appender.query-routing.routes.pattern = $${ctx:queryId}
* # default route
* appender.query-routing.routes.route-default.type = Route
* appender.query-routing.routes.route-default.key = $${ctx:queryId}
* appender.query-routing.routes.route-default.app.type = null
* appender.query-routing.routes.route-default.app.name = Null
* # queryId based route
* appender.query-routing.routes.route-mdc.type = Route
* appender.query-routing.routes.route-mdc.name = IrrelevantName-query-routing
* appender.query-routing.routes.route-mdc.app.type = RandomAccessFile
* appender.query-routing.routes.route-mdc.app.name = query-file-appender
* appender.query-routing.routes.route-mdc.app.fileName = ${sys:hive.log.dir}/${ctx:sessionId}/${ctx:queryId}
* appender.query-routing.routes.route-mdc.app.layout.type = PatternLayout
* appender.query-routing.routes.route-mdc.app.layout.pattern = %d{ISO8601} %5p %c{2}: %m%n
* @param conf the configuration for HiveServer2 instance
*/
public static void registerRoutingAppender(org.apache.hadoop.conf.Configuration conf) {
if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED, false)) {
// spare some resources, do not register logger if it is not enabled .
return;
}
String loggingLevel = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL);
OperationLog.LoggingLevel loggingMode = OperationLog.getLoggingLevel(loggingLevel);
String layout = loggingMode == OperationLog.LoggingLevel.VERBOSE ? verboseLayout : nonVerboseLayout;
// Create NullAppender
PluginEntry nullEntry = new PluginEntry();
nullEntry.setClassName(NullAppender.class.getName());
nullEntry.setKey("null");
nullEntry.setName("appender");
PluginType<NullAppender> nullChildType = new PluginType<NullAppender>(nullEntry, NullAppender.class, "appender");
Node nullChildNode = new Node(null, "Null", nullChildType);
// Create default route
PluginEntry defaultEntry = new PluginEntry();
defaultEntry.setClassName(Route.class.getName());
defaultEntry.setKey("route");
defaultEntry.setName("Route");
PluginType<Route> defaultType = new PluginType<Route>(defaultEntry, Route.class, "Route");
Node nullNode = new Node(null, "Route", defaultType);
nullNode.getChildren().add(nullChildNode);
Route defaultRoute = Route.createRoute(null, "${ctx:queryId}", nullNode);
// Create queryId based route
PluginEntry entry = new PluginEntry();
entry.setClassName(Route.class.getName());
entry.setKey("route");
entry.setName("Route");
PluginType<Route> type = new PluginType<Route>(entry, Route.class, "Route");
Node node = new Node(null, "Route", type);
PluginEntry childEntry = new PluginEntry();
childEntry.setClassName(HushableRandomAccessFileAppender.class.getName());
childEntry.setKey("HushableMutableRandomAccess");
childEntry.setName("appender");
PluginType<HushableRandomAccessFileAppender> childType = new PluginType<>(childEntry, HushableRandomAccessFileAppender.class, "appender");
Node childNode = new Node(node, "HushableMutableRandomAccess", childType);
childNode.getAttributes().put("name", "query-file-appender");
childNode.getAttributes().put("fileName", "${ctx:operationLogLocation}/${ctx:sessionId}/${ctx:queryId}");
node.getChildren().add(childNode);
PluginEntry filterEntry = new PluginEntry();
filterEntry.setClassName(NameFilter.class.getName());
filterEntry.setKey("namefilter");
filterEntry.setName("namefilter");
PluginType<NameFilter> filterType = new PluginType<>(filterEntry, NameFilter.class, "filter");
Node filterNode = new Node(childNode, "NameFilter", filterType);
filterNode.getAttributes().put("loggingLevel", loggingMode.name());
childNode.getChildren().add(filterNode);
PluginEntry layoutEntry = new PluginEntry();
layoutEntry.setClassName(PatternLayout.class.getName());
layoutEntry.setKey("patternlayout");
layoutEntry.setName("layout");
PluginType<PatternLayout> layoutType = new PluginType<>(layoutEntry, PatternLayout.class, "layout");
Node layoutNode = new Node(childNode, "PatternLayout", layoutType);
layoutNode.getAttributes().put("pattern", layout);
childNode.getChildren().add(layoutNode);
Route mdcRoute = Route.createRoute(null, null, node);
Routes routes = Routes.createRoutes("${ctx:queryId}", defaultRoute, mdcRoute);
LoggerContext context = (LoggerContext) LogManager.getContext(false);
Configuration configuration = context.getConfiguration();
String timeToLive = String.valueOf(HiveConf.getTimeVar(conf, HiveConf.ConfVars.HIVE_SERVER2_OPERATION_LOG_PURGEPOLICY_TIMETOLIVE, TimeUnit.SECONDS));
PurgePolicy purgePolicy = IdlePurgePolicy.createPurgePolicy(timeToLive, null, "SECONDS", configuration);
// Hack: due to the (non-standard) way that log4j configuration is extended to introduce the routing appender
// the life-cycle methods are not called as expected leading to initialization problems (such as the scheduler)
configuration.getScheduler().incrementScheduledItems();
RoutingAppender routingAppender = RoutingAppender.createAppender(QUERY_ROUTING_APPENDER, "true", routes, configuration, null, purgePolicy, null);
LoggerConfig loggerConfig = configuration.getRootLogger();
loggerConfig.addAppender(routingAppender, null, null);
context.updateLoggers();
routingAppender.start();
}
use of org.apache.logging.log4j.core.config.Node in project hive by apache.
the class LogDivertAppenderForTest method registerRoutingAppenderIfInTest.
/**
* If the HIVE_IN_TEST is set, then programmatically register a routing appender to Log4J
* configuration, which automatically writes the test log of each query to an individual file.
* The equivalent property configuration is as follows:
* # queryId based routing file appender
* appender.test-query-routing.type = Routing
* appender.test-query-routing.name = test-query-routing
* appender.test-query-routing.routes.type = Routes
* appender.test-query-routing.routes.pattern = $${ctx:queryId}
* # default route
* appender.test-query-routing.routes.test-route-default.type = Route
* appender.test-query-routing.routes.test-route-default.key = $${ctx:queryId}
* appender.test-query-routing.routes.test-route-default.app.type = NullAppender
* appender.test-query-routing.routes.test-route-default.app.name = test-null-appender
* # queryId based route
* appender.test-query-routing.routes.test-route-mdc.type = Route
* appender.test-query-routing.routes.test-route-mdc.name = test-query-routing
* appender.test-query-routing.routes.test-route-mdc.app.type = RandomAccessFile
* appender.test-query-routing.routes.test-route-mdc.app.name = test-query-file-appender
* appender.test-query-routing.routes.test-route-mdc.app.fileName = ${sys:hive.log.dir}/${ctx:sessionId}/${ctx:queryId}.test
* appender.test-query-routing.routes.test-route-mdc.app.layout.type = PatternLayout
* appender.test-query-routing.routes.test-route-mdc.app.layout.pattern = %d{ISO8601} %5p %c{2}: %m%n
* appender.test-query-routing.routes.test-route-mdc.app.filter.type = TestFilter
* @param conf the configuration for HiveServer2 instance
*/
public static void registerRoutingAppenderIfInTest(org.apache.hadoop.conf.Configuration conf) {
if (!conf.getBoolean(HiveConf.ConfVars.HIVE_IN_TEST.varname, HiveConf.ConfVars.HIVE_IN_TEST.defaultBoolVal)) {
// If not in test mode, then do no create the appender
return;
}
String logLocation = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION);
// Create test-null-appender to drop events without queryId
PluginEntry nullAppenderEntry = new PluginEntry();
nullAppenderEntry.setClassName(NullAppender.class.getName());
PluginType<NullAppender> nullAppenderType = new PluginType<>(nullAppenderEntry, NullAppender.class, "appender");
Node nullAppenderChildNode = new Node(null, "test-null-appender", nullAppenderType);
// Create default route where events go without queryId
PluginEntry defaultRouteEntry = new PluginEntry();
defaultRouteEntry.setClassName(Route.class.getName());
PluginType<Route> defaultRouteType = new PluginType<>(defaultRouteEntry, Route.class, "");
Node defaultRouteNode = new Node(null, "test-route-default", defaultRouteType);
// Add the test-null-appender to the default route
defaultRouteNode.getChildren().add(nullAppenderChildNode);
// Create queryId based route
PluginEntry queryIdRouteEntry = new PluginEntry();
queryIdRouteEntry.setClassName(Route.class.getName());
PluginType<Route> queryIdRouteType = new PluginType<>(queryIdRouteEntry, Route.class, "");
Node queryIdRouteNode = new Node(null, "test-route-mdc", queryIdRouteType);
// Create the queryId appender for the queryId route
PluginEntry queryIdAppenderEntry = new PluginEntry();
queryIdAppenderEntry.setClassName(HushableRandomAccessFileAppender.class.getName());
PluginType<HushableRandomAccessFileAppender> queryIdAppenderType = new PluginType<>(queryIdAppenderEntry, HushableRandomAccessFileAppender.class, "appender");
Node queryIdAppenderNode = new Node(queryIdRouteNode, "test-query-file-appender", queryIdAppenderType);
queryIdAppenderNode.getAttributes().put("fileName", logLocation + "/${ctx:sessionId}/${ctx:queryId}.test");
queryIdAppenderNode.getAttributes().put("name", "test-query-file-appender");
// Add the queryId appender to the queryId based route
queryIdRouteNode.getChildren().add(queryIdAppenderNode);
// Create the filter for the queryId appender
PluginEntry filterEntry = new PluginEntry();
filterEntry.setClassName(TestFilter.class.getName());
PluginType<TestFilter> filterType = new PluginType<>(filterEntry, TestFilter.class, "");
Node filterNode = new Node(queryIdAppenderNode, "test-filter", filterType);
// Add the filter to the queryId appender
queryIdAppenderNode.getChildren().add(filterNode);
// Create the layout for the queryId appender
PluginEntry layoutEntry = new PluginEntry();
layoutEntry.setClassName(PatternLayout.class.getName());
PluginType<PatternLayout> layoutType = new PluginType<>(layoutEntry, PatternLayout.class, "");
Node layoutNode = new Node(queryIdAppenderNode, "PatternLayout", layoutType);
layoutNode.getAttributes().put("pattern", LogDivertAppender.nonVerboseLayout);
// Add the layout to the queryId appender
queryIdAppenderNode.getChildren().add(layoutNode);
// Create the route objects based on the Nodes
Route defaultRoute = Route.createRoute(null, "${ctx:queryId}", defaultRouteNode);
Route mdcRoute = Route.createRoute(null, null, queryIdRouteNode);
// Create the routes group
Routes routes = Routes.createRoutes("${ctx:queryId}", defaultRoute, mdcRoute);
LoggerContext context = (LoggerContext) LogManager.getContext(false);
Configuration configuration = context.getConfiguration();
String timeToLive = String.valueOf(HiveConf.getTimeVar(conf, HiveConf.ConfVars.HIVE_SERVER2_OPERATION_LOG_PURGEPOLICY_TIMETOLIVE, TimeUnit.SECONDS));
PurgePolicy purgePolicy = IdlePurgePolicy.createPurgePolicy(timeToLive, null, "SECONDS", configuration);
// Hack: due to the (non-standard) way that log4j configuration is extended to introduce the routing appender
// the life-cycle methods are not called as expected leading to initialization problems (such as the scheduler)
configuration.getScheduler().incrementScheduledItems();
// Create the appender
RoutingAppender routingAppender = RoutingAppender.createAppender(TEST_QUERY_ROUTING_APPENDER, "true", routes, configuration, null, purgePolicy, null);
LoggerConfig loggerConfig = configuration.getRootLogger();
loggerConfig.addAppender(routingAppender, null, null);
context.updateLoggers();
routingAppender.start();
}
Aggregations