use of io.cdap.cdap.logging.filter.AndFilter in project cdap by caskdata.
the class FileLogReader method getLogPrev.
@Override
public void getLogPrev(final LoggingContext loggingContext, final ReadRange readRange, final int maxEvents, final Filter filter, final Callback callback) {
callback.init();
try {
Filter logFilter = new AndFilter(ImmutableList.of(LoggingContextHelper.createFilter(loggingContext), filter));
List<LogLocation> sortedFilesInRange = fileMetadataReader.listFiles(LoggingContextHelper.getLogPathIdentifier(loggingContext), readRange.getFromMillis(), readRange.getToMillis());
if (sortedFilesInRange.isEmpty()) {
return;
}
long fromTimeMs = readRange.getToMillis() - 1;
LOG.trace("Using fromTimeMs={}, readRange={}", fromTimeMs, readRange);
List<Collection<LogEvent>> logSegments = Lists.newLinkedList();
int count = 0;
for (LogLocation file : Lists.reverse(sortedFilesInRange)) {
try {
LOG.trace("Reading file {}", file);
Collection<LogEvent> events = file.readLogPrev(logFilter, fromTimeMs, maxEvents - count);
logSegments.add(events);
count += events.size();
if (count >= maxEvents) {
break;
}
} catch (IOException e) {
LOG.warn("Got exception reading log file {}", file, e);
}
}
for (LogEvent event : Iterables.concat(Lists.reverse(logSegments))) {
callback.handle(event);
}
} catch (Throwable e) {
LOG.error("Got exception: ", e);
throw Throwables.propagate(e);
}
}
use of io.cdap.cdap.logging.filter.AndFilter in project cdap by caskdata.
the class KafkaLogReader method getLogPrev.
@Override
public void getLogPrev(LoggingContext loggingContext, ReadRange readRange, int maxEvents, Filter filter, Callback callback) {
if (readRange.getKafkaOffset() == LogOffset.INVALID_KAFKA_OFFSET) {
readRange = new ReadRange(readRange.getFromMillis(), readRange.getToMillis(), ReadRange.LATEST.getKafkaOffset());
}
int partition = partitioner.partition(loggingContext.getLogPartition(), -1);
LOG.trace("Reading from kafka partition {}", partition);
callback.init();
KafkaConsumer kafkaConsumer = new KafkaConsumer(brokerService, topic, partition, KAFKA_FETCH_TIMEOUT_MS);
try {
Filter logFilter = new AndFilter(ImmutableList.of(LoggingContextHelper.createFilter(loggingContext), filter));
long latestOffset = kafkaConsumer.fetchLatestOffset();
long earliestOffset = kafkaConsumer.fetchEarliestOffset();
long stopOffset;
long startOffset;
if (readRange.getKafkaOffset() < 0) {
stopOffset = latestOffset;
} else {
stopOffset = readRange.getKafkaOffset();
}
startOffset = stopOffset - maxEvents;
if (startOffset < earliestOffset) {
startOffset = earliestOffset;
}
LOG.trace("Using startOffset={}, latestOffset={}, readRange={}", startOffset, latestOffset, readRange);
if (startOffset >= stopOffset || startOffset >= latestOffset) {
// At end of kafka events, nothing to return
return;
}
KafkaCallback kafkaCallback = new KafkaCallback(logFilter, serializer.get(), stopOffset, maxEvents, callback, readRange.getFromMillis());
// Events between startOffset and stopOffset may not have the required logs we are looking for,
// we'll need to return at least 1 log offset for next getLogPrev call to work.
int fetchCount = 0;
while (fetchCount == 0 && kafkaCallback.getEventsRead() <= MAX_READ_EVENTS_KAFKA) {
fetchCount = fetchLogEvents(kafkaConsumer, kafkaCallback, startOffset, stopOffset, maxEvents, readRange);
stopOffset = startOffset;
if (stopOffset <= earliestOffset) {
// Truly no log messages found.
break;
}
startOffset = stopOffset - maxEvents;
if (startOffset < earliestOffset) {
startOffset = earliestOffset;
}
}
} catch (Throwable e) {
LOG.error("Got exception: ", e);
throw Throwables.propagate(e);
} finally {
try {
kafkaConsumer.close();
} catch (IOException e) {
LOG.error(String.format("Caught exception when closing KafkaConsumer for topic %s, partition %d", topic, partition), e);
}
}
}
use of io.cdap.cdap.logging.filter.AndFilter in project cdap by caskdata.
the class KafkaLogReader method getLogNext.
@Override
public void getLogNext(LoggingContext loggingContext, ReadRange readRange, int maxEvents, Filter filter, Callback callback) {
if (readRange.getKafkaOffset() == ReadRange.LATEST.getKafkaOffset()) {
getLogPrev(loggingContext, readRange, maxEvents, filter, callback);
return;
}
int partition = partitioner.partition(loggingContext.getLogPartition(), -1);
LOG.trace("Reading from kafka {}:{}", topic, partition);
callback.init();
KafkaConsumer kafkaConsumer = new KafkaConsumer(brokerService, topic, partition, KAFKA_FETCH_TIMEOUT_MS);
try {
// Try to get the offset corresponding to fromOffset.getTime()
if (readRange.getKafkaOffset() == LogOffset.INVALID_KAFKA_OFFSET) {
readRange = new ReadRange(readRange.getFromMillis(), readRange.getToMillis(), kafkaConsumer.fetchOffsetBefore(readRange.getFromMillis()));
}
Filter logFilter = new AndFilter(ImmutableList.of(LoggingContextHelper.createFilter(loggingContext), filter));
long latestOffset = kafkaConsumer.fetchLatestOffset();
long startOffset = readRange.getKafkaOffset() + 1;
LOG.trace("Using startOffset={}, latestOffset={}, readRange={}", startOffset, latestOffset, readRange);
if (startOffset >= latestOffset) {
// At end of events, nothing to return
return;
}
KafkaCallback kafkaCallback = new KafkaCallback(logFilter, serializer.get(), latestOffset, maxEvents, callback, readRange.getFromMillis());
fetchLogEvents(kafkaConsumer, kafkaCallback, startOffset, latestOffset, maxEvents, readRange);
} catch (Throwable e) {
LOG.error("Got exception: ", e);
throw Throwables.propagate(e);
} finally {
try {
kafkaConsumer.close();
} catch (IOException e) {
LOG.error(String.format("Caught exception when closing KafkaConsumer for topic %s, partition %d", topic, partition), e);
}
}
}
use of io.cdap.cdap.logging.filter.AndFilter in project cdap by caskdata.
the class LoggingContextHelper method createFilter.
public static Filter createFilter(LoggingContext loggingContext) {
if (loggingContext instanceof ServiceLoggingContext) {
LoggingContext.SystemTag systemTag = getByNamespaceOrSystemID(loggingContext.getSystemTagsMap());
if (systemTag == null) {
throw new IllegalArgumentException("No namespace or system id present");
}
String systemId = systemTag.getValue();
String componentId = loggingContext.getSystemTagsMap().get(ServiceLoggingContext.TAG_COMPONENT_ID).getValue();
String tagName = ServiceLoggingContext.TAG_SERVICE_ID;
String entityId = loggingContext.getSystemTagsMap().get(ServiceLoggingContext.TAG_SERVICE_ID).getValue();
ImmutableList.Builder<Filter> filterBuilder = ImmutableList.builder();
// In CDAP 3.5 we removed SystemLoggingContext which had tag .systemId and now we use .namespaceId but to
// support backward compatibility have an or filter so that we can read old logs too. See CDAP-7482
OrFilter namespaceFilter = new OrFilter(ImmutableList.of(new MdcExpression(NamespaceLoggingContext.TAG_NAMESPACE_ID, systemId), new MdcExpression(ServiceLoggingContext.TAG_SYSTEM_ID, systemId)));
filterBuilder.add(namespaceFilter);
filterBuilder.add(new MdcExpression(ServiceLoggingContext.TAG_COMPONENT_ID, componentId));
filterBuilder.add(new MdcExpression(tagName, entityId));
return new AndFilter(filterBuilder.build());
} else {
String namespaceId = loggingContext.getSystemTagsMap().get(ApplicationLoggingContext.TAG_NAMESPACE_ID).getValue();
String applId = loggingContext.getSystemTagsMap().get(ApplicationLoggingContext.TAG_APPLICATION_ID).getValue();
LoggingContext.SystemTag entityTag = getEntityId(loggingContext);
ImmutableList.Builder<Filter> filterBuilder = ImmutableList.builder();
// For backward compatibility: The old logs before namespace have .accountId and developer as value so we don't
// want them to get filtered out if they belong to this application and entity
OrFilter namespaceFilter = new OrFilter(ImmutableList.of(new MdcExpression(NamespaceLoggingContext.TAG_NAMESPACE_ID, namespaceId), new MdcExpression(ACCOUNT_ID, Constants.DEVELOPER_ACCOUNT)));
filterBuilder.add(namespaceFilter);
filterBuilder.add(new MdcExpression(ApplicationLoggingContext.TAG_APPLICATION_ID, applId));
filterBuilder.add(new MdcExpression(entityTag.getName(), entityTag.getValue()));
if (loggingContext instanceof WorkflowProgramLoggingContext) {
// Program is started by Workflow. Add Program information to filter.
Map<String, LoggingContext.SystemTag> systemTagsMap = loggingContext.getSystemTagsMap();
LoggingContext.SystemTag programTag = systemTagsMap.get(WorkflowProgramLoggingContext.TAG_WORKFLOW_MAP_REDUCE_ID);
if (programTag != null) {
filterBuilder.add(new MdcExpression(WorkflowProgramLoggingContext.TAG_WORKFLOW_MAP_REDUCE_ID, programTag.getValue()));
}
programTag = systemTagsMap.get(WorkflowProgramLoggingContext.TAG_WORKFLOW_SPARK_ID);
if (programTag != null) {
filterBuilder.add(new MdcExpression(WorkflowProgramLoggingContext.TAG_WORKFLOW_SPARK_ID, programTag.getValue()));
}
}
// Add runid filter if required
LoggingContext.SystemTag runId = loggingContext.getSystemTagsMap().get(ApplicationLoggingContext.TAG_RUN_ID);
if (runId != null && runId.getValue() != null) {
filterBuilder.add(new MdcExpression(ApplicationLoggingContext.TAG_RUN_ID, runId.getValue()));
}
return new AndFilter(filterBuilder.build());
}
}
use of io.cdap.cdap.logging.filter.AndFilter in project cdap by caskdata.
the class FileLogReader method getLog.
@Override
public CloseableIterator<LogEvent> getLog(LoggingContext loggingContext, final long fromTimeMs, final long toTimeMs, Filter filter) {
try {
final Filter logFilter = new AndFilter(ImmutableList.of(LoggingContextHelper.createFilter(loggingContext), filter));
LOG.trace("Using fromTimeMs={}, toTimeMs={}", fromTimeMs, toTimeMs);
List<LogLocation> sortedFilesInRange = fileMetadataReader.listFiles(LoggingContextHelper.getLogPathIdentifier(loggingContext), fromTimeMs, toTimeMs);
if (sortedFilesInRange.isEmpty()) {
// return empty iterator
return new AbstractCloseableIterator<LogEvent>() {
@Override
protected LogEvent computeNext() {
return endOfData();
}
@Override
public void close() {
// no-op
}
};
}
final Iterator<LogLocation> filesIter = sortedFilesInRange.iterator();
CloseableIterator<CloseableIterator<LogEvent>> closeableIterator = new CloseableIterator<CloseableIterator<LogEvent>>() {
private CloseableIterator<LogEvent> curr = null;
@Override
public void close() {
if (curr != null) {
curr.close();
}
}
@Override
public boolean hasNext() {
return filesIter.hasNext();
}
@Override
public CloseableIterator<LogEvent> next() {
if (curr != null) {
curr.close();
}
LogLocation file = filesIter.next();
LOG.trace("Reading file {}", file);
curr = file.readLog(logFilter, fromTimeMs, toTimeMs, Integer.MAX_VALUE);
return curr;
}
@Override
public void remove() {
throw new UnsupportedOperationException("Remove not supported");
}
};
return concat(closeableIterator);
} catch (Throwable e) {
LOG.error("Got exception: ", e);
throw Throwables.propagate(e);
}
}
Aggregations