use of co.cask.cdap.logging.read.LogEvent in project cdap by caskdata.
the class TestDistributedLogReader method generateCheckpointTime.
private static void generateCheckpointTime(LoggingContext loggingContext, int numExpectedEvents, String kafkaTopic) throws Exception {
FileLogReader logReader = injector.getInstance(FileLogReader.class);
List<LogEvent> events = Lists.newArrayList(logReader.getLog(loggingContext, 0, Long.MAX_VALUE, Filter.EMPTY_FILTER));
Assert.assertEquals(numExpectedEvents, events.size());
// Save checkpoint (time of last event)
CheckpointManagerFactory checkpointManagerFactory = injector.getInstance(CheckpointManagerFactory.class);
CheckpointManager checkpointManager = checkpointManagerFactory.create(kafkaTopic, Constants.Logging.SYSTEM_PIPELINE_CHECKPOINT_PREFIX);
long checkpointTime = events.get(numExpectedEvents - 1).getLoggingEvent().getTimeStamp();
checkpointManager.saveCheckpoints(ImmutableMap.of(stringPartitioner.partition(loggingContext.getLogPartition(), -1), new Checkpoint(numExpectedEvents, checkpointTime, checkpointTime)));
}
use of co.cask.cdap.logging.read.LogEvent in project cdap by caskdata.
the class MockLogReader method getLog.
@Override
public CloseableIterator<LogEvent> getLog(LoggingContext loggingContext, long fromTimeMs, long toTimeMs, Filter filter) {
CollectingCallback collectingCallback = new CollectingCallback();
// since its just for test case, we don't need to lazily read logs (which is the purpose of returning an Iterator)
long fromOffset = getOffset(fromTimeMs / 1000);
long toOffset = getOffset(toTimeMs / 1000);
getLogNext(loggingContext, new ReadRange(fromTimeMs, toTimeMs, fromOffset), (int) (toOffset - fromOffset), filter, collectingCallback);
final Iterator<LogEvent> iterator = collectingCallback.getLogEvents().iterator();
return new CloseableIterator<LogEvent>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public LogEvent next() {
return iterator.next();
}
@Override
public void remove() {
iterator.remove();
}
@Override
public void close() {
// no-op
}
};
}
use of co.cask.cdap.logging.read.LogEvent in project cdap by caskdata.
the class MockLogReader method getLogNext.
@Override
public void getLogNext(LoggingContext loggingContext, ReadRange readRange, int maxEvents, Filter filter, Callback callback) {
if (readRange.getKafkaOffset() < 0) {
getLogPrev(loggingContext, readRange, maxEvents, filter, callback);
return;
}
Filter contextFilter = LoggingContextHelper.createFilter(loggingContext);
callback.init();
try {
int count = 0;
for (LogEvent logLine : logEvents) {
if (logLine.getOffset().getKafkaOffset() >= readRange.getKafkaOffset()) {
long logTime = logLine.getLoggingEvent().getTimeStamp();
if (!contextFilter.match(logLine.getLoggingEvent()) || logTime < readRange.getFromMillis() || logTime >= readRange.getToMillis()) {
continue;
}
if (++count > maxEvents) {
break;
}
if (!filter.match(logLine.getLoggingEvent())) {
continue;
}
callback.handle(logLine);
}
}
} catch (Throwable e) {
LOG.error("Got exception", e);
} finally {
callback.close();
}
}
Aggregations