Search in sources :

Example 1 with Filter

use of co.cask.cdap.logging.filter.Filter in project cdap by caskdata.

the class MockLogReader method getLogPrev.

@Override
public void getLogPrev(LoggingContext loggingContext, ReadRange readRange, int maxEvents, Filter filter, Callback callback) {
    if (readRange.getKafkaOffset() < 0) {
        readRange = new ReadRange(readRange.getFromMillis(), readRange.getToMillis(), MAX);
    }
    Filter contextFilter = LoggingContextHelper.createFilter(loggingContext);
    callback.init();
    try {
        int count = 0;
        long startOffset = readRange.getKafkaOffset() - maxEvents;
        for (LogEvent logLine : logEvents) {
            long logTime = logLine.getLoggingEvent().getTimeStamp();
            if (!contextFilter.match(logLine.getLoggingEvent()) || logTime < readRange.getFromMillis() || logTime >= readRange.getToMillis()) {
                continue;
            }
            if (logLine.getOffset().getKafkaOffset() >= startOffset && logLine.getOffset().getKafkaOffset() < readRange.getKafkaOffset()) {
                if (++count > maxEvents) {
                    break;
                }
                if (filter != Filter.EMPTY_FILTER && logLine.getOffset().getKafkaOffset() % 2 != 0) {
                    continue;
                }
                callback.handle(logLine);
            }
        }
    } catch (Throwable e) {
        LOG.error("Got exception", e);
    } finally {
        callback.close();
    }
}
Also used : ReadRange(co.cask.cdap.logging.read.ReadRange) Filter(co.cask.cdap.logging.filter.Filter) LogEvent(co.cask.cdap.logging.read.LogEvent)

Example 2 with Filter

use of co.cask.cdap.logging.filter.Filter in project cdap by caskdata.

the class KafkaLogReader method getLogNext.

@Override
public void getLogNext(LoggingContext loggingContext, ReadRange readRange, int maxEvents, Filter filter, Callback callback) {
    if (readRange.getKafkaOffset() == ReadRange.LATEST.getKafkaOffset()) {
        getLogPrev(loggingContext, readRange, maxEvents, filter, callback);
        return;
    }
    int partition = partitioner.partition(loggingContext.getLogPartition(), -1);
    LOG.trace("Reading from kafka {}:{}", topic, partition);
    callback.init();
    KafkaConsumer kafkaConsumer = new KafkaConsumer(brokerService, topic, partition, KAFKA_FETCH_TIMEOUT_MS);
    try {
        // Try to get the offset corresponding to fromOffset.getTime()
        if (readRange.getKafkaOffset() == LogOffset.INVALID_KAFKA_OFFSET) {
            readRange = new ReadRange(readRange.getFromMillis(), readRange.getToMillis(), kafkaConsumer.fetchOffsetBefore(readRange.getFromMillis()));
        }
        Filter logFilter = new AndFilter(ImmutableList.of(LoggingContextHelper.createFilter(loggingContext), filter));
        long latestOffset = kafkaConsumer.fetchLatestOffset();
        long startOffset = readRange.getKafkaOffset() + 1;
        LOG.trace("Using startOffset={}, latestOffset={}, readRange={}", startOffset, latestOffset, readRange);
        if (startOffset >= latestOffset) {
            // At end of events, nothing to return
            return;
        }
        KafkaCallback kafkaCallback = new KafkaCallback(logFilter, serializer, latestOffset, maxEvents, callback, readRange.getFromMillis());
        fetchLogEvents(kafkaConsumer, kafkaCallback, startOffset, latestOffset, maxEvents, readRange);
    } catch (Throwable e) {
        LOG.error("Got exception: ", e);
        throw Throwables.propagate(e);
    } finally {
        try {
            kafkaConsumer.close();
        } catch (IOException e) {
            LOG.error(String.format("Caught exception when closing KafkaConsumer for topic %s, partition %d", topic, partition), e);
        }
    }
}
Also used : AndFilter(co.cask.cdap.logging.filter.AndFilter) Filter(co.cask.cdap.logging.filter.Filter) AndFilter(co.cask.cdap.logging.filter.AndFilter) KafkaConsumer(co.cask.cdap.logging.kafka.KafkaConsumer) IOException(java.io.IOException)

Example 3 with Filter

use of co.cask.cdap.logging.filter.Filter in project cdap by caskdata.

the class KafkaLogReader method getLogPrev.

@Override
public void getLogPrev(LoggingContext loggingContext, ReadRange readRange, int maxEvents, Filter filter, Callback callback) {
    if (readRange.getKafkaOffset() == LogOffset.INVALID_KAFKA_OFFSET) {
        readRange = new ReadRange(readRange.getFromMillis(), readRange.getToMillis(), ReadRange.LATEST.getKafkaOffset());
    }
    int partition = partitioner.partition(loggingContext.getLogPartition(), -1);
    LOG.trace("Reading from kafka partition {}", partition);
    callback.init();
    KafkaConsumer kafkaConsumer = new KafkaConsumer(brokerService, topic, partition, KAFKA_FETCH_TIMEOUT_MS);
    try {
        Filter logFilter = new AndFilter(ImmutableList.of(LoggingContextHelper.createFilter(loggingContext), filter));
        long latestOffset = kafkaConsumer.fetchLatestOffset();
        long earliestOffset = kafkaConsumer.fetchEarliestOffset();
        long stopOffset;
        long startOffset;
        if (readRange.getKafkaOffset() < 0) {
            stopOffset = latestOffset;
        } else {
            stopOffset = readRange.getKafkaOffset();
        }
        startOffset = stopOffset - maxEvents;
        if (startOffset < earliestOffset) {
            startOffset = earliestOffset;
        }
        LOG.trace("Using startOffset={}, latestOffset={}, readRange={}", startOffset, latestOffset, readRange);
        if (startOffset >= stopOffset || startOffset >= latestOffset) {
            // At end of kafka events, nothing to return
            return;
        }
        KafkaCallback kafkaCallback = new KafkaCallback(logFilter, serializer, stopOffset, maxEvents, callback, readRange.getFromMillis());
        // Events between startOffset and stopOffset may not have the required logs we are looking for,
        // we'll need to return at least 1 log offset for next getLogPrev call to work.
        int fetchCount = 0;
        while (fetchCount == 0 && kafkaCallback.getEventsRead() <= MAX_READ_EVENTS_KAFKA) {
            fetchCount = fetchLogEvents(kafkaConsumer, kafkaCallback, startOffset, stopOffset, maxEvents, readRange);
            stopOffset = startOffset;
            if (stopOffset <= earliestOffset) {
                // Truly no log messages found.
                break;
            }
            startOffset = stopOffset - maxEvents;
            if (startOffset < earliestOffset) {
                startOffset = earliestOffset;
            }
        }
    } catch (Throwable e) {
        LOG.error("Got exception: ", e);
        throw Throwables.propagate(e);
    } finally {
        try {
            kafkaConsumer.close();
        } catch (IOException e) {
            LOG.error(String.format("Caught exception when closing KafkaConsumer for topic %s, partition %d", topic, partition), e);
        }
    }
}
Also used : AndFilter(co.cask.cdap.logging.filter.AndFilter) Filter(co.cask.cdap.logging.filter.Filter) AndFilter(co.cask.cdap.logging.filter.AndFilter) KafkaConsumer(co.cask.cdap.logging.kafka.KafkaConsumer) IOException(java.io.IOException)

Example 4 with Filter

use of co.cask.cdap.logging.filter.Filter in project cdap by caskdata.

the class AbstractLogHandler method doPrev.

protected void doPrev(HttpResponder responder, LoggingContext loggingContext, int maxEvents, String fromOffsetStr, boolean escape, String filterStr, @Nullable RunRecordMeta runRecord, String format, List<String> fieldsToSuppress) {
    try {
        Filter filter = FilterParser.parse(filterStr);
        Callback logCallback = getNextOrPrevLogsCallback(format, responder, fieldsToSuppress, escape);
        LogOffset logOffset = FormattedTextLogEvent.parseLogOffset(fromOffsetStr);
        ReadRange readRange = ReadRange.createToRange(logOffset);
        readRange = adjustReadRange(readRange, runRecord, true);
        try {
            logReader.getLogPrev(loggingContext, readRange, maxEvents, filter, logCallback);
        } catch (Exception ex) {
            LOG.debug("Exception while reading logs for logging context {}", loggingContext, ex);
        } finally {
            logCallback.close();
        }
    } catch (SecurityException e) {
        responder.sendStatus(HttpResponseStatus.UNAUTHORIZED);
    } catch (IllegalArgumentException e) {
        responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
    }
}
Also used : Callback(co.cask.cdap.logging.read.Callback) ReadRange(co.cask.cdap.logging.read.ReadRange) Filter(co.cask.cdap.logging.filter.Filter) LogOffset(co.cask.cdap.logging.read.LogOffset)

Example 5 with Filter

use of co.cask.cdap.logging.filter.Filter in project cdap by caskdata.

the class AbstractLogHandler method doGetLogs.

protected void doGetLogs(HttpResponder responder, LoggingContext loggingContext, long fromTimeSecsParam, long toTimeSecsParam, boolean escape, String filterStr, @Nullable RunRecordMeta runRecord, String format, List<String> fieldsToSuppress) {
    try {
        TimeRange timeRange = parseTime(fromTimeSecsParam, toTimeSecsParam, responder);
        if (timeRange == null) {
            return;
        }
        Filter filter = FilterParser.parse(filterStr);
        ReadRange readRange = new ReadRange(timeRange.getFromMillis(), timeRange.getToMillis(), LogOffset.INVALID_KAFKA_OFFSET);
        readRange = adjustReadRange(readRange, runRecord, fromTimeSecsParam != -1);
        AbstractChunkedLogProducer logsProducer = null;
        try {
            // the iterator is closed by the BodyProducer passed to the HttpResponder
            CloseableIterator<LogEvent> logIter = logReader.getLog(loggingContext, readRange.getFromMillis(), readRange.getToMillis(), filter);
            logsProducer = getFullLogsProducer(format, logIter, fieldsToSuppress, escape);
        } catch (Exception ex) {
            LOG.debug("Exception while reading logs for logging context {}", loggingContext, ex);
            if (logsProducer != null) {
                logsProducer.close();
            }
            responder.sendStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
            return;
        }
        responder.sendContent(HttpResponseStatus.OK, logsProducer, logsProducer.getResponseHeaders());
    } catch (SecurityException e) {
        responder.sendStatus(HttpResponseStatus.UNAUTHORIZED);
    } catch (IllegalArgumentException e) {
        responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
    }
}
Also used : ReadRange(co.cask.cdap.logging.read.ReadRange) Filter(co.cask.cdap.logging.filter.Filter) LogEvent(co.cask.cdap.logging.read.LogEvent)

Aggregations

Filter (co.cask.cdap.logging.filter.Filter)11 AndFilter (co.cask.cdap.logging.filter.AndFilter)6 ReadRange (co.cask.cdap.logging.read.ReadRange)4 LogEvent (co.cask.cdap.logging.read.LogEvent)3 LogLocation (co.cask.cdap.logging.write.LogLocation)3 IOException (java.io.IOException)3 KafkaConsumer (co.cask.cdap.logging.kafka.KafkaConsumer)2 Callback (co.cask.cdap.logging.read.Callback)2 LogOffset (co.cask.cdap.logging.read.LogOffset)2 AbstractCloseableIterator (co.cask.cdap.api.dataset.lib.AbstractCloseableIterator)1 CloseableIterator (co.cask.cdap.api.dataset.lib.CloseableIterator)1 ApplicationLoggingContext (co.cask.cdap.common.logging.ApplicationLoggingContext)1 ComponentLoggingContext (co.cask.cdap.common.logging.ComponentLoggingContext)1 LoggingContext (co.cask.cdap.common.logging.LoggingContext)1 NamespaceLoggingContext (co.cask.cdap.common.logging.NamespaceLoggingContext)1 ServiceLoggingContext (co.cask.cdap.common.logging.ServiceLoggingContext)1 MdcExpression (co.cask.cdap.logging.filter.MdcExpression)1 OrFilter (co.cask.cdap.logging.filter.OrFilter)1 ImmutableList (com.google.common.collect.ImmutableList)1 Collection (java.util.Collection)1