use of co.cask.cdap.filetailer.event.FileTailerEvent in project cdap-ingest by caskdata.
the class FileTailerQueue method take.
/**
* Takes an event out from the queue.
*
* @return taken event
* @throws InterruptedException in case interrupted while waiting
*/
public FileTailerEvent take() throws InterruptedException {
LOG.trace("Attempt to take event from queue");
FileTailerEvent event = queue.take();
LOG.trace("Attempt to take event {} from queue was successful", event);
return event;
}
use of co.cask.cdap.filetailer.event.FileTailerEvent in project cdap-ingest by caskdata.
the class FileTailerSinkTest method basicTestWithCustomPackSize.
@Test
public void basicTestWithCustomPackSize() throws Exception {
FileTailerStateProcessor stateProcessor = Mockito.mock(FileTailerStateProcessor.class);
FileTailerMetricsProcessor metricsProcessor = Mockito.mock(FileTailerMetricsProcessor.class);
FileTailerQueue queue = new FileTailerQueue(DEFAULT_QUEUE_SIZE);
StreamWriter writerMock = getDummyStreamWriter();
FileTailerSink sink = new FileTailerSink(queue, writerMock, SinkStrategy.LOADBALANCE, stateProcessor, metricsProcessor, null, CUSTOM_PACK_SIZE);
try {
sink.startAsync();
for (int i = 0; i < TEST_EVENTS_SIZE; i++) {
queue.put(new FileTailerEvent(new FileTailerState("file", 0L, 42, 0L), "test", Charset.defaultCharset()));
}
Mockito.verify(writerMock, Mockito.timeout(10000).times(TEST_EVENTS_SIZE)).write("test", Charset.defaultCharset());
} finally {
sink.stopAsync();
}
}
use of co.cask.cdap.filetailer.event.FileTailerEvent in project cdap-ingest by caskdata.
the class LogTailer method tryReadFromFile.
/**
* Try read log file
*
* @param channel FileChannel steam
* @param separator log entry separator
* @param currentLogFile current log file
* @return last modified time of current log file
* @throws IOException in case could not read entry after failureRetryLimit attempts
* @throws InterruptedException in case thread was interrupted
*/
private long tryReadFromFile(FileChannel channel, char separator, File currentLogFile, long modifyTime) throws IOException, InterruptedException {
int retryNumber = 0;
long position = channel.position();
StringBuilder sb = new StringBuilder();
while (isRunning()) {
if (retryNumber > failureRetryLimit && failureRetryLimit > 0) {
LOG.error("fail to read line after {} attempts", retryNumber);
throw new IOException();
}
try {
readBuffer.clear();
decoded.clear();
int len = channel.read(readBuffer);
if (len >= 0) {
readBuffer.flip();
decoder.decode(readBuffer, decoded, false);
decoded.flip();
for (int i = 0; i < decoded.length(); i++) {
char ch = decoded.charAt(i);
if (ch != separator) {
sb.append(ch);
} else {
String line = sb.toString();
int lineHash = line.hashCode();
LOG.debug("From log file {} read entry: {}", currentLogFile, line);
modifyTime = currentLogFile.lastModified();
queue.put(new FileTailerEvent(new FileTailerState(currentLogFile.toString(), position, lineHash, modifyTime), line, charset));
metricsProcessor.onReadEventMetric(line.getBytes(charset).length);
position += line.getBytes(charset).length + separatorByteLength;
sb.setLength(0);
}
}
} else {
break;
}
} catch (IOException e) {
retryNumber++;
Thread.sleep(failureSleepInterval);
}
}
return modifyTime;
}
Aggregations