use of org.apache.druid.data.input.Firehose in project druid by druid-io.
the class PrefetchableTextFilesFirehoseFactoryTest method testReconnectWithCache.
@Test
public void testReconnectWithCache() throws IOException {
final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 2048, 0);
final File firehoseTmpDir = createFirehoseTmpDir("testReconnectWithCache");
for (int i = 0; i < 5; i++) {
final List<Row> rows = new ArrayList<>();
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
if (i > 0) {
Assert.assertEquals(FILE_SIZE * 2, factory.getCacheManager().getTotalCachedBytes());
}
while (firehose.hasMore()) {
rows.add(firehose.nextRow());
}
}
assertResult(rows);
assertNumRemainingCacheFiles(firehoseTmpDir, 2);
}
}
use of org.apache.druid.data.input.Firehose in project druid by druid-io.
the class PrefetchableTextFilesFirehoseFactoryTest method testWithSmallCacheAndLargeFetch.
@Test
public void testWithSmallCacheAndLargeFetch() throws IOException {
final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 1024, 2048);
final List<Row> rows = new ArrayList<>();
final File firehoseTmpDir = createFirehoseTmpDir("testWithSmallCacheAndLargeFetch");
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
while (firehose.hasMore()) {
rows.add(firehose.nextRow());
}
}
assertResult(rows);
assertNumRemainingCacheFiles(firehoseTmpDir, 1);
}
use of org.apache.druid.data.input.Firehose in project druid by druid-io.
the class PrefetchableTextFilesFirehoseFactoryTest method testTimeout.
@Test
public void testTimeout() throws IOException {
expectedException.expect(RuntimeException.class);
expectedException.expectCause(CoreMatchers.instanceOf(TimeoutException.class));
final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.withSleepMillis(TEST_DIR, 1000);
try (Firehose firehose = factory.connect(PARSER, createFirehoseTmpDir("testTimeout"))) {
while (firehose.hasMore()) {
firehose.nextRow();
}
}
}
use of org.apache.druid.data.input.Firehose in project druid by druid-io.
the class NoopTask method run.
@Override
public TaskStatus run(TaskToolbox toolbox) throws Exception {
if (firehoseFactory != null) {
log.info("Connecting firehose");
}
try (Firehose firehose = firehoseFactory != null ? firehoseFactory.connect(null, null) : null) {
log.info("Running noop task[%s]", getId());
log.info("Sleeping for %,d millis.", runTime);
Thread.sleep(runTime);
log.info("Woke up!");
return TaskStatus.success(getId());
}
}
use of org.apache.druid.data.input.Firehose in project druid by druid-io.
the class IngestSegmentFirehoseFactoryTimelineTest method testSplit.
private void testSplit() throws Exception {
Assert.assertTrue(factory.isSplittable());
final int numSplits = factory.getNumSplits(null);
// We set maxInputSegmentBytesPerSplit to 2 so each segment should become a byte.
Assert.assertEquals(segmentCount, numSplits);
final List<InputSplit<List<WindowedSegmentId>>> splits = factory.getSplits(null).collect(Collectors.toList());
Assert.assertEquals(numSplits, splits.size());
int count = 0;
long sum = 0;
for (InputSplit<List<WindowedSegmentId>> split : splits) {
final FiniteFirehoseFactory<InputRowParser, List<WindowedSegmentId>> splitFactory = factory.withSplit(split);
try (final Firehose firehose = splitFactory.connect(ROW_PARSER, tmpDir)) {
while (firehose.hasMore()) {
final InputRow row = firehose.nextRow();
count++;
sum += row.getMetric(METRICS[0]).longValue();
}
}
}
Assert.assertEquals("count", expectedCount, count);
Assert.assertEquals("sum", expectedSum, sum);
}
Aggregations