Search in sources :

Example 26 with Firehose

use of org.apache.druid.data.input.Firehose in project druid by druid-io.

the class BatchDeltaIngestionTest method testIngestion.

private void testIngestion(HadoopDruidIndexerConfig config, List<ImmutableMap<String, Object>> expectedRowsGenerated, WindowedDataSegment windowedDataSegment, List<String> expectedDimensions, List<String> expectedMetrics) throws Exception {
    IndexGeneratorJob job = new IndexGeneratorJob(config);
    Assert.assertTrue(JobHelper.runJobs(ImmutableList.of(job)));
    List<DataSegmentAndIndexZipFilePath> dataSegmentAndIndexZipFilePaths = IndexGeneratorJob.getPublishedSegmentAndIndexZipFilePaths(config);
    JobHelper.renameIndexFilesForSegments(config.getSchema(), dataSegmentAndIndexZipFilePaths);
    JobHelper.maybeDeleteIntermediatePath(true, config.getSchema());
    File workingPath = new File(config.makeIntermediatePath().toUri().getPath());
    Assert.assertFalse(workingPath.exists());
    File segmentFolder = new File(StringUtils.format("%s/%s/%s_%s/%s/0", config.getSchema().getIOConfig().getSegmentOutputPath(), config.getSchema().getDataSchema().getDataSource(), INTERVAL_FULL.getStart().toString(), INTERVAL_FULL.getEnd().toString(), config.getSchema().getTuningConfig().getVersion()));
    Assert.assertTrue(segmentFolder.exists());
    File indexZip = new File(segmentFolder, "index.zip");
    Assert.assertTrue(indexZip.exists());
    File tmpUnzippedSegmentDir = temporaryFolder.newFolder();
    new LocalDataSegmentPuller().getSegmentFiles(indexZip, tmpUnzippedSegmentDir);
    QueryableIndex index = INDEX_IO.loadIndex(tmpUnzippedSegmentDir);
    StorageAdapter adapter = new QueryableIndexStorageAdapter(index);
    Firehose firehose = new IngestSegmentFirehose(ImmutableList.of(new WindowedStorageAdapter(adapter, windowedDataSegment.getInterval())), TransformSpec.NONE, expectedDimensions, expectedMetrics, null);
    List<InputRow> rows = new ArrayList<>();
    while (firehose.hasMore()) {
        rows.add(firehose.nextRow());
    }
    verifyRows(expectedRowsGenerated, rows, expectedDimensions, expectedMetrics);
}
Also used : IngestSegmentFirehose(org.apache.druid.segment.realtime.firehose.IngestSegmentFirehose) IngestSegmentFirehose(org.apache.druid.segment.realtime.firehose.IngestSegmentFirehose) Firehose(org.apache.druid.data.input.Firehose) ArrayList(java.util.ArrayList) StorageAdapter(org.apache.druid.segment.StorageAdapter) QueryableIndexStorageAdapter(org.apache.druid.segment.QueryableIndexStorageAdapter) WindowedStorageAdapter(org.apache.druid.segment.realtime.firehose.WindowedStorageAdapter) QueryableIndexStorageAdapter(org.apache.druid.segment.QueryableIndexStorageAdapter) LocalDataSegmentPuller(org.apache.druid.segment.loading.LocalDataSegmentPuller) QueryableIndex(org.apache.druid.segment.QueryableIndex) InputRow(org.apache.druid.data.input.InputRow) File(java.io.File) WindowedStorageAdapter(org.apache.druid.segment.realtime.firehose.WindowedStorageAdapter)

Example 27 with Firehose

use of org.apache.druid.data.input.Firehose in project druid by druid-io.

the class CombiningFirehoseFactoryTest method testCombiningfirehose.

@Test
public void testCombiningfirehose() throws IOException {
    final Firehose firehose = combiningFirehoseFactory.connect(null, null);
    for (int i = 1; i < 6; i++) {
        Assert.assertTrue(firehose.hasMore());
        final InputRow inputRow = firehose.nextRow();
        Assert.assertEquals(i, inputRow.getTimestampFromEpoch());
        Assert.assertEquals(i, inputRow.getMetric("test").floatValue(), 0);
    }
    Assert.assertFalse(firehose.hasMore());
}
Also used : Firehose(org.apache.druid.data.input.Firehose) InputRow(org.apache.druid.data.input.InputRow) Test(org.junit.Test)

Example 28 with Firehose

use of org.apache.druid.data.input.Firehose in project druid by druid-io.

the class InlineFirehoseFactoryTest method testConnect.

@Test
public void testConnect() throws IOException {
    Firehose firehose = target.connect(PARSER, NO_TEMP_DIR);
    InputRow row = firehose.nextRow();
    Assert.assertNotNull(row);
    List<String> values = row.getDimension(DIMENSION_1);
    Assert.assertNotNull(values);
    Assert.assertEquals(1, values.size());
    Assert.assertEquals(VALUE, values.get(0));
}
Also used : Firehose(org.apache.druid.data.input.Firehose) InputRow(org.apache.druid.data.input.InputRow) Test(org.junit.Test)

Example 29 with Firehose

use of org.apache.druid.data.input.Firehose in project druid by druid-io.

the class SqlFirehoseFactoryTest method testWithoutCache.

@Test
public void testWithoutCache() throws IOException {
    derbyConnector = derbyConnectorRule.getConnector();
    SqlTestUtils testUtils = new SqlTestUtils(derbyConnector);
    testUtils.createAndUpdateTable(TABLE_NAME_1, 10);
    final SqlFirehoseFactory factory = new SqlFirehoseFactory(SQLLIST1, 0L, null, null, null, true, testUtils.getDerbyFirehoseConnector(), mapper);
    final List<Row> rows = new ArrayList<>();
    final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCache");
    try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
        while (firehose.hasMore()) {
            rows.add(firehose.nextRow());
        }
    }
    assertResult(rows, SQLLIST1);
    assertNumRemainingCacheFiles(firehoseTmpDir, 0);
    testUtils.dropTable(TABLE_NAME_1);
}
Also used : SqlTestUtils(org.apache.druid.metadata.input.SqlTestUtils) Firehose(org.apache.druid.data.input.Firehose) ArrayList(java.util.ArrayList) Row(org.apache.druid.data.input.Row) File(java.io.File) Test(org.junit.Test)

Example 30 with Firehose

use of org.apache.druid.data.input.Firehose in project druid by druid-io.

the class SqlFirehoseFactoryTest method testWithCacheAndFetch.

@Test
public void testWithCacheAndFetch() throws IOException {
    derbyConnector = derbyConnectorRule.getConnector();
    SqlTestUtils testUtils = new SqlTestUtils(derbyConnector);
    testUtils.createAndUpdateTable(TABLE_NAME_1, 10);
    testUtils.createAndUpdateTable(TABLE_NAME_2, 10);
    final SqlFirehoseFactory factory = new SqlFirehoseFactory(SQLLIST2, null, null, 0L, null, true, testUtils.getDerbyFirehoseConnector(), mapper);
    final List<Row> rows = new ArrayList<>();
    final File firehoseTmpDir = createFirehoseTmpDir("testWithCacheAndFetch");
    try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
        while (firehose.hasMore()) {
            rows.add(firehose.nextRow());
        }
    }
    assertResult(rows, SQLLIST2);
    assertNumRemainingCacheFiles(firehoseTmpDir, 2);
    testUtils.dropTable(TABLE_NAME_1);
    testUtils.dropTable(TABLE_NAME_2);
}
Also used : SqlTestUtils(org.apache.druid.metadata.input.SqlTestUtils) Firehose(org.apache.druid.data.input.Firehose) ArrayList(java.util.ArrayList) Row(org.apache.druid.data.input.Row) File(java.io.File) Test(org.junit.Test)

Aggregations

Firehose (org.apache.druid.data.input.Firehose)30 Test (org.junit.Test)25 File (java.io.File)19 ArrayList (java.util.ArrayList)16 Row (org.apache.druid.data.input.Row)14 InputRow (org.apache.druid.data.input.InputRow)13 List (java.util.List)7 ImmutableList (com.google.common.collect.ImmutableList)6 ImmutableMap (com.google.common.collect.ImmutableMap)6 IOException (java.io.IOException)6 Map (java.util.Map)6 InputRowParser (org.apache.druid.data.input.impl.InputRowParser)6 MapInputRowParser (org.apache.druid.data.input.impl.MapInputRowParser)6 DataSchema (org.apache.druid.segment.indexing.DataSchema)6 TypeReference (com.fasterxml.jackson.core.type.TypeReference)5 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)5 ListenableFuture (com.google.common.util.concurrent.ListenableFuture)5 Iterables (com.google.common.collect.Iterables)4 ListeningExecutorService (com.google.common.util.concurrent.ListeningExecutorService)4 MoreExecutors (com.google.common.util.concurrent.MoreExecutors)4