Search in sources :

Example 26 with InputEntityReader

use of org.apache.druid.data.input.InputEntityReader in project druid by druid-io.

the class OrcReaderTest method testDate1900.

// This test is migrated from OrcHadoopInputRowParserTest
@Test
public void testDate1900() throws IOException {
    final InputEntityReader reader = createReader(new TimestampSpec("time", "millis", null), DimensionsSpec.builder().setDimensionExclusions(Collections.singletonList("time")).build(), new OrcInputFormat(new JSONPathSpec(true, null), null, new Configuration()), "example/TestOrcFile.testDate1900.orc");
    try (CloseableIterator<InputRow> iterator = reader.read()) {
        int actualRowCount = 0;
        Assert.assertTrue(iterator.hasNext());
        final InputRow row = iterator.next();
        actualRowCount++;
        Assert.assertEquals(1, row.getDimensions().size());
        Assert.assertEquals(DateTimes.of("1900-05-05T12:34:56.1Z"), row.getTimestamp());
        Assert.assertEquals("1900-12-25T00:00:00.000Z", Iterables.getOnlyElement(row.getDimension("date")));
        while (iterator.hasNext()) {
            actualRowCount++;
            iterator.next();
        }
        Assert.assertEquals(70000, actualRowCount);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) InputRow(org.apache.druid.data.input.InputRow) JSONPathSpec(org.apache.druid.java.util.common.parsers.JSONPathSpec) InputEntityReader(org.apache.druid.data.input.InputEntityReader) Test(org.junit.Test)

Example 27 with InputEntityReader

use of org.apache.druid.data.input.InputEntityReader in project druid by druid-io.

the class OrcReaderTest method testDate2038.

// This test is migrated from OrcHadoopInputRowParserTest
@Test
public void testDate2038() throws IOException {
    final InputEntityReader reader = createReader(new TimestampSpec("time", "millis", null), DimensionsSpec.builder().setDimensionExclusions(Collections.singletonList("time")).build(), new OrcInputFormat(new JSONPathSpec(true, null), null, new Configuration()), "example/TestOrcFile.testDate2038.orc");
    try (CloseableIterator<InputRow> iterator = reader.read()) {
        int actualRowCount = 0;
        Assert.assertTrue(iterator.hasNext());
        final InputRow row = iterator.next();
        actualRowCount++;
        Assert.assertEquals(1, row.getDimensions().size());
        Assert.assertEquals(DateTimes.of("2038-05-05T12:34:56.1Z"), row.getTimestamp());
        Assert.assertEquals("2038-12-25T00:00:00.000Z", Iterables.getOnlyElement(row.getDimension("date")));
        while (iterator.hasNext()) {
            actualRowCount++;
            iterator.next();
        }
        Assert.assertEquals(212000, actualRowCount);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) InputRow(org.apache.druid.data.input.InputRow) JSONPathSpec(org.apache.druid.java.util.common.parsers.JSONPathSpec) InputEntityReader(org.apache.druid.data.input.InputEntityReader) Test(org.junit.Test)

Example 28 with InputEntityReader

use of org.apache.druid.data.input.InputEntityReader in project druid by druid-io.

the class OrcReaderTest method testTest2.

// This test is migrated from OrcHadoopInputRowParserTest
@Test
public void testTest2() throws IOException {
    final InputFormat inputFormat = new OrcInputFormat(new JSONPathSpec(true, Collections.singletonList(new JSONPathFieldSpec(JSONPathFieldType.PATH, "col7-subcol7", "$.col7.subcol7"))), null, new Configuration());
    final InputEntityReader reader = createReader(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(null), inputFormat, "example/test_2.orc");
    try (CloseableIterator<InputRow> iterator = reader.read()) {
        Assert.assertTrue(iterator.hasNext());
        final InputRow row = iterator.next();
        Assert.assertEquals(DateTimes.of("2016-01-01T00:00:00.000Z"), row.getTimestamp());
        Assert.assertEquals("bar", Iterables.getOnlyElement(row.getDimension("col1")));
        Assert.assertEquals(ImmutableList.of("dat1", "dat2", "dat3"), row.getDimension("col2"));
        Assert.assertEquals("1.1", Iterables.getOnlyElement(row.getDimension("col3")));
        Assert.assertEquals("2", Iterables.getOnlyElement(row.getDimension("col4")));
        Assert.assertEquals("3.5", Iterables.getOnlyElement(row.getDimension("col5")));
        Assert.assertTrue(row.getDimension("col6").isEmpty());
        Assert.assertFalse(iterator.hasNext());
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) InputFormat(org.apache.druid.data.input.InputFormat) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) InputRow(org.apache.druid.data.input.InputRow) JSONPathSpec(org.apache.druid.java.util.common.parsers.JSONPathSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) JSONPathFieldSpec(org.apache.druid.java.util.common.parsers.JSONPathFieldSpec) InputEntityReader(org.apache.druid.data.input.InputEntityReader) Test(org.junit.Test)

Example 29 with InputEntityReader

use of org.apache.druid.data.input.InputEntityReader in project druid by druid-io.

the class OrcReaderTest method testTest1.

// This test is migrated from OrcHadoopInputRowParserTest
@Test
public void testTest1() throws IOException {
    final InputEntityReader reader = createReader(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("col1", "col2"))), new OrcInputFormat(null, null, new Configuration()), "example/test_1.orc");
    try (CloseableIterator<InputRow> iterator = reader.read()) {
        Assert.assertTrue(iterator.hasNext());
        final InputRow row = iterator.next();
        Assert.assertEquals(DateTimes.of("2016-01-01T00:00:00.000Z"), row.getTimestamp());
        Assert.assertEquals("bar", Iterables.getOnlyElement(row.getDimension("col1")));
        Assert.assertEquals(ImmutableList.of("dat1", "dat2", "dat3"), row.getDimension("col2"));
        Assert.assertEquals(1.1, row.getMetric("val1").doubleValue(), 0.001);
        Assert.assertFalse(iterator.hasNext());
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) InputRow(org.apache.druid.data.input.InputRow) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) InputEntityReader(org.apache.druid.data.input.InputEntityReader) Test(org.junit.Test)

Example 30 with InputEntityReader

use of org.apache.druid.data.input.InputEntityReader in project druid by druid-io.

the class OrcReaderTest method testOrcSplitElim.

// This test is migrated from OrcHadoopInputRowParserTest
@Test
public void testOrcSplitElim() throws IOException {
    final InputEntityReader reader = createReader(new TimestampSpec("ts", "millis", null), new DimensionsSpec(null), new OrcInputFormat(new JSONPathSpec(true, null), null, new Configuration()), "example/orc_split_elim.orc");
    try (CloseableIterator<InputRow> iterator = reader.read()) {
        int actualRowCount = 0;
        Assert.assertTrue(iterator.hasNext());
        final InputRow row = iterator.next();
        actualRowCount++;
        Assert.assertEquals(DateTimes.of("1969-12-31T16:00:00.0Z"), row.getTimestamp());
        Assert.assertEquals("2", Iterables.getOnlyElement(row.getDimension("userid")));
        Assert.assertEquals("foo", Iterables.getOnlyElement(row.getDimension("string1")));
        Assert.assertEquals("0.8", Iterables.getOnlyElement(row.getDimension("subtype")));
        Assert.assertEquals("1.2", Iterables.getOnlyElement(row.getDimension("decimal1")));
        while (iterator.hasNext()) {
            actualRowCount++;
            iterator.next();
        }
        Assert.assertEquals(25000, actualRowCount);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) InputRow(org.apache.druid.data.input.InputRow) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) JSONPathSpec(org.apache.druid.java.util.common.parsers.JSONPathSpec) InputEntityReader(org.apache.druid.data.input.InputEntityReader) Test(org.junit.Test)

Aggregations

InputEntityReader (org.apache.druid.data.input.InputEntityReader)58 Test (org.junit.Test)56 InputRow (org.apache.druid.data.input.InputRow)54 InputRowSchema (org.apache.druid.data.input.InputRowSchema)39 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)33 JSONPathSpec (org.apache.druid.java.util.common.parsers.JSONPathSpec)33 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)31 JSONPathFieldSpec (org.apache.druid.java.util.common.parsers.JSONPathFieldSpec)28 InputRowListPlusRawValues (org.apache.druid.data.input.InputRowListPlusRawValues)26 MapBasedInputRow (org.apache.druid.data.input.MapBasedInputRow)8 Configuration (org.apache.hadoop.conf.Configuration)8 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)5 AvroHadoopInputRowParserTest (org.apache.druid.data.input.AvroHadoopInputRowParserTest)5 AvroStreamInputRowParserTest (org.apache.druid.data.input.AvroStreamInputRowParserTest)5 DefaultObjectMapper (org.apache.druid.jackson.DefaultObjectMapper)5 KafkaRecordEntity (org.apache.druid.data.input.kafka.KafkaRecordEntity)4 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)4 Headers (org.apache.kafka.common.header.Headers)4 RecordHeaders (org.apache.kafka.common.header.internals.RecordHeaders)4 BigDecimal (java.math.BigDecimal)3