Search in sources :

Example 26 with DataSchema

use of org.apache.druid.segment.indexing.DataSchema in project druid by druid-io.

the class CsvInputSourceSamplerTest method testCSVColumnAllNull.

@Test
public void testCSVColumnAllNull() {
    final TimestampSpec timestampSpec = new TimestampSpec(null, null, DateTimes.of("1970"));
    final DimensionsSpec dimensionsSpec = new DimensionsSpec(null);
    final DataSchema dataSchema = new DataSchema("sampler", timestampSpec, dimensionsSpec, null, null, null);
    final List<String> strCsvRows = ImmutableList.of("FirstName,LastName,Number,Gender", "J,G,,Male", "Kobe,Bryant,,Male", "Lisa, Krystal,,Female", "Michael,Jackson,,Male");
    final InputSource inputSource = new InlineInputSource(String.join("\n", strCsvRows));
    final InputFormat inputFormat = new CsvInputFormat(null, null, null, true, 0);
    final InputSourceSampler inputSourceSampler = new InputSourceSampler();
    final SamplerResponse response = inputSourceSampler.sample(inputSource, inputFormat, dataSchema, null);
    Assert.assertEquals(4, response.getNumRowsRead());
    Assert.assertEquals(4, response.getNumRowsIndexed());
    Assert.assertEquals(4, response.getData().size());
    List<SamplerResponseRow> data = response.getData();
    Assert.assertEquals(new SamplerResponseRow(new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("Number", null).put("FirstName", "J").put("LastName", "G").put("Gender", "Male").build(), new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("__time", 0L).put("Number", null).put("FirstName", "J").put("LastName", "G").put("Gender", "Male").build(), null, null), data.get(0));
    Assert.assertEquals(new SamplerResponseRow(new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("Number", null).put("FirstName", "Kobe").put("LastName", "Bryant").put("Gender", "Male").build(), new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("Number", null).put("__time", 0L).put("FirstName", "Kobe").put("LastName", "Bryant").put("Gender", "Male").build(), null, null), data.get(1));
    Assert.assertEquals(new SamplerResponseRow(new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("Number", null).put("FirstName", "Lisa").put("LastName", " Krystal").put("Gender", "Female").build(), new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("Number", null).put("__time", 0L).put("FirstName", "Lisa").put("LastName", " Krystal").put("Gender", "Female").build(), null, null), data.get(2));
    Assert.assertEquals(new SamplerResponseRow(new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("Number", null).put("FirstName", "Michael").put("LastName", "Jackson").put("Gender", "Male").build(), new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("__time", 0L).put("Number", null).put("FirstName", "Michael").put("LastName", "Jackson").put("Gender", "Male").build(), null, null), data.get(3));
}
Also used : InlineInputSource(org.apache.druid.data.input.impl.InlineInputSource) InputSource(org.apache.druid.data.input.InputSource) SamplerResponse(org.apache.druid.client.indexing.SamplerResponse) DataSchema(org.apache.druid.segment.indexing.DataSchema) InputFormat(org.apache.druid.data.input.InputFormat) CsvInputFormat(org.apache.druid.data.input.impl.CsvInputFormat) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) CsvInputFormat(org.apache.druid.data.input.impl.CsvInputFormat) SamplerResponseRow(org.apache.druid.client.indexing.SamplerResponse.SamplerResponseRow) InlineInputSource(org.apache.druid.data.input.impl.InlineInputSource) Test(org.junit.Test)

Example 27 with DataSchema

use of org.apache.druid.segment.indexing.DataSchema in project druid by druid-io.

the class InputSourceSamplerTest method testWithMoreRollup.

@Test
public void testWithMoreRollup() throws IOException {
    final TimestampSpec timestampSpec = new TimestampSpec("t", null, null);
    final DimensionsSpec dimensionsSpec = new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("dim1")));
    final AggregatorFactory[] aggregatorFactories = { new LongSumAggregatorFactory("met1", "met1") };
    final GranularitySpec granularitySpec = new UniformGranularitySpec(Granularities.DAY, Granularities.HOUR, true, null);
    final DataSchema dataSchema = createDataSchema(timestampSpec, dimensionsSpec, aggregatorFactories, granularitySpec, null);
    final InputSource inputSource = createInputSource(getTestRows(), dataSchema);
    final InputFormat inputFormat = createInputFormat();
    SamplerResponse response = inputSourceSampler.sample(inputSource, inputFormat, dataSchema, null);
    Assert.assertEquals(6, response.getNumRowsRead());
    Assert.assertEquals(5, response.getNumRowsIndexed());
    Assert.assertEquals(3, response.getData().size());
    List<SamplerResponseRow> data = response.getData();
    assertEqualsSamplerResponseRow(new SamplerResponseRow(getRawColumns().get(0), new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("__time", 1555934400000L).put("dim1", "foo").put("met1", 11L).build(), null, null), data.get(0));
    assertEqualsSamplerResponseRow(new SamplerResponseRow(getRawColumns().get(3), new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("__time", 1555934400000L).put("dim1", "foo2").put("met1", 4L).build(), null, null), data.get(1));
    assertEqualsSamplerResponseRow(new SamplerResponseRow(getRawColumns().get(5), null, true, getUnparseableTimestampString()), data.get(2));
}
Also used : RecordSupplierInputSource(org.apache.druid.indexing.seekablestream.RecordSupplierInputSource) InlineInputSource(org.apache.druid.data.input.impl.InlineInputSource) InputSource(org.apache.druid.data.input.InputSource) SamplerResponse(org.apache.druid.client.indexing.SamplerResponse) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) DataSchema(org.apache.druid.segment.indexing.DataSchema) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) GranularitySpec(org.apache.druid.segment.indexing.granularity.GranularitySpec) JsonInputFormat(org.apache.druid.data.input.impl.JsonInputFormat) InputFormat(org.apache.druid.data.input.InputFormat) CsvInputFormat(org.apache.druid.data.input.impl.CsvInputFormat) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) SamplerResponseRow(org.apache.druid.client.indexing.SamplerResponse.SamplerResponseRow) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 28 with DataSchema

use of org.apache.druid.segment.indexing.DataSchema in project druid by druid-io.

the class InputSourceSamplerTest method testMultipleJsonStringInOneBlock.

/**
 * This case tests sampling for multiple json lines in one text block
 * Currently only RecordSupplierInputSource supports this kind of input, see https://github.com/apache/druid/pull/10383 for more information
 *
 * This test combines illegal json block and legal json block together to verify:
 * 1. all lines in the illegal json block should not be parsed
 * 2. the illegal json block should not affect the processing of the 2nd record
 * 3. all lines in legal json block should be parsed successfully
 */
@Test
public void testMultipleJsonStringInOneBlock() throws IOException {
    if (!ParserType.STR_JSON.equals(parserType) || !useInputFormatApi) {
        return;
    }
    final TimestampSpec timestampSpec = new TimestampSpec("t", null, null);
    final DimensionsSpec dimensionsSpec = new DimensionsSpec(ImmutableList.of(StringDimensionSchema.create("dim1PlusBar")));
    final TransformSpec transformSpec = new TransformSpec(null, ImmutableList.of(new ExpressionTransform("dim1PlusBar", "concat(dim1 + 'bar')", TestExprMacroTable.INSTANCE)));
    final AggregatorFactory[] aggregatorFactories = { new LongSumAggregatorFactory("met1", "met1") };
    final GranularitySpec granularitySpec = new UniformGranularitySpec(Granularities.DAY, Granularities.HOUR, true, null);
    final DataSchema dataSchema = createDataSchema(timestampSpec, dimensionsSpec, aggregatorFactories, granularitySpec, transformSpec);
    List<String> jsonBlockList = ImmutableList.of(// include the line which can't be parsed into JSON object to form a illegal json block
    String.join("", STR_JSON_ROWS), // exclude the last line to form a legal json block
    STR_JSON_ROWS.stream().limit(STR_JSON_ROWS.size() - 1).collect(Collectors.joining()));
    SamplerResponse response = inputSourceSampler.sample(new RecordSupplierInputSource("topicName", new TestRecordSupplier(jsonBlockList), true), createInputFormat(), dataSchema, new SamplerConfig(200, 3000));
    // 
    // the 1st json block contains STR_JSON_ROWS.size() lines, and 2nd json block contains STR_JSON_ROWS.size()-1 lines
    // together there should STR_JSON_ROWS.size() * 2 - 1 lines
    // 
    int illegalRows = STR_JSON_ROWS.size();
    int legalRows = STR_JSON_ROWS.size() - 1;
    Assert.assertEquals(illegalRows + legalRows, response.getNumRowsRead());
    Assert.assertEquals(legalRows, response.getNumRowsIndexed());
    Assert.assertEquals(illegalRows + 2, response.getData().size());
    List<SamplerResponseRow> data = response.getData();
    List<Map<String, Object>> rawColumnList = this.getRawColumns();
    int index = 0;
    // 
    // first n rows are related to the first json block which fails to parse
    // 
    String parseExceptionMessage;
    if (useInputFormatApi) {
        parseExceptionMessage = "Timestamp[bad_timestamp] is unparseable! Event: {t=bad_timestamp, dim1=foo, met1=6}";
    } else {
        parseExceptionMessage = "Timestamp[bad_timestamp] is unparseable! Event: {t=bad_timestamp, dim1=foo, met1=6}";
    }
    for (; index < illegalRows; index++) {
        assertEqualsSamplerResponseRow(new SamplerResponseRow(rawColumnList.get(index), null, true, parseExceptionMessage), data.get(index));
    }
    // 
    // following are parsed rows for legal json block
    // 
    assertEqualsSamplerResponseRow(new SamplerResponseRow(rawColumnList.get(0), new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("__time", 1555934400000L).put("dim1PlusBar", "foobar").put("met1", 11L).build(), null, null), data.get(index++));
    assertEqualsSamplerResponseRow(new SamplerResponseRow(rawColumnList.get(3), new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("__time", 1555934400000L).put("dim1PlusBar", "foo2bar").put("met1", 4L).build(), null, null), data.get(index));
}
Also used : SamplerResponse(org.apache.druid.client.indexing.SamplerResponse) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) TransformSpec(org.apache.druid.segment.transform.TransformSpec) DataSchema(org.apache.druid.segment.indexing.DataSchema) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) GranularitySpec(org.apache.druid.segment.indexing.granularity.GranularitySpec) RecordSupplierInputSource(org.apache.druid.indexing.seekablestream.RecordSupplierInputSource) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) SamplerResponseRow(org.apache.druid.client.indexing.SamplerResponse.SamplerResponseRow) ExpressionTransform(org.apache.druid.segment.transform.ExpressionTransform) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 29 with DataSchema

use of org.apache.druid.segment.indexing.DataSchema in project druid by druid-io.

the class InputSourceSamplerTest method testMissingValueTimestampSpec.

@Test
public void testMissingValueTimestampSpec() throws IOException {
    final TimestampSpec timestampSpec = new TimestampSpec(null, null, DateTimes.of("1970"));
    final DimensionsSpec dimensionsSpec = new DimensionsSpec(null);
    final DataSchema dataSchema = createDataSchema(timestampSpec, dimensionsSpec, null, null, null);
    final InputSource inputSource = createInputSource(getTestRows(), dataSchema);
    final InputFormat inputFormat = createInputFormat();
    SamplerResponse response = inputSourceSampler.sample(inputSource, inputFormat, dataSchema, null);
    Assert.assertEquals(6, response.getNumRowsRead());
    Assert.assertEquals(6, response.getNumRowsIndexed());
    Assert.assertEquals(6, response.getData().size());
    List<SamplerResponseRow> data = response.getData();
    assertEqualsSamplerResponseRow(new SamplerResponseRow(getRawColumns().get(0), new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("__time", 0L).put("t", "2019-04-22T12:00").put("dim2", null).put("dim1", "foo").put("met1", "1").build(), null, null), data.get(0));
    assertEqualsSamplerResponseRow(new SamplerResponseRow(getRawColumns().get(1), new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("__time", 0L).put("t", "2019-04-22T12:00").put("dim2", null).put("dim1", "foo").put("met1", "2").build(), null, null), data.get(1));
    assertEqualsSamplerResponseRow(new SamplerResponseRow(getRawColumns().get(2), new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("__time", 0L).put("t", "2019-04-22T12:01").put("dim2", null).put("dim1", "foo").put("met1", "3").build(), null, null), data.get(2));
    assertEqualsSamplerResponseRow(new SamplerResponseRow(getRawColumns().get(3), new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("__time", 0L).put("t", "2019-04-22T12:00").put("dim2", null).put("dim1", "foo2").put("met1", "4").build(), null, null), data.get(3));
    assertEqualsSamplerResponseRow(new SamplerResponseRow(getRawColumns().get(4), new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("__time", 0L).put("t", "2019-04-22T12:00").put("dim2", "bar").put("dim1", "foo").put("met1", "5").build(), null, null), data.get(4));
    assertEqualsSamplerResponseRow(new SamplerResponseRow(getRawColumns().get(5), new SamplerTestUtils.MapAllowingNullValuesBuilder<String, Object>().put("__time", 0L).put("t", "bad_timestamp").put("dim2", null).put("dim1", "foo").put("met1", "6").build(), null, null), data.get(5));
}
Also used : DataSchema(org.apache.druid.segment.indexing.DataSchema) RecordSupplierInputSource(org.apache.druid.indexing.seekablestream.RecordSupplierInputSource) InlineInputSource(org.apache.druid.data.input.impl.InlineInputSource) InputSource(org.apache.druid.data.input.InputSource) JsonInputFormat(org.apache.druid.data.input.impl.JsonInputFormat) InputFormat(org.apache.druid.data.input.InputFormat) CsvInputFormat(org.apache.druid.data.input.impl.CsvInputFormat) SamplerResponse(org.apache.druid.client.indexing.SamplerResponse) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) SamplerResponseRow(org.apache.druid.client.indexing.SamplerResponse.SamplerResponseRow) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 30 with DataSchema

use of org.apache.druid.segment.indexing.DataSchema in project druid by druid-io.

the class SeekableStreamSupervisorSpecTest method setUp.

@Before
public void setUp() {
    ingestionSchema = EasyMock.mock(SeekableStreamSupervisorIngestionSpec.class);
    taskStorage = EasyMock.mock(TaskStorage.class);
    taskMaster = EasyMock.mock(TaskMaster.class);
    indexerMetadataStorageCoordinator = EasyMock.mock(IndexerMetadataStorageCoordinator.class);
    emitter = EasyMock.mock(ServiceEmitter.class);
    rowIngestionMetersFactory = EasyMock.mock(RowIngestionMetersFactory.class);
    dataSchema = EasyMock.mock(DataSchema.class);
    seekableStreamSupervisorTuningConfig = EasyMock.mock(SeekableStreamSupervisorTuningConfig.class);
    seekableStreamSupervisorIOConfig = EasyMock.mock(SeekableStreamSupervisorIOConfig.class);
    taskClientFactory = EasyMock.mock(SeekableStreamIndexTaskClientFactory.class);
    spec = EasyMock.mock(SeekableStreamSupervisorSpec.class);
    supervisorConfig = new SupervisorStateManagerConfig();
    indexTaskClientFactory = EasyMock.mock(SeekableStreamIndexTaskClientFactory.class);
    mapper = new DefaultObjectMapper();
    monitorSchedulerConfig = EasyMock.mock(DruidMonitorSchedulerConfig.class);
    supervisorStateManagerConfig = EasyMock.mock(SupervisorStateManagerConfig.class);
    supervisor4 = EasyMock.mock(SeekableStreamSupervisor.class);
}
Also used : ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) SeekableStreamSupervisorTuningConfig(org.apache.druid.indexing.seekablestream.supervisor.SeekableStreamSupervisorTuningConfig) SeekableStreamSupervisorIOConfig(org.apache.druid.indexing.seekablestream.supervisor.SeekableStreamSupervisorIOConfig) IndexerMetadataStorageCoordinator(org.apache.druid.indexing.overlord.IndexerMetadataStorageCoordinator) SeekableStreamSupervisorSpec(org.apache.druid.indexing.seekablestream.supervisor.SeekableStreamSupervisorSpec) SeekableStreamSupervisorIngestionSpec(org.apache.druid.indexing.seekablestream.supervisor.SeekableStreamSupervisorIngestionSpec) DataSchema(org.apache.druid.segment.indexing.DataSchema) DruidMonitorSchedulerConfig(org.apache.druid.server.metrics.DruidMonitorSchedulerConfig) TaskStorage(org.apache.druid.indexing.overlord.TaskStorage) SupervisorStateManagerConfig(org.apache.druid.indexing.overlord.supervisor.SupervisorStateManagerConfig) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) TaskMaster(org.apache.druid.indexing.overlord.TaskMaster) RowIngestionMetersFactory(org.apache.druid.segment.incremental.RowIngestionMetersFactory) SeekableStreamSupervisor(org.apache.druid.indexing.seekablestream.supervisor.SeekableStreamSupervisor) Before(org.junit.Before)

Aggregations

DataSchema (org.apache.druid.segment.indexing.DataSchema)80 UniformGranularitySpec (org.apache.druid.segment.indexing.granularity.UniformGranularitySpec)49 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)45 Test (org.junit.Test)44 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)32 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)25 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)22 GranularitySpec (org.apache.druid.segment.indexing.granularity.GranularitySpec)19 InputSource (org.apache.druid.data.input.InputSource)17 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)17 File (java.io.File)16 Map (java.util.Map)15 InputFormat (org.apache.druid.data.input.InputFormat)15 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)15 SamplerResponse (org.apache.druid.client.indexing.SamplerResponse)14 SamplerResponseRow (org.apache.druid.client.indexing.SamplerResponse.SamplerResponseRow)13 CsvInputFormat (org.apache.druid.data.input.impl.CsvInputFormat)13 Interval (org.joda.time.Interval)13 ArrayList (java.util.ArrayList)12 JsonInputFormat (org.apache.druid.data.input.impl.JsonInputFormat)12