Search in sources :

Example 1 with ArbitraryGranularitySpec

use of org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec in project druid by druid-io.

the class DataSchemaTest method testEmptyDatasource.

@Test
public void testEmptyDatasource() {
    Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), DimensionsSpec.builder().setDimensions(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "col2"))).setDimensionExclusions(ImmutableList.of("dimC")).build(), null, null, null), null), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT);
    expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class));
    expectedException.expectMessage("dataSource cannot be null or empty. Please provide a dataSource.");
    DataSchema schema = new DataSchema("", parser, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), null, jsonMapper);
}
Also used : DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) StringInputRowParser(org.apache.druid.data.input.impl.StringInputRowParser) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) JSONParseSpec(org.apache.druid.data.input.impl.JSONParseSpec) ArbitraryGranularitySpec(org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test) IdUtilsTest(org.apache.druid.common.utils.IdUtilsTest)

Example 2 with ArbitraryGranularitySpec

use of org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec in project druid by druid-io.

the class DataSchemaTest method testExplicitInclude.

@Test
public void testExplicitInclude() {
    Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), DimensionsSpec.builder().setDimensions(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "col2"))).setDimensionExclusions(ImmutableList.of("dimC")).build(), null, null, null), null), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT);
    DataSchema schema = new DataSchema(IdUtilsTest.VALID_ID_CHARS, parser, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), null, jsonMapper);
    Assert.assertEquals(ImmutableSet.of("__time", "dimC", "col1", "metric1", "metric2"), schema.getParser().getParseSpec().getDimensionsSpec().getDimensionExclusions());
}
Also used : DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) StringInputRowParser(org.apache.druid.data.input.impl.StringInputRowParser) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) JSONParseSpec(org.apache.druid.data.input.impl.JSONParseSpec) ArbitraryGranularitySpec(org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test) IdUtilsTest(org.apache.druid.common.utils.IdUtilsTest)

Example 3 with ArbitraryGranularitySpec

use of org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec in project druid by druid-io.

the class DataSchemaTest method testTransformSpec.

@Test
public void testTransformSpec() {
    Map<String, Object> parserMap = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "col2"))), null, null, null), null), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT);
    DataSchema schema = new DataSchema(IdUtilsTest.VALID_ID_CHARS, parserMap, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), new TransformSpec(new SelectorDimFilter("dimA", "foo", null), ImmutableList.of(new ExpressionTransform("expr", "concat(dimA,dimA)", TestExprMacroTable.INSTANCE))), jsonMapper);
    // Test hack that produces a StringInputRowParser.
    final StringInputRowParser parser = (StringInputRowParser) schema.getParser();
    final InputRow row1bb = parser.parseBatch(ByteBuffer.wrap("{\"time\":\"2000-01-01\",\"dimA\":\"foo\"}".getBytes(StandardCharsets.UTF_8))).get(0);
    Assert.assertEquals(DateTimes.of("2000-01-01"), row1bb.getTimestamp());
    Assert.assertEquals("foo", row1bb.getRaw("dimA"));
    Assert.assertEquals("foofoo", row1bb.getRaw("expr"));
    final InputRow row1string = parser.parse("{\"time\":\"2000-01-01\",\"dimA\":\"foo\"}");
    Assert.assertEquals(DateTimes.of("2000-01-01"), row1string.getTimestamp());
    Assert.assertEquals("foo", row1string.getRaw("dimA"));
    Assert.assertEquals("foofoo", row1string.getRaw("expr"));
    final InputRow row2 = parser.parseBatch(ByteBuffer.wrap("{\"time\":\"2000-01-01\",\"dimA\":\"x\"}".getBytes(StandardCharsets.UTF_8))).get(0);
    Assert.assertNull(row2);
}
Also used : DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) ArbitraryGranularitySpec(org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec) TransformSpec(org.apache.druid.segment.transform.TransformSpec) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) StringInputRowParser(org.apache.druid.data.input.impl.StringInputRowParser) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) InputRow(org.apache.druid.data.input.InputRow) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) ExpressionTransform(org.apache.druid.segment.transform.ExpressionTransform) JSONParseSpec(org.apache.druid.data.input.impl.JSONParseSpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test) IdUtilsTest(org.apache.druid.common.utils.IdUtilsTest)

Example 4 with ArbitraryGranularitySpec

use of org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec in project druid by druid-io.

the class DataSchemaTest method testOverlapTimeAndDimLegacy.

@Test
public void testOverlapTimeAndDimLegacy() {
    Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), DimensionsSpec.builder().setDimensions(DimensionsSpec.getDefaultSchemas(ImmutableList.of("__time", "dimA", "dimB", "metric1"))).setDimensionExclusions(ImmutableList.of("dimC")).build(), null, null, null), null), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT);
    DataSchema schema = new DataSchema(IdUtilsTest.VALID_ID_CHARS, parser, null, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), null, jsonMapper);
    expectedException.expect(IllegalArgumentException.class);
    expectedException.expectMessage("Cannot specify a column more than once: [__time] seen in dimensions list, primary timestamp " + "(__time cannot appear as a dimension or metric)");
    schema.getParser();
}
Also used : StringInputRowParser(org.apache.druid.data.input.impl.StringInputRowParser) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) JSONParseSpec(org.apache.druid.data.input.impl.JSONParseSpec) ArbitraryGranularitySpec(org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test) IdUtilsTest(org.apache.druid.common.utils.IdUtilsTest)

Example 5 with ArbitraryGranularitySpec

use of org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec in project druid by druid-io.

the class DataSchemaTest method testDuplicateAggregators.

@Test
public void testDuplicateAggregators() {
    Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), DimensionsSpec.builder().setDimensions(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time"))).setDimensionExclusions(ImmutableList.of("dimC")).build(), null, null, null), null), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT);
    expectedException.expect(IllegalArgumentException.class);
    expectedException.expectMessage("Cannot specify a column more than once: [metric1] seen in metricsSpec list (2 occurrences); " + "[metric3] seen in metricsSpec list (2 occurrences)");
    DataSchema schema = new DataSchema(IdUtilsTest.VALID_ID_CHARS, parser, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), new DoubleSumAggregatorFactory("metric1", "col3"), new DoubleSumAggregatorFactory("metric3", "col4"), new DoubleSumAggregatorFactory("metric3", "col5") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), null, jsonMapper);
}
Also used : DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) StringInputRowParser(org.apache.druid.data.input.impl.StringInputRowParser) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) JSONParseSpec(org.apache.druid.data.input.impl.JSONParseSpec) ArbitraryGranularitySpec(org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test) IdUtilsTest(org.apache.druid.common.utils.IdUtilsTest)

Aggregations

ArbitraryGranularitySpec (org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec)19 Test (org.junit.Test)17 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)15 IdUtilsTest (org.apache.druid.common.utils.IdUtilsTest)11 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)11 JSONParseSpec (org.apache.druid.data.input.impl.JSONParseSpec)10 StringInputRowParser (org.apache.druid.data.input.impl.StringInputRowParser)9 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)9 DataSchema (org.apache.druid.segment.indexing.DataSchema)7 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)6 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)5 IndexIOConfig (org.apache.druid.indexing.common.task.IndexTask.IndexIOConfig)4 IndexIngestionSpec (org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec)4 NoopInputSource (org.apache.druid.data.input.impl.NoopInputSource)3 NoopFirehoseFactory (org.apache.druid.data.input.impl.NoopFirehoseFactory)2 NoopInputFormat (org.apache.druid.data.input.impl.NoopInputFormat)2 TypeReference (com.fasterxml.jackson.core.type.TypeReference)1 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 BufferedWriter (java.io.BufferedWriter)1 File (java.io.File)1