use of org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec in project druid by druid-io.
the class DataSchemaTest method testOverlapMetricNameAndDim.
@Test
public void testOverlapMetricNameAndDim() {
Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), DimensionsSpec.builder().setDimensions(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "metric1"))).setDimensionExclusions(ImmutableList.of("dimC")).build(), null, null, null), null), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT);
DataSchema schema = new DataSchema(IdUtilsTest.VALID_ID_CHARS, parser, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), null, jsonMapper);
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Cannot specify a column more than once: [metric1] seen in dimensions list, metricsSpec list");
schema.getParser();
}
use of org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec in project druid by druid-io.
the class DataSchemaTest method testSerde.
@Test
public void testSerde() throws Exception {
String jsonStr = "{" + "\"dataSource\":\"" + StringEscapeUtils.escapeJson(IdUtilsTest.VALID_ID_CHARS) + "\"," + "\"parser\":{" + "\"type\":\"string\"," + "\"parseSpec\":{" + "\"format\":\"json\"," + "\"timestampSpec\":{\"column\":\"xXx\", \"format\": \"auto\", \"missingValue\": null}," + "\"dimensionsSpec\":{\"dimensions\":[], \"dimensionExclusions\":[]}," + "\"flattenSpec\":{\"useFieldDiscovery\":true, \"fields\":[]}," + "\"featureSpec\":{}}," + "\"encoding\":\"UTF-8\"" + "}," + "\"metricsSpec\":[{\"type\":\"doubleSum\",\"name\":\"metric1\",\"fieldName\":\"col1\"}]," + "\"granularitySpec\":{" + "\"type\":\"arbitrary\"," + "\"queryGranularity\":{\"type\":\"duration\",\"duration\":86400000,\"origin\":\"1970-01-01T00:00:00.000Z\"}," + "\"intervals\":[\"2014-01-01T00:00:00.000Z/2015-01-01T00:00:00.000Z\"]}}";
DataSchema actual = jsonMapper.readValue(jsonMapper.writeValueAsString(jsonMapper.readValue(jsonStr, DataSchema.class)), DataSchema.class);
Assert.assertEquals(actual.getDataSource(), IdUtilsTest.VALID_ID_CHARS);
Assert.assertEquals(actual.getParser().getParseSpec(), new JSONParseSpec(new TimestampSpec("xXx", null, null), DimensionsSpec.builder().setDimensionExclusions(Arrays.asList("__time", "metric1", "xXx", "col1")).build(), null, null, null));
Assert.assertArrayEquals(actual.getAggregators(), new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1") });
Assert.assertEquals(actual.getGranularitySpec(), new ArbitraryGranularitySpec(new DurationGranularity(86400000, null), ImmutableList.of(Intervals.of("2014/2015"))));
}
use of org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec in project druid by druid-io.
the class DataSchemaTest method testSerdeWithUpdatedDataSchemaRemovedField.
@Test
public void testSerdeWithUpdatedDataSchemaRemovedField() throws IOException {
Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dimB", "dimA"))), null, null, null), null), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT);
TestModifiedDataSchema originalSchema = new TestModifiedDataSchema(IdUtilsTest.VALID_ID_CHARS, null, null, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), null, parser, jsonMapper, "some arbitrary string");
String serialized = jsonMapper.writeValueAsString(originalSchema);
DataSchema deserialized = jsonMapper.readValue(serialized, DataSchema.class);
Assert.assertEquals(originalSchema.getDataSource(), deserialized.getDataSource());
Assert.assertEquals(originalSchema.getGranularitySpec(), deserialized.getGranularitySpec());
Assert.assertEquals(originalSchema.getParser().getParseSpec(), deserialized.getParser().getParseSpec());
Assert.assertArrayEquals(originalSchema.getAggregators(), deserialized.getAggregators());
Assert.assertEquals(originalSchema.getTransformSpec(), deserialized.getTransformSpec());
Assert.assertEquals(originalSchema.getParserMap(), deserialized.getParserMap());
}
use of org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec in project druid by druid-io.
the class DataSchemaTest method testSerdeWithUpdatedDataSchemaAddedField.
@Test
public void testSerdeWithUpdatedDataSchemaAddedField() throws IOException {
Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dimB", "dimA"))), null, null, null), null), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT);
DataSchema originalSchema = new DataSchema(IdUtilsTest.VALID_ID_CHARS, parser, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), null, jsonMapper);
String serialized = jsonMapper.writeValueAsString(originalSchema);
TestModifiedDataSchema deserialized = jsonMapper.readValue(serialized, TestModifiedDataSchema.class);
Assert.assertEquals(null, deserialized.getExtra());
Assert.assertEquals(originalSchema.getDataSource(), deserialized.getDataSource());
Assert.assertEquals(originalSchema.getGranularitySpec(), deserialized.getGranularitySpec());
Assert.assertEquals(originalSchema.getParser().getParseSpec(), deserialized.getParser().getParseSpec());
Assert.assertArrayEquals(originalSchema.getAggregators(), deserialized.getAggregators());
Assert.assertEquals(originalSchema.getTransformSpec(), deserialized.getTransformSpec());
Assert.assertEquals(originalSchema.getParserMap(), deserialized.getParserMap());
}
Aggregations