use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class DataSchemaTest method testOverlapMetricNameAndDim.
@Test(expected = IAE.class)
public void testOverlapMetricNameAndDim() throws Exception {
Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "metric1")), ImmutableList.of("dimC"), null), null, null), null), new TypeReference<Map<String, Object>>() {
});
DataSchema schema = new DataSchema("test", parser, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), jsonMapper);
schema.getParser();
}
use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class DataSchemaTest method testDuplicateAggregators.
@Test(expected = IAE.class)
public void testDuplicateAggregators() throws Exception {
Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time")), ImmutableList.of("dimC"), null), null, null), null), new TypeReference<Map<String, Object>>() {
});
DataSchema schema = new DataSchema("test", parser, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), new DoubleSumAggregatorFactory("metric1", "col3") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), jsonMapper);
schema.getParser();
}
use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class KafkaSupervisorTest method getDataSchema.
private DataSchema getDataSchema(String dataSource) {
List<DimensionSchema> dimensions = new ArrayList<>();
dimensions.add(StringDimensionSchema.create("dim1"));
dimensions.add(StringDimensionSchema.create("dim2"));
return new DataSchema(dataSource, objectMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(dimensions, null, null), new JSONPathSpec(true, ImmutableList.<JSONPathFieldSpec>of()), ImmutableMap.<String, Boolean>of()), Charsets.UTF_8.name()), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, ImmutableList.<Interval>of()), objectMapper);
}
use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class TestIndex method makeRealtimeIndex.
public static IncrementalIndex makeRealtimeIndex(final CharSource source, boolean rollup) {
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()).withTimestampSpec(new TimestampSpec("ds", "auto", null)).withQueryGranularity(Granularities.NONE).withDimensionsSpec(DIMENSIONS_SPEC).withVirtualColumns(VIRTUAL_COLUMNS).withMetrics(METRIC_AGGS).withRollup(rollup).build();
final IncrementalIndex retVal = new OnheapIncrementalIndex(schema, true, 10000);
try {
return loadIncrementalIndex(retVal, source);
} catch (Exception e) {
if (rollup) {
realtimeIndex = null;
} else {
noRollupRealtimeIndex = null;
}
throw Throwables.propagate(e);
}
}
use of io.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class MetadataTest method testMerge.
@Test
public void testMerge() {
Assert.assertNull(Metadata.merge(null, null));
Assert.assertNull(Metadata.merge(ImmutableList.<Metadata>of(), null));
List<Metadata> metadataToBeMerged = new ArrayList<>();
metadataToBeMerged.add(null);
Assert.assertNull(Metadata.merge(metadataToBeMerged, null));
//sanity merge check
AggregatorFactory[] aggs = new AggregatorFactory[] { new LongMaxAggregatorFactory("n", "f") };
Metadata m1 = new Metadata();
m1.put("k", "v");
m1.setAggregators(aggs);
m1.setTimestampSpec(new TimestampSpec("ds", "auto", null));
m1.setQueryGranularity(Granularities.ALL);
m1.setRollup(Boolean.FALSE);
Metadata m2 = new Metadata();
m2.put("k", "v");
m2.setAggregators(aggs);
m2.setTimestampSpec(new TimestampSpec("ds", "auto", null));
m2.setQueryGranularity(Granularities.ALL);
m2.setRollup(Boolean.FALSE);
Metadata merged = new Metadata();
merged.put("k", "v");
merged.setAggregators(new AggregatorFactory[] { new LongMaxAggregatorFactory("n", "n") });
merged.setTimestampSpec(new TimestampSpec("ds", "auto", null));
merged.setRollup(Boolean.FALSE);
merged.setQueryGranularity(Granularities.ALL);
Assert.assertEquals(merged, Metadata.merge(ImmutableList.of(m1, m2), null));
//merge check with one metadata being null
metadataToBeMerged.clear();
metadataToBeMerged.add(m1);
metadataToBeMerged.add(m2);
metadataToBeMerged.add(null);
merged.setAggregators(null);
merged.setTimestampSpec(null);
merged.setQueryGranularity(null);
merged.setRollup(null);
Assert.assertEquals(merged, Metadata.merge(metadataToBeMerged, null));
//merge check with client explicitly providing merged aggregators
AggregatorFactory[] explicitAggs = new AggregatorFactory[] { new DoubleMaxAggregatorFactory("x", "y") };
merged.setAggregators(explicitAggs);
Assert.assertEquals(merged, Metadata.merge(metadataToBeMerged, explicitAggs));
merged.setTimestampSpec(new TimestampSpec("ds", "auto", null));
merged.setQueryGranularity(Granularities.ALL);
m1.setRollup(Boolean.TRUE);
Assert.assertEquals(merged, Metadata.merge(ImmutableList.of(m1, m2), explicitAggs));
}
Aggregations