use of io.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class DataSchemaTest method testDuplicateAggregators.
@Test(expected = IAE.class)
public void testDuplicateAggregators() throws Exception {
Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time")), ImmutableList.of("dimC"), null), null, null), null), new TypeReference<Map<String, Object>>() {
});
DataSchema schema = new DataSchema("test", parser, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), new DoubleSumAggregatorFactory("metric1", "col3") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), jsonMapper);
schema.getParser();
}
use of io.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class KafkaSupervisorTest method getDataSchema.
private DataSchema getDataSchema(String dataSource) {
List<DimensionSchema> dimensions = new ArrayList<>();
dimensions.add(StringDimensionSchema.create("dim1"));
dimensions.add(StringDimensionSchema.create("dim2"));
return new DataSchema(dataSource, objectMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(dimensions, null, null), new JSONPathSpec(true, ImmutableList.<JSONPathFieldSpec>of()), ImmutableMap.<String, Boolean>of()), Charsets.UTF_8.name()), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, ImmutableList.<Interval>of()), objectMapper);
}
use of io.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class MapVirtualColumnTest method constructorFeeder.
@Parameterized.Parameters
public static Iterable<Object[]> constructorFeeder() throws IOException {
final Supplier<SelectQueryConfig> selectConfigSupplier = Suppliers.ofInstance(new SelectQueryConfig(true));
SelectQueryRunnerFactory factory = new SelectQueryRunnerFactory(new SelectQueryQueryToolChest(new DefaultObjectMapper(), QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator(), selectConfigSupplier), new SelectQueryEngine(selectConfigSupplier), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()).withQueryGranularity(Granularities.NONE).build();
final IncrementalIndex index = new OnheapIncrementalIndex(schema, true, 10000);
final StringInputRowParser parser = new StringInputRowParser(new DelimitedParseSpec(new TimestampSpec("ts", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList("dim", "keys", "values")), null, null), "\t", ",", Arrays.asList("ts", "dim", "keys", "values")), "utf8");
CharSource input = CharSource.wrap("2011-01-12T00:00:00.000Z\ta\tkey1,key2,key3\tvalue1,value2,value3\n" + "2011-01-12T00:00:00.000Z\tb\tkey4,key5,key6\tvalue4\n" + "2011-01-12T00:00:00.000Z\tc\tkey1,key5\tvalue1,value5,value9\n");
IncrementalIndex index1 = TestIndex.loadIncrementalIndex(index, input, parser);
QueryableIndex index2 = TestIndex.persistRealtimeAndLoadMMapped(index1);
return transformToConstructionFeeder(Arrays.asList(makeQueryRunner(factory, "index1", new IncrementalIndexSegment(index1, "index1"), "incremental"), makeQueryRunner(factory, "index2", new QueryableIndexSegment("index2", index2), "queryable")));
}
use of io.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class OrcHadoopInputRowParserTest method testTypeFromParseSpec.
@Test
public void testTypeFromParseSpec() {
ParseSpec parseSpec = new TimeAndDimsParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(ImmutableList.<DimensionSchema>of(new StringDimensionSchema("col1"), new StringDimensionSchema("col2")), null, null));
String typeString = OrcHadoopInputRowParser.typeStringFromParseSpec(parseSpec);
String expected = "struct<timestamp:string,col1:string,col2:string>";
Assert.assertEquals(expected, typeString);
}
use of io.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class IncrementalIndexMultiValueSpecTest method test.
@Test
public void test() throws IndexSizeExceededException {
DimensionsSpec dimensionsSpec = new DimensionsSpec(Arrays.<DimensionSchema>asList(new StringDimensionSchema("string1", DimensionSchema.MultiValueHandling.ARRAY), new StringDimensionSchema("string2", DimensionSchema.MultiValueHandling.SORTED_ARRAY), new StringDimensionSchema("string3", DimensionSchema.MultiValueHandling.SORTED_SET)), null, null);
IncrementalIndexSchema schema = new IncrementalIndexSchema(0, new TimestampSpec("ds", "auto", null), Granularities.ALL, VirtualColumns.EMPTY, dimensionsSpec, new AggregatorFactory[0], false);
Map<String, Object> map = new HashMap<String, Object>() {
@Override
public Object get(Object key) {
if (((String) key).startsWith("string")) {
return Arrays.asList("xsd", "aba", "fds", "aba");
}
if (((String) key).startsWith("float")) {
return Arrays.<Float>asList(3.92f, -2.76f, 42.153f, Float.NaN, -2.76f, -2.76f);
}
if (((String) key).startsWith("long")) {
return Arrays.<Long>asList(-231238789L, 328L, 923L, 328L, -2L, 0L);
}
return null;
}
};
IncrementalIndex<?> index = new OnheapIncrementalIndex(schema, true, 10000);
index.add(new MapBasedInputRow(0, Arrays.asList("string1", "string2", "string3", "float1", "float2", "float3", "long1", "long2", "long3"), map));
Row row = index.iterator().next();
Assert.assertEquals(Lists.newArrayList("xsd", "aba", "fds", "aba"), row.getRaw("string1"));
Assert.assertEquals(Lists.newArrayList("aba", "aba", "fds", "xsd"), row.getRaw("string2"));
Assert.assertEquals(Lists.newArrayList("aba", "fds", "xsd"), row.getRaw("string3"));
}
Aggregations