use of io.druid.data.input.impl.JSONParseSpec in project druid by druid-io.
the class FireDepartmentTest method testSerde.
@Test
public void testSerde() throws Exception {
ObjectMapper jsonMapper = new DefaultObjectMapper();
jsonMapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, jsonMapper));
FireDepartment schema = new FireDepartment(new DataSchema("foo", jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList("dim1", "dim2")), null, null), null, null), null), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("count") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, null), jsonMapper), new RealtimeIOConfig(null, new RealtimePlumberSchool(null, null, null, null, null, null, null, TestHelper.getTestIndexMerger(), TestHelper.getTestIndexMergerV9(), TestHelper.getTestIndexIO(), MapCache.create(0), NO_CACHE_CONFIG, TestHelper.getObjectMapper()), null), RealtimeTuningConfig.makeDefaultTuningConfig(new File("/tmp/nonexistent")));
String json = jsonMapper.writeValueAsString(schema);
FireDepartment newSchema = jsonMapper.readValue(json, FireDepartment.class);
Assert.assertEquals(schema.getDataSchema().getDataSource(), newSchema.getDataSchema().getDataSource());
Assert.assertEquals("/tmp/nonexistent", schema.getTuningConfig().getBasePersistDirectory().toString());
}
use of io.druid.data.input.impl.JSONParseSpec in project druid by druid-io.
the class DataSchemaTest method testDefaultExclusions.
@Test
public void testDefaultExclusions() throws Exception {
Map<String, Object> parser = jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("time", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dimB", "dimA")), null, null), null, null), null), new TypeReference<Map<String, Object>>() {
});
DataSchema schema = new DataSchema("test", parser, new AggregatorFactory[] { new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2") }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), jsonMapper);
Assert.assertEquals(ImmutableSet.of("time", "col1", "col2", "metric1", "metric2"), schema.getParser().getParseSpec().getDimensionsSpec().getDimensionExclusions());
}
use of io.druid.data.input.impl.JSONParseSpec in project druid by druid-io.
the class FlattenJSONBenchmarkUtil method getNestedParser.
public Parser getNestedParser() {
List<JSONPathFieldSpec> fields = new ArrayList<>();
fields.add(JSONPathFieldSpec.createRootField("ts"));
fields.add(JSONPathFieldSpec.createRootField("d1"));
//fields.add(JSONPathFieldSpec.createRootField("d2"));
fields.add(JSONPathFieldSpec.createNestedField("e1.d1", "$.e1.d1"));
fields.add(JSONPathFieldSpec.createNestedField("e1.d2", "$.e1.d2"));
fields.add(JSONPathFieldSpec.createNestedField("e2.d3", "$.e2.d3"));
fields.add(JSONPathFieldSpec.createNestedField("e2.d4", "$.e2.d4"));
fields.add(JSONPathFieldSpec.createNestedField("e2.d5", "$.e2.d5"));
fields.add(JSONPathFieldSpec.createNestedField("e2.d6", "$.e2.d6"));
fields.add(JSONPathFieldSpec.createNestedField("e2.ad1[0]", "$.e2.ad1[0]"));
fields.add(JSONPathFieldSpec.createNestedField("e2.ad1[1]", "$.e2.ad1[1]"));
fields.add(JSONPathFieldSpec.createNestedField("e2.ad1[2]", "$.e2.ad1[2]"));
fields.add(JSONPathFieldSpec.createNestedField("ae1[0].d1", "$.ae1[0].d1"));
fields.add(JSONPathFieldSpec.createNestedField("ae1[1].d1", "$.ae1[1].d1"));
fields.add(JSONPathFieldSpec.createNestedField("ae1[2].e1.d2", "$.ae1[2].e1.d2"));
fields.add(JSONPathFieldSpec.createRootField("m3"));
//fields.add(JSONPathFieldSpec.createRootField("m4"));
fields.add(JSONPathFieldSpec.createNestedField("e3.m1", "$.e3.m1"));
fields.add(JSONPathFieldSpec.createNestedField("e3.m2", "$.e3.m2"));
fields.add(JSONPathFieldSpec.createNestedField("e3.m3", "$.e3.m3"));
fields.add(JSONPathFieldSpec.createNestedField("e3.m4", "$.e3.m4"));
fields.add(JSONPathFieldSpec.createNestedField("e3.am1[0]", "$.e3.am1[0]"));
fields.add(JSONPathFieldSpec.createNestedField("e3.am1[1]", "$.e3.am1[1]"));
fields.add(JSONPathFieldSpec.createNestedField("e3.am1[2]", "$.e3.am1[2]"));
fields.add(JSONPathFieldSpec.createNestedField("e3.am1[3]", "$.e3.am1[3]"));
fields.add(JSONPathFieldSpec.createNestedField("e4.e4.m4", "$.e4.e4.m4"));
JSONPathSpec flattenSpec = new JSONPathSpec(true, fields);
JSONParseSpec spec = new JSONParseSpec(new TimestampSpec("ts", "iso", null), new DimensionsSpec(null, null, null), flattenSpec, null);
return spec.makeParser();
}
use of io.druid.data.input.impl.JSONParseSpec in project druid by druid-io.
the class FlattenJSONBenchmarkUtil method getFieldDiscoveryParser.
public Parser getFieldDiscoveryParser() {
List<JSONPathFieldSpec> fields = new ArrayList<>();
JSONPathSpec flattenSpec = new JSONPathSpec(true, fields);
JSONParseSpec spec = new JSONParseSpec(new TimestampSpec("ts", "iso", null), new DimensionsSpec(null, null, null), flattenSpec, null);
return spec.makeParser();
}
use of io.druid.data.input.impl.JSONParseSpec in project druid by druid-io.
the class FlattenJSONBenchmarkUtil method getForcedPathParser.
public Parser getForcedPathParser() {
List<JSONPathFieldSpec> fields = new ArrayList<>();
fields.add(JSONPathFieldSpec.createNestedField("ts", "$['ts']"));
fields.add(JSONPathFieldSpec.createNestedField("d1", "$['d1']"));
fields.add(JSONPathFieldSpec.createNestedField("d2", "$['d2']"));
fields.add(JSONPathFieldSpec.createNestedField("e1.d1", "$['e1.d1']"));
fields.add(JSONPathFieldSpec.createNestedField("e1.d2", "$['e1.d2']"));
fields.add(JSONPathFieldSpec.createNestedField("e2.d3", "$['e2.d3']"));
fields.add(JSONPathFieldSpec.createNestedField("e2.d4", "$['e2.d4']"));
fields.add(JSONPathFieldSpec.createNestedField("e2.d5", "$['e2.d5']"));
fields.add(JSONPathFieldSpec.createNestedField("e2.d6", "$['e2.d6']"));
fields.add(JSONPathFieldSpec.createNestedField("e2.ad1[0]", "$['e2.ad1[0]']"));
fields.add(JSONPathFieldSpec.createNestedField("e2.ad1[1]", "$['e2.ad1[1]']"));
fields.add(JSONPathFieldSpec.createNestedField("e2.ad1[2]", "$['e2.ad1[2]']"));
fields.add(JSONPathFieldSpec.createNestedField("ae1[0].d1", "$['ae1[0].d1']"));
fields.add(JSONPathFieldSpec.createNestedField("ae1[1].d1", "$['ae1[1].d1']"));
fields.add(JSONPathFieldSpec.createNestedField("ae1[2].e1.d2", "$['ae1[2].e1.d2']"));
fields.add(JSONPathFieldSpec.createNestedField("m3", "$['m3']"));
fields.add(JSONPathFieldSpec.createNestedField("m4", "$['m4']"));
fields.add(JSONPathFieldSpec.createNestedField("e3.m1", "$['e3.m1']"));
fields.add(JSONPathFieldSpec.createNestedField("e3.m2", "$['e3.m2']"));
fields.add(JSONPathFieldSpec.createNestedField("e3.m3", "$['e3.m3']"));
fields.add(JSONPathFieldSpec.createNestedField("e3.m4", "$['e3.m4']"));
fields.add(JSONPathFieldSpec.createNestedField("e3.am1[0]", "$['e3.am1[0]']"));
fields.add(JSONPathFieldSpec.createNestedField("e3.am1[1]", "$['e3.am1[1]']"));
fields.add(JSONPathFieldSpec.createNestedField("e3.am1[2]", "$['e3.am1[2]']"));
fields.add(JSONPathFieldSpec.createNestedField("e3.am1[3]", "$['e3.am1[3]']"));
fields.add(JSONPathFieldSpec.createNestedField("e4.e4.m4", "$['e4.e4.m4']"));
JSONPathSpec flattenSpec = new JSONPathSpec(false, fields);
JSONParseSpec spec = new JSONParseSpec(new TimestampSpec("ts", "iso", null), new DimensionsSpec(null, null, null), flattenSpec, null);
return spec.makeParser();
}
Aggregations