use of org.apache.druid.java.util.common.parsers.JSONPathFieldSpec in project druid by druid-io.
the class ProtobufInputFormatTest method setUp.
@Before
public void setUp() {
timestampSpec = new TimestampSpec("timestamp", "iso", null);
dimensionsSpec = new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid")));
flattenSpec = new JSONPathSpec(true, Lists.newArrayList(new JSONPathFieldSpec(JSONPathFieldType.ROOT, "eventType", "eventType"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "foobar", "$.foo.bar"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "bar0", "$.bar[0].bar")));
decoder = new FileBasedProtobufBytesDecoder("prototest.desc", "ProtoTestEvent");
for (Module jacksonModule : new ProtobufExtensionsModule().getJacksonModules()) {
jsonMapper.registerModule(jacksonModule);
}
jsonMapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, new DefaultObjectMapper()));
}
use of org.apache.druid.java.util.common.parsers.JSONPathFieldSpec in project druid by druid-io.
the class ProtobufInputRowParserTest method setUp.
@Before
public void setUp() {
parseSpec = new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid"))), new JSONPathSpec(true, Lists.newArrayList(new JSONPathFieldSpec(JSONPathFieldType.ROOT, "eventType", "eventType"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "foobar", "$.foo.bar"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "bar0", "$.bar[0].bar"))), null, null);
flatParseSpec = new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid"))), null, null, null);
flatParseSpecWithComplexTimestamp = new JSONParseSpec(new TimestampSpec("otherTimestamp", "iso", null), new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid"))), null, null, null);
decoder = new FileBasedProtobufBytesDecoder("prototest.desc", "ProtoTestEvent");
}
use of org.apache.druid.java.util.common.parsers.JSONPathFieldSpec in project druid by druid-io.
the class ProtobufReaderTest method setUp.
@Before
public void setUp() {
TimestampSpec timestampSpec = new TimestampSpec("timestamp", "iso", null);
DimensionsSpec dimensionsSpec = new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid")));
flattenSpec = new JSONPathSpec(true, Lists.newArrayList(new JSONPathFieldSpec(JSONPathFieldType.ROOT, "eventType", "eventType"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "foobar", "$.foo.bar"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "bar0", "$.bar[0].bar")));
inputRowSchema = new InputRowSchema(timestampSpec, dimensionsSpec, null);
inputRowSchemaWithComplexTimestamp = new InputRowSchema(new TimestampSpec("otherTimestamp", "iso", null), dimensionsSpec, null);
decoder = new FileBasedProtobufBytesDecoder("prototest.desc", "ProtoTestEvent");
}
use of org.apache.druid.java.util.common.parsers.JSONPathFieldSpec in project druid by druid-io.
the class CompatParquetReaderTest method testOldRepeatedInt.
@Test
public void testOldRepeatedInt() throws IOException {
final String file = "example/compat/old-repeated-int.parquet";
InputRowSchema schema = new InputRowSchema(new TimestampSpec("timestamp", "auto", DateTimes.of("2018-09-01T00:00:00.000Z")), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("repeatedInt"))), ColumnsFilter.all());
List<JSONPathFieldSpec> flattenExpr = ImmutableList.of(new JSONPathFieldSpec(JSONPathFieldType.ROOT, "repeatedInt", "repeatedInt"));
JSONPathSpec flattenSpec = new JSONPathSpec(true, flattenExpr);
InputEntityReader reader = createReader(file, schema, flattenSpec);
List<InputRow> rows = readAllRows(reader);
Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("1", rows.get(0).getDimension("repeatedInt").get(0));
Assert.assertEquals("2", rows.get(0).getDimension("repeatedInt").get(1));
Assert.assertEquals("3", rows.get(0).getDimension("repeatedInt").get(2));
reader = createReader(file, schema, flattenSpec);
List<InputRowListPlusRawValues> sampled = sampleAllRows(reader);
final String expectedJson = "{\n" + " \"repeatedInt\" : [ 1, 2, 3 ]\n" + "}";
Assert.assertEquals(expectedJson, DEFAULT_JSON_WRITER.writeValueAsString(sampled.get(0).getRawValues()));
}
use of org.apache.druid.java.util.common.parsers.JSONPathFieldSpec in project druid by druid-io.
the class CompatParquetReaderTest method testReadNestedArrayStruct.
@Test
public void testReadNestedArrayStruct() throws IOException {
final String file = "example/compat/nested-array-struct.parquet";
InputRowSchema schema = new InputRowSchema(new TimestampSpec("timestamp", "auto", DateTimes.of("2018-09-01T00:00:00.000Z")), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("i32_dec", "extracted1", "extracted2"))), ColumnsFilter.all());
List<JSONPathFieldSpec> flattenExpr = ImmutableList.of(new JSONPathFieldSpec(JSONPathFieldType.PATH, "extracted1", "$.myComplex[0].id"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "extracted2", "$.myComplex[0].repeatedMessage[*].someId"));
JSONPathSpec flattenSpec = new JSONPathSpec(true, flattenExpr);
InputEntityReader reader = createReader(file, schema, flattenSpec);
List<InputRow> rows = readAllRows(reader);
Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(1).getTimestamp().toString());
Assert.assertEquals("5", rows.get(1).getDimension("primitive").get(0));
Assert.assertEquals("4", rows.get(1).getDimension("extracted1").get(0));
Assert.assertEquals("6", rows.get(1).getDimension("extracted2").get(0));
reader = createReader(file, schema, flattenSpec);
List<InputRowListPlusRawValues> sampled = sampleAllRows(reader);
final String expectedJson = "{\n" + " \"primitive\" : 2,\n" + " \"myComplex\" : [ {\n" + " \"id\" : 1,\n" + " \"repeatedMessage\" : [ 3 ]\n" + " } ]\n" + "}";
Assert.assertEquals(expectedJson, DEFAULT_JSON_WRITER.writeValueAsString(sampled.get(0).getRawValues()));
}
Aggregations