use of org.apache.druid.data.input.impl.ByteEntity in project druid by druid-io.
the class ProtobufInputFormatTest method testParseNestedData.
@Test
public void testParseNestedData() throws Exception {
// configure parser with desc file
ProtobufInputFormat protobufInputFormat = new ProtobufInputFormat(flattenSpec, decoder);
// create binary of proto test event
DateTime dateTime = new DateTime(2012, 7, 12, 9, 30, ISOChronology.getInstanceUTC());
ProtoTestEventWrapper.ProtoTestEvent event = ProtobufInputRowParserTest.buildNestedData(dateTime);
final ByteEntity entity = new ByteEntity(ProtobufInputRowParserTest.toByteBuffer(event));
InputRow row = protobufInputFormat.createReader(new InputRowSchema(timestampSpec, dimensionsSpec, null), entity, null).read().next();
ProtobufInputRowParserTest.verifyNestedData(row, dateTime);
}
use of org.apache.druid.data.input.impl.ByteEntity in project druid by druid-io.
the class ProtobufInputFormatTest method testParseFlatData.
@Test
public void testParseFlatData() throws Exception {
// configure parser with desc file
ProtobufInputFormat protobufInputFormat = new ProtobufInputFormat(null, decoder);
// create binary of proto test event
DateTime dateTime = new DateTime(2012, 7, 12, 9, 30, ISOChronology.getInstanceUTC());
ProtoTestEventWrapper.ProtoTestEvent event = ProtobufInputRowParserTest.buildFlatData(dateTime);
final ByteEntity entity = new ByteEntity(ProtobufInputRowParserTest.toByteBuffer(event));
InputRow row = protobufInputFormat.createReader(new InputRowSchema(timestampSpec, dimensionsSpec, null), entity, null).read().next();
ProtobufInputRowParserTest.verifyFlatData(row, dateTime);
}
use of org.apache.druid.data.input.impl.ByteEntity in project druid by druid-io.
the class StreamChunkParserTest method testWithParserAndNullInputformatParseProperly.
@Test
public void testWithParserAndNullInputformatParseProperly() throws IOException {
final InputRowParser<ByteBuffer> parser = new StringInputRowParser(new JSONParseSpec(TIMESTAMP_SPEC, DimensionsSpec.EMPTY, JSONPathSpec.DEFAULT, Collections.emptyMap(), false), StringUtils.UTF8_STRING);
final StreamChunkParser<ByteEntity> chunkParser = new StreamChunkParser<>(parser, // Set nulls for all parameters below since inputFormat will be never used.
null, null, null, null, row -> true, rowIngestionMeters, parseExceptionHandler);
parseAndAssertResult(chunkParser);
}
use of org.apache.druid.data.input.impl.ByteEntity in project druid by druid-io.
the class StreamChunkParserTest method testWithNullParserAndInputformatParseProperly.
@Test
public void testWithNullParserAndInputformatParseProperly() throws IOException {
final JsonInputFormat inputFormat = new JsonInputFormat(JSONPathSpec.DEFAULT, Collections.emptyMap(), null);
final StreamChunkParser<ByteEntity> chunkParser = new StreamChunkParser<>(null, inputFormat, new InputRowSchema(TIMESTAMP_SPEC, DimensionsSpec.EMPTY, ColumnsFilter.all()), TransformSpec.NONE, temporaryFolder.newFolder(), row -> true, rowIngestionMeters, parseExceptionHandler);
parseAndAssertResult(chunkParser);
}
use of org.apache.druid.data.input.impl.ByteEntity in project druid by druid-io.
the class StreamChunkParserTest method parseAndAssertResult.
private void parseAndAssertResult(StreamChunkParser<ByteEntity> chunkParser) throws IOException {
final String json = "{\"timestamp\": \"2020-01-01\", \"dim\": \"val\", \"met\": \"val2\"}";
List<InputRow> parsedRows = chunkParser.parse(Collections.singletonList(new ByteEntity(json.getBytes(StringUtils.UTF8_STRING))), false);
Assert.assertEquals(1, parsedRows.size());
InputRow row = parsedRows.get(0);
Assert.assertEquals(DateTimes.of("2020-01-01"), row.getTimestamp());
Assert.assertEquals("val", Iterables.getOnlyElement(row.getDimension("dim")));
Assert.assertEquals("val2", Iterables.getOnlyElement(row.getDimension("met")));
}
Aggregations