use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class StreamChunkParserTest method parseEmptyNotEndOfShard.
@Test
public void parseEmptyNotEndOfShard() throws IOException {
final TrackingJsonInputFormat inputFormat = new TrackingJsonInputFormat(JSONPathSpec.DEFAULT, Collections.emptyMap());
RowIngestionMeters mockRowIngestionMeters = Mockito.mock(RowIngestionMeters.class);
final StreamChunkParser<ByteEntity> chunkParser = new StreamChunkParser<>(null, inputFormat, new InputRowSchema(TIMESTAMP_SPEC, DimensionsSpec.EMPTY, ColumnsFilter.all()), TransformSpec.NONE, temporaryFolder.newFolder(), row -> true, mockRowIngestionMeters, parseExceptionHandler);
List<InputRow> parsedRows = chunkParser.parse(ImmutableList.of(), false);
Assert.assertEquals(0, parsedRows.size());
Mockito.verify(mockRowIngestionMeters).incrementThrownAway();
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class InputRowParserSerdeTest method testStringInputRowParserSerdeMultiCharset.
@Test
public void testStringInputRowParserSerdeMultiCharset() throws Exception {
Charset[] testCharsets = { StandardCharsets.US_ASCII, StandardCharsets.ISO_8859_1, StandardCharsets.UTF_8, StandardCharsets.UTF_16BE, StandardCharsets.UTF_16LE, StandardCharsets.UTF_16 };
for (Charset testCharset : testCharsets) {
InputRow parsed = testCharsetParseHelper(testCharset);
Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions());
Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo"));
Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar"));
Assert.assertEquals(DateTimes.of("3000").getMillis(), parsed.getTimestampFromEpoch());
}
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class InputRowParserSerdeTest method testMapInputRowParserNumbersSerde.
@Test
public void testMapInputRowParserNumbersSerde() throws Exception {
final MapInputRowParser parser = new MapInputRowParser(new JSONParseSpec(new TimestampSpec("timemillis", "millis", null), DimensionsSpec.builder().setDimensions(DimensionsSpec.getDefaultSchemas(ImmutableList.of("foo", "values"))).setDimensionExclusions(ImmutableList.of("toobig", "value")).build(), null, null, null));
final MapInputRowParser parser2 = (MapInputRowParser) jsonMapper.readValue(jsonMapper.writeValueAsBytes(parser), InputRowParser.class);
final InputRow parsed = parser2.parseBatch(ImmutableMap.of("timemillis", 1412705931123L, "toobig", 123E64, "value", 123.456, "long", 123456789000L, "values", Lists.newArrayList(1412705931123L, 123.456, 123E45, "hello"))).get(0);
Assert.assertEquals(ImmutableList.of("foo", "values"), parsed.getDimensions());
Assert.assertEquals(ImmutableList.of(), parsed.getDimension("foo"));
Assert.assertEquals(ImmutableList.of("1412705931123", "123.456", "1.23E47", "hello"), parsed.getDimension("values"));
Assert.assertEquals(Float.POSITIVE_INFINITY, parsed.getMetric("toobig").floatValue(), 0.0);
Assert.assertEquals(123E64, parsed.getRaw("toobig"));
Assert.assertEquals(123.456f, parsed.getMetric("value").floatValue(), 0.0f);
Assert.assertEquals(123456789000L, parsed.getRaw("long"));
Assert.assertEquals(1.23456791E11f, parsed.getMetric("long").floatValue(), 0.0f);
Assert.assertEquals(1412705931123L, parsed.getTimestampFromEpoch());
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class InputRowParserSerdeTest method testCharsetParseHelper.
private InputRow testCharsetParseHelper(Charset charset) throws Exception {
final StringInputRowParser parser = new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("foo", "bar"))), null, null, null), charset.name());
final ByteBufferInputRowParser parser2 = jsonMapper.readValue(jsonMapper.writeValueAsBytes(parser), ByteBufferInputRowParser.class);
final InputRow parsed = parser2.parseBatch(ByteBuffer.wrap("{\"foo\":\"x\",\"bar\":\"y\",\"qux\":\"z\",\"timestamp\":\"3000\"}".getBytes(charset))).get(0);
return parsed;
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class InputRowParserSerdeTest method testMapInputRowParserSerde.
@Test
public void testMapInputRowParserSerde() throws Exception {
final MapInputRowParser parser = new MapInputRowParser(new JSONParseSpec(new TimestampSpec("timeposix", "posix", null), DimensionsSpec.builder().setDimensions(DimensionsSpec.getDefaultSchemas(ImmutableList.of("foo", "bar"))).setDimensionExclusions(ImmutableList.of("baz")).build(), null, null, null));
final MapInputRowParser parser2 = (MapInputRowParser) jsonMapper.readValue(jsonMapper.writeValueAsBytes(parser), InputRowParser.class);
final InputRow parsed = parser2.parseBatch(ImmutableMap.of("foo", "x", "bar", "y", "qux", "z", "timeposix", "1")).get(0);
Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions());
Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo"));
Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar"));
Assert.assertEquals(1000, parsed.getTimestampFromEpoch());
}
Aggregations