use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class CompatParquetInputTest method testProtoStructWithArray.
@Test
public void testProtoStructWithArray() throws IOException, InterruptedException {
// "REPEATED not supported outside LIST or MAP. Type: repeated int32 repeatedPrimitive"
if (parserType.equals(ParquetExtensionsModule.PARQUET_AVRO_INPUT_PARSER_TYPE)) {
return;
}
HadoopDruidIndexerConfig config = transformHadoopDruidIndexerConfig("example/compat/proto_struct_with_array.json", parserType, true);
config.intoConfiguration(job);
List<InputRow> rows = getAllRows(parserType, config);
Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("10", rows.get(0).getDimension("optionalPrimitive").get(0));
Assert.assertEquals("9", rows.get(0).getDimension("requiredPrimitive").get(0));
Assert.assertTrue(rows.get(0).getDimension("repeatedPrimitive").isEmpty());
Assert.assertTrue(rows.get(0).getDimension("extractedOptional").isEmpty());
Assert.assertEquals("9", rows.get(0).getDimension("extractedRequired").get(0));
Assert.assertEquals("9", rows.get(0).getDimension("extractedRepeated").get(0));
Assert.assertEquals("10", rows.get(0).getDimension("extractedRepeated").get(1));
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class CompatParquetInputTest method testParquet1217.
@Test
public void testParquet1217() throws IOException, InterruptedException {
HadoopDruidIndexerConfig config = transformHadoopDruidIndexerConfig("example/compat/parquet_1217.json", parserType, true);
config.intoConfiguration(job);
Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
List<InputRow> rows2 = getAllRows(parserType, config);
Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("-1", rows.get(0).getDimension("col").get(0));
Assert.assertEquals(-1, rows.get(0).getMetric("metric1"));
Assert.assertTrue(rows2.get(2).getDimension("col").isEmpty());
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class DecimalParquetInputTest method testReadParquetDecimali64.
@Test
public void testReadParquetDecimali64() throws IOException, InterruptedException {
// parquet-avro does not correctly convert decimal types
if (parserType.equals(ParquetExtensionsModule.PARQUET_AVRO_INPUT_PARSER_TYPE)) {
return;
}
HadoopDruidIndexerConfig config = transformHadoopDruidIndexerConfig("example/decimals/dec_in_i64.json", parserType, true);
List<InputRow> rows = getAllRows(parserType, config);
Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("100", rows.get(0).getDimension("i64_dec").get(0));
Assert.assertEquals(new BigDecimal(100), rows.get(0).getMetric("metric1"));
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class DecimalParquetInputTest method testReadParquetDecimalFixedLen.
@Test
public void testReadParquetDecimalFixedLen() throws IOException, InterruptedException {
// parquet-avro does not correctly convert decimal types
if (parserType.equals(ParquetExtensionsModule.PARQUET_AVRO_INPUT_PARSER_TYPE)) {
return;
}
HadoopDruidIndexerConfig config = transformHadoopDruidIndexerConfig("example/decimals/dec_in_fix_len.json", parserType, true);
List<InputRow> rows = getAllRows(parserType, config);
Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("1.0", rows.get(0).getDimension("fixed_len_dec").get(0));
Assert.assertEquals(new BigDecimal("1.0"), rows.get(0).getMetric("metric1"));
}
use of org.apache.druid.data.input.InputRow in project druid by druid-io.
the class FlattenSpecParquetInputTest method testNested1NoFlattenSpec.
@Test
public void testNested1NoFlattenSpec() throws IOException, InterruptedException {
HadoopDruidIndexerConfig config = transformHadoopDruidIndexerConfig("example/flattening/nested_1.json", parserType, false);
config.intoConfiguration(job);
Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString());
Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
List<String> dims = rows.get(0).getDimensions();
Assert.assertFalse(dims.contains("dim2"));
Assert.assertFalse(dims.contains("dim3"));
Assert.assertFalse(dims.contains("listDim"));
Assert.assertFalse(dims.contains("nestedData"));
Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue());
}
Aggregations