use of io.druid.data.input.Row in project druid by druid-io.
the class IncrementalIndexTest method testCaseSensitivity.
@Test
public void testCaseSensitivity() throws Exception {
long timestamp = System.currentTimeMillis();
IncrementalIndex index = closer.closeLater(indexCreator.createIndex(defaultAggregatorFactories));
populateIndex(timestamp, index);
Assert.assertEquals(Arrays.asList("dim1", "dim2"), index.getDimensionNames());
Assert.assertEquals(2, index.size());
final Iterator<Row> rows = index.iterator();
Row row = rows.next();
Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
Assert.assertEquals(Arrays.asList("1"), row.getDimension("dim1"));
Assert.assertEquals(Arrays.asList("2"), row.getDimension("dim2"));
row = rows.next();
Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
Assert.assertEquals(Arrays.asList("3"), row.getDimension("dim1"));
Assert.assertEquals(Arrays.asList("4"), row.getDimension("dim2"));
}
use of io.druid.data.input.Row in project hive by apache.
the class TestDruidSerDe method deserializeQueryResults.
private static void deserializeQueryResults(DruidSerDe serDe, String queryType, String jsonQuery, String resultString, Object[][] records) throws SerDeException, JsonParseException, JsonMappingException, IOException, NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException, InterruptedException, NoSuchMethodException, InvocationTargetException {
// Initialize
Query<?> query = null;
DruidQueryRecordReader<?, ?> reader = null;
List<?> resultsList = null;
ObjectMapper mapper = new DefaultObjectMapper();
switch(queryType) {
case Query.TIMESERIES:
query = mapper.readValue(jsonQuery, TimeseriesQuery.class);
reader = new DruidTimeseriesQueryRecordReader();
resultsList = mapper.readValue(resultString, new TypeReference<List<Result<TimeseriesResultValue>>>() {
});
break;
case Query.TOPN:
query = mapper.readValue(jsonQuery, TopNQuery.class);
reader = new DruidTopNQueryRecordReader();
resultsList = mapper.readValue(resultString, new TypeReference<List<Result<TopNResultValue>>>() {
});
break;
case Query.GROUP_BY:
query = mapper.readValue(jsonQuery, GroupByQuery.class);
reader = new DruidGroupByQueryRecordReader();
resultsList = mapper.readValue(resultString, new TypeReference<List<Row>>() {
});
break;
case Query.SELECT:
query = mapper.readValue(jsonQuery, SelectQuery.class);
reader = new DruidSelectQueryRecordReader();
resultsList = mapper.readValue(resultString, new TypeReference<List<Result<SelectResultValue>>>() {
});
break;
}
// Set query and fields access
Field field1 = DruidQueryRecordReader.class.getDeclaredField("query");
field1.setAccessible(true);
field1.set(reader, query);
if (reader instanceof DruidGroupByQueryRecordReader) {
Method method1 = DruidGroupByQueryRecordReader.class.getDeclaredMethod("initExtractors");
method1.setAccessible(true);
method1.invoke(reader);
}
Field field2 = DruidQueryRecordReader.class.getDeclaredField("results");
field2.setAccessible(true);
// Get the row structure
StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
// Check mapred
Iterator<?> results = resultsList.iterator();
field2.set(reader, results);
DruidWritable writable = new DruidWritable();
int pos = 0;
while (reader.next(NullWritable.get(), writable)) {
Object row = serDe.deserialize(writable);
Object[] expectedFieldsData = records[pos];
assertEquals(expectedFieldsData.length, fieldRefs.size());
for (int i = 0; i < fieldRefs.size(); i++) {
Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
assertEquals("Field " + i, expectedFieldsData[i], fieldData);
}
pos++;
}
assertEquals(pos, records.length);
// Check mapreduce
results = resultsList.iterator();
field2.set(reader, results);
pos = 0;
while (reader.nextKeyValue()) {
Object row = serDe.deserialize(reader.getCurrentValue());
Object[] expectedFieldsData = records[pos];
assertEquals(expectedFieldsData.length, fieldRefs.size());
for (int i = 0; i < fieldRefs.size(); i++) {
Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
assertEquals("Field " + i, expectedFieldsData[i], fieldData);
}
pos++;
}
assertEquals(pos, records.length);
}
use of io.druid.data.input.Row in project druid by druid-io.
the class SketchAggregationTest method testRetentionDataIngestAndGpByQuery.
@Test
public void testRetentionDataIngestAndGpByQuery() throws Exception {
Sequence<Row> seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("retention_test_data.tsv").getFile()), readFileFromClasspathAsString("simple_test_data_record_parser.json"), readFileFromClasspathAsString("simple_test_data_aggregators.json"), 0, Granularities.NONE, 5, readFileFromClasspathAsString("retention_test_data_group_by_query.json"));
List<Row> results = Sequences.toList(seq, Lists.<Row>newArrayList());
Assert.assertEquals(1, results.size());
Assert.assertEquals(ImmutableList.of(new MapBasedRow(DateTime.parse("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_1").put("p1_unique_country_day_1", 20.0).put("p1_unique_country_day_2", 20.0).put("p1_unique_country_day_3", 10.0).put("sketchEstimatePostAgg", 20.0).put("sketchIntersectionPostAggEstimate1", 10.0).put("sketchIntersectionPostAggEstimate2", 5.0).put("non_existing_col_validation", 0.0).build())), results);
}
use of io.druid.data.input.Row in project druid by druid-io.
the class SketchAggregationTest method testSketchDataIngestAndGpByQuery.
@Test
public void testSketchDataIngestAndGpByQuery() throws Exception {
Sequence<Row> seq = helper.createIndexAndRunQueryOnSegment(new File(SketchAggregationTest.class.getClassLoader().getResource("sketch_test_data.tsv").getFile()), readFileFromClasspathAsString("sketch_test_data_record_parser.json"), readFileFromClasspathAsString("sketch_test_data_aggregators.json"), 0, Granularities.NONE, 5, readFileFromClasspathAsString("sketch_test_data_group_by_query.json"));
List<Row> results = Sequences.toList(seq, Lists.<Row>newArrayList());
Assert.assertEquals(1, results.size());
Assert.assertEquals(new MapBasedRow(DateTime.parse("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("sids_sketch_count", 50.0).put("sids_sketch_count_with_err", new SketchEstimateWithErrorBounds(50.0, 50.0, 50.0, 2)).put("sketchEstimatePostAgg", 50.0).put("sketchEstimatePostAggWithErrorBounds", new SketchEstimateWithErrorBounds(50.0, 50.0, 50.0, 2)).put("sketchUnionPostAggEstimate", 50.0).put("sketchIntersectionPostAggEstimate", 50.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), results.get(0));
}
use of io.druid.data.input.Row in project druid by druid-io.
the class CachingClusteredClientTest method makeGroupByResults.
private Iterable<Row> makeGroupByResults(Object... objects) {
List<Row> retVal = Lists.newArrayList();
int index = 0;
while (index < objects.length) {
DateTime timestamp = (DateTime) objects[index++];
retVal.add(new MapBasedRow(timestamp, (Map<String, Object>) objects[index++]));
}
return retVal;
}
Aggregations