use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.
the class OldApiSketchAggregationTest method testSketchDataIngestAndQuery.
@Test
public void testSketchDataIngestAndQuery() throws Exception {
final String groupByQueryString = readFileFromClasspathAsString("oldapi/old_sketch_test_data_group_by_query.json");
final GroupByQuery groupByQuery = (GroupByQuery) helper.getObjectMapper().readValue(groupByQueryString, Query.class);
final Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(OldApiSketchAggregationTest.class.getClassLoader().getResource("sketch_test_data.tsv").getFile()), readFileFromClasspathAsString("sketch_test_data_record_parser.json"), readFileFromClasspathAsString("oldapi/old_sketch_test_data_aggregators.json"), 0, Granularities.NONE, 1000, groupByQueryString);
List results = seq.toList();
Assert.assertEquals(1, results.size());
Assert.assertEquals(ResultRow.fromLegacyRow(new MapBasedRow(DateTimes.of("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("sids_sketch_count", 50.0).put("sketchEstimatePostAgg", 50.0).put("sketchUnionPostAggEstimate", 50.0).put("sketchIntersectionPostAggEstimate", 50.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), groupByQuery), results.get(0));
}
use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.
the class BloomFilterGroupByQueryTest method testQuery.
@Test
public void testQuery() throws Exception {
String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"filter\":{ \"type\":\"selector\", \"dimension\":\"market\", \"value\":\"upfront\"}," + "\"aggregations\": [" + " { \"type\": \"bloom\", \"name\": \"blooming_quality\", \"field\": \"quality\" }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
MapBasedRow row = ingestAndQuery(query);
BloomKFilter filter = BloomKFilter.deserialize((ByteBuffer) row.getRaw("blooming_quality"));
Assert.assertTrue(filter.testString("mezzanine"));
Assert.assertTrue(filter.testString("premium"));
Assert.assertFalse(filter.testString("entertainment"));
}
use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.
the class BloomFilterGroupByQueryTest method testNestedQueryComplex.
@Test
public void testNestedQueryComplex() throws Exception {
if (!isV2) {
return;
}
String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": {" + "\"type\": \"query\"," + "\"query\": {" + "\"queryType\":\"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"intervals\": [ \"1970/2050\" ]," + "\"granularity\":\"ALL\"," + "\"dimensions\":[]," + "\"filter\":{ \"type\":\"selector\", \"dimension\":\"market\", \"value\":\"upfront\"}," + "\"aggregations\": [{ \"type\":\"bloom\", \"name\":\"innerBloom\", \"field\":\"quality\"}]" + "}" + "}," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + " { \"type\": \"bloom\", \"name\": \"innerBloom\", \"field\": \"innerBloom\" }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
MapBasedRow row = ingestAndQuery(query);
BloomKFilter filter = BloomKFilter.deserialize((ByteBuffer) row.getRaw("innerBloom"));
Assert.assertTrue(filter.testString("mezzanine"));
Assert.assertTrue(filter.testString("premium"));
Assert.assertFalse(filter.testString("entertainment"));
}
use of org.apache.druid.data.input.MapBasedRow in project druid by druid-io.
the class FixedBucketsHistogramAggregationTest method testIngestWithNullsIgnoredAndQuery.
@Test
public void testIngestWithNullsIgnoredAndQuery() throws Exception {
MapBasedRow row = ingestAndQuery();
if (!NullHandling.replaceWithDefault()) {
Assert.assertEquals(92.782760, row.getMetric("index_min").floatValue(), 0.0001);
Assert.assertEquals(135.109191, row.getMetric("index_max").floatValue(), 0.0001);
Assert.assertEquals(135.9499969482422, row.getMetric("index_quantile").floatValue(), 0.0001);
} else {
Assert.assertEquals(0.0, row.getMetric("index_min"));
Assert.assertEquals(135.109191, row.getMetric("index_max").floatValue(), 0.0001);
Assert.assertEquals(135.8699951171875, row.getMetric("index_quantile").floatValue(), 0.0001);
}
}
use of org.apache.druid.data.input.MapBasedRow in project hive by apache.
the class DruidGroupByQueryRecordReader method nextKeyValue.
@Override
public boolean nextKeyValue() {
if (getQueryResultsIterator().hasNext()) {
final Row row = getQueryResultsIterator().next();
// currently druid supports only MapBasedRow as Jackson SerDe so it should safe to cast without check
currentRow = (MapBasedRow) row;
currentEvent = currentRow.getEvent();
return true;
}
return false;
}
Aggregations