use of io.druid.data.input.MapBasedRow in project druid by druid-io.
the class ApproximateHistogramAggregationTest method testIngestWithNullsToZeroAndQuery.
@Test
public void testIngestWithNullsToZeroAndQuery() throws Exception {
MapBasedRow row = ingestAndQuery(false);
Assert.assertEquals(0.0, row.getFloatMetric("index_min"), 0.0001);
Assert.assertEquals(135.109191, row.getFloatMetric("index_max"), 0.0001);
Assert.assertEquals(131.428176, row.getFloatMetric("index_quantile"), 0.0001);
}
use of io.druid.data.input.MapBasedRow in project druid by druid-io.
the class OldApiSketchAggregationTest method testSimpleDataIngestAndQuery.
@Test
public void testSimpleDataIngestAndQuery() throws Exception {
Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("simple_test_data.tsv").getFile()), readFileFromClasspathAsString("simple_test_data_record_parser.json"), readFileFromClasspathAsString("oldapi/old_simple_test_data_aggregators.json"), 0, Granularities.NONE, 5, readFileFromClasspathAsString("oldapi/old_simple_test_data_group_by_query.json"));
List results = Sequences.toList(seq, Lists.newArrayList());
Assert.assertEquals(1, results.size());
Assert.assertEquals(new MapBasedRow(DateTime.parse("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("sketch_count", 50.0).put("sketchEstimatePostAgg", 50.0).put("sketchUnionPostAggEstimate", 50.0).put("sketchIntersectionPostAggEstimate", 50.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), results.get(0));
}
use of io.druid.data.input.MapBasedRow in project druid by druid-io.
the class CachingClusteredClientTest method makeGroupByResults.
private Iterable<Row> makeGroupByResults(Object... objects) {
List<Row> retVal = Lists.newArrayList();
int index = 0;
while (index < objects.length) {
DateTime timestamp = (DateTime) objects[index++];
retVal.add(new MapBasedRow(timestamp, (Map<String, Object>) objects[index++]));
}
return retVal;
}
use of io.druid.data.input.MapBasedRow in project druid by druid-io.
the class FinalizingFieldAccessPostAggregatorTest method testIngestAndQueryWithArithmeticPostAggregator.
@Test
public void testIngestAndQueryWithArithmeticPostAggregator() throws Exception {
AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Lists.newArrayList(new AggregatorsModule()), GroupByQueryRunnerTest.testConfigs().get(0), tempFoler);
String metricSpec = "[{\"type\": \"hyperUnique\", \"name\": \"hll_market\", \"fieldName\": \"market\"}," + "{\"type\": \"hyperUnique\", \"name\": \"hll_quality\", \"fieldName\": \"quality\"}]";
String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + " \"format\" : \"tsv\"," + " \"timestampSpec\" : {" + " \"column\" : \"timestamp\"," + " \"format\" : \"auto\"" + "}," + " \"dimensionsSpec\" : {" + " \"dimensions\": []," + " \"dimensionExclusions\" : []," + " \"spatialDimensions\" : []" + " }," + " \"columns\": [\"timestamp\", \"market\", \"quality\", \"placement\", \"placementish\", \"index\"]" + " }" + "}";
String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + " { \"type\": \"hyperUnique\", \"name\": \"hll_market\", \"fieldName\": \"hll_market\" }," + " { \"type\": \"hyperUnique\", \"name\": \"hll_quality\", \"fieldName\": \"hll_quality\" }" + "]," + "\"postAggregations\": [" + " { \"type\": \"arithmetic\", \"name\": \"uniq_add\", \"fn\": \"+\", \"fields\":[" + " { \"type\": \"finalizingFieldAccess\", \"name\": \"uniq_market\", \"fieldName\": \"hll_market\" }," + " { \"type\": \"finalizingFieldAccess\", \"name\": \"uniq_quality\", \"fieldName\": \"hll_quality\" }]" + " }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.NONE, 50000, query);
MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
Assert.assertEquals(3.0, row.getFloatMetric("hll_market"), 0.1);
Assert.assertEquals(9.0, row.getFloatMetric("hll_quality"), 0.1);
Assert.assertEquals(12.0, row.getFloatMetric("uniq_add"), 0.1);
}
use of io.druid.data.input.MapBasedRow in project druid by druid-io.
the class GroupByQueryRunnerTestHelper method createExpectedRows.
public static List<Row> createExpectedRows(String[] columnNames, Object[]... values) {
int timeIndex = Arrays.asList(columnNames).indexOf(Column.TIME_COLUMN_NAME);
Preconditions.checkArgument(timeIndex >= 0);
List<Row> expected = Lists.newArrayList();
for (Object[] value : values) {
Preconditions.checkArgument(value.length == columnNames.length);
Map<String, Object> theVals = Maps.newHashMapWithExpectedSize(value.length);
for (int i = 0; i < columnNames.length; i++) {
if (i != timeIndex) {
theVals.put(columnNames[i], value[i]);
}
}
expected.add(new MapBasedRow(new DateTime(value[timeIndex]), theVals));
}
return expected;
}
Aggregations