use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IncrementalIndexStorageAdapterTest method testSanity.
@Test
public void testSanity() throws Exception {
IncrementalIndex index = indexCreator.createIndex();
index.add(new MapBasedInputRow(new DateTime().minus(1).getMillis(), Lists.newArrayList("billy"), ImmutableMap.<String, Object>of("billy", "hi")));
index.add(new MapBasedInputRow(new DateTime().minus(1).getMillis(), Lists.newArrayList("sally"), ImmutableMap.<String, Object>of("sally", "bo")));
GroupByQueryEngine engine = makeGroupByQueryEngine();
final Sequence<Row> rows = engine.process(GroupByQuery.builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval(new Interval(0, new DateTime().getMillis())).addDimension("billy").addDimension("sally").addAggregator(new LongSumAggregatorFactory("cnt", "cnt")).build(), new IncrementalIndexStorageAdapter(index));
final ArrayList<Row> results = Sequences.toList(rows, Lists.<Row>newArrayList());
Assert.assertEquals(2, results.size());
MapBasedRow row = (MapBasedRow) results.get(0);
Assert.assertEquals(ImmutableMap.of("sally", "bo", "cnt", 1L), row.getEvent());
row = (MapBasedRow) results.get(1);
Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L), row.getEvent());
}
use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IncrementalIndexStorageAdapterTest method testResetSanity.
@Test
public void testResetSanity() throws IOException {
IncrementalIndex index = indexCreator.createIndex();
DateTime t = DateTime.now();
Interval interval = new Interval(t.minusMinutes(1), t.plusMinutes(1));
index.add(new MapBasedInputRow(t.minus(1).getMillis(), Lists.newArrayList("billy"), ImmutableMap.<String, Object>of("billy", "hi")));
index.add(new MapBasedInputRow(t.minus(1).getMillis(), Lists.newArrayList("sally"), ImmutableMap.<String, Object>of("sally", "bo")));
IncrementalIndexStorageAdapter adapter = new IncrementalIndexStorageAdapter(index);
for (boolean descending : Arrays.asList(false, true)) {
Sequence<Cursor> cursorSequence = adapter.makeCursors(new SelectorFilter("sally", "bo"), interval, VirtualColumns.EMPTY, Granularities.NONE, descending);
Cursor cursor = Sequences.toList(Sequences.limit(cursorSequence, 1), Lists.<Cursor>newArrayList()).get(0);
DimensionSelector dimSelector;
dimSelector = cursor.makeDimensionSelector(new DefaultDimensionSpec("sally", "sally"));
Assert.assertEquals("bo", dimSelector.lookupName(dimSelector.getRow().get(0)));
index.add(new MapBasedInputRow(t.minus(1).getMillis(), Lists.newArrayList("sally"), ImmutableMap.<String, Object>of("sally", "ah")));
// Cursor reset should not be affected by out of order values
cursor.reset();
dimSelector = cursor.makeDimensionSelector(new DefaultDimensionSpec("sally", "sally"));
Assert.assertEquals("bo", dimSelector.lookupName(dimSelector.getRow().get(0)));
}
}
use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IncrementalIndexTest method testFilteredAggregators.
@Test
public void testFilteredAggregators() throws Exception {
long timestamp = System.currentTimeMillis();
IncrementalIndex index = closer.closeLater(indexCreator.createIndex(new AggregatorFactory[] { new CountAggregatorFactory("count"), new FilteredAggregatorFactory(new CountAggregatorFactory("count_selector_filtered"), new SelectorDimFilter("dim2", "2", null)), new FilteredAggregatorFactory(new CountAggregatorFactory("count_bound_filtered"), new BoundDimFilter("dim2", "2", "3", false, true, null, null, StringComparators.NUMERIC)), new FilteredAggregatorFactory(new CountAggregatorFactory("count_multivaldim_filtered"), new SelectorDimFilter("dim3", "b", null)), new FilteredAggregatorFactory(new CountAggregatorFactory("count_numeric_filtered"), new SelectorDimFilter("met1", "11", null)) }));
index.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2", "dim3"), ImmutableMap.<String, Object>of("dim1", "1", "dim2", "2", "dim3", Lists.newArrayList("b", "a"), "met1", 10)));
index.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2", "dim3"), ImmutableMap.<String, Object>of("dim1", "3", "dim2", "4", "dim3", Lists.newArrayList("c", "d"), "met1", 11)));
Assert.assertEquals(Arrays.asList("dim1", "dim2", "dim3"), index.getDimensionNames());
Assert.assertEquals(Arrays.asList("count", "count_selector_filtered", "count_bound_filtered", "count_multivaldim_filtered", "count_numeric_filtered"), index.getMetricNames());
Assert.assertEquals(2, index.size());
final Iterator<Row> rows = index.iterator();
Row row = rows.next();
Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
Assert.assertEquals(Arrays.asList("1"), row.getDimension("dim1"));
Assert.assertEquals(Arrays.asList("2"), row.getDimension("dim2"));
Assert.assertEquals(Arrays.asList("a", "b"), row.getDimension("dim3"));
Assert.assertEquals(1L, row.getLongMetric("count"));
Assert.assertEquals(1L, row.getLongMetric("count_selector_filtered"));
Assert.assertEquals(1L, row.getLongMetric("count_bound_filtered"));
Assert.assertEquals(1L, row.getLongMetric("count_multivaldim_filtered"));
Assert.assertEquals(0L, row.getLongMetric("count_numeric_filtered"));
row = rows.next();
Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
Assert.assertEquals(Arrays.asList("3"), row.getDimension("dim1"));
Assert.assertEquals(Arrays.asList("4"), row.getDimension("dim2"));
Assert.assertEquals(Arrays.asList("c", "d"), row.getDimension("dim3"));
Assert.assertEquals(1L, row.getLongMetric("count"));
Assert.assertEquals(0L, row.getLongMetric("count_selector_filtered"));
Assert.assertEquals(0L, row.getLongMetric("count_bound_filtered"));
Assert.assertEquals(0L, row.getLongMetric("count_multivaldim_filtered"));
Assert.assertEquals(1L, row.getLongMetric("count_numeric_filtered"));
}
use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class DistinctCountGroupByQueryTest method testGroupByWithDistinctCountAgg.
@Test
public void testGroupByWithDistinctCountAgg() throws Exception {
final GroupByQueryConfig config = new GroupByQueryConfig();
config.setMaxIntermediateRows(10000);
final GroupByQueryRunnerFactory factory = GroupByQueryRunnerTest.makeQueryRunnerFactory(config);
IncrementalIndex index = new OnheapIncrementalIndex(0, Granularities.SECOND, new AggregatorFactory[] { new CountAggregatorFactory("cnt") }, 1000);
String visitor_id = "visitor_id";
String client_type = "client_type";
long timestamp = System.currentTimeMillis();
index.add(new MapBasedInputRow(timestamp, Lists.newArrayList(visitor_id, client_type), ImmutableMap.<String, Object>of(visitor_id, "0", client_type, "iphone")));
index.add(new MapBasedInputRow(timestamp + 1, Lists.newArrayList(visitor_id, client_type), ImmutableMap.<String, Object>of(visitor_id, "1", client_type, "iphone")));
index.add(new MapBasedInputRow(timestamp + 2, Lists.newArrayList(visitor_id, client_type), ImmutableMap.<String, Object>of(visitor_id, "2", client_type, "android")));
GroupByQuery query = new GroupByQuery.Builder().setDataSource(QueryRunnerTestHelper.dataSource).setGranularity(QueryRunnerTestHelper.allGran).setDimensions(Arrays.<DimensionSpec>asList(new DefaultDimensionSpec(client_type, client_type))).setInterval(QueryRunnerTestHelper.fullOnInterval).setLimitSpec(new DefaultLimitSpec(Lists.newArrayList(new OrderByColumnSpec(client_type, OrderByColumnSpec.Direction.DESCENDING)), 10)).setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount, new DistinctCountAggregatorFactory("UV", visitor_id, null))).build();
final Segment incrementalIndexSegment = new IncrementalIndexSegment(index, null);
Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, factory.createRunner(incrementalIndexSegment), query);
List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", client_type, "iphone", "UV", 2L, "rows", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", client_type, "android", "UV", 1L, "rows", 1L));
TestHelper.assertExpectedObjects(expectedResults, results, "distinct-count");
}
use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class DistinctCountTimeseriesQueryTest method testTopNWithDistinctCountAgg.
@Test
public void testTopNWithDistinctCountAgg() throws Exception {
TimeseriesQueryEngine engine = new TimeseriesQueryEngine();
IncrementalIndex index = new OnheapIncrementalIndex(0, Granularities.SECOND, new AggregatorFactory[] { new CountAggregatorFactory("cnt") }, 1000);
String visitor_id = "visitor_id";
String client_type = "client_type";
DateTime time = new DateTime("2016-03-04T00:00:00.000Z");
long timestamp = time.getMillis();
index.add(new MapBasedInputRow(timestamp, Lists.newArrayList(visitor_id, client_type), ImmutableMap.<String, Object>of(visitor_id, "0", client_type, "iphone")));
index.add(new MapBasedInputRow(timestamp, Lists.newArrayList(visitor_id, client_type), ImmutableMap.<String, Object>of(visitor_id, "1", client_type, "iphone")));
index.add(new MapBasedInputRow(timestamp, Lists.newArrayList(visitor_id, client_type), ImmutableMap.<String, Object>of(visitor_id, "2", client_type, "android")));
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Lists.newArrayList(QueryRunnerTestHelper.rowsCount, new DistinctCountAggregatorFactory("UV", visitor_id, null))).build();
final Iterable<Result<TimeseriesResultValue>> results = Sequences.toList(engine.process(query, new IncrementalIndexStorageAdapter(index)), Lists.<Result<TimeseriesResultValue>>newLinkedList());
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(time, new TimeseriesResultValue(ImmutableMap.<String, Object>of("UV", 3, "rows", 3L))));
TestHelper.assertExpectedResults(expectedResults, results);
}
Aggregations