use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class PostAveragerAggregatorCalculatorTest method setup.
@Before
public void setup() {
System.setProperty("druid.generic.useDefaultValueForNull", "true");
NullHandling.initializeForTests();
MovingAverageQuery query = new MovingAverageQuery(new TableDataSource("d"), new MultipleIntervalSegmentSpec(Collections.singletonList(new Interval("2017-01-01/2017-01-01", ISOChronology.getInstanceUTC()))), null, Granularities.DAY, null, Collections.singletonList(new CountAggregatorFactory("count")), Collections.emptyList(), null, Collections.singletonList(new DoubleMeanAveragerFactory("avgCount", 7, 1, "count")), Collections.singletonList(new ArithmeticPostAggregator("avgCountRatio", "/", Arrays.asList(new FieldAccessPostAggregator("count", "count"), new FieldAccessPostAggregator("avgCount", "avgCount")))), null, null);
pac = new PostAveragerAggregatorCalculator(query);
event = new HashMap<>();
row = new MapBasedRow(new DateTime(ISOChronology.getInstanceUTC()), event);
}
use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class MapVirtualColumnGroupByTest method testWithMapColumn.
@Test
public void testWithMapColumn() {
final GroupByQuery query = new GroupByQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))), null, Granularities.ALL, ImmutableList.of(new DefaultDimensionSpec("params", "params")), ImmutableList.of(new CountAggregatorFactory("count")), null, null, null, null, null);
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("Map column doesn't support getRow()");
runner.run(QueryPlus.wrap(query)).toList();
}
use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class MapVirtualColumnGroupByTest method testWithSubColumn.
@Test
public void testWithSubColumn() {
final GroupByQuery query = new GroupByQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))), null, Granularities.ALL, ImmutableList.of(new DefaultDimensionSpec("params.key3", "params.key3")), ImmutableList.of(new CountAggregatorFactory("count")), null, null, null, null, null);
final List<ResultRow> result = runner.run(QueryPlus.wrap(query)).toList();
final List<ResultRow> expected = ImmutableList.of(new MapBasedRow(DateTimes.of("2011-01-12T00:00:00.000Z"), MapVirtualColumnTestBase.mapOf("count", 1L, "params.key3", "value3")), new MapBasedRow(DateTimes.of("2011-01-12T00:00:00.000Z"), MapVirtualColumnTestBase.mapOf("count", 2L))).stream().map(row -> ResultRow.fromLegacyRow(row, query)).collect(Collectors.toList());
Assert.assertEquals(expected, result);
}
use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class MapVirtualColumnTopNTest method testWithMapColumn.
@Test
public void testWithMapColumn() {
final TopNQuery query = new TopNQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))), // params is the map type
new DefaultDimensionSpec("params", "params"), new NumericTopNMetricSpec("count"), 1, new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("count")), null, null);
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("Map column doesn't support getRow()");
runner.run(QueryPlus.wrap(query)).toList();
}
use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class ScanBenchmark method querySingleIncrementalIndex.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleIncrementalIndex(Blackhole blackhole, IncrementalIndexState state) {
QueryRunner<ScanResultValue> runner = QueryBenchmarkUtil.makeQueryRunner(factory, SegmentId.dummy("incIndex"), new IncrementalIndexSegment(state.incIndex, SegmentId.dummy("incIndex")));
Query effectiveQuery = query.withDataSource(new TableDataSource("incIndex")).withQuerySegmentSpec(new MultipleSpecificSegmentSpec(ImmutableList.of(new SegmentDescriptor(Intervals.ETERNITY, "dummy_version", 0)))).withOverriddenContext(ImmutableMap.of(ScanQuery.CTX_KEY_OUTERMOST, false));
List<ScanResultValue> results = ScanBenchmark.runQuery(factory, runner, effectiveQuery);
blackhole.consume(results);
}
Aggregations