Search in sources :

Example 66 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class ScanBenchmark method querySingleQueryableIndex.

@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleQueryableIndex(Blackhole blackhole, QueryableIndexState state) {
    final QueryRunner<Result<ScanResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, SegmentId.dummy("qIndex"), new QueryableIndexSegment(state.qIndexes.get(0), SegmentId.dummy("qIndex")));
    Query effectiveQuery = query.withDataSource(new TableDataSource("qIndex")).withQuerySegmentSpec(new MultipleSpecificSegmentSpec(ImmutableList.of(new SegmentDescriptor(Intervals.ETERNITY, "dummy_version", 0)))).withOverriddenContext(ImmutableMap.of(ScanQuery.CTX_KEY_OUTERMOST, false));
    List<ScanResultValue> results = ScanBenchmark.runQuery(factory, runner, effectiveQuery);
    blackhole.consume(results);
}
Also used : QueryableIndexSegment(org.apache.druid.segment.QueryableIndexSegment) MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) ScanQuery(org.apache.druid.query.scan.ScanQuery) Query(org.apache.druid.query.Query) TableDataSource(org.apache.druid.query.TableDataSource) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) ScanResultValue(org.apache.druid.query.scan.ScanResultValue) Result(org.apache.druid.query.Result) BenchmarkMode(org.openjdk.jmh.annotations.BenchmarkMode) Benchmark(org.openjdk.jmh.annotations.Benchmark) OutputTimeUnit(org.openjdk.jmh.annotations.OutputTimeUnit)

Example 67 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class MapVirtualColumnTopNTest method testWithSubColumn.

@Test
public void testWithSubColumn() {
    final TopNQuery query = new TopNQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))), // params.key3 is string
    new DefaultDimensionSpec("params.key3", "params.key3"), new NumericTopNMetricSpec("count"), 2, new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("count")), null, null);
    final List<Result<TopNResultValue>> result = runner.run(QueryPlus.wrap(query)).toList();
    final List<Result<TopNResultValue>> expected = Collections.singletonList(new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue(ImmutableList.of(new DimensionAndMetricValueExtractor(MapVirtualColumnTestBase.mapOf("count", 2L, "params.key3", null)), new DimensionAndMetricValueExtractor(MapVirtualColumnTestBase.mapOf("count", 1L, "params.key3", "value3"))))));
    Assert.assertEquals(expected, result);
}
Also used : TopNResultValue(org.apache.druid.query.topn.TopNResultValue) TableDataSource(org.apache.druid.query.TableDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) TopNQuery(org.apache.druid.query.topn.TopNQuery) NumericTopNMetricSpec(org.apache.druid.query.topn.NumericTopNMetricSpec) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) DimensionAndMetricValueExtractor(org.apache.druid.query.topn.DimensionAndMetricValueExtractor) Result(org.apache.druid.query.Result) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 68 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class MaterializedViewQueryTest method testQuerySerialization.

@Test
public void testQuerySerialization() throws IOException {
    TopNQuery topNQuery = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(QueryRunnerTestHelper.MARKET_DIMENSION).metric(QueryRunnerTestHelper.INDEX_METRIC).threshold(4).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(Lists.newArrayList(Iterables.concat(QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).build();
    MaterializedViewQuery query = new MaterializedViewQuery(topNQuery, optimizer);
    String json = JSON_MAPPER.writeValueAsString(query);
    Query serdeQuery = JSON_MAPPER.readValue(json, Query.class);
    Assert.assertEquals(query, serdeQuery);
    Assert.assertEquals(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), query.getDataSource());
    Assert.assertEquals(QueryRunnerTestHelper.ALL_GRAN, query.getGranularity());
    Assert.assertEquals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC.getIntervals(), query.getIntervals());
}
Also used : TopNQueryBuilder(org.apache.druid.query.topn.TopNQueryBuilder) DoubleMaxAggregatorFactory(org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory) TopNQuery(org.apache.druid.query.topn.TopNQuery) Query(org.apache.druid.query.Query) TableDataSource(org.apache.druid.query.TableDataSource) TopNQuery(org.apache.druid.query.topn.TopNQuery) DoubleMinAggregatorFactory(org.apache.druid.query.aggregation.DoubleMinAggregatorFactory) Test(org.junit.Test)

Example 69 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class ScanBenchmark method queryMultiQueryableIndex.

@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void queryMultiQueryableIndex(Blackhole blackhole, QueryableIndexState state) {
    List<SegmentDescriptor> segmentDescriptors = new ArrayList<>();
    List<QueryRunner<Row>> runners = new ArrayList<>();
    QueryToolChest toolChest = factory.getToolchest();
    for (int i = 0; i < state.numSegments; i++) {
        String segmentName = "qIndex";
        final QueryRunner<Result<ScanResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, SegmentId.dummy(segmentName), new QueryableIndexSegment(state.qIndexes.get(i), SegmentId.dummy(segmentName, i)));
        segmentDescriptors.add(new SegmentDescriptor(Intervals.ETERNITY, "dummy_version", i));
        runners.add(toolChest.preMergeQueryDecoration(runner));
    }
    QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(state.executorService, runners)), toolChest));
    Query effectiveQuery = query.withDataSource(new TableDataSource("qIndex")).withQuerySegmentSpec(new MultipleSpecificSegmentSpec(segmentDescriptors)).withOverriddenContext(ImmutableMap.of(ScanQuery.CTX_KEY_OUTERMOST, false));
    Sequence<Result<ScanResultValue>> queryResult = theRunner.run(QueryPlus.wrap(effectiveQuery), ResponseContext.createEmpty());
    List<Result<ScanResultValue>> results = queryResult.toList();
    blackhole.consume(results);
}
Also used : QueryableIndexSegment(org.apache.druid.segment.QueryableIndexSegment) MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) ScanQuery(org.apache.druid.query.scan.ScanQuery) Query(org.apache.druid.query.Query) ArrayList(java.util.ArrayList) ScanQueryQueryToolChest(org.apache.druid.query.scan.ScanQueryQueryToolChest) QueryToolChest(org.apache.druid.query.QueryToolChest) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) Result(org.apache.druid.query.Result) TableDataSource(org.apache.druid.query.TableDataSource) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) BenchmarkMode(org.openjdk.jmh.annotations.BenchmarkMode) Benchmark(org.openjdk.jmh.annotations.Benchmark) OutputTimeUnit(org.openjdk.jmh.annotations.OutputTimeUnit)

Example 70 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class DataSourcesResource method isHandOffComplete.

/**
 * Used by the realtime tasks to learn whether a segment is handed off or not.
 * It returns true when the segment will never be handed off or is already handed off. Otherwise, it returns false.
 */
@GET
@Path("/{dataSourceName}/handoffComplete")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response isHandOffComplete(@PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") final String interval, @QueryParam("partitionNumber") final int partitionNumber, @QueryParam("version") final String version) {
    try {
        final List<Rule> rules = metadataRuleManager.getRulesWithDefault(dataSourceName);
        final Interval theInterval = Intervals.of(interval);
        final SegmentDescriptor descriptor = new SegmentDescriptor(theInterval, version, partitionNumber);
        final DateTime now = DateTimes.nowUtc();
        // dropped means a segment will never be handed off, i.e it completed hand off
        // init to true, reset to false only if this segment can be loaded by rules
        boolean dropped = true;
        for (Rule rule : rules) {
            if (rule.appliesTo(theInterval, now)) {
                if (rule instanceof LoadRule) {
                    dropped = false;
                }
                break;
            }
        }
        if (dropped) {
            return Response.ok(true).build();
        }
        TimelineLookup<String, SegmentLoadInfo> timeline = serverInventoryView.getTimeline(new TableDataSource(dataSourceName));
        if (timeline == null) {
            log.debug("No timeline found for datasource[%s]", dataSourceName);
            return Response.ok(false).build();
        }
        Iterable<ImmutableSegmentLoadInfo> servedSegmentsInInterval = prepareServedSegmentsInInterval(timeline, theInterval);
        if (isSegmentLoaded(servedSegmentsInInterval, descriptor)) {
            return Response.ok(true).build();
        }
        return Response.ok(false).build();
    } catch (Exception e) {
        log.error(e, "Error while handling hand off check request");
        return Response.serverError().entity(ImmutableMap.of("error", e.toString())).build();
    }
}
Also used : SegmentLoadInfo(org.apache.druid.client.SegmentLoadInfo) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) DateTime(org.joda.time.DateTime) UnknownSegmentIdsException(org.apache.druid.metadata.UnknownSegmentIdsException) TableDataSource(org.apache.druid.query.TableDataSource) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) ImmutableSegmentLoadInfo(org.apache.druid.client.ImmutableSegmentLoadInfo) LoadRule(org.apache.druid.server.coordinator.rules.LoadRule) Rule(org.apache.druid.server.coordinator.rules.Rule) LoadRule(org.apache.druid.server.coordinator.rules.LoadRule) Interval(org.joda.time.Interval) Path(javax.ws.rs.Path) ResourceFilters(com.sun.jersey.spi.container.ResourceFilters) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Aggregations

TableDataSource (org.apache.druid.query.TableDataSource)118 Test (org.junit.Test)94 GlobalTableDataSource (org.apache.druid.query.GlobalTableDataSource)46 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)43 QueryDataSource (org.apache.druid.query.QueryDataSource)41 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)40 Parameters (junitparams.Parameters)30 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)19 LookupDataSource (org.apache.druid.query.LookupDataSource)18 DataSegment (org.apache.druid.timeline.DataSegment)15 Result (org.apache.druid.query.Result)14 CountDownLatch (java.util.concurrent.CountDownLatch)11 Query (org.apache.druid.query.Query)11 TimelineObjectHolder (org.apache.druid.timeline.TimelineObjectHolder)11 Interval (org.joda.time.Interval)11 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)10 ArrayList (java.util.ArrayList)9 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)9 ISE (org.apache.druid.java.util.common.ISE)8 SegmentDescriptor (org.apache.druid.query.SegmentDescriptor)8