Search in sources :

Example 1 with PagingSpec

use of io.druid.query.select.PagingSpec in project druid by druid-io.

the class CachingClusteredClientTest method testSelectCaching.

@Test
public void testSelectCaching() throws Exception {
    final Set<String> dimensions = Sets.<String>newHashSet("a");
    final Set<String> metrics = Sets.<String>newHashSet("rows");
    Druids.SelectQueryBuilder builder = Druids.newSelectQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).dimensions(Arrays.asList("a")).metrics(Arrays.asList("rows")).pagingSpec(new PagingSpec(null, 3)).context(CONTEXT);
    testQueryCaching(client, builder.build(), new Interval("2011-01-01/2011-01-02"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1)), new Interval("2011-01-02/2011-01-03"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5)), new Interval("2011-01-05/2011-01-10"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9)), new Interval("2011-01-05/2011-01-10"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9)));
    QueryRunner runner = new FinalizeResultsQueryRunner(client, new SelectQueryQueryToolChest(jsonMapper, QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator(), selectConfigSupplier));
    HashMap<String, Object> context = new HashMap<String, Object>();
    TestHelper.assertExpectedResults(makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1), new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5), new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9), new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9)), runner.run(builder.intervals("2011-01-01/2011-01-10").build(), context));
}
Also used : HashMap(java.util.HashMap) DateTime(org.joda.time.DateTime) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) SelectQueryQueryToolChest(io.druid.query.select.SelectQueryQueryToolChest) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) PagingSpec(io.druid.query.select.PagingSpec) Druids(io.druid.query.Druids) Interval(org.joda.time.Interval) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 2 with PagingSpec

use of io.druid.query.select.PagingSpec in project druid by druid-io.

the class CachingClusteredClientTest method testSelectCachingRenamedOutputName.

@Test
public void testSelectCachingRenamedOutputName() throws Exception {
    final Set<String> dimensions = Sets.<String>newHashSet("a");
    final Set<String> metrics = Sets.<String>newHashSet("rows");
    Druids.SelectQueryBuilder builder = Druids.newSelectQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).dimensions(Arrays.asList("a")).metrics(Arrays.asList("rows")).pagingSpec(new PagingSpec(null, 3)).context(CONTEXT);
    testQueryCaching(client, builder.build(), new Interval("2011-01-01/2011-01-02"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1)), new Interval("2011-01-02/2011-01-03"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5)), new Interval("2011-01-05/2011-01-10"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9)), new Interval("2011-01-05/2011-01-10"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9)));
    QueryRunner runner = new FinalizeResultsQueryRunner(client, new SelectQueryQueryToolChest(jsonMapper, QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator(), selectConfigSupplier));
    HashMap<String, Object> context = new HashMap<String, Object>();
    TestHelper.assertExpectedResults(makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1), new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5), new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9), new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9)), runner.run(builder.intervals("2011-01-01/2011-01-10").build(), context));
    TestHelper.assertExpectedResults(makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a2", "b", "rows", 1), new DateTime("2011-01-02"), ImmutableMap.of("a2", "c", "rows", 5), new DateTime("2011-01-05"), ImmutableMap.of("a2", "d", "rows", 5), new DateTime("2011-01-05T01"), ImmutableMap.of("a2", "d", "rows", 5), new DateTime("2011-01-06"), ImmutableMap.of("a2", "e", "rows", 6), new DateTime("2011-01-06T01"), ImmutableMap.of("a2", "e", "rows", 6), new DateTime("2011-01-07"), ImmutableMap.of("a2", "f", "rows", 7), new DateTime("2011-01-07T01"), ImmutableMap.of("a2", "f", "rows", 7), new DateTime("2011-01-08"), ImmutableMap.of("a2", "g", "rows", 8), new DateTime("2011-01-08T01"), ImmutableMap.of("a2", "g", "rows", 8), new DateTime("2011-01-09"), ImmutableMap.of("a2", "h", "rows", 9), new DateTime("2011-01-09T01"), ImmutableMap.of("a2", "h", "rows", 9)), runner.run(builder.intervals("2011-01-01/2011-01-10").dimensionSpecs(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("a", "a2"))).build(), context));
}
Also used : DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) DimensionSpec(io.druid.query.dimension.DimensionSpec) HashMap(java.util.HashMap) DateTime(org.joda.time.DateTime) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) SelectQueryQueryToolChest(io.druid.query.select.SelectQueryQueryToolChest) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) PagingSpec(io.druid.query.select.PagingSpec) Druids(io.druid.query.Druids) Interval(org.joda.time.Interval) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 3 with PagingSpec

use of io.druid.query.select.PagingSpec in project druid by druid-io.

the class SelectBenchmark method incrementQueryPagination.

// don't run this benchmark with a query that doesn't use QueryGranularities.ALL,
// this pagination function probably doesn't work correctly in that case.
private SelectQuery incrementQueryPagination(SelectQuery query, SelectResultValue prevResult) {
    Map<String, Integer> pagingIdentifiers = prevResult.getPagingIdentifiers();
    Map<String, Integer> newPagingIdentifers = new HashMap<>();
    for (String segmentId : pagingIdentifiers.keySet()) {
        int newOffset = pagingIdentifiers.get(segmentId) + 1;
        newPagingIdentifers.put(segmentId, newOffset);
    }
    return query.withPagingSpec(new PagingSpec(newPagingIdentifers, pagingThreshold));
}
Also used : HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) PagingSpec(io.druid.query.select.PagingSpec)

Example 4 with PagingSpec

use of io.druid.query.select.PagingSpec in project druid by druid-io.

the class QueryMaker method executeSelect.

private Sequence<Object[]> executeSelect(final DruidQueryBuilder queryBuilder, final SelectQuery baseQuery) {
    Preconditions.checkState(queryBuilder.getGrouping() == null, "grouping must be null");
    final List<RelDataTypeField> fieldList = queryBuilder.getRowType().getFieldList();
    final Integer limit = queryBuilder.getLimitSpec() != null ? queryBuilder.getLimitSpec().getLimit() : null;
    // Select is paginated, we need to make multiple queries.
    final Sequence<Sequence<Object[]>> sequenceOfSequences = Sequences.simple(new Iterable<Sequence<Object[]>>() {

        @Override
        public Iterator<Sequence<Object[]>> iterator() {
            final AtomicBoolean morePages = new AtomicBoolean(true);
            final AtomicReference<Map<String, Integer>> pagingIdentifiers = new AtomicReference<>();
            final AtomicLong rowsRead = new AtomicLong();
            // Each Sequence<Object[]> is one page.
            return new Iterator<Sequence<Object[]>>() {

                @Override
                public boolean hasNext() {
                    return morePages.get();
                }

                @Override
                public Sequence<Object[]> next() {
                    final SelectQuery queryWithPagination = baseQuery.withPagingSpec(new PagingSpec(pagingIdentifiers.get(), plannerContext.getPlannerConfig().getSelectThreshold(), true));
                    Hook.QUERY_PLAN.run(queryWithPagination);
                    morePages.set(false);
                    final AtomicBoolean gotResult = new AtomicBoolean();
                    return Sequences.concat(Sequences.map(queryWithPagination.run(walker, Maps.<String, Object>newHashMap()), new Function<Result<SelectResultValue>, Sequence<Object[]>>() {

                        @Override
                        public Sequence<Object[]> apply(final Result<SelectResultValue> result) {
                            if (!gotResult.compareAndSet(false, true)) {
                                throw new ISE("WTF?! Expected single result from Select query but got multiple!");
                            }
                            pagingIdentifiers.set(result.getValue().getPagingIdentifiers());
                            final List<Object[]> retVals = new ArrayList<>();
                            for (EventHolder holder : result.getValue().getEvents()) {
                                morePages.set(true);
                                final Map<String, Object> map = holder.getEvent();
                                final Object[] retVal = new Object[fieldList.size()];
                                for (RelDataTypeField field : fieldList) {
                                    final String outputName = queryBuilder.getRowOrder().get(field.getIndex());
                                    if (outputName.equals(Column.TIME_COLUMN_NAME)) {
                                        retVal[field.getIndex()] = coerce(holder.getTimestamp().getMillis(), field.getType().getSqlTypeName());
                                    } else {
                                        retVal[field.getIndex()] = coerce(map.get(outputName), field.getType().getSqlTypeName());
                                    }
                                }
                                if (limit == null || rowsRead.incrementAndGet() <= limit) {
                                    retVals.add(retVal);
                                } else {
                                    morePages.set(false);
                                    return Sequences.simple(retVals);
                                }
                            }
                            return Sequences.simple(retVals);
                        }
                    }));
                }

                @Override
                public void remove() {
                    throw new UnsupportedOperationException();
                }
            };
        }
    });
    return Sequences.concat(sequenceOfSequences);
}
Also used : NlsString(org.apache.calcite.util.NlsString) Result(io.druid.query.Result) Iterator(java.util.Iterator) ISE(io.druid.java.util.common.ISE) ArrayList(java.util.ArrayList) List(java.util.List) SelectResultValue(io.druid.query.select.SelectResultValue) AtomicReference(java.util.concurrent.atomic.AtomicReference) Sequence(io.druid.java.util.common.guava.Sequence) SelectQuery(io.druid.query.select.SelectQuery) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) RelDataTypeField(org.apache.calcite.rel.type.RelDataTypeField) AtomicLong(java.util.concurrent.atomic.AtomicLong) PagingSpec(io.druid.query.select.PagingSpec) EventHolder(io.druid.query.select.EventHolder) Map(java.util.Map)

Example 5 with PagingSpec

use of io.druid.query.select.PagingSpec in project druid by druid-io.

the class DruidQueryBuilder method toSelectQuery.

/**
   * Return this query as a Select query, or null if this query is not compatible with Select.
   *
   * @param dataSource         data source to query
   * @param sourceRowSignature row signature of the dataSource
   * @param context            query context
   *
   * @return query or null
   */
public SelectQuery toSelectQuery(final DataSource dataSource, final RowSignature sourceRowSignature, final Map<String, Object> context) {
    if (grouping != null) {
        return null;
    }
    final Filtration filtration = Filtration.create(filter).optimize(sourceRowSignature);
    final boolean descending;
    if (limitSpec != null) {
        // Safe to assume limitSpec has zero or one entry; DruidSelectSortRule wouldn't push in anything else.
        if (limitSpec.getColumns().size() > 0) {
            final OrderByColumnSpec orderBy = Iterables.getOnlyElement(limitSpec.getColumns());
            if (!orderBy.getDimension().equals(Column.TIME_COLUMN_NAME)) {
                throw new ISE("WTF?! Got select with non-time orderBy[%s]", orderBy);
            }
            descending = orderBy.getDirection() == OrderByColumnSpec.Direction.DESCENDING;
        } else {
            descending = false;
        }
    } else {
        descending = false;
    }
    return new SelectQuery(dataSource, filtration.getQuerySegmentSpec(), descending, filtration.getDimFilter(), Granularities.ALL, selectProjection != null ? selectProjection.getDimensions() : ImmutableList.<DimensionSpec>of(), selectProjection != null ? selectProjection.getMetrics() : ImmutableList.<String>of(), null, new PagingSpec(null, 0), /* dummy -- will be replaced */
    context);
}
Also used : OrderByColumnSpec(io.druid.query.groupby.orderby.OrderByColumnSpec) SelectQuery(io.druid.query.select.SelectQuery) DimensionSpec(io.druid.query.dimension.DimensionSpec) Filtration(io.druid.sql.calcite.filtration.Filtration) PagingSpec(io.druid.query.select.PagingSpec) ISE(io.druid.java.util.common.ISE)

Aggregations

PagingSpec (io.druid.query.select.PagingSpec)5 HashMap (java.util.HashMap)3 ISE (io.druid.java.util.common.ISE)2 Druids (io.druid.query.Druids)2 FinalizeResultsQueryRunner (io.druid.query.FinalizeResultsQueryRunner)2 QueryRunner (io.druid.query.QueryRunner)2 DimensionSpec (io.druid.query.dimension.DimensionSpec)2 GroupByQueryRunnerTest (io.druid.query.groupby.GroupByQueryRunnerTest)2 SelectQuery (io.druid.query.select.SelectQuery)2 SelectQueryQueryToolChest (io.druid.query.select.SelectQueryQueryToolChest)2 DateTime (org.joda.time.DateTime)2 Interval (org.joda.time.Interval)2 Test (org.junit.Test)2 Sequence (io.druid.java.util.common.guava.Sequence)1 Result (io.druid.query.Result)1 DefaultDimensionSpec (io.druid.query.dimension.DefaultDimensionSpec)1 OrderByColumnSpec (io.druid.query.groupby.orderby.OrderByColumnSpec)1 EventHolder (io.druid.query.select.EventHolder)1 SelectResultValue (io.druid.query.select.SelectResultValue)1 Filtration (io.druid.sql.calcite.filtration.Filtration)1