Search in sources :

Example 6 with Sequence

use of org.apache.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class DefaultLimitSpecTest method testBuildWithExplicitOrder.

@Test
public void testBuildWithExplicitOrder() {
    DefaultLimitSpec limitSpec = new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("k1", OrderByColumnSpec.Direction.ASCENDING)), 2);
    Function<Sequence<ResultRow>, Sequence<ResultRow>> limitFn = limitSpec.build(GroupByQuery.builder().setDataSource("dummy").setInterval("1000/3000").setDimensions(new DefaultDimensionSpec("k1", "k1")).setAggregatorSpecs(new LongSumAggregatorFactory("k2", "k2")).setPostAggregatorSpecs(ImmutableList.of(new ConstantPostAggregator("k3", 1L))).setGranularity(Granularities.NONE).build());
    Assert.assertEquals(ImmutableList.of(testRowsList.get(0), testRowsList.get(1)), limitFn.apply(Sequences.simple(testRowsList)).toList());
}
Also used : LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) ConstantPostAggregator(org.apache.druid.query.aggregation.post.ConstantPostAggregator) Sequence(org.apache.druid.java.util.common.guava.Sequence) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 7 with Sequence

use of org.apache.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class DefaultLimitSpecTest method testWithSortByDimsFirst.

@Test
public void testWithSortByDimsFirst() {
    DefaultLimitSpec limitSpec = new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("k1", OrderByColumnSpec.Direction.ASCENDING, StringComparators.NUMERIC)), 2);
    Function<Sequence<ResultRow>, Sequence<ResultRow>> limitFn = limitSpec.build(GroupByQuery.builder().setDataSource("dummy").setInterval("1000/3000").setDimensions(new DefaultDimensionSpec("k1", "k1", ColumnType.DOUBLE)).setGranularity(Granularities.NONE).overrideContext(ImmutableMap.of(GroupByQuery.CTX_KEY_SORT_BY_DIMS_FIRST, true)).build());
    Assert.assertEquals(ImmutableList.of(testRowsWithTimestampList.get(2), testRowsWithTimestampList.get(0)), limitFn.apply(Sequences.simple(testRowsWithTimestampList)).toList());
}
Also used : Sequence(org.apache.druid.java.util.common.guava.Sequence) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 8 with Sequence

use of org.apache.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class QueryLifecycle method execute.

/**
 * Execute the query. Can only be called if the query has been authorized. Note that query logs and metrics will
 * not be emitted automatically when the Sequence is fully iterated. It is the caller's responsibility to call
 * {@link #emitLogsAndMetrics(Throwable, String, long)} to emit logs and metrics.
 *
 * @return result sequence and response context
 */
public QueryResponse execute() {
    transition(State.AUTHORIZED, State.EXECUTING);
    final ResponseContext responseContext = DirectDruidClient.makeResponseContextForQuery();
    final Sequence res = QueryPlus.wrap(baseQuery).withIdentity(authenticationResult.getIdentity()).run(texasRanger, responseContext);
    return new QueryResponse(res == null ? Sequences.empty() : res, responseContext);
}
Also used : ResponseContext(org.apache.druid.query.context.ResponseContext) Sequence(org.apache.druid.java.util.common.guava.Sequence)

Example 9 with Sequence

use of org.apache.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class DumpSegment method runDump.

private void runDump(final Injector injector, final QueryableIndex index) throws IOException {
    final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class));
    final QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(index);
    final List<String> columnNames = getColumnsToInclude(index);
    final DimFilter filter = filterJson != null ? objectMapper.readValue(filterJson, DimFilter.class) : null;
    final Sequence<Cursor> cursors = adapter.makeCursors(Filters.toFilter(filter), index.getDataInterval().withChronology(ISOChronology.getInstanceUTC()), VirtualColumns.EMPTY, Granularities.ALL, false, null);
    withOutputStream(new Function<OutputStream, Object>() {

        @Override
        public Object apply(final OutputStream out) {
            final Sequence<Object> sequence = Sequences.map(cursors, new Function<Cursor, Object>() {

                @Override
                public Object apply(Cursor cursor) {
                    ColumnSelectorFactory columnSelectorFactory = cursor.getColumnSelectorFactory();
                    final List<BaseObjectColumnValueSelector> selectors = columnNames.stream().map(columnSelectorFactory::makeColumnValueSelector).collect(Collectors.toList());
                    while (!cursor.isDone()) {
                        final Map<String, Object> row = Maps.newLinkedHashMap();
                        for (int i = 0; i < columnNames.size(); i++) {
                            final String columnName = columnNames.get(i);
                            final Object value = selectors.get(i).getObject();
                            if (timeISO8601 && columnNames.get(i).equals(ColumnHolder.TIME_COLUMN_NAME)) {
                                row.put(columnName, new DateTime(value, DateTimeZone.UTC).toString());
                            } else {
                                row.put(columnName, value);
                            }
                        }
                        try {
                            out.write(objectMapper.writeValueAsBytes(row));
                            out.write('\n');
                        } catch (IOException e) {
                            throw new RuntimeException(e);
                        }
                        cursor.advance();
                    }
                    return null;
                }
            });
            evaluateSequenceForSideEffects(sequence);
            return null;
        }
    });
}
Also used : ColumnSelectorFactory(org.apache.druid.segment.ColumnSelectorFactory) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) BaseObjectColumnValueSelector(org.apache.druid.segment.BaseObjectColumnValueSelector) QueryableIndexStorageAdapter(org.apache.druid.segment.QueryableIndexStorageAdapter) Json(org.apache.druid.guice.annotations.Json) Sequence(org.apache.druid.java.util.common.guava.Sequence) IOException(java.io.IOException) Cursor(org.apache.druid.segment.Cursor) DateTime(org.joda.time.DateTime) Function(com.google.common.base.Function) DimFilter(org.apache.druid.query.filter.DimFilter) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Example 10 with Sequence

use of org.apache.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class NestedQueryPushDownTest method runNestedQueryWithForcePushDown.

private Sequence<ResultRow> runNestedQueryWithForcePushDown(GroupByQuery nestedQuery) {
    ResponseContext context = ResponseContext.createEmpty();
    QueryToolChest<ResultRow, GroupByQuery> toolChest = groupByFactory.getToolchest();
    GroupByQuery pushDownQuery = nestedQuery;
    QueryRunner<ResultRow> segment1Runner = new FinalizeResultsQueryRunner<ResultRow>(toolChest.mergeResults(groupByFactory.mergeRunners(executorService, getQueryRunnerForSegment1())), (QueryToolChest) toolChest);
    QueryRunner<ResultRow> segment2Runner = new FinalizeResultsQueryRunner<ResultRow>(toolChest.mergeResults(groupByFactory2.mergeRunners(executorService, getQueryRunnerForSegment2())), (QueryToolChest) toolChest);
    QueryRunner<ResultRow> queryRunnerForSegments = new FinalizeResultsQueryRunner<>(toolChest.mergeResults((queryPlus, responseContext) -> Sequences.simple(ImmutableList.of(Sequences.map(segment1Runner.run(queryPlus, responseContext), toolChest.makePreComputeManipulatorFn((GroupByQuery) queryPlus.getQuery(), MetricManipulatorFns.deserializing())), Sequences.map(segment2Runner.run(queryPlus, responseContext), toolChest.makePreComputeManipulatorFn((GroupByQuery) queryPlus.getQuery(), MetricManipulatorFns.deserializing())))).flatMerge(Function.identity(), queryPlus.getQuery().getResultOrdering())), (QueryToolChest) toolChest);
    GroupByStrategy strategy = ((GroupByQueryRunnerFactory) groupByFactory).getStrategySelector().strategize(nestedQuery);
    // Historicals execute the query with force push down flag as false
    GroupByQuery queryWithPushDownDisabled = pushDownQuery.withOverriddenContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_FORCE_PUSH_DOWN_NESTED_QUERY, false));
    Sequence<ResultRow> pushDownQueryResults = strategy.mergeResults(queryRunnerForSegments, queryWithPushDownDisabled, context);
    return toolChest.mergeResults((queryPlus, responseContext) -> pushDownQueryResults).run(QueryPlus.wrap(nestedQuery), context);
}
Also used : QueryPlus(org.apache.druid.query.QueryPlus) Arrays(java.util.Arrays) LongDimensionSchema(org.apache.druid.data.input.impl.LongDimensionSchema) StupidPool(org.apache.druid.collections.StupidPool) IndexSpec(org.apache.druid.segment.IndexSpec) DefaultBlockingPool(org.apache.druid.collections.DefaultBlockingPool) ByteBuffer(java.nio.ByteBuffer) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) QueryWatcher(org.apache.druid.query.QueryWatcher) After(org.junit.After) Map(java.util.Map) QueryRunner(org.apache.druid.query.QueryRunner) GroupByStrategySelector(org.apache.druid.query.groupby.strategy.GroupByStrategySelector) ExtractionDimensionSpec(org.apache.druid.query.dimension.ExtractionDimensionSpec) FileUtils(org.apache.druid.java.util.common.FileUtils) OffHeapMemorySegmentWriteOutMediumFactory(org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory) GroupByStrategy(org.apache.druid.query.groupby.strategy.GroupByStrategy) Sequence(org.apache.druid.java.util.common.guava.Sequence) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) Execs(org.apache.druid.java.util.common.concurrent.Execs) ImmutableMap(com.google.common.collect.ImmutableMap) QueryableIndex(org.apache.druid.segment.QueryableIndex) DruidProcessingConfig(org.apache.druid.query.DruidProcessingConfig) RegexDimExtractionFn(org.apache.druid.query.extraction.RegexDimExtractionFn) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) BySegmentQueryRunner(org.apache.druid.query.BySegmentQueryRunner) QuerySegmentSpec(org.apache.druid.query.spec.QuerySegmentSpec) IncrementalIndexSchema(org.apache.druid.segment.incremental.IncrementalIndexSchema) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) InputRow(org.apache.druid.data.input.InputRow) BlockingPool(org.apache.druid.collections.BlockingPool) List(java.util.List) SegmentId(org.apache.druid.timeline.SegmentId) QueryableIndexSegment(org.apache.druid.segment.QueryableIndexSegment) Logger(org.apache.druid.java.util.common.logger.Logger) InjectableValues(com.fasterxml.jackson.databind.InjectableValues) NonBlockingPool(org.apache.druid.collections.NonBlockingPool) Intervals(org.apache.druid.java.util.common.Intervals) Supplier(com.google.common.base.Supplier) MetricManipulatorFns(org.apache.druid.query.aggregation.MetricManipulatorFns) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) HashMap(java.util.HashMap) Function(java.util.function.Function) JavaScriptConfig(org.apache.druid.js.JavaScriptConfig) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) Query(org.apache.druid.query.Query) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) Suppliers(com.google.common.base.Suppliers) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) IndexMergerV9(org.apache.druid.segment.IndexMergerV9) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) Sequences(org.apache.druid.java.util.common.guava.Sequences) ExecutorService(java.util.concurrent.ExecutorService) Before(org.junit.Before) Segment(org.apache.druid.segment.Segment) GroupByStrategyV2(org.apache.druid.query.groupby.strategy.GroupByStrategyV2) ResponseContext(org.apache.druid.query.context.ResponseContext) GroupByStrategyV1(org.apache.druid.query.groupby.strategy.GroupByStrategyV1) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) QueryToolChest(org.apache.druid.query.QueryToolChest) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) Test(org.junit.Test) GreaterThanHavingSpec(org.apache.druid.query.groupby.having.GreaterThanHavingSpec) SmileFactory(com.fasterxml.jackson.dataformat.smile.SmileFactory) File(java.io.File) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Granularities(org.apache.druid.java.util.common.granularity.Granularities) LongMaxAggregatorFactory(org.apache.druid.query.aggregation.LongMaxAggregatorFactory) AtomicLong(java.util.concurrent.atomic.AtomicLong) QueryRunnerFactory(org.apache.druid.query.QueryRunnerFactory) Assert(org.junit.Assert) IndexIO(org.apache.druid.segment.IndexIO) Collections(java.util.Collections) JavaScriptDimFilter(org.apache.druid.query.filter.JavaScriptDimFilter) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) ResponseContext(org.apache.druid.query.context.ResponseContext) GroupByStrategy(org.apache.druid.query.groupby.strategy.GroupByStrategy)

Aggregations

Sequence (org.apache.druid.java.util.common.guava.Sequence)102 Test (org.junit.Test)53 List (java.util.List)44 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)37 ResponseContext (org.apache.druid.query.context.ResponseContext)32 ImmutableList (com.google.common.collect.ImmutableList)29 Intervals (org.apache.druid.java.util.common.Intervals)28 Granularities (org.apache.druid.java.util.common.granularity.Granularities)28 QueryRunner (org.apache.druid.query.QueryRunner)28 ArrayList (java.util.ArrayList)27 VirtualColumns (org.apache.druid.segment.VirtualColumns)26 Cursor (org.apache.druid.segment.Cursor)25 QueryPlus (org.apache.druid.query.QueryPlus)24 Result (org.apache.druid.query.Result)24 NullHandling (org.apache.druid.common.config.NullHandling)22 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)22 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)21 QueryableIndexStorageAdapter (org.apache.druid.segment.QueryableIndexStorageAdapter)20 DataSegment (org.apache.druid.timeline.DataSegment)20 ImmutableMap (com.google.common.collect.ImmutableMap)18