Search in sources :

Example 41 with Query

use of io.druid.query.Query in project druid by druid-io.

the class AsyncQueryForwardingServlet method service.

@Override
protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
    final boolean isSmile = SmileMediaTypes.APPLICATION_JACKSON_SMILE.equals(request.getContentType()) || APPLICATION_SMILE.equals(request.getContentType());
    final ObjectMapper objectMapper = isSmile ? smileMapper : jsonMapper;
    request.setAttribute(OBJECTMAPPER_ATTRIBUTE, objectMapper);
    final String defaultHost = hostFinder.getDefaultHost();
    request.setAttribute(HOST_ATTRIBUTE, defaultHost);
    final boolean isQueryEndpoint = request.getRequestURI().startsWith("/druid/v2");
    if (isQueryEndpoint && HttpMethod.DELETE.is(request.getMethod())) {
        // query cancellation request
        for (final String host : hostFinder.getAllHosts()) {
            // to keep the code simple, the proxy servlet will also send a request to one of the default brokers
            if (!host.equals(defaultHost)) {
                // issue async requests
                broadcastClient.newRequest(rewriteURI(request, host)).method(HttpMethod.DELETE).timeout(CANCELLATION_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS).send(new Response.CompleteListener() {

                    @Override
                    public void onComplete(Result result) {
                        if (result.isFailed()) {
                            log.warn(result.getFailure(), "Failed to forward cancellation request to [%s]", host);
                        }
                    }
                });
            }
            interruptedQueryCount.incrementAndGet();
        }
    } else if (isQueryEndpoint && HttpMethod.POST.is(request.getMethod())) {
        // query request
        try {
            Query inputQuery = objectMapper.readValue(request.getInputStream(), Query.class);
            if (inputQuery != null) {
                request.setAttribute(HOST_ATTRIBUTE, hostFinder.getHost(inputQuery));
                if (inputQuery.getId() == null) {
                    inputQuery = inputQuery.withId(UUID.randomUUID().toString());
                }
            }
            request.setAttribute(QUERY_ATTRIBUTE, inputQuery);
        } catch (IOException e) {
            log.warn(e, "Exception parsing query");
            final String errorMessage = e.getMessage() == null ? "no error message" : e.getMessage();
            requestLogger.log(new RequestLogLine(new DateTime(), request.getRemoteAddr(), null, new QueryStats(ImmutableMap.<String, Object>of("success", false, "exception", errorMessage))));
            response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
            response.setContentType(MediaType.APPLICATION_JSON);
            objectMapper.writeValue(response.getOutputStream(), ImmutableMap.of("error", errorMessage));
            return;
        } catch (Exception e) {
            handleException(response, objectMapper, e);
            return;
        }
    }
    super.service(request, response);
}
Also used : Query(io.druid.query.Query) IOException(java.io.IOException) DateTime(org.joda.time.DateTime) ServletException(javax.servlet.ServletException) URISyntaxException(java.net.URISyntaxException) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) IOException(java.io.IOException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) Result(org.eclipse.jetty.client.api.Result) Response(org.eclipse.jetty.client.api.Response) HttpServletResponse(javax.servlet.http.HttpServletResponse) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Example 42 with Query

use of io.druid.query.Query in project druid by druid-io.

the class RealtimeIndexTaskTest method makeToolbox.

private TaskToolbox makeToolbox(final Task task, final TaskStorage taskStorage, final IndexerMetadataStorageCoordinator mdc, final File directory) {
    final TaskConfig taskConfig = new TaskConfig(directory.getPath(), null, null, 50000, null, false, null, null);
    final TaskLockbox taskLockbox = new TaskLockbox(taskStorage);
    try {
        taskStorage.insert(task, TaskStatus.running(task.getId()));
    } catch (EntryExistsException e) {
    // suppress
    }
    taskLockbox.syncFromStorage();
    final TaskActionToolbox taskActionToolbox = new TaskActionToolbox(taskLockbox, mdc, emitter, EasyMock.createMock(SupervisorManager.class));
    final TaskActionClientFactory taskActionClientFactory = new LocalTaskActionClientFactory(taskStorage, taskActionToolbox);
    final QueryRunnerFactoryConglomerate conglomerate = new DefaultQueryRunnerFactoryConglomerate(ImmutableMap.<Class<? extends Query>, QueryRunnerFactory>of(TimeseriesQuery.class, new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(new IntervalChunkingQueryRunnerDecorator(null, null, null) {

        @Override
        public <T> QueryRunner<T> decorate(QueryRunner<T> delegate, QueryToolChest<T, ? extends Query<T>> toolChest) {
            return delegate;
        }
    }), new TimeseriesQueryEngine(), new QueryWatcher() {

        @Override
        public void registerQuery(Query query, ListenableFuture future) {
        // do nothing
        }
    })));
    handOffCallbacks = Maps.newConcurrentMap();
    final SegmentHandoffNotifierFactory handoffNotifierFactory = new SegmentHandoffNotifierFactory() {

        @Override
        public SegmentHandoffNotifier createSegmentHandoffNotifier(String dataSource) {
            return new SegmentHandoffNotifier() {

                @Override
                public boolean registerSegmentHandoffCallback(SegmentDescriptor descriptor, Executor exec, Runnable handOffRunnable) {
                    handOffCallbacks.put(descriptor, new Pair<>(exec, handOffRunnable));
                    return true;
                }

                @Override
                public void start() {
                //Noop
                }

                @Override
                public void close() {
                //Noop
                }

                Map<SegmentDescriptor, Pair<Executor, Runnable>> getHandOffCallbacks() {
                    return handOffCallbacks;
                }
            };
        }
    };
    final TestUtils testUtils = new TestUtils();
    final TaskToolboxFactory toolboxFactory = new TaskToolboxFactory(taskConfig, taskActionClientFactory, emitter, new TestDataSegmentPusher(), new TestDataSegmentKiller(), // DataSegmentMover
    null, // DataSegmentArchiver
    null, new TestDataSegmentAnnouncer(), handoffNotifierFactory, conglomerate, // queryExecutorService
    MoreExecutors.sameThreadExecutor(), EasyMock.createMock(MonitorScheduler.class), new SegmentLoaderFactory(new SegmentLoaderLocalCacheManager(null, new SegmentLoaderConfig() {

        @Override
        public List<StorageLocationConfig> getLocations() {
            return Lists.newArrayList();
        }
    }, testUtils.getTestObjectMapper())), testUtils.getTestObjectMapper(), testUtils.getTestIndexMerger(), testUtils.getTestIndexIO(), MapCache.create(1024), new CacheConfig(), testUtils.getTestIndexMergerV9());
    return toolboxFactory.build(task);
}
Also used : TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) QueryWatcher(io.druid.query.QueryWatcher) LocalTaskActionClientFactory(io.druid.indexing.common.actions.LocalTaskActionClientFactory) TaskActionClientFactory(io.druid.indexing.common.actions.TaskActionClientFactory) TestDataSegmentAnnouncer(io.druid.indexing.test.TestDataSegmentAnnouncer) TaskConfig(io.druid.indexing.common.config.TaskConfig) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) QueryToolChest(io.druid.query.QueryToolChest) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) TestUtils(io.druid.indexing.common.TestUtils) DefaultQueryRunnerFactoryConglomerate(io.druid.query.DefaultQueryRunnerFactoryConglomerate) QueryRunnerFactoryConglomerate(io.druid.query.QueryRunnerFactoryConglomerate) TimeseriesQueryEngine(io.druid.query.timeseries.TimeseriesQueryEngine) Executor(java.util.concurrent.Executor) TaskToolboxFactory(io.druid.indexing.common.TaskToolboxFactory) SegmentDescriptor(io.druid.query.SegmentDescriptor) TaskActionToolbox(io.druid.indexing.common.actions.TaskActionToolbox) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) LocalTaskActionClientFactory(io.druid.indexing.common.actions.LocalTaskActionClientFactory) IntervalChunkingQueryRunnerDecorator(io.druid.query.IntervalChunkingQueryRunnerDecorator) SegmentLoaderConfig(io.druid.segment.loading.SegmentLoaderConfig) SegmentLoaderFactory(io.druid.indexing.common.SegmentLoaderFactory) SegmentLoaderLocalCacheManager(io.druid.segment.loading.SegmentLoaderLocalCacheManager) CacheConfig(io.druid.client.cache.CacheConfig) TestDataSegmentPusher(io.druid.indexing.test.TestDataSegmentPusher) Pair(io.druid.java.util.common.Pair) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) MonitorScheduler(com.metamx.metrics.MonitorScheduler) SegmentHandoffNotifier(io.druid.segment.realtime.plumber.SegmentHandoffNotifier) DefaultQueryRunnerFactoryConglomerate(io.druid.query.DefaultQueryRunnerFactoryConglomerate) EntryExistsException(io.druid.metadata.EntryExistsException) QueryRunner(io.druid.query.QueryRunner) TestDataSegmentKiller(io.druid.indexing.test.TestDataSegmentKiller) SegmentHandoffNotifierFactory(io.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory) SupervisorManager(io.druid.indexing.overlord.supervisor.SupervisorManager) TimeseriesQueryRunnerFactory(io.druid.query.timeseries.TimeseriesQueryRunnerFactory) TaskLockbox(io.druid.indexing.overlord.TaskLockbox) ListenableFuture(com.google.common.util.concurrent.ListenableFuture)

Example 43 with Query

use of io.druid.query.Query in project druid by druid-io.

the class GroupByQueryRunnerTest method testMergedHavingSpec.

@Test
public void testMergedHavingSpec() {
    List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "business", "rows", 2L, "idx", 217L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L));
    GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setInterval("2011-04-02/2011-04-04").setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(new OrHavingSpec(ImmutableList.<HavingSpec>of(new GreaterThanHavingSpec("rows", 2L), new EqualToHavingSpec("idx", 217L))));
    GroupByQuery fullQuery = builder.build();
    QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<Row>() {

        @Override
        public Sequence<Row> run(Query<Row> query, Map<String, Object> responseContext) {
            // simulate two daily segments
            final Query query1 = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))));
            final Query query2 = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))));
            return new MergeSequence(query.getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(query1, responseContext), runner.run(query2, responseContext))));
        }
    });
    Map<String, Object> context = Maps.newHashMap();
    TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery, context), "merged");
}
Also used : DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) RegexFilteredDimensionSpec(io.druid.query.dimension.RegexFilteredDimensionSpec) ExtractionDimensionSpec(io.druid.query.dimension.ExtractionDimensionSpec) ListFilteredDimensionSpec(io.druid.query.dimension.ListFilteredDimensionSpec) DimensionSpec(io.druid.query.dimension.DimensionSpec) EqualToHavingSpec(io.druid.query.groupby.having.EqualToHavingSpec) Query(io.druid.query.Query) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) PeriodGranularity(io.druid.java.util.common.granularity.PeriodGranularity) MultipleIntervalSegmentSpec(io.druid.query.spec.MultipleIntervalSegmentSpec) MergeSequence(io.druid.java.util.common.guava.MergeSequence) GreaterThanHavingSpec(io.druid.query.groupby.having.GreaterThanHavingSpec) OrHavingSpec(io.druid.query.groupby.having.OrHavingSpec) Period(org.joda.time.Period) Sequence(io.druid.java.util.common.guava.Sequence) MergeSequence(io.druid.java.util.common.guava.MergeSequence) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) GreaterThanHavingSpec(io.druid.query.groupby.having.GreaterThanHavingSpec) HavingSpec(io.druid.query.groupby.having.HavingSpec) DimFilterHavingSpec(io.druid.query.groupby.having.DimFilterHavingSpec) BaseHavingSpec(io.druid.query.groupby.having.BaseHavingSpec) OrHavingSpec(io.druid.query.groupby.having.OrHavingSpec) DimensionSelectorHavingSpec(io.druid.query.groupby.having.DimensionSelectorHavingSpec) EqualToHavingSpec(io.druid.query.groupby.having.EqualToHavingSpec) Row(io.druid.data.input.Row) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 44 with Query

use of io.druid.query.Query in project druid by druid-io.

the class GroupByQueryRunnerTest method doTestMergeResultsWithOrderBy.

private void doTestMergeResultsWithOrderBy(LimitSpec orderBySpec, List<Row> expectedResults) {
    GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setInterval("2011-04-02/2011-04-04").setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setLimitSpec(orderBySpec);
    final GroupByQuery fullQuery = builder.build();
    QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<Row>() {

        @Override
        public Sequence<Row> run(Query<Row> query, Map<String, Object> responseContext) {
            // simulate two daily segments
            final Query query1 = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))));
            final Query query2 = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))));
            return new MergeSequence(query.getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(query1, responseContext), runner.run(query2, responseContext))));
        }
    });
    Map<String, Object> context = Maps.newHashMap();
    TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery, context), "merged");
}
Also used : DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) RegexFilteredDimensionSpec(io.druid.query.dimension.RegexFilteredDimensionSpec) ExtractionDimensionSpec(io.druid.query.dimension.ExtractionDimensionSpec) ListFilteredDimensionSpec(io.druid.query.dimension.ListFilteredDimensionSpec) DimensionSpec(io.druid.query.dimension.DimensionSpec) Query(io.druid.query.Query) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) PeriodGranularity(io.druid.java.util.common.granularity.PeriodGranularity) Period(org.joda.time.Period) MultipleIntervalSegmentSpec(io.druid.query.spec.MultipleIntervalSegmentSpec) Sequence(io.druid.java.util.common.guava.Sequence) MergeSequence(io.druid.java.util.common.guava.MergeSequence) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) MergeSequence(io.druid.java.util.common.guava.MergeSequence) Row(io.druid.data.input.Row) Interval(org.joda.time.Interval)

Example 45 with Query

use of io.druid.query.Query in project druid by druid-io.

the class DataSourceMetadataQueryTest method testQuerySerialization.

@Test
public void testQuerySerialization() throws IOException {
    Query query = Druids.newDataSourceMetadataQueryBuilder().dataSource("testing").build();
    String json = jsonMapper.writeValueAsString(query);
    Query serdeQuery = jsonMapper.readValue(json, Query.class);
    Assert.assertEquals(query, serdeQuery);
}
Also used : Query(io.druid.query.Query) Test(org.junit.Test)

Aggregations

Query (io.druid.query.Query)48 QueryRunner (io.druid.query.QueryRunner)23 Test (org.junit.Test)22 Interval (org.joda.time.Interval)18 Sequence (io.druid.java.util.common.guava.Sequence)14 Map (java.util.Map)14 FinalizeResultsQueryRunner (io.druid.query.FinalizeResultsQueryRunner)11 SegmentDescriptor (io.druid.query.SegmentDescriptor)11 IOException (java.io.IOException)10 Row (io.druid.data.input.Row)9 DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)9 Result (io.druid.query.Result)9 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)8 Function (com.google.common.base.Function)8 DefaultDimensionSpec (io.druid.query.dimension.DefaultDimensionSpec)8 TimeseriesQuery (io.druid.query.timeseries.TimeseriesQuery)8 MergeSequence (io.druid.java.util.common.guava.MergeSequence)7 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)7 MultipleIntervalSegmentSpec (io.druid.query.spec.MultipleIntervalSegmentSpec)7 ImmutableMap (com.google.common.collect.ImmutableMap)6