Search in sources :

Example 1 with MultipleSpecificSegmentSpec

use of org.apache.druid.query.spec.MultipleSpecificSegmentSpec in project druid by druid-io.

the class CachingClusteredClientTest method testSingleDimensionPruning.

@Test
public void testSingleDimensionPruning() {
    DimFilter filter = new AndDimFilter(new OrDimFilter(new SelectorDimFilter("dim1", "a", null), new BoundDimFilter("dim1", "from", "to", false, false, false, null, StringComparators.LEXICOGRAPHIC)), new AndDimFilter(new InDimFilter("dim2", Arrays.asList("a", "c", "e", "g"), null), new BoundDimFilter("dim2", "aaa", "hi", false, false, false, null, StringComparators.LEXICOGRAPHIC), new BoundDimFilter("dim2", "e", "zzz", true, true, false, null, StringComparators.LEXICOGRAPHIC)));
    final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).filters(filter).granularity(GRANULARITY).intervals(SEG_SPEC).context(CONTEXT).intervals("2011-01-05/2011-01-10").aggregators(RENAMED_AGGS).postAggregators(RENAMED_POST_AGGS);
    TimeseriesQuery query = builder.randomQueryId().build();
    final Interval interval1 = Intervals.of("2011-01-06/2011-01-07");
    final Interval interval2 = Intervals.of("2011-01-07/2011-01-08");
    final Interval interval3 = Intervals.of("2011-01-08/2011-01-09");
    QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
    final DruidServer lastServer = servers[random.nextInt(servers.length)];
    ServerSelector selector1 = makeMockSingleDimensionSelector(lastServer, "dim1", null, "b", 0);
    ServerSelector selector2 = makeMockSingleDimensionSelector(lastServer, "dim1", "e", "f", 1);
    ServerSelector selector3 = makeMockSingleDimensionSelector(lastServer, "dim1", "hi", "zzz", 2);
    ServerSelector selector4 = makeMockSingleDimensionSelector(lastServer, "dim2", "a", "e", 0);
    ServerSelector selector5 = makeMockSingleDimensionSelector(lastServer, "dim2", null, null, 1);
    ServerSelector selector6 = makeMockSingleDimensionSelector(lastServer, "other", "b", null, 0);
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(0, 3, selector1));
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(1, 3, selector2));
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(2, 3, selector3));
    timeline.add(interval2, "v", new NumberedPartitionChunk<>(0, 2, selector4));
    timeline.add(interval2, "v", new NumberedPartitionChunk<>(1, 2, selector5));
    timeline.add(interval3, "v", new NumberedPartitionChunk<>(0, 1, selector6));
    final Capture<QueryPlus> capture = Capture.newInstance();
    final Capture<ResponseContext> contextCap = Capture.newInstance();
    QueryRunner mockRunner = EasyMock.createNiceMock(QueryRunner.class);
    EasyMock.expect(mockRunner.run(EasyMock.capture(capture), EasyMock.capture(contextCap))).andReturn(Sequences.empty()).anyTimes();
    EasyMock.expect(serverView.getQueryRunner(lastServer)).andReturn(mockRunner).anyTimes();
    EasyMock.replay(serverView);
    EasyMock.replay(mockRunner);
    List<SegmentDescriptor> descriptors = new ArrayList<>();
    descriptors.add(new SegmentDescriptor(interval1, "v", 0));
    descriptors.add(new SegmentDescriptor(interval1, "v", 2));
    descriptors.add(new SegmentDescriptor(interval2, "v", 1));
    descriptors.add(new SegmentDescriptor(interval3, "v", 0));
    MultipleSpecificSegmentSpec expected = new MultipleSpecificSegmentSpec(descriptors);
    runner.run(QueryPlus.wrap(query)).toList();
    Assert.assertEquals(expected, ((TimeseriesQuery) capture.getValue().getQuery()).getQuerySegmentSpec());
}
Also used : MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) AndDimFilter(org.apache.druid.query.filter.AndDimFilter) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) ArrayList(java.util.ArrayList) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) ServerSelector(org.apache.druid.client.selector.ServerSelector) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) Druids(org.apache.druid.query.Druids) ResponseContext(org.apache.druid.query.context.ResponseContext) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) InDimFilter(org.apache.druid.query.filter.InDimFilter) AndDimFilter(org.apache.druid.query.filter.AndDimFilter) DimFilter(org.apache.druid.query.filter.DimFilter) InDimFilter(org.apache.druid.query.filter.InDimFilter) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) Interval(org.joda.time.Interval) QueryPlus(org.apache.druid.query.QueryPlus) Test(org.junit.Test)

Example 2 with MultipleSpecificSegmentSpec

use of org.apache.druid.query.spec.MultipleSpecificSegmentSpec in project druid by druid-io.

the class CachingClusteredClientTest method toFilteredQueryableTimeseriesResults.

private Sequence<Result<TimeseriesResultValue>> toFilteredQueryableTimeseriesResults(TimeseriesQuery query, List<SegmentId> segmentIds, List<Interval> queryIntervals, List<Iterable<Result<TimeseriesResultValue>>> results) {
    MultipleSpecificSegmentSpec spec = (MultipleSpecificSegmentSpec) query.getQuerySegmentSpec();
    List<Result<TimeseriesResultValue>> ret = new ArrayList<>();
    for (SegmentDescriptor descriptor : spec.getDescriptors()) {
        SegmentId id = SegmentId.dummy(StringUtils.format("%s_%s", queryIntervals.indexOf(descriptor.getInterval()), descriptor.getPartitionNumber()));
        int index = segmentIds.indexOf(id);
        if (index != -1) {
            Result result = new Result(results.get(index).iterator().next().getTimestamp(), new BySegmentResultValueClass(Lists.newArrayList(results.get(index)), id.toString(), descriptor.getInterval()));
            ret.add(result);
        } else {
            throw new ISE("Descriptor %s not found in server", id);
        }
    }
    return Sequences.simple(ret);
}
Also used : MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) SegmentId(org.apache.druid.timeline.SegmentId) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) ArrayList(java.util.ArrayList) BySegmentResultValueClass(org.apache.druid.query.BySegmentResultValueClass) ISE(org.apache.druid.java.util.common.ISE) Result(org.apache.druid.query.Result)

Example 3 with MultipleSpecificSegmentSpec

use of org.apache.druid.query.spec.MultipleSpecificSegmentSpec in project druid by druid-io.

the class StreamAppenderatorTest method testQueryBySegments.

@Test
public void testQueryBySegments() throws Exception {
    try (final StreamAppenderatorTester tester = new StreamAppenderatorTester(2, true)) {
        final Appenderator appenderator = tester.getAppenderator();
        appenderator.startJob();
        appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 1), Suppliers.ofInstance(Committers.nil()));
        appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 2), Suppliers.ofInstance(Committers.nil()));
        appenderator.add(IDENTIFIERS.get(1), ir("2000", "foo", 4), Suppliers.ofInstance(Committers.nil()));
        appenderator.add(IDENTIFIERS.get(2), ir("2001", "foo", 8), Suppliers.ofInstance(Committers.nil()));
        appenderator.add(IDENTIFIERS.get(2), ir("2001T01", "foo", 16), Suppliers.ofInstance(Committers.nil()));
        appenderator.add(IDENTIFIERS.get(2), ir("2001T02", "foo", 32), Suppliers.ofInstance(Committers.nil()));
        appenderator.add(IDENTIFIERS.get(2), ir("2001T03", "foo", 64), Suppliers.ofInstance(Committers.nil()));
        // Query1: segment #2
        final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource(StreamAppenderatorTester.DATASOURCE).aggregators(Arrays.asList(new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met"))).granularity(Granularities.DAY).intervals(new MultipleSpecificSegmentSpec(ImmutableList.of(new SegmentDescriptor(IDENTIFIERS.get(2).getInterval(), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum())))).build();
        final List<Result<TimeseriesResultValue>> results1 = QueryPlus.wrap(query1).run(appenderator, ResponseContext.createEmpty()).toList();
        Assert.assertEquals("query1", ImmutableList.of(new Result<>(DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 4L, "met", 120L)))), results1);
        // Query2: segment #2, partial
        final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource(StreamAppenderatorTester.DATASOURCE).aggregators(Arrays.asList(new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met"))).granularity(Granularities.DAY).intervals(new MultipleSpecificSegmentSpec(ImmutableList.of(new SegmentDescriptor(Intervals.of("2001/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum())))).build();
        final List<Result<TimeseriesResultValue>> results2 = QueryPlus.wrap(query2).run(appenderator, ResponseContext.createEmpty()).toList();
        Assert.assertEquals("query2", ImmutableList.of(new Result<>(DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 1L, "met", 8L)))), results2);
        // Query3: segment #2, two disjoint intervals
        final TimeseriesQuery query3 = Druids.newTimeseriesQueryBuilder().dataSource(StreamAppenderatorTester.DATASOURCE).aggregators(Arrays.asList(new LongSumAggregatorFactory("count", "count"), new LongSumAggregatorFactory("met", "met"))).granularity(Granularities.DAY).intervals(new MultipleSpecificSegmentSpec(ImmutableList.of(new SegmentDescriptor(Intervals.of("2001/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum()), new SegmentDescriptor(Intervals.of("2001T03/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum())))).build();
        final List<Result<TimeseriesResultValue>> results3 = QueryPlus.wrap(query3).run(appenderator, ResponseContext.createEmpty()).toList();
        Assert.assertEquals("query3", ImmutableList.of(new Result<>(DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 2L, "met", 72L)))), results3);
        final ScanQuery query4 = Druids.newScanQueryBuilder().dataSource(StreamAppenderatorTester.DATASOURCE).intervals(new MultipleSpecificSegmentSpec(ImmutableList.of(new SegmentDescriptor(Intervals.of("2001/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum()), new SegmentDescriptor(Intervals.of("2001T03/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum())))).order(ScanQuery.Order.ASCENDING).batchSize(10).resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST).build();
        final List<ScanResultValue> results4 = QueryPlus.wrap(query4).run(appenderator, ResponseContext.createEmpty()).toList();
        // 2 segments, 1 row per segment
        Assert.assertEquals(2, results4.size());
        Assert.assertArrayEquals(new String[] { "__time", "dim", "count", "met" }, results4.get(0).getColumns().toArray());
        Assert.assertArrayEquals(new Object[] { DateTimes.of("2001").getMillis(), "foo", 1L, 8L }, ((List<Object>) ((List<Object>) results4.get(0).getEvents()).get(0)).toArray());
        Assert.assertArrayEquals(new String[] { "__time", "dim", "count", "met" }, results4.get(0).getColumns().toArray());
        Assert.assertArrayEquals(new Object[] { DateTimes.of("2001T03").getMillis(), "foo", 1L, 64L }, ((List<Object>) ((List<Object>) results4.get(1).getEvents()).get(0)).toArray());
    }
}
Also used : MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) ScanQuery(org.apache.druid.query.scan.ScanQuery) Result(org.apache.druid.query.Result) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) ScanResultValue(org.apache.druid.query.scan.ScanResultValue) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 4 with MultipleSpecificSegmentSpec

use of org.apache.druid.query.spec.MultipleSpecificSegmentSpec in project druid by druid-io.

the class DruidSchemaTest method testRunSegmentMetadataQueryWithContext.

/**
 * Ensure that the BrokerInternalQueryConfig context is honored for this internally generated SegmentMetadata Query
 */
@Test
public void testRunSegmentMetadataQueryWithContext() throws Exception {
    Map<String, Object> queryContext = ImmutableMap.of("priority", 5);
    String brokerInternalQueryConfigJson = "{\"context\": { \"priority\": 5} }";
    TestHelper.makeJsonMapper();
    BrokerInternalQueryConfig brokerInternalQueryConfig = MAPPER.readValue(MAPPER.writeValueAsString(MAPPER.readValue(brokerInternalQueryConfigJson, BrokerInternalQueryConfig.class)), BrokerInternalQueryConfig.class);
    DataSegment segment = newSegment("test", 0);
    List<SegmentId> segmentIterable = ImmutableList.of(segment.getId());
    // This is the query that we expect this method to create. We will be testing that it matches the query generated by the method under test.
    SegmentMetadataQuery expectedMetadataQuery = new SegmentMetadataQuery(new TableDataSource(segment.getDataSource()), new MultipleSpecificSegmentSpec(segmentIterable.stream().map(SegmentId::toDescriptor).collect(Collectors.toList())), new AllColumnIncluderator(), false, queryContext, EnumSet.noneOf(SegmentMetadataQuery.AnalysisType.class), false, false);
    QueryLifecycleFactory factoryMock = EasyMock.createMock(QueryLifecycleFactory.class);
    QueryLifecycle lifecycleMock = EasyMock.createMock(QueryLifecycle.class);
    // Need to create schema for this test because the available schemas don't mock the QueryLifecycleFactory, which I need for this test.
    DruidSchema mySchema = new DruidSchema(factoryMock, serverView, segmentManager, new MapJoinableFactory(ImmutableSet.of(globalTableJoinable), ImmutableMap.of(globalTableJoinable.getClass(), GlobalTableDataSource.class)), PLANNER_CONFIG_DEFAULT, new NoopEscalator(), brokerInternalQueryConfig, null);
    EasyMock.expect(factoryMock.factorize()).andReturn(lifecycleMock).once();
    // This is the mat of the test, making sure that the query created by the method under test matches the expected query, specifically the operator configured context
    EasyMock.expect(lifecycleMock.runSimple(expectedMetadataQuery, AllowAllAuthenticator.ALLOW_ALL_RESULT, Access.OK)).andReturn(null);
    EasyMock.replay(factoryMock, lifecycleMock);
    mySchema.runSegmentMetadataQuery(segmentIterable);
    EasyMock.verify(factoryMock, lifecycleMock);
}
Also used : MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) QueryLifecycle(org.apache.druid.server.QueryLifecycle) SegmentId(org.apache.druid.timeline.SegmentId) DataSegment(org.apache.druid.timeline.DataSegment) AllColumnIncluderator(org.apache.druid.query.metadata.metadata.AllColumnIncluderator) NoopEscalator(org.apache.druid.server.security.NoopEscalator) GlobalTableDataSource(org.apache.druid.query.GlobalTableDataSource) TableDataSource(org.apache.druid.query.TableDataSource) QueryLifecycleFactory(org.apache.druid.server.QueryLifecycleFactory) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) BrokerInternalQueryConfig(org.apache.druid.client.BrokerInternalQueryConfig) MapJoinableFactory(org.apache.druid.segment.join.MapJoinableFactory) Test(org.junit.Test)

Example 5 with MultipleSpecificSegmentSpec

use of org.apache.druid.query.spec.MultipleSpecificSegmentSpec in project druid by druid-io.

the class ServerManagerTest method testGetQueryRunnerForSegmentsForUnknownQueryThrowingException.

@Test
public void testGetQueryRunnerForSegmentsForUnknownQueryThrowingException() {
    final Interval interval = Intervals.of("P1d/2011-04-01");
    final List<SegmentDescriptor> descriptors = Collections.singletonList(new SegmentDescriptor(interval, "1", 0));
    expectedException.expect(QueryUnsupportedException.class);
    expectedException.expectMessage("Unknown query type");
    serverManager.getQueryRunnerForSegments(new BaseQuery<Object>(new TableDataSource("test"), new MultipleSpecificSegmentSpec(descriptors), false, new HashMap<>()) {

        @Override
        public boolean hasFilters() {
            return false;
        }

        @Override
        public DimFilter getFilter() {
            return null;
        }

        @Override
        public String getType() {
            return null;
        }

        @Override
        public Query<Object> withOverriddenContext(Map<String, Object> contextOverride) {
            return null;
        }

        @Override
        public Query<Object> withQuerySegmentSpec(QuerySegmentSpec spec) {
            return null;
        }

        @Override
        public Query<Object> withDataSource(DataSource dataSource) {
            return null;
        }
    }, descriptors);
}
Also used : MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) BaseQuery(org.apache.druid.query.BaseQuery) Query(org.apache.druid.query.Query) SearchQuery(org.apache.druid.query.search.SearchQuery) HashMap(java.util.HashMap) DataSource(org.apache.druid.query.DataSource) TableDataSource(org.apache.druid.query.TableDataSource) TableDataSource(org.apache.druid.query.TableDataSource) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) QuerySegmentSpec(org.apache.druid.query.spec.QuerySegmentSpec) DimFilter(org.apache.druid.query.filter.DimFilter) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

MultipleSpecificSegmentSpec (org.apache.druid.query.spec.MultipleSpecificSegmentSpec)13 SegmentDescriptor (org.apache.druid.query.SegmentDescriptor)9 TableDataSource (org.apache.druid.query.TableDataSource)6 Test (org.junit.Test)6 ArrayList (java.util.ArrayList)5 Query (org.apache.druid.query.Query)5 QueryRunner (org.apache.druid.query.QueryRunner)4 Result (org.apache.druid.query.Result)4 ScanQuery (org.apache.druid.query.scan.ScanQuery)4 SegmentId (org.apache.druid.timeline.SegmentId)4 Druids (org.apache.druid.query.Druids)3 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)3 QueryPlus (org.apache.druid.query.QueryPlus)3 ResponseContext (org.apache.druid.query.context.ResponseContext)3 ScanResultValue (org.apache.druid.query.scan.ScanResultValue)3 ImmutableList (com.google.common.collect.ImmutableList)2 List (java.util.List)2 QueryableDruidServer (org.apache.druid.client.selector.QueryableDruidServer)2 ServerSelector (org.apache.druid.client.selector.ServerSelector)2 ISE (org.apache.druid.java.util.common.ISE)2