Search in sources :

Example 51 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class ClientQuerySegmentWalkerTest method testJoinOnGroupByOnUnionOfTables.

@Test
public void testJoinOnGroupByOnUnionOfTables() {
    final UnionDataSource unionDataSource = new UnionDataSource(ImmutableList.of(new TableDataSource(FOO), new TableDataSource(BAR)));
    final GroupByQuery subquery = GroupByQuery.builder().setDataSource(unionDataSource).setGranularity(Granularities.ALL).setInterval(Collections.singletonList(INTERVAL)).setDimensions(DefaultDimensionSpec.of("s")).setDimFilter(new SelectorDimFilter("s", "y", null)).build();
    final GroupByQuery query = (GroupByQuery) GroupByQuery.builder().setDataSource(JoinDataSource.create(unionDataSource, new QueryDataSource(subquery), "j.", "\"j.s\" == \"s\"", JoinType.INNER, null, ExprMacroTable.nil())).setGranularity(Granularities.ALL).setInterval(Intervals.ONLY_ETERNITY).setDimensions(DefaultDimensionSpec.of("s"), DefaultDimensionSpec.of("j.s")).setAggregatorSpecs(new CountAggregatorFactory("cnt")).build().withId(DUMMY_QUERY_ID);
    testQuery(query, ImmutableList.of(ExpectedQuery.cluster(subquery.withDataSource(subquery.getDataSource().getChildren().get(0)).withId(DUMMY_QUERY_ID).withSubQueryId("2.1.foo.1")), ExpectedQuery.cluster(subquery.withDataSource(subquery.getDataSource().getChildren().get(1)).withId(DUMMY_QUERY_ID).withSubQueryId("2.1.bar.2")), ExpectedQuery.cluster(query.withDataSource(query.getDataSource().withChildren(ImmutableList.of(unionDataSource.getChildren().get(0), InlineDataSource.fromIterable(ImmutableList.of(new Object[] { "y" }), RowSignature.builder().add("s", ColumnType.STRING).build())))).withSubQueryId("foo.1")), ExpectedQuery.cluster(query.withDataSource(query.getDataSource().withChildren(ImmutableList.of(unionDataSource.getChildren().get(1), InlineDataSource.fromIterable(ImmutableList.of(new Object[] { "y" }), RowSignature.builder().add("s", ColumnType.STRING).build())))).withSubQueryId("bar.2"))), ImmutableList.of(new Object[] { "y", "y", 1L }));
    // note: this should really be 1, but in the interim queries that are composed of multiple queries count each
    // invocation of either the cluster or local walker in ClientQuerySegmentWalker
    Assert.assertEquals(4, scheduler.getTotalRun().get());
    Assert.assertEquals(4, scheduler.getTotalPrioritizedAndLaned().get());
    Assert.assertEquals(4, scheduler.getTotalAcquired().get());
    Assert.assertEquals(4, scheduler.getTotalReleased().get());
}
Also used : GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) GlobalTableDataSource(org.apache.druid.query.GlobalTableDataSource) TableDataSource(org.apache.druid.query.TableDataSource) QueryDataSource(org.apache.druid.query.QueryDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) UnionDataSource(org.apache.druid.query.UnionDataSource) Test(org.junit.Test)

Example 52 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class ClientQuerySegmentWalkerTest method testGroupByOnUnionOfTwoTables.

@Test
public void testGroupByOnUnionOfTwoTables() {
    final GroupByQuery query = (GroupByQuery) GroupByQuery.builder().setDataSource(new UnionDataSource(ImmutableList.of(new TableDataSource(FOO), new TableDataSource(BAR)))).setGranularity(Granularities.ALL).setInterval(Intervals.ONLY_ETERNITY).setDimensions(DefaultDimensionSpec.of("s")).setAggregatorSpecs(new CountAggregatorFactory("cnt")).build().withId(DUMMY_QUERY_ID);
    testQuery(query, ImmutableList.of(ExpectedQuery.cluster(query.withDataSource(new TableDataSource(FOO)).withSubQueryId("foo.1")), ExpectedQuery.cluster(query.withDataSource(new TableDataSource(BAR)).withSubQueryId("bar.2"))), ImmutableList.of(new Object[] { "a", 2L }, new Object[] { "b", 1L }, new Object[] { "c", 1L }, new Object[] { "x", 2L }, new Object[] { "y", 1L }, new Object[] { "z", 1L }));
    // note: this should really be 1, but in the interim queries that are composed of multiple queries count each
    // invocation of either the cluster or local walker in ClientQuerySegmentWalker
    Assert.assertEquals(2, scheduler.getTotalRun().get());
    Assert.assertEquals(2, scheduler.getTotalPrioritizedAndLaned().get());
    Assert.assertEquals(2, scheduler.getTotalAcquired().get());
    Assert.assertEquals(2, scheduler.getTotalReleased().get());
}
Also used : GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) GlobalTableDataSource(org.apache.druid.query.GlobalTableDataSource) TableDataSource(org.apache.druid.query.TableDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) UnionDataSource(org.apache.druid.query.UnionDataSource) Test(org.junit.Test)

Example 53 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class ClientQuerySegmentWalkerTest method testGroupByOnUnionOfOneTable.

@Test
public void testGroupByOnUnionOfOneTable() {
    final GroupByQuery query = (GroupByQuery) GroupByQuery.builder().setDataSource(new UnionDataSource(ImmutableList.of(new TableDataSource(FOO)))).setGranularity(Granularities.ALL).setInterval(Intervals.ONLY_ETERNITY).setDimensions(DefaultDimensionSpec.of("s")).setAggregatorSpecs(new CountAggregatorFactory("cnt")).build().withId(DUMMY_QUERY_ID);
    testQuery(query, ImmutableList.of(ExpectedQuery.cluster(query.withDataSource(new TableDataSource(FOO)))), ImmutableList.of(new Object[] { "x", 2L }, new Object[] { "y", 1L }, new Object[] { "z", 1L }));
    Assert.assertEquals(1, scheduler.getTotalRun().get());
    Assert.assertEquals(1, scheduler.getTotalPrioritizedAndLaned().get());
    Assert.assertEquals(1, scheduler.getTotalAcquired().get());
    Assert.assertEquals(1, scheduler.getTotalReleased().get());
}
Also used : GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) GlobalTableDataSource(org.apache.druid.query.GlobalTableDataSource) TableDataSource(org.apache.druid.query.TableDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) UnionDataSource(org.apache.druid.query.UnionDataSource) Test(org.junit.Test)

Example 54 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class DruidSchemaTest method testRunSegmentMetadataQueryWithContext.

/**
 * Ensure that the BrokerInternalQueryConfig context is honored for this internally generated SegmentMetadata Query
 */
@Test
public void testRunSegmentMetadataQueryWithContext() throws Exception {
    Map<String, Object> queryContext = ImmutableMap.of("priority", 5);
    String brokerInternalQueryConfigJson = "{\"context\": { \"priority\": 5} }";
    TestHelper.makeJsonMapper();
    BrokerInternalQueryConfig brokerInternalQueryConfig = MAPPER.readValue(MAPPER.writeValueAsString(MAPPER.readValue(brokerInternalQueryConfigJson, BrokerInternalQueryConfig.class)), BrokerInternalQueryConfig.class);
    DataSegment segment = newSegment("test", 0);
    List<SegmentId> segmentIterable = ImmutableList.of(segment.getId());
    // This is the query that we expect this method to create. We will be testing that it matches the query generated by the method under test.
    SegmentMetadataQuery expectedMetadataQuery = new SegmentMetadataQuery(new TableDataSource(segment.getDataSource()), new MultipleSpecificSegmentSpec(segmentIterable.stream().map(SegmentId::toDescriptor).collect(Collectors.toList())), new AllColumnIncluderator(), false, queryContext, EnumSet.noneOf(SegmentMetadataQuery.AnalysisType.class), false, false);
    QueryLifecycleFactory factoryMock = EasyMock.createMock(QueryLifecycleFactory.class);
    QueryLifecycle lifecycleMock = EasyMock.createMock(QueryLifecycle.class);
    // Need to create schema for this test because the available schemas don't mock the QueryLifecycleFactory, which I need for this test.
    DruidSchema mySchema = new DruidSchema(factoryMock, serverView, segmentManager, new MapJoinableFactory(ImmutableSet.of(globalTableJoinable), ImmutableMap.of(globalTableJoinable.getClass(), GlobalTableDataSource.class)), PLANNER_CONFIG_DEFAULT, new NoopEscalator(), brokerInternalQueryConfig, null);
    EasyMock.expect(factoryMock.factorize()).andReturn(lifecycleMock).once();
    // This is the mat of the test, making sure that the query created by the method under test matches the expected query, specifically the operator configured context
    EasyMock.expect(lifecycleMock.runSimple(expectedMetadataQuery, AllowAllAuthenticator.ALLOW_ALL_RESULT, Access.OK)).andReturn(null);
    EasyMock.replay(factoryMock, lifecycleMock);
    mySchema.runSegmentMetadataQuery(segmentIterable);
    EasyMock.verify(factoryMock, lifecycleMock);
}
Also used : MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) QueryLifecycle(org.apache.druid.server.QueryLifecycle) SegmentId(org.apache.druid.timeline.SegmentId) DataSegment(org.apache.druid.timeline.DataSegment) AllColumnIncluderator(org.apache.druid.query.metadata.metadata.AllColumnIncluderator) NoopEscalator(org.apache.druid.server.security.NoopEscalator) GlobalTableDataSource(org.apache.druid.query.GlobalTableDataSource) TableDataSource(org.apache.druid.query.TableDataSource) QueryLifecycleFactory(org.apache.druid.server.QueryLifecycleFactory) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) BrokerInternalQueryConfig(org.apache.druid.client.BrokerInternalQueryConfig) MapJoinableFactory(org.apache.druid.segment.join.MapJoinableFactory) Test(org.junit.Test)

Example 55 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class ServerManagerTest method testGetQueryRunnerForSegmentsForUnknownQueryThrowingException.

@Test
public void testGetQueryRunnerForSegmentsForUnknownQueryThrowingException() {
    final Interval interval = Intervals.of("P1d/2011-04-01");
    final List<SegmentDescriptor> descriptors = Collections.singletonList(new SegmentDescriptor(interval, "1", 0));
    expectedException.expect(QueryUnsupportedException.class);
    expectedException.expectMessage("Unknown query type");
    serverManager.getQueryRunnerForSegments(new BaseQuery<Object>(new TableDataSource("test"), new MultipleSpecificSegmentSpec(descriptors), false, new HashMap<>()) {

        @Override
        public boolean hasFilters() {
            return false;
        }

        @Override
        public DimFilter getFilter() {
            return null;
        }

        @Override
        public String getType() {
            return null;
        }

        @Override
        public Query<Object> withOverriddenContext(Map<String, Object> contextOverride) {
            return null;
        }

        @Override
        public Query<Object> withQuerySegmentSpec(QuerySegmentSpec spec) {
            return null;
        }

        @Override
        public Query<Object> withDataSource(DataSource dataSource) {
            return null;
        }
    }, descriptors);
}
Also used : MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) BaseQuery(org.apache.druid.query.BaseQuery) Query(org.apache.druid.query.Query) SearchQuery(org.apache.druid.query.search.SearchQuery) HashMap(java.util.HashMap) DataSource(org.apache.druid.query.DataSource) TableDataSource(org.apache.druid.query.TableDataSource) TableDataSource(org.apache.druid.query.TableDataSource) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) QuerySegmentSpec(org.apache.druid.query.spec.QuerySegmentSpec) DimFilter(org.apache.druid.query.filter.DimFilter) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

TableDataSource (org.apache.druid.query.TableDataSource)118 Test (org.junit.Test)94 GlobalTableDataSource (org.apache.druid.query.GlobalTableDataSource)46 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)43 QueryDataSource (org.apache.druid.query.QueryDataSource)41 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)40 Parameters (junitparams.Parameters)30 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)19 LookupDataSource (org.apache.druid.query.LookupDataSource)18 DataSegment (org.apache.druid.timeline.DataSegment)15 Result (org.apache.druid.query.Result)14 CountDownLatch (java.util.concurrent.CountDownLatch)11 Query (org.apache.druid.query.Query)11 TimelineObjectHolder (org.apache.druid.timeline.TimelineObjectHolder)11 Interval (org.joda.time.Interval)11 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)10 ArrayList (java.util.ArrayList)9 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)9 ISE (org.apache.druid.java.util.common.ISE)8 SegmentDescriptor (org.apache.druid.query.SegmentDescriptor)8