use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class ClientQuerySegmentWalkerTest method testJoinOnGroupByOnUnionOfTables.
@Test
public void testJoinOnGroupByOnUnionOfTables() {
final UnionDataSource unionDataSource = new UnionDataSource(ImmutableList.of(new TableDataSource(FOO), new TableDataSource(BAR)));
final GroupByQuery subquery = GroupByQuery.builder().setDataSource(unionDataSource).setGranularity(Granularities.ALL).setInterval(Collections.singletonList(INTERVAL)).setDimensions(DefaultDimensionSpec.of("s")).setDimFilter(new SelectorDimFilter("s", "y", null)).build();
final GroupByQuery query = (GroupByQuery) GroupByQuery.builder().setDataSource(JoinDataSource.create(unionDataSource, new QueryDataSource(subquery), "j.", "\"j.s\" == \"s\"", JoinType.INNER, null, ExprMacroTable.nil())).setGranularity(Granularities.ALL).setInterval(Intervals.ONLY_ETERNITY).setDimensions(DefaultDimensionSpec.of("s"), DefaultDimensionSpec.of("j.s")).setAggregatorSpecs(new CountAggregatorFactory("cnt")).build().withId(DUMMY_QUERY_ID);
testQuery(query, ImmutableList.of(ExpectedQuery.cluster(subquery.withDataSource(subquery.getDataSource().getChildren().get(0)).withId(DUMMY_QUERY_ID).withSubQueryId("2.1.foo.1")), ExpectedQuery.cluster(subquery.withDataSource(subquery.getDataSource().getChildren().get(1)).withId(DUMMY_QUERY_ID).withSubQueryId("2.1.bar.2")), ExpectedQuery.cluster(query.withDataSource(query.getDataSource().withChildren(ImmutableList.of(unionDataSource.getChildren().get(0), InlineDataSource.fromIterable(ImmutableList.of(new Object[] { "y" }), RowSignature.builder().add("s", ColumnType.STRING).build())))).withSubQueryId("foo.1")), ExpectedQuery.cluster(query.withDataSource(query.getDataSource().withChildren(ImmutableList.of(unionDataSource.getChildren().get(1), InlineDataSource.fromIterable(ImmutableList.of(new Object[] { "y" }), RowSignature.builder().add("s", ColumnType.STRING).build())))).withSubQueryId("bar.2"))), ImmutableList.of(new Object[] { "y", "y", 1L }));
// note: this should really be 1, but in the interim queries that are composed of multiple queries count each
// invocation of either the cluster or local walker in ClientQuerySegmentWalker
Assert.assertEquals(4, scheduler.getTotalRun().get());
Assert.assertEquals(4, scheduler.getTotalPrioritizedAndLaned().get());
Assert.assertEquals(4, scheduler.getTotalAcquired().get());
Assert.assertEquals(4, scheduler.getTotalReleased().get());
}
use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class ClientQuerySegmentWalkerTest method testGroupByOnUnionOfTwoTables.
@Test
public void testGroupByOnUnionOfTwoTables() {
final GroupByQuery query = (GroupByQuery) GroupByQuery.builder().setDataSource(new UnionDataSource(ImmutableList.of(new TableDataSource(FOO), new TableDataSource(BAR)))).setGranularity(Granularities.ALL).setInterval(Intervals.ONLY_ETERNITY).setDimensions(DefaultDimensionSpec.of("s")).setAggregatorSpecs(new CountAggregatorFactory("cnt")).build().withId(DUMMY_QUERY_ID);
testQuery(query, ImmutableList.of(ExpectedQuery.cluster(query.withDataSource(new TableDataSource(FOO)).withSubQueryId("foo.1")), ExpectedQuery.cluster(query.withDataSource(new TableDataSource(BAR)).withSubQueryId("bar.2"))), ImmutableList.of(new Object[] { "a", 2L }, new Object[] { "b", 1L }, new Object[] { "c", 1L }, new Object[] { "x", 2L }, new Object[] { "y", 1L }, new Object[] { "z", 1L }));
// note: this should really be 1, but in the interim queries that are composed of multiple queries count each
// invocation of either the cluster or local walker in ClientQuerySegmentWalker
Assert.assertEquals(2, scheduler.getTotalRun().get());
Assert.assertEquals(2, scheduler.getTotalPrioritizedAndLaned().get());
Assert.assertEquals(2, scheduler.getTotalAcquired().get());
Assert.assertEquals(2, scheduler.getTotalReleased().get());
}
use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class ClientQuerySegmentWalkerTest method testGroupByOnUnionOfOneTable.
@Test
public void testGroupByOnUnionOfOneTable() {
final GroupByQuery query = (GroupByQuery) GroupByQuery.builder().setDataSource(new UnionDataSource(ImmutableList.of(new TableDataSource(FOO)))).setGranularity(Granularities.ALL).setInterval(Intervals.ONLY_ETERNITY).setDimensions(DefaultDimensionSpec.of("s")).setAggregatorSpecs(new CountAggregatorFactory("cnt")).build().withId(DUMMY_QUERY_ID);
testQuery(query, ImmutableList.of(ExpectedQuery.cluster(query.withDataSource(new TableDataSource(FOO)))), ImmutableList.of(new Object[] { "x", 2L }, new Object[] { "y", 1L }, new Object[] { "z", 1L }));
Assert.assertEquals(1, scheduler.getTotalRun().get());
Assert.assertEquals(1, scheduler.getTotalPrioritizedAndLaned().get());
Assert.assertEquals(1, scheduler.getTotalAcquired().get());
Assert.assertEquals(1, scheduler.getTotalReleased().get());
}
use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class DruidSchemaTest method testRunSegmentMetadataQueryWithContext.
/**
* Ensure that the BrokerInternalQueryConfig context is honored for this internally generated SegmentMetadata Query
*/
@Test
public void testRunSegmentMetadataQueryWithContext() throws Exception {
Map<String, Object> queryContext = ImmutableMap.of("priority", 5);
String brokerInternalQueryConfigJson = "{\"context\": { \"priority\": 5} }";
TestHelper.makeJsonMapper();
BrokerInternalQueryConfig brokerInternalQueryConfig = MAPPER.readValue(MAPPER.writeValueAsString(MAPPER.readValue(brokerInternalQueryConfigJson, BrokerInternalQueryConfig.class)), BrokerInternalQueryConfig.class);
DataSegment segment = newSegment("test", 0);
List<SegmentId> segmentIterable = ImmutableList.of(segment.getId());
// This is the query that we expect this method to create. We will be testing that it matches the query generated by the method under test.
SegmentMetadataQuery expectedMetadataQuery = new SegmentMetadataQuery(new TableDataSource(segment.getDataSource()), new MultipleSpecificSegmentSpec(segmentIterable.stream().map(SegmentId::toDescriptor).collect(Collectors.toList())), new AllColumnIncluderator(), false, queryContext, EnumSet.noneOf(SegmentMetadataQuery.AnalysisType.class), false, false);
QueryLifecycleFactory factoryMock = EasyMock.createMock(QueryLifecycleFactory.class);
QueryLifecycle lifecycleMock = EasyMock.createMock(QueryLifecycle.class);
// Need to create schema for this test because the available schemas don't mock the QueryLifecycleFactory, which I need for this test.
DruidSchema mySchema = new DruidSchema(factoryMock, serverView, segmentManager, new MapJoinableFactory(ImmutableSet.of(globalTableJoinable), ImmutableMap.of(globalTableJoinable.getClass(), GlobalTableDataSource.class)), PLANNER_CONFIG_DEFAULT, new NoopEscalator(), brokerInternalQueryConfig, null);
EasyMock.expect(factoryMock.factorize()).andReturn(lifecycleMock).once();
// This is the mat of the test, making sure that the query created by the method under test matches the expected query, specifically the operator configured context
EasyMock.expect(lifecycleMock.runSimple(expectedMetadataQuery, AllowAllAuthenticator.ALLOW_ALL_RESULT, Access.OK)).andReturn(null);
EasyMock.replay(factoryMock, lifecycleMock);
mySchema.runSegmentMetadataQuery(segmentIterable);
EasyMock.verify(factoryMock, lifecycleMock);
}
use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class ServerManagerTest method testGetQueryRunnerForSegmentsForUnknownQueryThrowingException.
@Test
public void testGetQueryRunnerForSegmentsForUnknownQueryThrowingException() {
final Interval interval = Intervals.of("P1d/2011-04-01");
final List<SegmentDescriptor> descriptors = Collections.singletonList(new SegmentDescriptor(interval, "1", 0));
expectedException.expect(QueryUnsupportedException.class);
expectedException.expectMessage("Unknown query type");
serverManager.getQueryRunnerForSegments(new BaseQuery<Object>(new TableDataSource("test"), new MultipleSpecificSegmentSpec(descriptors), false, new HashMap<>()) {
@Override
public boolean hasFilters() {
return false;
}
@Override
public DimFilter getFilter() {
return null;
}
@Override
public String getType() {
return null;
}
@Override
public Query<Object> withOverriddenContext(Map<String, Object> contextOverride) {
return null;
}
@Override
public Query<Object> withQuerySegmentSpec(QuerySegmentSpec spec) {
return null;
}
@Override
public Query<Object> withDataSource(DataSource dataSource) {
return null;
}
}, descriptors);
}
Aggregations