use of org.apache.druid.server.QueryLifecycleFactory in project druid by druid-io.
the class DruidSchemaTest method testRunSegmentMetadataQueryWithContext.
/**
* Ensure that the BrokerInternalQueryConfig context is honored for this internally generated SegmentMetadata Query
*/
@Test
public void testRunSegmentMetadataQueryWithContext() throws Exception {
Map<String, Object> queryContext = ImmutableMap.of("priority", 5);
String brokerInternalQueryConfigJson = "{\"context\": { \"priority\": 5} }";
TestHelper.makeJsonMapper();
BrokerInternalQueryConfig brokerInternalQueryConfig = MAPPER.readValue(MAPPER.writeValueAsString(MAPPER.readValue(brokerInternalQueryConfigJson, BrokerInternalQueryConfig.class)), BrokerInternalQueryConfig.class);
DataSegment segment = newSegment("test", 0);
List<SegmentId> segmentIterable = ImmutableList.of(segment.getId());
// This is the query that we expect this method to create. We will be testing that it matches the query generated by the method under test.
SegmentMetadataQuery expectedMetadataQuery = new SegmentMetadataQuery(new TableDataSource(segment.getDataSource()), new MultipleSpecificSegmentSpec(segmentIterable.stream().map(SegmentId::toDescriptor).collect(Collectors.toList())), new AllColumnIncluderator(), false, queryContext, EnumSet.noneOf(SegmentMetadataQuery.AnalysisType.class), false, false);
QueryLifecycleFactory factoryMock = EasyMock.createMock(QueryLifecycleFactory.class);
QueryLifecycle lifecycleMock = EasyMock.createMock(QueryLifecycle.class);
// Need to create schema for this test because the available schemas don't mock the QueryLifecycleFactory, which I need for this test.
DruidSchema mySchema = new DruidSchema(factoryMock, serverView, segmentManager, new MapJoinableFactory(ImmutableSet.of(globalTableJoinable), ImmutableMap.of(globalTableJoinable.getClass(), GlobalTableDataSource.class)), PLANNER_CONFIG_DEFAULT, new NoopEscalator(), brokerInternalQueryConfig, null);
EasyMock.expect(factoryMock.factorize()).andReturn(lifecycleMock).once();
// This is the mat of the test, making sure that the query created by the method under test matches the expected query, specifically the operator configured context
EasyMock.expect(lifecycleMock.runSimple(expectedMetadataQuery, AllowAllAuthenticator.ALLOW_ALL_RESULT, Access.OK)).andReturn(null);
EasyMock.replay(factoryMock, lifecycleMock);
mySchema.runSegmentMetadataQuery(segmentIterable);
EasyMock.verify(factoryMock, lifecycleMock);
}
use of org.apache.druid.server.QueryLifecycleFactory in project druid by druid-io.
the class CalciteJoinQueryTest method testGroupByJoinAsNativeQueryWithUnoptimizedFilter.
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testGroupByJoinAsNativeQueryWithUnoptimizedFilter(Map<String, Object> queryContext) {
// The query below is the same as the inner groupBy on a join datasource from the test
// testNestedGroupByOnInlineDataSourceWithFilter, except that the selector filter
// dim1=def has been rewritten into an unoptimized filter, dim1 IN (def).
//
// The unoptimized filter will be optimized into dim1=def by the query toolchests in their
// pre-merge decoration function, when it calls DimFilter.optimize().
//
// This test's goal is to ensure that the join filter rewrites function correctly when there are
// unoptimized filters in the join query. The rewrite logic must apply to the optimized form of the filters,
// as this is what will be passed to HashJoinSegmentAdapter.makeCursors(), where the result of the join
// filter pre-analysis is used.
//
// A native query is used because the filter types where we support optimization are the AND/OR/NOT and
// IN filters. However, when expressed in a SQL query, our SQL planning layer is smart enough to already apply
// these optimizations in the native query it generates, making it impossible to test the unoptimized filter forms
// using SQL queries.
//
// The test method is placed here for convenience as this class provides the necessary setup.
Query query = GroupByQuery.builder().setDataSource(join(new QueryDataSource(newScanQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Intervals.of("2001-01-02T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))).columns("dim1").resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST).context(queryContext).build()), new QueryDataSource(newScanQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Intervals.of("2001-01-02T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))).columns("dim1", "m2").resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST).context(queryContext).build()), "j0.", equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.dim1")), JoinType.INNER)).setGranularity(Granularities.ALL).setInterval(querySegmentSpec(Filtration.eternity())).setDimFilter(// provide an unoptimized IN filter
in("dim1", Collections.singletonList("def"), null)).setDimensions(dimensions(new DefaultDimensionSpec("v0", "d0"))).setVirtualColumns(expressionVirtualColumn("v0", "'def'", ColumnType.STRING)).build();
QueryLifecycleFactory qlf = CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate);
QueryLifecycle ql = qlf.factorize();
Sequence seq = ql.runSimple(query, CalciteTests.SUPER_USER_AUTH_RESULT, Access.OK);
List<Object> results = seq.toList();
Assert.assertEquals(ImmutableList.of(ResultRow.of("def")), results);
}
Aggregations