use of io.prestosql.spi.plan.TableScanNode in project hetu-core by openlookeng.
the class TestDynamicFiltersCollector method TestCollectingGlobalDynamicFilters.
@Test
public void TestCollectingGlobalDynamicFilters() throws InterruptedException {
final QueryId queryId = new QueryId("test_query");
final String filterId = "1";
final String columnName = "column";
final TestingColumnHandle columnHandle = new TestingColumnHandle(columnName);
final Set<String> valueSet = ImmutableSet.of("1", "2", "3");
TaskContext taskContext = mock(TaskContext.class);
Session session = testSessionBuilder().setQueryId(queryId).setSystemProperty(ENABLE_DYNAMIC_FILTERING, "true").setSystemProperty(DYNAMIC_FILTERING_DATA_TYPE, "HASHSET").build();
when(taskContext.getSession()).thenReturn(session);
// set up state store and merged dynamic filters map
Map mockMap = new HashMap<>();
StateStoreProvider stateStoreProvider = mock(StateStoreProvider.class);
StateStore stateStore = mock(StateStore.class);
StateMap stateMap = new MockStateMap<>("test-map", mockMap);
when(stateStoreProvider.getStateStore()).thenReturn(stateStore);
when(stateStore.getStateCollection(any())).thenReturn(stateMap);
when(stateStore.createStateMap(any())).thenReturn(stateMap);
when(stateStore.getOrCreateStateCollection(any(), any())).thenReturn(stateMap);
// set up state store listener and dynamic filter cache
StateStoreListenerManager stateStoreListenerManager = new StateStoreListenerManager(stateStoreProvider);
DynamicFilterCacheManager dynamicFilterCacheManager = new DynamicFilterCacheManager();
stateStoreListenerManager.addStateStoreListener(new DynamicFilterListener(dynamicFilterCacheManager), MERGED_DYNAMIC_FILTERS);
LocalDynamicFiltersCollector collector = new LocalDynamicFiltersCollector(taskContext, Optional.empty(), dynamicFilterCacheManager);
TableScanNode tableScan = mock(TableScanNode.class);
when(tableScan.getAssignments()).thenReturn(ImmutableMap.of(new Symbol(columnName), columnHandle));
List<DynamicFilters.Descriptor> dynamicFilterDescriptors = ImmutableList.of(new DynamicFilters.Descriptor(filterId, new VariableReferenceExpression(columnName, BIGINT)));
collector.initContext(ImmutableList.of(dynamicFilterDescriptors), SymbolUtils.toLayOut(tableScan.getOutputSymbols()));
assertTrue(collector.getDynamicFilters(tableScan).isEmpty(), "there should be no dynamic filter available");
// put some values in state store as a new dynamic filter
// and wait for the listener to process the event
stateMap.put(createKey(DynamicFilterUtils.FILTERPREFIX, filterId, queryId.getId()), valueSet);
TimeUnit.MILLISECONDS.sleep(100);
// get available dynamic filter and verify it
List<Map<ColumnHandle, DynamicFilter>> dynamicFilters = collector.getDynamicFilters(tableScan);
assertEquals(dynamicFilters.size(), 1, "there should be a new dynamic filter");
assertEquals(dynamicFilters.size(), 1);
DynamicFilter dynamicFilter = dynamicFilters.get(0).get(columnHandle);
assertTrue(dynamicFilter instanceof HashSetDynamicFilter, "new dynamic filter should be hashset");
assertEquals(dynamicFilter.getSize(), valueSet.size(), "new dynamic filter should have correct size");
for (String value : valueSet) {
assertTrue(dynamicFilter.contains(value), "new dynamic filter should contain correct values");
}
// clean up when task finishes
collector.removeDynamicFilter(true);
DynamicFilter cachedFilter = dynamicFilterCacheManager.getDynamicFilter(DynamicFilterCacheManager.createCacheKey(filterId, queryId.getId()));
assertNull(cachedFilter, "cached dynamic filter should have been removed");
}
use of io.prestosql.spi.plan.TableScanNode in project hetu-core by openlookeng.
the class ConnectorAwareTableScanMatcher method detailMatches.
@Override
public MatchResult detailMatches(PlanNode node, StatsProvider stats, Session session, Metadata metadata, SymbolAliases symbolAliases) {
checkState(shapeMatches(node), "Plan testing framework error: shapeMatches returned false in detailMatches in %s", this.getClass().getName());
TableScanNode tableScanNode = (TableScanNode) node;
TupleDomain<ColumnHandle> actual = tableScanNode.getEnforcedConstraint();
TupleDomain<Predicate<ColumnHandle>> expected = expectedEnforcedConstraint;
boolean tableMatches = expectedTable.test(tableScanNode.getTable().getConnectorHandle());
return new MatchResult(tableMatches && domainsMatch(expected, actual));
}
use of io.prestosql.spi.plan.TableScanNode in project hetu-core by openlookeng.
the class TestUtil method createExchangePlanFragment.
private static PlanFragment createExchangePlanFragment(RowExpression expr) {
Symbol testSymbol = new Symbol("a");
Map<Symbol, ColumnHandle> scanAssignments = ImmutableMap.<Symbol, ColumnHandle>builder().put(testSymbol, new TestingMetadata.TestingColumnHandle("a")).build();
Map<Symbol, ColumnHandle> assignments = Maps.filterKeys(scanAssignments, Predicates.in(ImmutableList.of(testSymbol)));
TableScanNode tableScanNode = new TableScanNode(new PlanNodeId(UUID.randomUUID().toString()), makeTableHandle(TupleDomain.none()), ImmutableList.copyOf(assignments.keySet()), assignments, TupleDomain.none(), Optional.empty(), ReuseExchangeOperator.STRATEGY.REUSE_STRATEGY_DEFAULT, new UUID(0, 0), 0, false);
PlanBuilder planBuilder = new PlanBuilder(new PlanNodeIdAllocator(), dummyMetadata());
FilterNode filterNode = planBuilder.filter(expr, tableScanNode);
PlanNode planNode = new LimitNode(new PlanNodeId("limit"), filterNode, 1, false);
ImmutableMap.Builder<Symbol, Type> types = ImmutableMap.builder();
for (Symbol symbol : planNode.getOutputSymbols()) {
types.put(symbol, VARCHAR);
}
return new PlanFragment(new PlanFragmentId("limit_fragment_id"), planNode, types.build(), SOURCE_DISTRIBUTION, ImmutableList.of(planNode.getId()), new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), planNode.getOutputSymbols()), ungroupedExecution(), StatsAndCosts.empty(), Optional.empty(), Optional.empty(), Optional.empty());
}
use of io.prestosql.spi.plan.TableScanNode in project hetu-core by openlookeng.
the class TestTypeValidator method setUp.
@BeforeMethod
public void setUp() {
planSymbolAllocator = new PlanSymbolAllocator();
columnA = planSymbolAllocator.newSymbol("a", BIGINT);
columnB = planSymbolAllocator.newSymbol("b", INTEGER);
columnC = planSymbolAllocator.newSymbol("c", DOUBLE);
columnD = planSymbolAllocator.newSymbol("d", DATE);
// varchar(3), to test type only coercion
columnE = planSymbolAllocator.newSymbol("e", VarcharType.createVarcharType(3));
Map<Symbol, ColumnHandle> assignments = ImmutableMap.<Symbol, ColumnHandle>builder().put(columnA, new TestingColumnHandle("a")).put(columnB, new TestingColumnHandle("b")).put(columnC, new TestingColumnHandle("c")).put(columnD, new TestingColumnHandle("d")).put(columnE, new TestingColumnHandle("e")).build();
baseTableScan = new TableScanNode(newId(), TEST_TABLE_HANDLE, ImmutableList.copyOf(assignments.keySet()), assignments, TupleDomain.all(), Optional.empty(), ReuseExchangeOperator.STRATEGY.REUSE_STRATEGY_DEFAULT, new UUID(0, 0), 0, false);
}
use of io.prestosql.spi.plan.TableScanNode in project hetu-core by openlookeng.
the class TestSchedulingOrderVisitor method testIndexJoinOrder.
@Test
public void testIndexJoinOrder() {
PlanBuilder planBuilder = new PlanBuilder(new PlanNodeIdAllocator(), dummyMetadata());
TableScanNode a = planBuilder.tableScan(emptyList(), emptyMap());
TableScanNode b = planBuilder.tableScan(emptyList(), emptyMap());
List<PlanNodeId> order = scheduleOrder(planBuilder.indexJoin(IndexJoinNode.Type.INNER, a, b));
assertEquals(order, ImmutableList.of(b.getId(), a.getId()));
}
Aggregations