use of io.prestosql.dynamicfilter.DynamicFilterService in project hetu-core by openlookeng.
the class TestNodeScheduler method testRuseExchangeComputeAssignments.
@Test
public void testRuseExchangeComputeAssignments() {
setUpNodes();
Split split = new Split(CONNECTOR_ID, new TestSplitLocallyAccessible(), Lifespan.taskWide());
Set<Split> splits = ImmutableSet.of(split);
NodeTaskMap newNodeTaskMap = new NodeTaskMap(new FinalizerService());
StageId stageId = new StageId(new QueryId("query"), 0);
UUID uuid = UUID.randomUUID();
PlanFragment testFragmentProducer = createTableScanPlanFragment("build", ReuseExchangeOperator.STRATEGY.REUSE_STRATEGY_PRODUCER, uuid, 1);
PlanNodeId tableScanNodeId = new PlanNodeId("plan_id");
StageExecutionPlan producerStageExecutionPlan = new StageExecutionPlan(testFragmentProducer, ImmutableMap.of(tableScanNodeId, new ConnectorAwareSplitSource(CONNECTOR_ID, createFixedSplitSource(0, TestingSplit::createRemoteSplit))), ImmutableList.of(), ImmutableMap.of(tableScanNodeId, new TableInfo(new QualifiedObjectName("test", TEST_SCHEMA, "test"), TupleDomain.all())));
SqlStageExecution producerStage = createSqlStageExecution(stageId, new TestSqlTaskManager.MockLocationFactory().createStageLocation(stageId), producerStageExecutionPlan.getFragment(), producerStageExecutionPlan.getTables(), new MockRemoteTaskFactory(remoteTaskExecutor, remoteTaskScheduledExecutor), TEST_SESSION_REUSE, true, newNodeTaskMap, remoteTaskExecutor, new NoOpFailureDetector(), new SplitSchedulerStats(), new DynamicFilterService(new LocalStateStoreProvider(new SeedStoreManager(new FileSystemClientManager()))), new QuerySnapshotManager(stageId.getQueryId(), NOOP_SNAPSHOT_UTILS, TEST_SESSION));
Map.Entry<InternalNode, Split> producerAssignment = Iterables.getOnlyElement(nodeSelector.computeAssignments(splits, ImmutableList.copyOf(this.taskMap.values()), Optional.of(producerStage)).getAssignments().entries());
PlanFragment testFragmentConsumer = createTableScanPlanFragment("build", ReuseExchangeOperator.STRATEGY.REUSE_STRATEGY_CONSUMER, uuid, 1);
StageExecutionPlan consumerStageExecutionPlan = new StageExecutionPlan(testFragmentConsumer, ImmutableMap.of(tableScanNodeId, new ConnectorAwareSplitSource(CONNECTOR_ID, createFixedSplitSource(0, TestingSplit::createRemoteSplit))), ImmutableList.of(), ImmutableMap.of(tableScanNodeId, new TableInfo(new QualifiedObjectName("test", TEST_SCHEMA, "test"), TupleDomain.all())));
SqlStageExecution stage = createSqlStageExecution(stageId, new TestSqlTaskManager.MockLocationFactory().createStageLocation(stageId), consumerStageExecutionPlan.getFragment(), consumerStageExecutionPlan.getTables(), new MockRemoteTaskFactory(remoteTaskExecutor, remoteTaskScheduledExecutor), TEST_SESSION_REUSE, true, newNodeTaskMap, remoteTaskExecutor, new NoOpFailureDetector(), new SplitSchedulerStats(), new DynamicFilterService(new LocalStateStoreProvider(new SeedStoreManager(new FileSystemClientManager()))), new QuerySnapshotManager(stageId.getQueryId(), NOOP_SNAPSHOT_UTILS, TEST_SESSION));
Map.Entry<InternalNode, Split> consumerAssignment = Iterables.getOnlyElement(nodeSelector.computeAssignments(splits, ImmutableList.copyOf(this.taskMap.values()), Optional.of(stage)).getAssignments().entries());
Split producerSplit = producerAssignment.getValue();
Split consumerSplit = consumerAssignment.getValue();
SplitKey splitKeyProducer = new SplitKey(producerSplit, producerSplit.getCatalogName().getCatalogName(), TEST_SCHEMA, "test");
SplitKey splitKeyConsumer = new SplitKey(producerSplit, consumerSplit.getCatalogName().getCatalogName(), TEST_SCHEMA, "test");
if (splitKeyProducer.equals(splitKeyConsumer)) {
assertEquals(true, true);
} else {
assertEquals(false, true);
}
}
use of io.prestosql.dynamicfilter.DynamicFilterService in project hetu-core by openlookeng.
the class TestNodeScheduler method testRuseExchangeComputeAssignmentsSplitsNotMatchProdConsumer.
@Test
public void testRuseExchangeComputeAssignmentsSplitsNotMatchProdConsumer() {
setUpNodes();
Split split = new Split(CONNECTOR_ID, new TestSplitLocallyAccessible(), Lifespan.taskWide());
Set<Split> splits = ImmutableSet.of(split);
NodeTaskMap newNodeTaskMap = new NodeTaskMap(new FinalizerService());
StageId stageId = new StageId(new QueryId("query"), 0);
UUID uuid = UUID.randomUUID();
PlanFragment testFragmentProducer = createTableScanPlanFragment("build", ReuseExchangeOperator.STRATEGY.REUSE_STRATEGY_PRODUCER, uuid, 1);
PlanNodeId tableScanNodeId = new PlanNodeId("plan_id");
StageExecutionPlan producerStageExecutionPlan = new StageExecutionPlan(testFragmentProducer, ImmutableMap.of(tableScanNodeId, new ConnectorAwareSplitSource(CONNECTOR_ID, createFixedSplitSource(0, TestingSplit::createRemoteSplit))), ImmutableList.of(), ImmutableMap.of(tableScanNodeId, new TableInfo(new QualifiedObjectName("test", TEST_SCHEMA, "test"), TupleDomain.all())));
SqlStageExecution producerStage = createSqlStageExecution(stageId, new TestSqlTaskManager.MockLocationFactory().createStageLocation(stageId), producerStageExecutionPlan.getFragment(), producerStageExecutionPlan.getTables(), new MockRemoteTaskFactory(remoteTaskExecutor, remoteTaskScheduledExecutor), TEST_SESSION_REUSE, true, newNodeTaskMap, remoteTaskExecutor, new NoOpFailureDetector(), new SplitSchedulerStats(), new DynamicFilterService(new LocalStateStoreProvider(new SeedStoreManager(new FileSystemClientManager()))), new QuerySnapshotManager(stageId.getQueryId(), NOOP_SNAPSHOT_UTILS, TEST_SESSION));
nodeSelector.computeAssignments(splits, ImmutableList.copyOf(this.taskMap.values()), Optional.of(producerStage)).getAssignments().entries();
// Consumer
Split splitConsumer = new Split(CONNECTOR_ID, new TestSplitLocallyAccessibleDifferentIndex(), Lifespan.taskWide());
Set<Split> splitConsumers = ImmutableSet.of(splitConsumer);
PlanFragment testFragmentConsumer = createTableScanPlanFragment("build", ReuseExchangeOperator.STRATEGY.REUSE_STRATEGY_CONSUMER, uuid, 1);
StageExecutionPlan consumerStageExecutionPlan = new StageExecutionPlan(testFragmentConsumer, ImmutableMap.of(tableScanNodeId, new ConnectorAwareSplitSource(CONNECTOR_ID, createFixedSplitSource(0, TestingSplit::createRemoteSplit))), ImmutableList.of(), ImmutableMap.of(tableScanNodeId, new TableInfo(new QualifiedObjectName("test", TEST_SCHEMA, "test"), TupleDomain.all())));
SqlStageExecution stage = createSqlStageExecution(stageId, new TestSqlTaskManager.MockLocationFactory().createStageLocation(stageId), consumerStageExecutionPlan.getFragment(), consumerStageExecutionPlan.getTables(), new MockRemoteTaskFactory(remoteTaskExecutor, remoteTaskScheduledExecutor), TEST_SESSION_REUSE, true, newNodeTaskMap, remoteTaskExecutor, new NoOpFailureDetector(), new SplitSchedulerStats(), new DynamicFilterService(new LocalStateStoreProvider(new SeedStoreManager(new FileSystemClientManager()))), new QuerySnapshotManager(stageId.getQueryId(), NOOP_SNAPSHOT_UTILS, TEST_SESSION));
try {
nodeSelector.computeAssignments(splitConsumers, ImmutableList.copyOf(this.taskMap.values()), Optional.of(stage)).getAssignments().entries();
} catch (PrestoException e) {
assertEquals("Producer & consumer splits are not same", e.getMessage());
return;
}
assertEquals(false, true);
}
use of io.prestosql.dynamicfilter.DynamicFilterService in project hetu-core by openlookeng.
the class TestHiveIntegrationSmokeTest method testRuseExchangeGroupSplitsMatchingBetweenProducerConsumer.
@Test
public void testRuseExchangeGroupSplitsMatchingBetweenProducerConsumer() {
setUpNodes();
NodeTaskMap nodeTasks = new NodeTaskMap(new FinalizerService());
StageId stageId = new StageId(new QueryId("query"), 0);
UUID uuid = UUID.randomUUID();
PlanFragment testFragmentProducer = createTableScanPlanFragment("build", ReuseExchangeOperator.STRATEGY.REUSE_STRATEGY_PRODUCER, uuid, 1);
PlanNodeId tableScanNodeId = new PlanNodeId("plan_id");
StageExecutionPlan producerStageExecutionPlan = new StageExecutionPlan(testFragmentProducer, ImmutableMap.of(tableScanNodeId, new ConnectorAwareSplitSource(CONNECTOR_ID, createFixedSplitSource(0, TestingSplit::createRemoteSplit))), ImmutableList.of(), ImmutableMap.of(tableScanNodeId, new TableInfo(new QualifiedObjectName("test", TEST_SCHEMA, "test"), TupleDomain.all())));
SqlStageExecution producerStage = createSqlStageExecution(stageId, new TestSqlTaskManager.MockLocationFactory().createStageLocation(stageId), producerStageExecutionPlan.getFragment(), producerStageExecutionPlan.getTables(), new MockRemoteTaskFactory(remoteTaskExecutor, remoteTaskScheduledExecutor), TEST_SESSION_REUSE, true, nodeTasks, remoteTaskExecutor, new NoOpFailureDetector(), new SplitSchedulerStats(), new DynamicFilterService(new LocalStateStoreProvider(new SeedStoreManager(new FileSystemClientManager()))), new QuerySnapshotManager(stageId.getQueryId(), NOOP_SNAPSHOT_UTILS, TEST_SESSION));
Set<Split> splits = createAndGetSplits(10);
Multimap<InternalNode, Split> producerAssignment = nodeSelector.computeAssignments(splits, ImmutableList.copyOf(taskMap.values()), Optional.of(producerStage)).getAssignments();
PlanFragment testFragmentConsumer = createTableScanPlanFragment("build", ReuseExchangeOperator.STRATEGY.REUSE_STRATEGY_CONSUMER, uuid, 1);
StageExecutionPlan consumerStageExecutionPlan = new StageExecutionPlan(testFragmentConsumer, ImmutableMap.of(tableScanNodeId, new ConnectorAwareSplitSource(CONNECTOR_ID, createFixedSplitSource(0, TestingSplit::createRemoteSplit))), ImmutableList.of(), ImmutableMap.of(tableScanNodeId, new TableInfo(new QualifiedObjectName("test", TEST_SCHEMA, "test"), TupleDomain.all())));
SqlStageExecution stage = createSqlStageExecution(stageId, new TestSqlTaskManager.MockLocationFactory().createStageLocation(stageId), consumerStageExecutionPlan.getFragment(), consumerStageExecutionPlan.getTables(), new MockRemoteTaskFactory(remoteTaskExecutor, remoteTaskScheduledExecutor), TEST_SESSION_REUSE, true, nodeTasks, remoteTaskExecutor, new NoOpFailureDetector(), new SplitSchedulerStats(), new DynamicFilterService(new LocalStateStoreProvider(new SeedStoreManager(new FileSystemClientManager()))), new QuerySnapshotManager(stageId.getQueryId(), NOOP_SNAPSHOT_UTILS, TEST_SESSION));
Multimap<InternalNode, Split> consumerAssignment = nodeSelector.computeAssignments(splits, ImmutableList.copyOf(taskMap.values()), Optional.of(stage)).getAssignments();
assertEquals(consumerAssignment.size(), consumerAssignment.size());
for (InternalNode node : consumerAssignment.keySet()) {
List<Split> splitList = new ArrayList<>();
List<Split> splitList2 = new ArrayList<>();
boolean b = producerAssignment.containsEntry(node, consumerAssignment.get(node));
Collection<Split> producerSplits = producerAssignment.get(node);
Collection<Split> consumerSplits = producerAssignment.get(node);
producerSplits.forEach(s -> splitList.add(s));
List<Split> splitList1 = splitList.get(0).getSplits();
consumerSplits.forEach(s -> splitList2.add(s));
int i = 0;
for (Split split3 : splitList1) {
SplitKey splitKey1 = new SplitKey(split3, TEST_CATALOG, TEST_SCHEMA, TEST_TABLE);
SplitKey splitKey2 = new SplitKey(splitList1.get(i), TEST_CATALOG, TEST_SCHEMA, TEST_TABLE);
boolean f = splitKey1.equals(splitKey2);
assertEquals(true, f);
i++;
}
}
}
use of io.prestosql.dynamicfilter.DynamicFilterService in project hetu-core by openlookeng.
the class TestSqlStageExecution method testFinalStageInfoInternal.
private void testFinalStageInfoInternal() throws Exception {
NodeTaskMap nodeTaskMap = new NodeTaskMap(new FinalizerService());
StageId stageId = new StageId(new QueryId("query"), 0);
SqlStageExecution stage = createSqlStageExecution(stageId, new MockLocationFactory().createStageLocation(stageId), createExchangePlanFragment(), ImmutableMap.of(), new MockRemoteTaskFactory(executor, scheduledExecutor), TEST_SESSION, true, nodeTaskMap, executor, new NoOpFailureDetector(), new SplitSchedulerStats(), new DynamicFilterService(new LocalStateStoreProvider(new SeedStoreManager(new FileSystemClientManager()))), new QuerySnapshotManager(stageId.getQueryId(), NOOP_SNAPSHOT_UTILS, TEST_SESSION));
stage.setOutputBuffers(createInitialEmptyOutputBuffers(ARBITRARY));
// add listener that fetches stage info when the final status is available
SettableFuture<StageInfo> finalStageInfo = SettableFuture.create();
stage.addFinalStageInfoListener(finalStageInfo::set);
// in a background thread add a ton of tasks
CountDownLatch latch = new CountDownLatch(1000);
Future<?> addTasksTask = executor.submit(() -> {
try {
for (int i = 0; i < 1_000_000; i++) {
if (Thread.interrupted()) {
return;
}
InternalNode node = new InternalNode("source" + i, URI.create("http://10.0.0." + (i / 10_000) + ":" + (i % 10_000)), NodeVersion.UNKNOWN, false);
stage.scheduleTask(node, i, OptionalInt.empty());
latch.countDown();
}
} finally {
while (latch.getCount() > 0) {
latch.countDown();
}
}
});
// wait for some tasks to be created, and then abort the query
latch.await(1, MINUTES);
assertFalse(stage.getStageInfo().getTasks().isEmpty());
stage.abort();
// once the final stage info is available, verify that it is complete
StageInfo stageInfo = finalStageInfo.get(1, MINUTES);
assertFalse(stageInfo.getTasks().isEmpty());
assertTrue(stageInfo.isCompleteInfo());
assertSame(stage.getStageInfo(), stageInfo);
// cancel the background thread adding tasks
addTasksTask.cancel(true);
}
use of io.prestosql.dynamicfilter.DynamicFilterService in project hetu-core by openlookeng.
the class TestUtil method getTestStage.
public static SqlStageExecution getTestStage(RowExpression expression) {
StageId stageId = new StageId(new QueryId("query"), 0);
SqlStageExecution stage = createSqlStageExecution(stageId, new TestSqlTaskManager.MockLocationFactory().createStageLocation(stageId), createExchangePlanFragment(expression), new HashMap<>(), new MockRemoteTaskFactory(executor, scheduledExecutor), TEST_SESSION, true, nodeTaskMap, executor, new NoOpFailureDetector(), new SplitSchedulerStats(), new DynamicFilterService(new LocalStateStoreProvider(new SeedStoreManager(new FileSystemClientManager()))), new QuerySnapshotManager(stageId.getQueryId(), TestingSnapshotUtils.NOOP_SNAPSHOT_UTILS, TEST_SESSION));
stage.setOutputBuffers(createInitialEmptyOutputBuffers(ARBITRARY));
return stage;
}
Aggregations