Search in sources :

Example 6 with SplitAssignment

use of io.trino.execution.SplitAssignment in project trino by trinodb.

the class LocalQueryRunner method createDrivers.

private List<Driver> createDrivers(Session session, Plan plan, OutputFactory outputFactory, TaskContext taskContext) {
    if (printPlan) {
        System.out.println(PlanPrinter.textLogicalPlan(plan.getRoot(), plan.getTypes(), plannerContext.getMetadata(), plannerContext.getFunctionManager(), plan.getStatsAndCosts(), session, 0, false));
    }
    SubPlan subplan = createSubPlans(session, plan, true);
    if (!subplan.getChildren().isEmpty()) {
        throw new AssertionError("Expected subplan to have no children");
    }
    TableExecuteContextManager tableExecuteContextManager = new TableExecuteContextManager();
    tableExecuteContextManager.registerTableExecuteContextForQuery(taskContext.getQueryContext().getQueryId());
    LocalExecutionPlanner executionPlanner = new LocalExecutionPlanner(plannerContext, new TypeAnalyzer(plannerContext, statementAnalyzerFactory), Optional.empty(), pageSourceManager, indexManager, nodePartitioningManager, pageSinkManager, null, expressionCompiler, pageFunctionCompiler, joinFilterFunctionCompiler, new IndexJoinLookupStats(), this.taskManagerConfig, spillerFactory, singleStreamSpillerFactory, partitioningSpillerFactory, new PagesIndex.TestingFactory(false), joinCompiler, operatorFactories, new OrderingCompiler(plannerContext.getTypeOperators()), new DynamicFilterConfig(), blockTypeOperators, tableExecuteContextManager, exchangeManagerRegistry);
    // plan query
    StageExecutionDescriptor stageExecutionDescriptor = subplan.getFragment().getStageExecutionDescriptor();
    LocalExecutionPlan localExecutionPlan = executionPlanner.plan(taskContext, stageExecutionDescriptor, subplan.getFragment().getRoot(), subplan.getFragment().getPartitioningScheme().getOutputLayout(), plan.getTypes(), subplan.getFragment().getPartitionedSources(), outputFactory);
    // generate splitAssignments
    List<SplitAssignment> splitAssignments = new ArrayList<>();
    long sequenceId = 0;
    for (TableScanNode tableScan : findTableScanNodes(subplan.getFragment().getRoot())) {
        TableHandle table = tableScan.getTable();
        SplitSource splitSource = splitManager.getSplits(session, table, stageExecutionDescriptor.isScanGroupedExecution(tableScan.getId()) ? GROUPED_SCHEDULING : UNGROUPED_SCHEDULING, EMPTY, alwaysTrue());
        ImmutableSet.Builder<ScheduledSplit> scheduledSplits = ImmutableSet.builder();
        while (!splitSource.isFinished()) {
            for (Split split : getNextBatch(splitSource)) {
                scheduledSplits.add(new ScheduledSplit(sequenceId++, tableScan.getId(), split));
            }
        }
        splitAssignments.add(new SplitAssignment(tableScan.getId(), scheduledSplits.build(), true));
    }
    // create drivers
    List<Driver> drivers = new ArrayList<>();
    Map<PlanNodeId, DriverFactory> driverFactoriesBySource = new HashMap<>();
    for (DriverFactory driverFactory : localExecutionPlan.getDriverFactories()) {
        for (int i = 0; i < driverFactory.getDriverInstances().orElse(1); i++) {
            if (driverFactory.getSourceId().isPresent()) {
                checkState(driverFactoriesBySource.put(driverFactory.getSourceId().get(), driverFactory) == null);
            } else {
                DriverContext driverContext = taskContext.addPipelineContext(driverFactory.getPipelineId(), driverFactory.isInputDriver(), driverFactory.isOutputDriver(), false).addDriverContext();
                Driver driver = driverFactory.createDriver(driverContext);
                drivers.add(driver);
            }
        }
    }
    // add split assignments to the drivers
    ImmutableSet<PlanNodeId> partitionedSources = ImmutableSet.copyOf(subplan.getFragment().getPartitionedSources());
    for (SplitAssignment splitAssignment : splitAssignments) {
        DriverFactory driverFactory = driverFactoriesBySource.get(splitAssignment.getPlanNodeId());
        checkState(driverFactory != null);
        boolean partitioned = partitionedSources.contains(driverFactory.getSourceId().get());
        for (ScheduledSplit split : splitAssignment.getSplits()) {
            DriverContext driverContext = taskContext.addPipelineContext(driverFactory.getPipelineId(), driverFactory.isInputDriver(), driverFactory.isOutputDriver(), partitioned).addDriverContext();
            Driver driver = driverFactory.createDriver(driverContext);
            driver.updateSplitAssignment(new SplitAssignment(split.getPlanNodeId(), ImmutableSet.of(split), true));
            drivers.add(driver);
        }
    }
    for (DriverFactory driverFactory : localExecutionPlan.getDriverFactories()) {
        driverFactory.noMoreDrivers();
    }
    return ImmutableList.copyOf(drivers);
}
Also used : DriverContext(io.trino.operator.DriverContext) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Driver(io.trino.operator.Driver) SplitAssignment(io.trino.execution.SplitAssignment) PagesIndex(io.trino.operator.PagesIndex) PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) ImmutableSet(com.google.common.collect.ImmutableSet) TableExecuteContextManager(io.trino.execution.TableExecuteContextManager) OrderingCompiler(io.trino.sql.gen.OrderingCompiler) DriverFactory(io.trino.operator.DriverFactory) ScheduledSplit(io.trino.execution.ScheduledSplit) LocalExecutionPlanner(io.trino.sql.planner.LocalExecutionPlanner) IndexJoinLookupStats(io.trino.operator.index.IndexJoinLookupStats) StageExecutionDescriptor(io.trino.operator.StageExecutionDescriptor) TypeAnalyzer(io.trino.sql.planner.TypeAnalyzer) LocalExecutionPlan(io.trino.sql.planner.LocalExecutionPlanner.LocalExecutionPlan) TableScanNode(io.trino.sql.planner.plan.TableScanNode) TableHandle(io.trino.metadata.TableHandle) SplitSource(io.trino.split.SplitSource) Split(io.trino.metadata.Split) ScheduledSplit(io.trino.execution.ScheduledSplit) SubPlan(io.trino.sql.planner.SubPlan) DynamicFilterConfig(io.trino.execution.DynamicFilterConfig)

Example 7 with SplitAssignment

use of io.trino.execution.SplitAssignment in project trino by trinodb.

the class IndexLoader method streamIndexDataForSingleKey.

public IndexedData streamIndexDataForSingleKey(UpdateRequest updateRequest) {
    Page indexKeyTuple = updateRequest.getPage().getRegion(0, 1);
    PageBuffer pageBuffer = new PageBuffer(100);
    DriverFactory driverFactory = indexBuildDriverFactoryProvider.createStreaming(pageBuffer, indexKeyTuple);
    Driver driver = driverFactory.createDriver(pipelineContext.addDriverContext());
    PageRecordSet pageRecordSet = new PageRecordSet(keyTypes, indexKeyTuple);
    PlanNodeId planNodeId = driverFactory.getSourceId().get();
    ScheduledSplit split = new ScheduledSplit(0, planNodeId, new Split(INDEX_CONNECTOR_ID, new IndexSplit(pageRecordSet), Lifespan.taskWide()));
    driver.updateSplitAssignment(new SplitAssignment(planNodeId, ImmutableSet.of(split), true));
    return new StreamingIndexedData(outputTypes, keyEqualOperators, indexKeyTuple, pageBuffer, driver);
}
Also used : PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) ScheduledSplit(io.trino.execution.ScheduledSplit) DriverFactory(io.trino.operator.DriverFactory) Driver(io.trino.operator.Driver) Page(io.trino.spi.Page) SplitAssignment(io.trino.execution.SplitAssignment) ScheduledSplit(io.trino.execution.ScheduledSplit) Split(io.trino.metadata.Split)

Example 8 with SplitAssignment

use of io.trino.execution.SplitAssignment in project trino by trinodb.

the class TestDriver method testBrokenOperatorAddSource.

@Test
public void testBrokenOperatorAddSource() throws Exception {
    PlanNodeId sourceId = new PlanNodeId("source");
    List<Type> types = ImmutableList.of(VARCHAR, BIGINT, BIGINT);
    // create a table scan operator that does not block, which will cause the driver loop to busy wait
    TableScanOperator source = new NotBlockedTableScanOperator(driverContext.addOperatorContext(99, new PlanNodeId("test"), "values"), sourceId, (session, split, table, columns, dynamicFilter) -> new FixedPageSource(rowPagesBuilder(types).addSequencePage(10, 20, 30, 40).build()), TEST_TABLE_HANDLE, ImmutableList.of());
    BrokenOperator brokenOperator = new BrokenOperator(driverContext.addOperatorContext(0, new PlanNodeId("test"), "source"));
    Driver driver = Driver.createDriver(driverContext, source, brokenOperator);
    // block thread in operator processing
    Future<Boolean> driverProcessFor = executor.submit(() -> driver.processFor(new Duration(1, TimeUnit.MILLISECONDS)).isDone());
    brokenOperator.waitForLocked();
    assertSame(driver.getDriverContext(), driverContext);
    assertFalse(driver.isFinished());
    // processFor always returns NOT_BLOCKED, because DriveLockResult was not acquired
    assertTrue(driver.processFor(new Duration(1, TimeUnit.MILLISECONDS)).isDone());
    assertFalse(driver.isFinished());
    driver.updateSplitAssignment(new SplitAssignment(sourceId, ImmutableSet.of(new ScheduledSplit(0, sourceId, newMockSplit())), true));
    assertFalse(driver.isFinished());
    // processFor always returns NOT_BLOCKED, because DriveLockResult was not acquired
    assertTrue(driver.processFor(new Duration(1, TimeUnit.SECONDS)).isDone());
    assertFalse(driver.isFinished());
    driver.close();
    assertTrue(driver.isFinished());
    assertThatThrownBy(() -> driverProcessFor.get(1, TimeUnit.SECONDS)).isInstanceOf(ExecutionException.class).hasCause(new TrinoException(GENERIC_INTERNAL_ERROR, "Driver was interrupted"));
}
Also used : ScheduledSplit(io.trino.execution.ScheduledSplit) Duration(io.airlift.units.Duration) SplitAssignment(io.trino.execution.SplitAssignment) FixedPageSource(io.trino.spi.connector.FixedPageSource) PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) Type(io.trino.spi.type.Type) TrinoException(io.trino.spi.TrinoException) ExecutionException(java.util.concurrent.ExecutionException) Test(org.testng.annotations.Test)

Example 9 with SplitAssignment

use of io.trino.execution.SplitAssignment in project trino by trinodb.

the class TestDriver method testAddSourceFinish.

@Test
public void testAddSourceFinish() {
    PlanNodeId sourceId = new PlanNodeId("source");
    List<Type> types = ImmutableList.of(VARCHAR, BIGINT, BIGINT);
    TableScanOperator source = new TableScanOperator(driverContext.addOperatorContext(99, new PlanNodeId("test"), "values"), sourceId, (session, split, table, columns, dynamicFilter) -> new FixedPageSource(rowPagesBuilder(types).addSequencePage(10, 20, 30, 40).build()), TEST_TABLE_HANDLE, ImmutableList.of(), DynamicFilter.EMPTY);
    PageConsumerOperator sink = createSinkOperator(types);
    Driver driver = Driver.createDriver(driverContext, source, sink);
    assertSame(driver.getDriverContext(), driverContext);
    assertFalse(driver.isFinished());
    assertFalse(driver.processFor(new Duration(1, TimeUnit.MILLISECONDS)).isDone());
    assertFalse(driver.isFinished());
    driver.updateSplitAssignment(new SplitAssignment(sourceId, ImmutableSet.of(new ScheduledSplit(0, sourceId, newMockSplit())), true));
    assertFalse(driver.isFinished());
    assertTrue(driver.processFor(new Duration(1, TimeUnit.SECONDS)).isDone());
    assertTrue(driver.isFinished());
    assertTrue(sink.isFinished());
    assertTrue(source.isFinished());
}
Also used : PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) PageConsumerOperator(io.trino.testing.PageConsumerOperator) Type(io.trino.spi.type.Type) ScheduledSplit(io.trino.execution.ScheduledSplit) Duration(io.airlift.units.Duration) SplitAssignment(io.trino.execution.SplitAssignment) FixedPageSource(io.trino.spi.connector.FixedPageSource) Test(org.testng.annotations.Test)

Aggregations

SplitAssignment (io.trino.execution.SplitAssignment)9 ScheduledSplit (io.trino.execution.ScheduledSplit)8 PlanNodeId (io.trino.sql.planner.plan.PlanNodeId)6 Split (io.trino.metadata.Split)4 Duration (io.airlift.units.Duration)3 Driver (io.trino.operator.Driver)3 FixedPageSource (io.trino.spi.connector.FixedPageSource)3 Type (io.trino.spi.type.Type)3 Test (org.testng.annotations.Test)3 Lifespan (io.trino.execution.Lifespan)2 DriverFactory (io.trino.operator.DriverFactory)2 PageConsumerOperator (io.trino.testing.PageConsumerOperator)2 VerifyException (com.google.common.base.VerifyException)1 ImmutableSet (com.google.common.collect.ImmutableSet)1 JsonResponse (io.airlift.http.client.FullJsonResponseHandler.JsonResponse)1 HttpUriBuilder (io.airlift.http.client.HttpUriBuilder)1 Request (io.airlift.http.client.Request)1 CatalogName (io.trino.connector.CatalogName)1 DynamicFilterConfig (io.trino.execution.DynamicFilterConfig)1 VersionedDynamicFilterDomains (io.trino.execution.DynamicFiltersCollector.VersionedDynamicFilterDomains)1