Search in sources :

Example 6 with DriverFactory

use of io.trino.operator.DriverFactory in project trino by trinodb.

the class HashBuildAndJoinBenchmark method createDrivers.

/*
    select orderkey, quantity, totalprice
    from lineitem join orders using (orderkey)
     */
@Override
protected List<Driver> createDrivers(TaskContext taskContext) {
    ImmutableList.Builder<OperatorFactory> driversBuilder = ImmutableList.builder();
    driversBuilder.add(ordersTableScan);
    List<Type> sourceTypes = ordersTableTypes;
    OptionalInt hashChannel = OptionalInt.empty();
    if (hashEnabled) {
        driversBuilder.add(createHashProjectOperator(1, new PlanNodeId("test"), sourceTypes));
        sourceTypes = ImmutableList.<Type>builder().addAll(sourceTypes).add(BIGINT).build();
        hashChannel = OptionalInt.of(sourceTypes.size() - 1);
    }
    // hash build
    BlockTypeOperators blockTypeOperators = new BlockTypeOperators(new TypeOperators());
    JoinBridgeManager<PartitionedLookupSourceFactory> lookupSourceFactoryManager = JoinBridgeManager.lookupAllAtOnce(new PartitionedLookupSourceFactory(sourceTypes, ImmutableList.of(0, 1).stream().map(sourceTypes::get).collect(toImmutableList()), Ints.asList(0).stream().map(sourceTypes::get).collect(toImmutableList()), 1, false, blockTypeOperators));
    HashBuilderOperatorFactory hashBuilder = new HashBuilderOperatorFactory(2, new PlanNodeId("test"), lookupSourceFactoryManager, ImmutableList.of(0, 1), Ints.asList(0), hashChannel, Optional.empty(), Optional.empty(), ImmutableList.of(), 1_500_000, new PagesIndex.TestingFactory(false), false, SingleStreamSpillerFactory.unsupportedSingleStreamSpillerFactory(), incrementalLoadFactorHashArraySizeSupplier(session));
    driversBuilder.add(hashBuilder);
    DriverFactory hashBuildDriverFactory = new DriverFactory(0, true, false, driversBuilder.build(), OptionalInt.empty(), UNGROUPED_EXECUTION);
    // join
    ImmutableList.Builder<OperatorFactory> joinDriversBuilder = ImmutableList.builder();
    joinDriversBuilder.add(lineItemTableScan);
    sourceTypes = lineItemTableTypes;
    hashChannel = OptionalInt.empty();
    if (hashEnabled) {
        joinDriversBuilder.add(createHashProjectOperator(1, new PlanNodeId("test"), sourceTypes));
        sourceTypes = ImmutableList.<Type>builder().addAll(sourceTypes).add(BIGINT).build();
        hashChannel = OptionalInt.of(sourceTypes.size() - 1);
    }
    OperatorFactory joinOperator = operatorFactories.innerJoin(2, new PlanNodeId("test"), lookupSourceFactoryManager, false, false, false, sourceTypes, Ints.asList(0), hashChannel, Optional.empty(), OptionalInt.empty(), unsupportedPartitioningSpillerFactory(), blockTypeOperators);
    joinDriversBuilder.add(joinOperator);
    joinDriversBuilder.add(new NullOutputOperatorFactory(3, new PlanNodeId("test")));
    DriverFactory joinDriverFactory = new DriverFactory(1, true, true, joinDriversBuilder.build(), OptionalInt.empty(), UNGROUPED_EXECUTION);
    Driver hashBuildDriver = hashBuildDriverFactory.createDriver(taskContext.addPipelineContext(0, true, false, false).addDriverContext());
    hashBuildDriverFactory.noMoreDrivers();
    Driver joinDriver = joinDriverFactory.createDriver(taskContext.addPipelineContext(1, true, true, false).addDriverContext());
    joinDriverFactory.noMoreDrivers();
    return ImmutableList.of(hashBuildDriver, joinDriver);
}
Also used : ImmutableList(com.google.common.collect.ImmutableList) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) HashBuilderOperatorFactory(io.trino.operator.join.HashBuilderOperator.HashBuilderOperatorFactory) Driver(io.trino.operator.Driver) OptionalInt(java.util.OptionalInt) PagesIndex(io.trino.operator.PagesIndex) PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) Type(io.trino.spi.type.Type) BlockTypeOperators(io.trino.type.BlockTypeOperators) HashBuilderOperatorFactory(io.trino.operator.join.HashBuilderOperator.HashBuilderOperatorFactory) NullOutputOperatorFactory(io.trino.testing.NullOutputOperator.NullOutputOperatorFactory) OperatorFactory(io.trino.operator.OperatorFactory) DriverFactory(io.trino.operator.DriverFactory) PartitionedLookupSourceFactory(io.trino.operator.join.PartitionedLookupSourceFactory) NullOutputOperatorFactory(io.trino.testing.NullOutputOperator.NullOutputOperatorFactory) TypeOperators(io.trino.spi.type.TypeOperators) BlockTypeOperators(io.trino.type.BlockTypeOperators)

Example 7 with DriverFactory

use of io.trino.operator.DriverFactory in project trino by trinodb.

the class LocalQueryRunner method createDrivers.

private List<Driver> createDrivers(Session session, Plan plan, OutputFactory outputFactory, TaskContext taskContext) {
    if (printPlan) {
        System.out.println(PlanPrinter.textLogicalPlan(plan.getRoot(), plan.getTypes(), plannerContext.getMetadata(), plannerContext.getFunctionManager(), plan.getStatsAndCosts(), session, 0, false));
    }
    SubPlan subplan = createSubPlans(session, plan, true);
    if (!subplan.getChildren().isEmpty()) {
        throw new AssertionError("Expected subplan to have no children");
    }
    TableExecuteContextManager tableExecuteContextManager = new TableExecuteContextManager();
    tableExecuteContextManager.registerTableExecuteContextForQuery(taskContext.getQueryContext().getQueryId());
    LocalExecutionPlanner executionPlanner = new LocalExecutionPlanner(plannerContext, new TypeAnalyzer(plannerContext, statementAnalyzerFactory), Optional.empty(), pageSourceManager, indexManager, nodePartitioningManager, pageSinkManager, null, expressionCompiler, pageFunctionCompiler, joinFilterFunctionCompiler, new IndexJoinLookupStats(), this.taskManagerConfig, spillerFactory, singleStreamSpillerFactory, partitioningSpillerFactory, new PagesIndex.TestingFactory(false), joinCompiler, operatorFactories, new OrderingCompiler(plannerContext.getTypeOperators()), new DynamicFilterConfig(), blockTypeOperators, tableExecuteContextManager, exchangeManagerRegistry);
    // plan query
    StageExecutionDescriptor stageExecutionDescriptor = subplan.getFragment().getStageExecutionDescriptor();
    LocalExecutionPlan localExecutionPlan = executionPlanner.plan(taskContext, stageExecutionDescriptor, subplan.getFragment().getRoot(), subplan.getFragment().getPartitioningScheme().getOutputLayout(), plan.getTypes(), subplan.getFragment().getPartitionedSources(), outputFactory);
    // generate splitAssignments
    List<SplitAssignment> splitAssignments = new ArrayList<>();
    long sequenceId = 0;
    for (TableScanNode tableScan : findTableScanNodes(subplan.getFragment().getRoot())) {
        TableHandle table = tableScan.getTable();
        SplitSource splitSource = splitManager.getSplits(session, table, stageExecutionDescriptor.isScanGroupedExecution(tableScan.getId()) ? GROUPED_SCHEDULING : UNGROUPED_SCHEDULING, EMPTY, alwaysTrue());
        ImmutableSet.Builder<ScheduledSplit> scheduledSplits = ImmutableSet.builder();
        while (!splitSource.isFinished()) {
            for (Split split : getNextBatch(splitSource)) {
                scheduledSplits.add(new ScheduledSplit(sequenceId++, tableScan.getId(), split));
            }
        }
        splitAssignments.add(new SplitAssignment(tableScan.getId(), scheduledSplits.build(), true));
    }
    // create drivers
    List<Driver> drivers = new ArrayList<>();
    Map<PlanNodeId, DriverFactory> driverFactoriesBySource = new HashMap<>();
    for (DriverFactory driverFactory : localExecutionPlan.getDriverFactories()) {
        for (int i = 0; i < driverFactory.getDriverInstances().orElse(1); i++) {
            if (driverFactory.getSourceId().isPresent()) {
                checkState(driverFactoriesBySource.put(driverFactory.getSourceId().get(), driverFactory) == null);
            } else {
                DriverContext driverContext = taskContext.addPipelineContext(driverFactory.getPipelineId(), driverFactory.isInputDriver(), driverFactory.isOutputDriver(), false).addDriverContext();
                Driver driver = driverFactory.createDriver(driverContext);
                drivers.add(driver);
            }
        }
    }
    // add split assignments to the drivers
    ImmutableSet<PlanNodeId> partitionedSources = ImmutableSet.copyOf(subplan.getFragment().getPartitionedSources());
    for (SplitAssignment splitAssignment : splitAssignments) {
        DriverFactory driverFactory = driverFactoriesBySource.get(splitAssignment.getPlanNodeId());
        checkState(driverFactory != null);
        boolean partitioned = partitionedSources.contains(driverFactory.getSourceId().get());
        for (ScheduledSplit split : splitAssignment.getSplits()) {
            DriverContext driverContext = taskContext.addPipelineContext(driverFactory.getPipelineId(), driverFactory.isInputDriver(), driverFactory.isOutputDriver(), partitioned).addDriverContext();
            Driver driver = driverFactory.createDriver(driverContext);
            driver.updateSplitAssignment(new SplitAssignment(split.getPlanNodeId(), ImmutableSet.of(split), true));
            drivers.add(driver);
        }
    }
    for (DriverFactory driverFactory : localExecutionPlan.getDriverFactories()) {
        driverFactory.noMoreDrivers();
    }
    return ImmutableList.copyOf(drivers);
}
Also used : DriverContext(io.trino.operator.DriverContext) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Driver(io.trino.operator.Driver) SplitAssignment(io.trino.execution.SplitAssignment) PagesIndex(io.trino.operator.PagesIndex) PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) ImmutableSet(com.google.common.collect.ImmutableSet) TableExecuteContextManager(io.trino.execution.TableExecuteContextManager) OrderingCompiler(io.trino.sql.gen.OrderingCompiler) DriverFactory(io.trino.operator.DriverFactory) ScheduledSplit(io.trino.execution.ScheduledSplit) LocalExecutionPlanner(io.trino.sql.planner.LocalExecutionPlanner) IndexJoinLookupStats(io.trino.operator.index.IndexJoinLookupStats) StageExecutionDescriptor(io.trino.operator.StageExecutionDescriptor) TypeAnalyzer(io.trino.sql.planner.TypeAnalyzer) LocalExecutionPlan(io.trino.sql.planner.LocalExecutionPlanner.LocalExecutionPlan) TableScanNode(io.trino.sql.planner.plan.TableScanNode) TableHandle(io.trino.metadata.TableHandle) SplitSource(io.trino.split.SplitSource) Split(io.trino.metadata.Split) ScheduledSplit(io.trino.execution.ScheduledSplit) SubPlan(io.trino.sql.planner.SubPlan) DynamicFilterConfig(io.trino.execution.DynamicFilterConfig)

Example 8 with DriverFactory

use of io.trino.operator.DriverFactory in project trino by trinodb.

the class IndexLoader method streamIndexDataForSingleKey.

public IndexedData streamIndexDataForSingleKey(UpdateRequest updateRequest) {
    Page indexKeyTuple = updateRequest.getPage().getRegion(0, 1);
    PageBuffer pageBuffer = new PageBuffer(100);
    DriverFactory driverFactory = indexBuildDriverFactoryProvider.createStreaming(pageBuffer, indexKeyTuple);
    Driver driver = driverFactory.createDriver(pipelineContext.addDriverContext());
    PageRecordSet pageRecordSet = new PageRecordSet(keyTypes, indexKeyTuple);
    PlanNodeId planNodeId = driverFactory.getSourceId().get();
    ScheduledSplit split = new ScheduledSplit(0, planNodeId, new Split(INDEX_CONNECTOR_ID, new IndexSplit(pageRecordSet), Lifespan.taskWide()));
    driver.updateSplitAssignment(new SplitAssignment(planNodeId, ImmutableSet.of(split), true));
    return new StreamingIndexedData(outputTypes, keyEqualOperators, indexKeyTuple, pageBuffer, driver);
}
Also used : PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) ScheduledSplit(io.trino.execution.ScheduledSplit) DriverFactory(io.trino.operator.DriverFactory) Driver(io.trino.operator.Driver) Page(io.trino.spi.Page) SplitAssignment(io.trino.execution.SplitAssignment) ScheduledSplit(io.trino.execution.ScheduledSplit) Split(io.trino.metadata.Split)

Example 9 with DriverFactory

use of io.trino.operator.DriverFactory in project trino by trinodb.

the class LocalExecutionPlanner method plan.

public LocalExecutionPlan plan(TaskContext taskContext, StageExecutionDescriptor stageExecutionDescriptor, PlanNode plan, List<Symbol> outputLayout, TypeProvider types, List<PlanNodeId> partitionedSourceOrder, OutputFactory outputOperatorFactory) {
    Session session = taskContext.getSession();
    LocalExecutionPlanContext context = new LocalExecutionPlanContext(taskContext, types);
    PhysicalOperation physicalOperation = plan.accept(new Visitor(session, stageExecutionDescriptor), context);
    Function<Page, Page> pagePreprocessor = enforceLoadedLayoutProcessor(outputLayout, physicalOperation.getLayout());
    List<Type> outputTypes = outputLayout.stream().map(types::get).collect(toImmutableList());
    context.addDriverFactory(context.isInputDriver(), true, new PhysicalOperation(outputOperatorFactory.createOutputOperator(context.getNextOperatorId(), plan.getId(), outputTypes, pagePreprocessor, new PagesSerdeFactory(plannerContext.getBlockEncodingSerde(), isExchangeCompressionEnabled(session))), physicalOperation), context.getDriverInstanceCount());
    // notify operator factories that planning has completed
    context.getDriverFactories().stream().map(DriverFactory::getOperatorFactories).flatMap(List::stream).filter(LocalPlannerAware.class::isInstance).map(LocalPlannerAware.class::cast).forEach(LocalPlannerAware::localPlannerComplete);
    return new LocalExecutionPlan(context.getDriverFactories(), partitionedSourceOrder, stageExecutionDescriptor);
}
Also used : PagesSerdeFactory(io.trino.execution.buffer.PagesSerdeFactory) FunctionType(io.trino.type.FunctionType) RowType(io.trino.spi.type.RowType) Type(io.trino.spi.type.Type) PlanVisitor(io.trino.sql.planner.plan.PlanVisitor) DriverFactory(io.trino.operator.DriverFactory) Page(io.trino.spi.Page) LocalPlannerAware(io.trino.operator.LocalPlannerAware) Session(io.trino.Session) ConnectorSession(io.trino.spi.connector.ConnectorSession)

Example 10 with DriverFactory

use of io.trino.operator.DriverFactory in project trino by trinodb.

the class AbstractSimpleOperatorBenchmark method createDriverFactory.

protected DriverFactory createDriverFactory() {
    List<OperatorFactory> operatorFactories = new ArrayList<>(createOperatorFactories());
    operatorFactories.add(new NullOutputOperatorFactory(999, new PlanNodeId("test")));
    return new DriverFactory(0, true, true, operatorFactories, OptionalInt.empty(), UNGROUPED_EXECUTION);
}
Also used : PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) NullOutputOperatorFactory(io.trino.testing.NullOutputOperator.NullOutputOperatorFactory) OperatorFactory(io.trino.operator.OperatorFactory) ArrayList(java.util.ArrayList) DriverFactory(io.trino.operator.DriverFactory) NullOutputOperatorFactory(io.trino.testing.NullOutputOperator.NullOutputOperatorFactory)

Aggregations

DriverFactory (io.trino.operator.DriverFactory)12 PlanNodeId (io.trino.sql.planner.plan.PlanNodeId)7 Driver (io.trino.operator.Driver)6 OperatorFactory (io.trino.operator.OperatorFactory)5 PagesIndex (io.trino.operator.PagesIndex)4 Type (io.trino.spi.type.Type)4 NullOutputOperatorFactory (io.trino.testing.NullOutputOperator.NullOutputOperatorFactory)4 ImmutableList (com.google.common.collect.ImmutableList)3 PagesSerdeFactory (io.trino.execution.buffer.PagesSerdeFactory)3 DriverContext (io.trino.operator.DriverContext)3 HashBuilderOperatorFactory (io.trino.operator.join.HashBuilderOperator.HashBuilderOperatorFactory)3 PartitionedLookupSourceFactory (io.trino.operator.join.PartitionedLookupSourceFactory)3 Page (io.trino.spi.Page)3 TypeOperators (io.trino.spi.type.TypeOperators)3 LocalExecutionPlan (io.trino.sql.planner.LocalExecutionPlanner.LocalExecutionPlan)3 BlockTypeOperators (io.trino.type.BlockTypeOperators)3 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)2 ScheduledSplit (io.trino.execution.ScheduledSplit)2 SplitAssignment (io.trino.execution.SplitAssignment)2 PartitionedOutputBuffer (io.trino.execution.buffer.PartitionedOutputBuffer)2