Search in sources :

Example 26 with Session

use of io.trino.Session in project trino by trinodb.

the class FaultTolerantStageScheduler method schedule.

public synchronized void schedule() throws Exception {
    if (failure != null) {
        propagateIfPossible(failure, Exception.class);
        throw new RuntimeException(failure);
    }
    if (closed) {
        return;
    }
    if (isFinished()) {
        return;
    }
    if (!blocked.isDone()) {
        return;
    }
    if (taskSource == null) {
        Map<PlanFragmentId, ListenableFuture<List<ExchangeSourceHandle>>> sourceHandles = sourceExchanges.entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> toListenableFuture(entry.getValue().getSourceHandles())));
        List<ListenableFuture<List<ExchangeSourceHandle>>> blockedFutures = sourceHandles.values().stream().filter(future -> !future.isDone()).collect(toImmutableList());
        if (!blockedFutures.isEmpty()) {
            blocked = asVoid(allAsList(blockedFutures));
            return;
        }
        Multimap<PlanFragmentId, ExchangeSourceHandle> exchangeSources = sourceHandles.entrySet().stream().collect(flatteningToImmutableListMultimap(Map.Entry::getKey, entry -> getFutureValue(entry.getValue()).stream()));
        taskSource = taskSourceFactory.create(session, stage.getFragment(), sourceExchanges, exchangeSources, stage::recordGetSplitTime, sourceBucketToPartitionMap, sourceBucketNodeMap);
    }
    while (!queuedPartitions.isEmpty() || !taskSource.isFinished()) {
        while (queuedPartitions.isEmpty() && !taskSource.isFinished()) {
            List<TaskDescriptor> tasks = taskSource.getMoreTasks();
            for (TaskDescriptor task : tasks) {
                queuedPartitions.add(task.getPartitionId());
                allPartitions.add(task.getPartitionId());
                taskDescriptorStorage.put(stage.getStageId(), task);
                sinkExchange.ifPresent(exchange -> {
                    ExchangeSinkHandle exchangeSinkHandle = exchange.addSink(task.getPartitionId());
                    partitionToExchangeSinkHandleMap.put(task.getPartitionId(), exchangeSinkHandle);
                });
            }
            if (taskSource.isFinished()) {
                sinkExchange.ifPresent(Exchange::noMoreSinks);
            }
        }
        if (queuedPartitions.isEmpty()) {
            break;
        }
        int partition = queuedPartitions.peek();
        Optional<TaskDescriptor> taskDescriptorOptional = taskDescriptorStorage.get(stage.getStageId(), partition);
        if (taskDescriptorOptional.isEmpty()) {
            // query has been terminated
            return;
        }
        TaskDescriptor taskDescriptor = taskDescriptorOptional.get();
        MemoryRequirements memoryRequirements = partitionMemoryRequirements.computeIfAbsent(partition, ignored -> partitionMemoryEstimator.getInitialMemoryRequirements(session, taskDescriptor.getNodeRequirements().getMemory()));
        if (nodeLease == null) {
            NodeRequirements nodeRequirements = taskDescriptor.getNodeRequirements();
            nodeRequirements = nodeRequirements.withMemory(memoryRequirements.getRequiredMemory());
            nodeLease = nodeAllocator.acquire(nodeRequirements);
        }
        if (!nodeLease.getNode().isDone()) {
            blocked = asVoid(nodeLease.getNode());
            return;
        }
        NodeInfo node = getFutureValue(nodeLease.getNode());
        queuedPartitions.poll();
        Multimap<PlanNodeId, Split> tableScanSplits = taskDescriptor.getSplits();
        Multimap<PlanNodeId, Split> remoteSplits = createRemoteSplits(taskDescriptor.getExchangeSourceHandles());
        Multimap<PlanNodeId, Split> taskSplits = ImmutableListMultimap.<PlanNodeId, Split>builder().putAll(tableScanSplits).putAll(remoteSplits).build();
        int attemptId = getNextAttemptIdForPartition(partition);
        OutputBuffers outputBuffers;
        Optional<ExchangeSinkInstanceHandle> exchangeSinkInstanceHandle;
        if (sinkExchange.isPresent()) {
            ExchangeSinkHandle sinkHandle = partitionToExchangeSinkHandleMap.get(partition);
            exchangeSinkInstanceHandle = Optional.of(sinkExchange.get().instantiateSink(sinkHandle, attemptId));
            outputBuffers = createSpoolingExchangeOutputBuffers(exchangeSinkInstanceHandle.get());
        } else {
            exchangeSinkInstanceHandle = Optional.empty();
            // stage will be consumed by the coordinator using direct exchange
            outputBuffers = createInitialEmptyOutputBuffers(PARTITIONED).withBuffer(new OutputBuffers.OutputBufferId(0), 0).withNoMoreBufferIds();
        }
        Set<PlanNodeId> allSourcePlanNodeIds = ImmutableSet.<PlanNodeId>builder().addAll(stage.getFragment().getPartitionedSources()).addAll(stage.getFragment().getRemoteSourceNodes().stream().map(RemoteSourceNode::getId).iterator()).build();
        RemoteTask task = stage.createTask(node.getNode(), partition, attemptId, sinkBucketToPartitionMap, outputBuffers, taskSplits, allSourcePlanNodeIds.stream().collect(toImmutableListMultimap(Function.identity(), planNodeId -> Lifespan.taskWide())), allSourcePlanNodeIds).orElseThrow(() -> new VerifyException("stage execution is expected to be active"));
        partitionToRemoteTaskMap.put(partition, task);
        runningTasks.put(task.getTaskId(), task);
        runningNodes.put(task.getTaskId(), nodeLease);
        nodeLease = null;
        if (taskFinishedFuture == null) {
            taskFinishedFuture = SettableFuture.create();
        }
        taskLifecycleListener.taskCreated(stage.getFragment().getId(), task);
        task.addStateChangeListener(taskStatus -> updateTaskStatus(taskStatus, exchangeSinkInstanceHandle));
        task.start();
    }
    if (taskFinishedFuture != null && !taskFinishedFuture.isDone()) {
        blocked = taskFinishedFuture;
    }
}
Also used : ArrayListMultimap(com.google.common.collect.ArrayListMultimap) SettableFuture(com.google.common.util.concurrent.SettableFuture) RemoteSourceNode(io.trino.sql.planner.plan.RemoteSourceNode) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) Throwables.propagateIfPossible(com.google.common.base.Throwables.propagateIfPossible) ImmutableListMultimap.toImmutableListMultimap(com.google.common.collect.ImmutableListMultimap.toImmutableListMultimap) MemoryRequirements(io.trino.execution.scheduler.PartitionMemoryEstimator.MemoryRequirements) PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) Map(java.util.Map) SpoolingExchangeInput(io.trino.split.RemoteSplit.SpoolingExchangeInput) REMOTE_HOST_GONE(io.trino.spi.StandardErrorCode.REMOTE_HOST_GONE) Futures.immediateVoidFuture(com.google.common.util.concurrent.Futures.immediateVoidFuture) ImmutableSet(com.google.common.collect.ImmutableSet) ExchangeSinkInstanceHandle(io.trino.spi.exchange.ExchangeSinkInstanceHandle) OutputBuffers.createSpoolingExchangeOutputBuffers(io.trino.execution.buffer.OutputBuffers.createSpoolingExchangeOutputBuffers) ImmutableMap(com.google.common.collect.ImmutableMap) ExecutionFailureInfo(io.trino.execution.ExecutionFailureInfo) Futures.allAsList(com.google.common.util.concurrent.Futures.allAsList) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) MoreFutures.toListenableFuture(io.airlift.concurrent.MoreFutures.toListenableFuture) Set(java.util.Set) TrinoException(io.trino.spi.TrinoException) GONE(io.trino.failuredetector.FailureDetector.State.GONE) GuardedBy(javax.annotation.concurrent.GuardedBy) TaskId(io.trino.execution.TaskId) ExchangeSinkHandle(io.trino.spi.exchange.ExchangeSinkHandle) List(java.util.List) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) Split(io.trino.metadata.Split) ImmutableListMultimap(com.google.common.collect.ImmutableListMultimap) Optional(java.util.Optional) Queue(java.util.Queue) PlanFragmentId(io.trino.sql.planner.plan.PlanFragmentId) OutputBuffers.createInitialEmptyOutputBuffers(io.trino.execution.buffer.OutputBuffers.createInitialEmptyOutputBuffers) ExchangeSourceHandle(io.trino.spi.exchange.ExchangeSourceHandle) Session(io.trino.Session) ImmutableListMultimap.flatteningToImmutableListMultimap(com.google.common.collect.ImmutableListMultimap.flatteningToImmutableListMultimap) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) StageId(io.trino.execution.StageId) Logger(io.airlift.log.Logger) HashMap(java.util.HashMap) Multimap(com.google.common.collect.Multimap) ErrorCode(io.trino.spi.ErrorCode) Function(java.util.function.Function) Failures.toFailure(io.trino.util.Failures.toFailure) RemoteSplit(io.trino.split.RemoteSplit) HashSet(java.util.HashSet) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) USER_ERROR(io.trino.spi.ErrorType.USER_ERROR) Objects.requireNonNull(java.util.Objects.requireNonNull) TaskState(io.trino.execution.TaskState) Lifespan(io.trino.execution.Lifespan) Exchange(io.trino.spi.exchange.Exchange) VerifyException(com.google.common.base.VerifyException) SqlStage(io.trino.execution.SqlStage) FailureDetector(io.trino.failuredetector.FailureDetector) RemoteTask(io.trino.execution.RemoteTask) TaskStatus(io.trino.execution.TaskStatus) MoreFutures.getFutureValue(io.airlift.concurrent.MoreFutures.getFutureValue) GENERIC_INTERNAL_ERROR(io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR) MoreFutures.asVoid(io.airlift.concurrent.MoreFutures.asVoid) PARTITIONED(io.trino.execution.buffer.OutputBuffers.BufferType.PARTITIONED) Futures.nonCancellationPropagating(com.google.common.util.concurrent.Futures.nonCancellationPropagating) OutputBuffers(io.trino.execution.buffer.OutputBuffers) ArrayDeque(java.util.ArrayDeque) REMOTE_CONNECTOR_ID(io.trino.operator.ExchangeOperator.REMOTE_CONNECTOR_ID) MemoryRequirements(io.trino.execution.scheduler.PartitionMemoryEstimator.MemoryRequirements) ExchangeSourceHandle(io.trino.spi.exchange.ExchangeSourceHandle) PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) OutputBuffers.createSpoolingExchangeOutputBuffers(io.trino.execution.buffer.OutputBuffers.createSpoolingExchangeOutputBuffers) OutputBuffers.createInitialEmptyOutputBuffers(io.trino.execution.buffer.OutputBuffers.createInitialEmptyOutputBuffers) OutputBuffers(io.trino.execution.buffer.OutputBuffers) PlanFragmentId(io.trino.sql.planner.plan.PlanFragmentId) ExchangeSinkInstanceHandle(io.trino.spi.exchange.ExchangeSinkInstanceHandle) RemoteTask(io.trino.execution.RemoteTask) Exchange(io.trino.spi.exchange.Exchange) ExchangeSinkHandle(io.trino.spi.exchange.ExchangeSinkHandle) VerifyException(com.google.common.base.VerifyException) MoreFutures.toListenableFuture(io.airlift.concurrent.MoreFutures.toListenableFuture) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) Split(io.trino.metadata.Split) RemoteSplit(io.trino.split.RemoteSplit) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) HashMap(java.util.HashMap)

Example 27 with Session

use of io.trino.Session in project trino by trinodb.

the class Analyzer method analyze.

public Analysis analyze(Statement statement, QueryType queryType) {
    Statement rewrittenStatement = statementRewrite.rewrite(analyzerFactory, session, statement, parameters, parameterLookup, warningCollector);
    Analysis analysis = new Analysis(rewrittenStatement, parameterLookup, queryType);
    StatementAnalyzer analyzer = statementAnalyzerFactory.createStatementAnalyzer(analysis, session, warningCollector, CorrelationSupport.ALLOWED);
    analyzer.analyze(rewrittenStatement, Optional.empty());
    // check column access permissions for each table
    analysis.getTableColumnReferences().forEach((accessControlInfo, tableColumnReferences) -> tableColumnReferences.forEach((tableName, columns) -> accessControlInfo.getAccessControl().checkCanSelectFromColumns(accessControlInfo.getSecurityContext(session.getRequiredTransactionId(), session.getQueryId()), tableName, columns)));
    return analysis;
}
Also used : ExpressionTreeUtils.extractExpressions(io.trino.sql.analyzer.ExpressionTreeUtils.extractExpressions) ExpressionTreeUtils.extractWindowExpressions(io.trino.sql.analyzer.ExpressionTreeUtils.extractWindowExpressions) Iterables(com.google.common.collect.Iterables) StatementRewrite(io.trino.sql.rewrite.StatementRewrite) Statement(io.trino.sql.tree.Statement) EXPRESSION_NOT_SCALAR(io.trino.spi.StandardErrorCode.EXPRESSION_NOT_SCALAR) GroupingOperation(io.trino.sql.tree.GroupingOperation) ExpressionTreeUtils.extractAggregateFunctions(io.trino.sql.analyzer.ExpressionTreeUtils.extractAggregateFunctions) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Parameter(io.trino.sql.tree.Parameter) NodeRef(io.trino.sql.tree.NodeRef) Map(java.util.Map) OTHERS(io.trino.sql.analyzer.QueryType.OTHERS) Objects.requireNonNull(java.util.Objects.requireNonNull) WarningCollector(io.trino.execution.warnings.WarningCollector) Metadata(io.trino.metadata.Metadata) Optional(java.util.Optional) Expression(io.trino.sql.tree.Expression) FunctionCall(io.trino.sql.tree.FunctionCall) SemanticExceptions.semanticException(io.trino.sql.analyzer.SemanticExceptions.semanticException) Session(io.trino.Session) Statement(io.trino.sql.tree.Statement)

Example 28 with Session

use of io.trino.Session in project trino by trinodb.

the class PushPredicateIntoTableScan method pushFilterIntoTableScan.

public static Optional<PlanNode> pushFilterIntoTableScan(FilterNode filterNode, TableScanNode node, boolean pruneWithPredicateExpression, Session session, SymbolAllocator symbolAllocator, PlannerContext plannerContext, TypeAnalyzer typeAnalyzer, StatsProvider statsProvider, DomainTranslator domainTranslator) {
    if (!isAllowPushdownIntoConnectors(session)) {
        return Optional.empty();
    }
    SplitExpression splitExpression = splitExpression(plannerContext, filterNode.getPredicate());
    DomainTranslator.ExtractionResult decomposedPredicate = DomainTranslator.getExtractionResult(plannerContext, session, splitExpression.getDeterministicPredicate(), symbolAllocator.getTypes());
    TupleDomain<ColumnHandle> newDomain = decomposedPredicate.getTupleDomain().transformKeys(node.getAssignments()::get).intersect(node.getEnforcedConstraint());
    Map<NodeRef<Expression>, Type> remainingExpressionTypes = typeAnalyzer.getTypes(session, symbolAllocator.getTypes(), decomposedPredicate.getRemainingExpression());
    Optional<ConnectorExpression> connectorExpression = new ConnectorExpressionTranslator.SqlToConnectorExpressionTranslator(session, remainingExpressionTypes, plannerContext).process(decomposedPredicate.getRemainingExpression());
    Map<String, ColumnHandle> connectorExpressionAssignments = connectorExpression.map(ignored -> node.getAssignments().entrySet().stream().collect(toImmutableMap(entry -> entry.getKey().getName(), Map.Entry::getValue))).orElse(ImmutableMap.of());
    Map<ColumnHandle, Symbol> assignments = ImmutableBiMap.copyOf(node.getAssignments()).inverse();
    Constraint constraint;
    // use evaluator only when there is some predicate which could not be translated into tuple domain
    if (pruneWithPredicateExpression && !TRUE_LITERAL.equals(decomposedPredicate.getRemainingExpression())) {
        LayoutConstraintEvaluator evaluator = new LayoutConstraintEvaluator(plannerContext, typeAnalyzer, session, symbolAllocator.getTypes(), node.getAssignments(), combineConjuncts(plannerContext.getMetadata(), splitExpression.getDeterministicPredicate(), // which would be expensive to evaluate in the call to isCandidate below.
        domainTranslator.toPredicate(session, newDomain.simplify().transformKeys(assignments::get))));
        constraint = new Constraint(newDomain, connectorExpression.orElse(TRUE), connectorExpressionAssignments, evaluator::isCandidate, evaluator.getArguments());
    } else {
        // Currently, invoking the expression interpreter is very expensive.
        // TODO invoke the interpreter unconditionally when the interpreter becomes cheap enough.
        constraint = new Constraint(newDomain, connectorExpression.orElse(TRUE), connectorExpressionAssignments);
    }
    // check if new domain is wider than domain already provided by table scan
    if (constraint.predicate().isEmpty() && // TODO do we need to track enforced ConnectorExpression in TableScanNode?
    TRUE.equals(connectorExpression.orElse(TRUE)) && newDomain.contains(node.getEnforcedConstraint())) {
        Expression resultingPredicate = createResultingPredicate(plannerContext, session, symbolAllocator, typeAnalyzer, splitExpression.getDynamicFilter(), TRUE_LITERAL, splitExpression.getNonDeterministicPredicate(), decomposedPredicate.getRemainingExpression());
        if (!TRUE_LITERAL.equals(resultingPredicate)) {
            return Optional.of(new FilterNode(filterNode.getId(), node, resultingPredicate));
        }
        return Optional.of(node);
    }
    if (newDomain.isNone()) {
        // to turn the subtree into a Values node
        return Optional.of(new ValuesNode(node.getId(), node.getOutputSymbols(), ImmutableList.of()));
    }
    Optional<ConstraintApplicationResult<TableHandle>> result = plannerContext.getMetadata().applyFilter(session, node.getTable(), constraint);
    if (result.isEmpty()) {
        return Optional.empty();
    }
    TableHandle newTable = result.get().getHandle();
    TableProperties newTableProperties = plannerContext.getMetadata().getTableProperties(session, newTable);
    Optional<TablePartitioning> newTablePartitioning = newTableProperties.getTablePartitioning();
    if (newTableProperties.getPredicate().isNone()) {
        return Optional.of(new ValuesNode(node.getId(), node.getOutputSymbols(), ImmutableList.of()));
    }
    TupleDomain<ColumnHandle> remainingFilter = result.get().getRemainingFilter();
    Optional<ConnectorExpression> remainingConnectorExpression = result.get().getRemainingExpression();
    boolean precalculateStatistics = result.get().isPrecalculateStatistics();
    verifyTablePartitioning(session, plannerContext.getMetadata(), node, newTablePartitioning);
    TableScanNode tableScan = new TableScanNode(node.getId(), newTable, node.getOutputSymbols(), node.getAssignments(), computeEnforced(newDomain, remainingFilter), // TODO (https://github.com/trinodb/trino/issues/8144) distinguish between predicate pushed down and remaining
    deriveTableStatisticsForPushdown(statsProvider, session, precalculateStatistics, filterNode), node.isUpdateTarget(), node.getUseConnectorNodePartitioning());
    Expression remainingDecomposedPredicate;
    if (remainingConnectorExpression.isEmpty() || remainingConnectorExpression.equals(connectorExpression)) {
        remainingDecomposedPredicate = decomposedPredicate.getRemainingExpression();
    } else {
        Map<String, Symbol> variableMappings = assignments.values().stream().collect(toImmutableMap(Symbol::getName, Function.identity()));
        Expression translatedExpression = ConnectorExpressionTranslator.translate(session, remainingConnectorExpression.get(), plannerContext, variableMappings, new LiteralEncoder(plannerContext));
        if (connectorExpression.isEmpty()) {
            remainingDecomposedPredicate = ExpressionUtils.combineConjuncts(plannerContext.getMetadata(), translatedExpression, decomposedPredicate.getRemainingExpression());
        } else {
            remainingDecomposedPredicate = translatedExpression;
        }
    }
    Expression resultingPredicate = createResultingPredicate(plannerContext, session, symbolAllocator, typeAnalyzer, splitExpression.getDynamicFilter(), domainTranslator.toPredicate(session, remainingFilter.transformKeys(assignments::get)), splitExpression.getNonDeterministicPredicate(), remainingDecomposedPredicate);
    if (!TRUE_LITERAL.equals(resultingPredicate)) {
        return Optional.of(new FilterNode(filterNode.getId(), tableScan, resultingPredicate));
    }
    return Optional.of(tableScan);
}
Also used : LayoutConstraintEvaluator(io.trino.sql.planner.LayoutConstraintEvaluator) SymbolAllocator(io.trino.sql.planner.SymbolAllocator) FilterNode(io.trino.sql.planner.plan.FilterNode) PlanNode(io.trino.sql.planner.plan.PlanNode) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) DeterminismEvaluator.isDeterministic(io.trino.sql.planner.DeterminismEvaluator.isDeterministic) Map(java.util.Map) TableScanNode(io.trino.sql.planner.plan.TableScanNode) Rules.deriveTableStatisticsForPushdown(io.trino.sql.planner.iterative.rule.Rules.deriveTableStatisticsForPushdown) TRUE(io.trino.spi.expression.Constant.TRUE) ImmutableMap(com.google.common.collect.ImmutableMap) Domain(io.trino.spi.predicate.Domain) ConnectorExpressionTranslator(io.trino.sql.planner.ConnectorExpressionTranslator) Patterns.tableScan(io.trino.sql.planner.plan.Patterns.tableScan) StatsProvider(io.trino.cost.StatsProvider) Objects(java.util.Objects) List(java.util.List) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) Pattern(io.trino.matching.Pattern) SystemSessionProperties.isAllowPushdownIntoConnectors(io.trino.SystemSessionProperties.isAllowPushdownIntoConnectors) Optional(java.util.Optional) Expression(io.trino.sql.tree.Expression) ExpressionUtils.extractConjuncts(io.trino.sql.ExpressionUtils.extractConjuncts) ValuesNode(io.trino.sql.planner.plan.ValuesNode) ExpressionUtils.combineConjuncts(io.trino.sql.ExpressionUtils.combineConjuncts) Session(io.trino.Session) PlannerContext(io.trino.sql.PlannerContext) Constraint(io.trino.spi.connector.Constraint) LiteralEncoder(io.trino.sql.planner.LiteralEncoder) Type(io.trino.spi.type.Type) Patterns.filter(io.trino.sql.planner.plan.Patterns.filter) TableProperties(io.trino.metadata.TableProperties) Function(java.util.function.Function) Capture.newCapture(io.trino.matching.Capture.newCapture) ArrayList(java.util.ArrayList) ImmutableBiMap(com.google.common.collect.ImmutableBiMap) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) NodeRef(io.trino.sql.tree.NodeRef) Objects.requireNonNull(java.util.Objects.requireNonNull) ColumnHandle(io.trino.spi.connector.ColumnHandle) Rule(io.trino.sql.planner.iterative.Rule) DomainTranslator(io.trino.sql.planner.DomainTranslator) TablePartitioning(io.trino.metadata.TableProperties.TablePartitioning) ExpressionUtils(io.trino.sql.ExpressionUtils) Symbol(io.trino.sql.planner.Symbol) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) TRUE_LITERAL(io.trino.sql.tree.BooleanLiteral.TRUE_LITERAL) TupleDomain(io.trino.spi.predicate.TupleDomain) Capture(io.trino.matching.Capture) TableHandle(io.trino.metadata.TableHandle) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) TypeAnalyzer(io.trino.sql.planner.TypeAnalyzer) Patterns.source(io.trino.sql.planner.plan.Patterns.source) DynamicFilters.isDynamicFilter(io.trino.sql.DynamicFilters.isDynamicFilter) Captures(io.trino.matching.Captures) Metadata(io.trino.metadata.Metadata) ValuesNode(io.trino.sql.planner.plan.ValuesNode) Constraint(io.trino.spi.connector.Constraint) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) Symbol(io.trino.sql.planner.Symbol) FilterNode(io.trino.sql.planner.plan.FilterNode) NodeRef(io.trino.sql.tree.NodeRef) TablePartitioning(io.trino.metadata.TableProperties.TablePartitioning) LiteralEncoder(io.trino.sql.planner.LiteralEncoder) DomainTranslator(io.trino.sql.planner.DomainTranslator) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) ColumnHandle(io.trino.spi.connector.ColumnHandle) Type(io.trino.spi.type.Type) TableScanNode(io.trino.sql.planner.plan.TableScanNode) Expression(io.trino.sql.tree.Expression) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) ConnectorExpressionTranslator(io.trino.sql.planner.ConnectorExpressionTranslator) TableHandle(io.trino.metadata.TableHandle) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) ImmutableBiMap(com.google.common.collect.ImmutableBiMap) TableProperties(io.trino.metadata.TableProperties) LayoutConstraintEvaluator(io.trino.sql.planner.LayoutConstraintEvaluator)

Example 29 with Session

use of io.trino.Session in project trino by trinodb.

the class TaskResource method createOrUpdateTask.

@ResourceSecurity(INTERNAL_ONLY)
@POST
@Path("{taskId}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public void createOrUpdateTask(@PathParam("taskId") TaskId taskId, TaskUpdateRequest taskUpdateRequest, @Context UriInfo uriInfo, @Suspended AsyncResponse asyncResponse) {
    requireNonNull(taskUpdateRequest, "taskUpdateRequest is null");
    Session session = taskUpdateRequest.getSession().toSession(sessionPropertyManager, taskUpdateRequest.getExtraCredentials());
    if (injectFailure(session.getTraceToken(), taskId, RequestType.CREATE_OR_UPDATE_TASK, asyncResponse)) {
        return;
    }
    TaskInfo taskInfo = taskManager.updateTask(session, taskId, taskUpdateRequest.getFragment(), taskUpdateRequest.getSplitAssignments(), taskUpdateRequest.getOutputIds(), taskUpdateRequest.getDynamicFilterDomains());
    if (shouldSummarize(uriInfo)) {
        taskInfo = taskInfo.summarize();
    }
    asyncResponse.resume(Response.ok().entity(taskInfo).build());
}
Also used : TaskInfo(io.trino.execution.TaskInfo) Session(io.trino.Session) Path(javax.ws.rs.Path) POST(javax.ws.rs.POST) Consumes(javax.ws.rs.Consumes) Produces(javax.ws.rs.Produces) ResourceSecurity(io.trino.server.security.ResourceSecurity)

Example 30 with Session

use of io.trino.Session in project trino by trinodb.

the class DynamicFilterService method getDynamicFilteringStats.

public DynamicFiltersStats getDynamicFilteringStats(QueryId queryId, Session session) {
    DynamicFilterContext context = dynamicFilterContexts.get(queryId);
    if (context == null) {
        // query has been removed or dynamic filtering is not enabled
        return DynamicFiltersStats.EMPTY;
    }
    int lazyFilters = context.getLazyDynamicFilters().size();
    int replicatedFilters = context.getReplicatedDynamicFilters().size();
    int totalDynamicFilters = context.getTotalDynamicFilters();
    ConnectorSession connectorSession = session.toConnectorSession();
    List<DynamicFilterDomainStats> dynamicFilterDomainStats = context.getDynamicFilterSummaries().entrySet().stream().map(entry -> {
        DynamicFilterId dynamicFilterId = entry.getKey();
        return new DynamicFilterDomainStats(dynamicFilterId, // use small limit for readability
        entry.getValue().toString(connectorSession, 2), context.getDynamicFilterCollectionDuration(dynamicFilterId));
    }).collect(toImmutableList());
    return new DynamicFiltersStats(dynamicFilterDomainStats, lazyFilters, replicatedFilters, totalDynamicFilters, dynamicFilterDomainStats.size());
}
Also used : JsonProperty(com.fasterxml.jackson.annotation.JsonProperty) QueryId(io.trino.spi.QueryId) PlanFragment(io.trino.sql.planner.PlanFragment) EMPTY(io.trino.spi.connector.DynamicFilter.EMPTY) Inject(com.google.inject.Inject) DynamicFilters.extractDynamicFilters(io.trino.sql.DynamicFilters.extractDynamicFilters) Duration.succinctNanos(io.airlift.units.Duration.succinctNanos) Domain.union(io.trino.spi.predicate.Domain.union) TypeOperators(io.trino.spi.type.TypeOperators) SettableFuture(com.google.common.util.concurrent.SettableFuture) Duration(io.airlift.units.Duration) PlanNode(io.trino.sql.planner.plan.PlanNode) PreDestroy(javax.annotation.PreDestroy) Sets.difference(com.google.common.collect.Sets.difference) HashMultimap(com.google.common.collect.HashMultimap) DynamicFilters.extractSourceSymbols(io.trino.sql.DynamicFilters.extractSourceSymbols) DynamicFilters(io.trino.sql.DynamicFilters) PlanNodeSearcher(io.trino.sql.planner.optimizations.PlanNodeSearcher) Map(java.util.Map) Sets.union(com.google.common.collect.Sets.union) ExpressionExtractor.extractExpressions(io.trino.sql.planner.ExpressionExtractor.extractExpressions) JoinNode(io.trino.sql.planner.plan.JoinNode) Functions.identity(com.google.common.base.Functions.identity) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Domain(io.trino.spi.predicate.Domain) Collection(java.util.Collection) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) SemiJoinNode(io.trino.sql.planner.plan.SemiJoinNode) ThreadSafe(javax.annotation.concurrent.ThreadSafe) GuardedBy(javax.annotation.concurrent.GuardedBy) TaskId(io.trino.execution.TaskId) JoinUtils(io.trino.operator.join.JoinUtils) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) Objects(java.util.Objects) List(java.util.List) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) SubPlan(io.trino.sql.planner.SubPlan) DynamicFilter(io.trino.spi.connector.DynamicFilter) Optional(java.util.Optional) MoreFutures.whenAnyComplete(io.airlift.concurrent.MoreFutures.whenAnyComplete) MoreFutures.unmodifiableFuture(io.airlift.concurrent.MoreFutures.unmodifiableFuture) Session(io.trino.Session) MoreObjects.toStringHelper(com.google.common.base.MoreObjects.toStringHelper) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) StageId(io.trino.execution.StageId) Type(io.trino.spi.type.Type) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) Multimap(com.google.common.collect.Multimap) OptionalInt(java.util.OptionalInt) AtomicReference(java.util.concurrent.atomic.AtomicReference) DynamicFilterId(io.trino.sql.planner.plan.DynamicFilterId) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) Threads.daemonThreadsNamed(io.airlift.concurrent.Threads.daemonThreadsNamed) Objects.requireNonNull(java.util.Objects.requireNonNull) ColumnHandle(io.trino.spi.connector.ColumnHandle) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) DynamicFilterConfig(io.trino.execution.DynamicFilterConfig) ExecutorService(java.util.concurrent.ExecutorService) MoreFutures.toCompletableFuture(io.airlift.concurrent.MoreFutures.toCompletableFuture) Symbol(io.trino.sql.planner.Symbol) SqlQueryExecution(io.trino.execution.SqlQueryExecution) Sets.newConcurrentHashSet(com.google.common.collect.Sets.newConcurrentHashSet) ConnectorSession(io.trino.spi.connector.ConnectorSession) TupleDomain(io.trino.spi.predicate.TupleDomain) SetMultimap(com.google.common.collect.SetMultimap) Executors.newFixedThreadPool(java.util.concurrent.Executors.newFixedThreadPool) FunctionManager(io.trino.metadata.FunctionManager) Consumer(java.util.function.Consumer) MorePredicates.isInstanceOfAny(io.trino.util.MorePredicates.isInstanceOfAny) Sets.intersection(com.google.common.collect.Sets.intersection) DomainCoercer.applySaturatedCasts(io.trino.sql.planner.DomainCoercer.applySaturatedCasts) SOURCE_DISTRIBUTION(io.trino.sql.planner.SystemPartitioningHandle.SOURCE_DISTRIBUTION) JsonCreator(com.fasterxml.jackson.annotation.JsonCreator) Metadata(io.trino.metadata.Metadata) TypeProvider(io.trino.sql.planner.TypeProvider) VisibleForTesting(com.google.common.annotations.VisibleForTesting) ConnectorSession(io.trino.spi.connector.ConnectorSession) DynamicFilterId(io.trino.sql.planner.plan.DynamicFilterId)

Aggregations

Session (io.trino.Session)483 Test (org.testng.annotations.Test)303 Optional (java.util.Optional)103 List (java.util.List)91 ImmutableList (com.google.common.collect.ImmutableList)82 ImmutableMap (com.google.common.collect.ImmutableMap)72 Map (java.util.Map)70 BaseConnectorTest (io.trino.testing.BaseConnectorTest)67 Objects.requireNonNull (java.util.Objects.requireNonNull)65 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)61 HiveQueryRunner.createBucketedSession (io.trino.plugin.hive.HiveQueryRunner.createBucketedSession)60 ImmutableSet (com.google.common.collect.ImmutableSet)51 Set (java.util.Set)50 ColumnHandle (io.trino.spi.connector.ColumnHandle)49 Metadata (io.trino.metadata.Metadata)47 QualifiedObjectName (io.trino.metadata.QualifiedObjectName)46 SchemaTableName (io.trino.spi.connector.SchemaTableName)45 TestingSession (io.trino.testing.TestingSession)45 TableHandle (io.trino.metadata.TableHandle)42 Type (io.trino.spi.type.Type)41