Search in sources :

Example 1 with BIGINT

use of io.trino.spi.type.BigintType.BIGINT in project trino by trinodb.

the class DecorrelateInnerUnnestWithGlobalAggregation method apply.

@Override
public Result apply(CorrelatedJoinNode correlatedJoinNode, Captures captures, Context context) {
    // find global aggregation in subquery
    List<PlanNode> globalAggregations = PlanNodeSearcher.searchFrom(correlatedJoinNode.getSubquery(), context.getLookup()).where(DecorrelateInnerUnnestWithGlobalAggregation::isGlobalAggregation).recurseOnlyWhen(node -> node instanceof ProjectNode || isGlobalAggregation(node)).findAll();
    if (globalAggregations.isEmpty()) {
        return Result.empty();
    }
    // if there are multiple global aggregations, the one that is closest to the source is the "reducing" aggregation, because it reduces multiple input rows to single output row
    AggregationNode reducingAggregation = (AggregationNode) globalAggregations.get(globalAggregations.size() - 1);
    // find unnest in subquery
    Optional<UnnestNode> subqueryUnnest = PlanNodeSearcher.searchFrom(reducingAggregation.getSource(), context.getLookup()).where(node -> isSupportedUnnest(node, correlatedJoinNode.getCorrelation(), context.getLookup())).recurseOnlyWhen(node -> node instanceof ProjectNode || isGroupedAggregation(node)).findFirst();
    if (subqueryUnnest.isEmpty()) {
        return Result.empty();
    }
    UnnestNode unnestNode = subqueryUnnest.get();
    // assign unique id to input rows to restore semantics of aggregations after rewrite
    PlanNode input = new AssignUniqueId(context.getIdAllocator().getNextId(), correlatedJoinNode.getInput(), context.getSymbolAllocator().newSymbol("unique", BIGINT));
    // pre-project unnest symbols if they were pre-projected in subquery
    // The correlated UnnestNode either unnests correlation symbols directly, or unnests symbols produced by a projection that uses only correlation symbols.
    // Here, any underlying projection that was a source of the correlated UnnestNode, is appended as a source of the rewritten UnnestNode.
    // If the projection is not necessary for UnnestNode (i.e. it does not produce any unnest symbols), it should be pruned afterwards.
    PlanNode unnestSource = context.getLookup().resolve(unnestNode.getSource());
    if (unnestSource instanceof ProjectNode) {
        ProjectNode sourceProjection = (ProjectNode) unnestSource;
        input = new ProjectNode(sourceProjection.getId(), input, Assignments.builder().putIdentities(input.getOutputSymbols()).putAll(sourceProjection.getAssignments()).build());
    }
    // rewrite correlated join to UnnestNode
    Symbol ordinalitySymbol = unnestNode.getOrdinalitySymbol().orElseGet(() -> context.getSymbolAllocator().newSymbol("ordinality", BIGINT));
    UnnestNode rewrittenUnnest = new UnnestNode(context.getIdAllocator().getNextId(), input, input.getOutputSymbols(), unnestNode.getMappings(), Optional.of(ordinalitySymbol), LEFT, Optional.empty());
    // append mask symbol based on ordinality to distinguish between the unnested rows and synthetic null rows
    Symbol mask = context.getSymbolAllocator().newSymbol("mask", BOOLEAN);
    ProjectNode sourceWithMask = new ProjectNode(context.getIdAllocator().getNextId(), rewrittenUnnest, Assignments.builder().putIdentities(rewrittenUnnest.getOutputSymbols()).put(mask, new IsNotNullPredicate(ordinalitySymbol.toSymbolReference())).build());
    // restore all projections, grouped aggregations and global aggregations from the subquery
    PlanNode result = rewriteNodeSequence(context.getLookup().resolve(correlatedJoinNode.getSubquery()), input.getOutputSymbols(), mask, sourceWithMask, reducingAggregation.getId(), unnestNode.getId(), context.getSymbolAllocator(), context.getIdAllocator(), context.getLookup());
    // restrict outputs
    return Result.ofPlanNode(restrictOutputs(context.getIdAllocator(), result, ImmutableSet.copyOf(correlatedJoinNode.getOutputSymbols())).orElse(result));
}
Also used : CorrelatedJoin.correlation(io.trino.sql.planner.plan.Patterns.CorrelatedJoin.correlation) QueryCardinalityUtil.isScalar(io.trino.sql.planner.optimizations.QueryCardinalityUtil.isScalar) INNER(io.trino.sql.planner.plan.JoinNode.Type.INNER) SymbolAllocator(io.trino.sql.planner.SymbolAllocator) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) SINGLE(io.trino.sql.planner.plan.AggregationNode.Step.SINGLE) CorrelatedJoinNode(io.trino.sql.planner.plan.CorrelatedJoinNode) PlanNode(io.trino.sql.planner.plan.PlanNode) AggregationDecorrelation.rewriteWithMasks(io.trino.sql.planner.iterative.rule.AggregationDecorrelation.rewriteWithMasks) CorrelatedJoin.filter(io.trino.sql.planner.plan.Patterns.CorrelatedJoin.filter) LEFT(io.trino.sql.planner.plan.JoinNode.Type.LEFT) ImmutableList(com.google.common.collect.ImmutableList) PlanNodeSearcher(io.trino.sql.planner.optimizations.PlanNodeSearcher) PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) IsNotNullPredicate(io.trino.sql.tree.IsNotNullPredicate) Map(java.util.Map) AggregationNode(io.trino.sql.planner.plan.AggregationNode) Rule(io.trino.sql.planner.iterative.Rule) SymbolsExtractor(io.trino.sql.planner.SymbolsExtractor) Pattern.nonEmpty(io.trino.matching.Pattern.nonEmpty) AssignUniqueId(io.trino.sql.planner.plan.AssignUniqueId) ProjectNode(io.trino.sql.planner.plan.ProjectNode) Symbol(io.trino.sql.planner.Symbol) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Lookup(io.trino.sql.planner.iterative.Lookup) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Assignments(io.trino.sql.planner.plan.Assignments) Set(java.util.Set) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) TRUE_LITERAL(io.trino.sql.tree.BooleanLiteral.TRUE_LITERAL) Streams(com.google.common.collect.Streams) UnnestNode(io.trino.sql.planner.plan.UnnestNode) AggregationNode.singleGroupingSet(io.trino.sql.planner.plan.AggregationNode.singleGroupingSet) List(java.util.List) Pattern(io.trino.matching.Pattern) BIGINT(io.trino.spi.type.BigintType.BIGINT) ExpressionUtils.and(io.trino.sql.ExpressionUtils.and) Captures(io.trino.matching.Captures) Util.restrictOutputs(io.trino.sql.planner.iterative.rule.Util.restrictOutputs) Mapping(io.trino.sql.planner.plan.UnnestNode.Mapping) Optional(java.util.Optional) Expression(io.trino.sql.tree.Expression) PlanNodeIdAllocator(io.trino.sql.planner.PlanNodeIdAllocator) Patterns.correlatedJoin(io.trino.sql.planner.plan.Patterns.correlatedJoin) PlanNode(io.trino.sql.planner.plan.PlanNode) AssignUniqueId(io.trino.sql.planner.plan.AssignUniqueId) UnnestNode(io.trino.sql.planner.plan.UnnestNode) Symbol(io.trino.sql.planner.Symbol) ProjectNode(io.trino.sql.planner.plan.ProjectNode) AggregationNode(io.trino.sql.planner.plan.AggregationNode) IsNotNullPredicate(io.trino.sql.tree.IsNotNullPredicate)

Example 2 with BIGINT

use of io.trino.spi.type.BigintType.BIGINT in project trino by trinodb.

the class OrcPageSourceFactory method createOrcPageSource.

private ConnectorPageSource createOrcPageSource(HdfsEnvironment hdfsEnvironment, ConnectorIdentity identity, Configuration configuration, Path path, long start, long length, long estimatedFileSize, List<HiveColumnHandle> columns, List<HiveColumnHandle> projections, boolean useOrcColumnNames, boolean isFullAcid, TupleDomain<HiveColumnHandle> effectivePredicate, DateTimeZone legacyFileTimeZone, OrcReaderOptions options, Optional<AcidInfo> acidInfo, OptionalInt bucketNumber, boolean originalFile, AcidTransaction transaction, FileFormatDataSourceStats stats) {
    for (HiveColumnHandle column : columns) {
        checkArgument(column.getColumnType() == REGULAR, "column type must be regular: %s", column);
    }
    checkArgument(!effectivePredicate.isNone());
    OrcDataSource orcDataSource;
    boolean originalFilesPresent = acidInfo.isPresent() && !acidInfo.get().getOriginalFiles().isEmpty();
    try {
        FileSystem fileSystem = hdfsEnvironment.getFileSystem(identity, path, configuration);
        FSDataInputStream inputStream = hdfsEnvironment.doAs(identity, () -> fileSystem.open(path));
        orcDataSource = new HdfsOrcDataSource(new OrcDataSourceId(path.toString()), estimatedFileSize, options, inputStream, stats);
    } catch (Exception e) {
        if (nullToEmpty(e.getMessage()).trim().equals("Filesystem closed") || e instanceof FileNotFoundException) {
            throw new TrinoException(HIVE_CANNOT_OPEN_SPLIT, e);
        }
        throw new TrinoException(HIVE_CANNOT_OPEN_SPLIT, splitError(e, path, start, length), e);
    }
    AggregatedMemoryContext memoryUsage = newSimpleAggregatedMemoryContext();
    try {
        Optional<OrcReader> optionalOrcReader = OrcReader.createOrcReader(orcDataSource, options);
        if (optionalOrcReader.isEmpty()) {
            return new EmptyPageSource();
        }
        OrcReader reader = optionalOrcReader.get();
        if (!originalFile && acidInfo.isPresent() && !acidInfo.get().isOrcAcidVersionValidated()) {
            validateOrcAcidVersion(path, reader);
        }
        List<OrcColumn> fileColumns = reader.getRootColumn().getNestedColumns();
        int actualColumnCount = columns.size() + (isFullAcid ? 3 : 0);
        List<OrcColumn> fileReadColumns = new ArrayList<>(actualColumnCount);
        List<Type> fileReadTypes = new ArrayList<>(actualColumnCount);
        List<OrcReader.ProjectedLayout> fileReadLayouts = new ArrayList<>(actualColumnCount);
        if (isFullAcid && !originalFilesPresent) {
            verifyAcidSchema(reader, path);
            Map<String, OrcColumn> acidColumnsByName = uniqueIndex(fileColumns, orcColumn -> orcColumn.getColumnName().toLowerCase(ENGLISH));
            fileColumns = ensureColumnNameConsistency(acidColumnsByName.get(AcidSchema.ACID_COLUMN_ROW_STRUCT.toLowerCase(ENGLISH)).getNestedColumns(), columns);
            fileReadColumns.add(acidColumnsByName.get(AcidSchema.ACID_COLUMN_ORIGINAL_TRANSACTION.toLowerCase(ENGLISH)));
            fileReadTypes.add(BIGINT);
            fileReadLayouts.add(fullyProjectedLayout());
            fileReadColumns.add(acidColumnsByName.get(AcidSchema.ACID_COLUMN_BUCKET.toLowerCase(ENGLISH)));
            fileReadTypes.add(INTEGER);
            fileReadLayouts.add(fullyProjectedLayout());
            fileReadColumns.add(acidColumnsByName.get(AcidSchema.ACID_COLUMN_ROW_ID.toLowerCase(ENGLISH)));
            fileReadTypes.add(BIGINT);
            fileReadLayouts.add(fullyProjectedLayout());
        }
        Map<String, OrcColumn> fileColumnsByName = ImmutableMap.of();
        if (useOrcColumnNames || isFullAcid) {
            verifyFileHasColumnNames(fileColumns, path);
            // Convert column names read from ORC files to lower case to be consistent with those stored in Hive Metastore
            fileColumnsByName = uniqueIndex(fileColumns, orcColumn -> orcColumn.getColumnName().toLowerCase(ENGLISH));
        }
        Map<String, List<List<String>>> projectionsByColumnName = ImmutableMap.of();
        Map<Integer, List<List<String>>> projectionsByColumnIndex = ImmutableMap.of();
        if (useOrcColumnNames || isFullAcid) {
            projectionsByColumnName = projections.stream().collect(Collectors.groupingBy(HiveColumnHandle::getBaseColumnName, mapping(OrcPageSourceFactory::getDereferencesAsList, toList())));
        } else {
            projectionsByColumnIndex = projections.stream().collect(Collectors.groupingBy(HiveColumnHandle::getBaseHiveColumnIndex, mapping(OrcPageSourceFactory::getDereferencesAsList, toList())));
        }
        TupleDomainOrcPredicateBuilder predicateBuilder = TupleDomainOrcPredicate.builder().setBloomFiltersEnabled(options.isBloomFiltersEnabled()).setDomainCompactionThreshold(domainCompactionThreshold);
        Map<HiveColumnHandle, Domain> effectivePredicateDomains = effectivePredicate.getDomains().orElseThrow(() -> new IllegalArgumentException("Effective predicate is none"));
        List<ColumnAdaptation> columnAdaptations = new ArrayList<>(columns.size());
        for (HiveColumnHandle column : columns) {
            OrcColumn orcColumn = null;
            OrcReader.ProjectedLayout projectedLayout = null;
            Map<Optional<HiveColumnProjectionInfo>, Domain> columnDomains = null;
            if (useOrcColumnNames || isFullAcid) {
                String columnName = column.getName().toLowerCase(ENGLISH);
                orcColumn = fileColumnsByName.get(columnName);
                if (orcColumn != null) {
                    projectedLayout = createProjectedLayout(orcColumn, projectionsByColumnName.get(columnName));
                    columnDomains = effectivePredicateDomains.entrySet().stream().filter(columnDomain -> columnDomain.getKey().getBaseColumnName().toLowerCase(ENGLISH).equals(columnName)).collect(toImmutableMap(columnDomain -> columnDomain.getKey().getHiveColumnProjectionInfo(), Map.Entry::getValue));
                }
            } else if (column.getBaseHiveColumnIndex() < fileColumns.size()) {
                orcColumn = fileColumns.get(column.getBaseHiveColumnIndex());
                if (orcColumn != null) {
                    projectedLayout = createProjectedLayout(orcColumn, projectionsByColumnIndex.get(column.getBaseHiveColumnIndex()));
                    columnDomains = effectivePredicateDomains.entrySet().stream().filter(columnDomain -> columnDomain.getKey().getBaseHiveColumnIndex() == column.getBaseHiveColumnIndex()).collect(toImmutableMap(columnDomain -> columnDomain.getKey().getHiveColumnProjectionInfo(), Map.Entry::getValue));
                }
            }
            Type readType = column.getType();
            if (orcColumn != null) {
                int sourceIndex = fileReadColumns.size();
                columnAdaptations.add(ColumnAdaptation.sourceColumn(sourceIndex));
                fileReadColumns.add(orcColumn);
                fileReadTypes.add(readType);
                fileReadLayouts.add(projectedLayout);
                // Add predicates on top-level and nested columns
                for (Map.Entry<Optional<HiveColumnProjectionInfo>, Domain> columnDomain : columnDomains.entrySet()) {
                    OrcColumn nestedColumn = getNestedColumn(orcColumn, columnDomain.getKey());
                    if (nestedColumn != null) {
                        predicateBuilder.addColumn(nestedColumn.getColumnId(), columnDomain.getValue());
                    }
                }
            } else {
                columnAdaptations.add(ColumnAdaptation.nullColumn(readType));
            }
        }
        OrcRecordReader recordReader = reader.createRecordReader(fileReadColumns, fileReadTypes, fileReadLayouts, predicateBuilder.build(), start, length, legacyFileTimeZone, memoryUsage, INITIAL_BATCH_SIZE, exception -> handleException(orcDataSource.getId(), exception), NameBasedFieldMapper::create);
        Optional<OrcDeletedRows> deletedRows = acidInfo.map(info -> new OrcDeletedRows(path.getName(), new OrcDeleteDeltaPageSourceFactory(options, identity, configuration, hdfsEnvironment, stats), identity, configuration, hdfsEnvironment, info, bucketNumber, memoryUsage));
        Optional<Long> originalFileRowId = acidInfo.filter(OrcPageSourceFactory::hasOriginalFiles).map(info -> OriginalFilesUtils.getPrecedingRowCount(acidInfo.get().getOriginalFiles(), path, hdfsEnvironment, identity, options, configuration, stats));
        if (transaction.isDelete()) {
            if (originalFile) {
                int bucket = bucketNumber.orElse(0);
                long startingRowId = originalFileRowId.orElse(0L);
                columnAdaptations.add(ColumnAdaptation.originalFileRowIdColumn(startingRowId, bucket));
            } else {
                columnAdaptations.add(ColumnAdaptation.rowIdColumn());
            }
        } else if (transaction.isUpdate()) {
            HiveUpdateProcessor updateProcessor = transaction.getUpdateProcessor().orElseThrow(() -> new IllegalArgumentException("updateProcessor not present"));
            List<HiveColumnHandle> dependencyColumns = projections.stream().filter(HiveColumnHandle::isBaseColumn).collect(toImmutableList());
            if (originalFile) {
                int bucket = bucketNumber.orElse(0);
                long startingRowId = originalFileRowId.orElse(0L);
                columnAdaptations.add(updatedRowColumnsWithOriginalFiles(startingRowId, bucket, updateProcessor, dependencyColumns));
            } else {
                columnAdaptations.add(updatedRowColumns(updateProcessor, dependencyColumns));
            }
        }
        return new OrcPageSource(recordReader, columnAdaptations, orcDataSource, deletedRows, originalFileRowId, memoryUsage, stats);
    } catch (Exception e) {
        try {
            orcDataSource.close();
        } catch (IOException ignored) {
        }
        if (e instanceof TrinoException) {
            throw (TrinoException) e;
        }
        String message = splitError(e, path, start, length);
        if (e instanceof BlockMissingException) {
            throw new TrinoException(HIVE_MISSING_DATA, message, e);
        }
        throw new TrinoException(HIVE_CANNOT_OPEN_SPLIT, message, e);
    }
}
Also used : DateTimeZone(org.joda.time.DateTimeZone) HiveUpdateProcessor(io.trino.plugin.hive.HiveUpdateProcessor) FileSystem(org.apache.hadoop.fs.FileSystem) HIVE_CANNOT_OPEN_SPLIT(io.trino.plugin.hive.HiveErrorCode.HIVE_CANNOT_OPEN_SPLIT) OrcTypeKind(io.trino.orc.metadata.OrcType.OrcTypeKind) Maps.uniqueIndex(com.google.common.collect.Maps.uniqueIndex) ColumnAdaptation(io.trino.plugin.hive.orc.OrcPageSource.ColumnAdaptation) BlockMissingException(org.apache.hadoop.hdfs.BlockMissingException) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) HiveSessionProperties.getOrcLazyReadSmallRanges(io.trino.plugin.hive.HiveSessionProperties.getOrcLazyReadSmallRanges) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) ENGLISH(java.util.Locale.ENGLISH) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) AcidTransaction(io.trino.plugin.hive.acid.AcidTransaction) HiveSessionProperties.getOrcTinyStripeThreshold(io.trino.plugin.hive.HiveSessionProperties.getOrcTinyStripeThreshold) FileFormatDataSourceStats(io.trino.plugin.hive.FileFormatDataSourceStats) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) ConnectorIdentity(io.trino.spi.security.ConnectorIdentity) Domain(io.trino.spi.predicate.Domain) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ReaderColumns(io.trino.plugin.hive.ReaderColumns) ReaderPageSource(io.trino.plugin.hive.ReaderPageSource) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) Slice(io.airlift.slice.Slice) STRUCT(io.trino.orc.metadata.OrcType.OrcTypeKind.STRUCT) ColumnAdaptation.updatedRowColumns(io.trino.plugin.hive.orc.OrcPageSource.ColumnAdaptation.updatedRowColumns) ArrayList(java.util.ArrayList) Collectors.toUnmodifiableList(java.util.stream.Collectors.toUnmodifiableList) TupleDomainOrcPredicateBuilder(io.trino.orc.TupleDomainOrcPredicate.TupleDomainOrcPredicateBuilder) LONG(io.trino.orc.metadata.OrcType.OrcTypeKind.LONG) AggregatedMemoryContext(io.trino.memory.context.AggregatedMemoryContext) INT(io.trino.orc.metadata.OrcType.OrcTypeKind.INT) HIVE_BAD_DATA(io.trino.plugin.hive.HiveErrorCode.HIVE_BAD_DATA) Properties(java.util.Properties) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) NameBasedFieldMapper(io.trino.orc.NameBasedFieldMapper) HivePageSourceProvider.projectBaseColumns(io.trino.plugin.hive.HivePageSourceProvider.projectBaseColumns) HiveSessionProperties.isOrcNestedLazy(io.trino.plugin.hive.HiveSessionProperties.isOrcNestedLazy) OrcColumn(io.trino.orc.OrcColumn) HIVE_MISSING_DATA(io.trino.plugin.hive.HiveErrorCode.HIVE_MISSING_DATA) OrcSerde(org.apache.hadoop.hive.ql.io.orc.OrcSerde) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) OrcRecordReader(io.trino.orc.OrcRecordReader) Path(org.apache.hadoop.fs.Path) OrcDataSource(io.trino.orc.OrcDataSource) HiveColumnHandle(io.trino.plugin.hive.HiveColumnHandle) INTEGER(io.trino.spi.type.IntegerType.INTEGER) ImmutableMap(com.google.common.collect.ImmutableMap) AcidUtils.isFullAcidTable(org.apache.hadoop.hive.ql.io.AcidUtils.isFullAcidTable) INITIAL_BATCH_SIZE(io.trino.orc.OrcReader.INITIAL_BATCH_SIZE) OrcPageSource.handleException(io.trino.plugin.hive.orc.OrcPageSource.handleException) TrinoException(io.trino.spi.TrinoException) Collectors(java.util.stream.Collectors) FileNotFoundException(java.io.FileNotFoundException) String.format(java.lang.String.format) OrcDataSourceId(io.trino.orc.OrcDataSourceId) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) Function.identity(java.util.function.Function.identity) Optional(java.util.Optional) PRESTO_WRITER_ID(io.trino.orc.metadata.OrcMetadataWriter.PRESTO_WRITER_ID) HivePageSourceFactory(io.trino.plugin.hive.HivePageSourceFactory) Pattern(java.util.regex.Pattern) TRINO_WRITER_ID(io.trino.orc.metadata.OrcMetadataWriter.TRINO_WRITER_ID) Strings.nullToEmpty(com.google.common.base.Strings.nullToEmpty) AggregatedMemoryContext.newSimpleAggregatedMemoryContext(io.trino.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext) HiveUtil.isDeserializerClass(io.trino.plugin.hive.util.HiveUtil.isDeserializerClass) Type(io.trino.spi.type.Type) TupleDomainOrcPredicate(io.trino.orc.TupleDomainOrcPredicate) AcidSchema(io.trino.plugin.hive.acid.AcidSchema) HiveSessionProperties.isUseOrcColumnNames(io.trino.plugin.hive.HiveSessionProperties.isUseOrcColumnNames) OptionalInt(java.util.OptionalInt) Inject(javax.inject.Inject) HiveSessionProperties.getOrcStreamBufferSize(io.trino.plugin.hive.HiveSessionProperties.getOrcStreamBufferSize) ImmutableList(com.google.common.collect.ImmutableList) OrcReaderOptions(io.trino.orc.OrcReaderOptions) Objects.requireNonNull(java.util.Objects.requireNonNull) Collectors.mapping(java.util.stream.Collectors.mapping) HiveSessionProperties.isOrcBloomFiltersEnabled(io.trino.plugin.hive.HiveSessionProperties.isOrcBloomFiltersEnabled) HiveSessionProperties.getOrcMaxReadBlockSize(io.trino.plugin.hive.HiveSessionProperties.getOrcMaxReadBlockSize) OrcReader(io.trino.orc.OrcReader) HiveSessionProperties.getOrcMaxBufferSize(io.trino.plugin.hive.HiveSessionProperties.getOrcMaxBufferSize) NameBasedProjectedLayout.createProjectedLayout(io.trino.orc.OrcReader.NameBasedProjectedLayout.createProjectedLayout) UTF_8(java.nio.charset.StandardCharsets.UTF_8) TupleDomain(io.trino.spi.predicate.TupleDomain) OrcReader.fullyProjectedLayout(io.trino.orc.OrcReader.fullyProjectedLayout) Maps(com.google.common.collect.Maps) HiveSessionProperties.getOrcMaxMergeDistance(io.trino.plugin.hive.HiveSessionProperties.getOrcMaxMergeDistance) ColumnAdaptation.updatedRowColumnsWithOriginalFiles(io.trino.plugin.hive.orc.OrcPageSource.ColumnAdaptation.updatedRowColumnsWithOriginalFiles) AcidInfo(io.trino.plugin.hive.AcidInfo) HiveColumnProjectionInfo(io.trino.plugin.hive.HiveColumnProjectionInfo) Collectors.toList(java.util.stream.Collectors.toList) EmptyPageSource(io.trino.spi.connector.EmptyPageSource) HIVE_FILE_MISSING_COLUMN_NAMES(io.trino.plugin.hive.HiveErrorCode.HIVE_FILE_MISSING_COLUMN_NAMES) HiveConfig(io.trino.plugin.hive.HiveConfig) REGULAR(io.trino.plugin.hive.HiveColumnHandle.ColumnType.REGULAR) FileNotFoundException(java.io.FileNotFoundException) ArrayList(java.util.ArrayList) NameBasedFieldMapper(io.trino.orc.NameBasedFieldMapper) FileSystem(org.apache.hadoop.fs.FileSystem) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ArrayList(java.util.ArrayList) Collectors.toUnmodifiableList(java.util.stream.Collectors.toUnmodifiableList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) Optional(java.util.Optional) OrcColumn(io.trino.orc.OrcColumn) OrcReader(io.trino.orc.OrcReader) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) Domain(io.trino.spi.predicate.Domain) TupleDomain(io.trino.spi.predicate.TupleDomain) Map(java.util.Map) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) ImmutableMap(com.google.common.collect.ImmutableMap) ColumnAdaptation(io.trino.plugin.hive.orc.OrcPageSource.ColumnAdaptation) EmptyPageSource(io.trino.spi.connector.EmptyPageSource) BlockMissingException(org.apache.hadoop.hdfs.BlockMissingException) HiveColumnHandle(io.trino.plugin.hive.HiveColumnHandle) HiveUpdateProcessor(io.trino.plugin.hive.HiveUpdateProcessor) OrcDataSource(io.trino.orc.OrcDataSource) OrcDataSourceId(io.trino.orc.OrcDataSourceId) IOException(java.io.IOException) OrcRecordReader(io.trino.orc.OrcRecordReader) AggregatedMemoryContext(io.trino.memory.context.AggregatedMemoryContext) AggregatedMemoryContext.newSimpleAggregatedMemoryContext(io.trino.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext) BlockMissingException(org.apache.hadoop.hdfs.BlockMissingException) IOException(java.io.IOException) OrcPageSource.handleException(io.trino.plugin.hive.orc.OrcPageSource.handleException) TrinoException(io.trino.spi.TrinoException) FileNotFoundException(java.io.FileNotFoundException) Type(io.trino.spi.type.Type) TupleDomainOrcPredicateBuilder(io.trino.orc.TupleDomainOrcPredicate.TupleDomainOrcPredicateBuilder) TrinoException(io.trino.spi.TrinoException) NameBasedProjectedLayout.createProjectedLayout(io.trino.orc.OrcReader.NameBasedProjectedLayout.createProjectedLayout) OrcReader.fullyProjectedLayout(io.trino.orc.OrcReader.fullyProjectedLayout)

Example 3 with BIGINT

use of io.trino.spi.type.BigintType.BIGINT in project trino by trinodb.

the class AbstractTestHive method testApplyProjection.

@Test
public void testApplyProjection() throws Exception {
    ColumnMetadata bigIntColumn0 = new ColumnMetadata("int0", BIGINT);
    ColumnMetadata bigIntColumn1 = new ColumnMetadata("int1", BIGINT);
    RowType oneLevelRowType = toRowType(ImmutableList.of(bigIntColumn0, bigIntColumn1));
    ColumnMetadata oneLevelRow0 = new ColumnMetadata("onelevelrow0", oneLevelRowType);
    RowType twoLevelRowType = toRowType(ImmutableList.of(oneLevelRow0, bigIntColumn0, bigIntColumn1));
    ColumnMetadata twoLevelRow0 = new ColumnMetadata("twolevelrow0", twoLevelRowType);
    List<ColumnMetadata> columnsForApplyProjectionTest = ImmutableList.of(bigIntColumn0, bigIntColumn1, oneLevelRow0, twoLevelRow0);
    SchemaTableName tableName = temporaryTable("apply_projection_tester");
    doCreateEmptyTable(tableName, ORC, columnsForApplyProjectionTest);
    try (Transaction transaction = newTransaction()) {
        ConnectorSession session = newSession();
        ConnectorMetadata metadata = transaction.getMetadata();
        ConnectorTableHandle tableHandle = getTableHandle(metadata, tableName);
        List<ColumnHandle> columnHandles = metadata.getColumnHandles(session, tableHandle).values().stream().filter(columnHandle -> !((HiveColumnHandle) columnHandle).isHidden()).collect(toList());
        assertEquals(columnHandles.size(), columnsForApplyProjectionTest.size());
        Map<String, ColumnHandle> columnHandleMap = columnHandles.stream().collect(toImmutableMap(handle -> ((HiveColumnHandle) handle).getBaseColumnName(), Function.identity()));
        // Emulate symbols coming from the query plan and map them to column handles
        Map<String, ColumnHandle> columnHandlesWithSymbols = ImmutableMap.of("symbol_0", columnHandleMap.get("int0"), "symbol_1", columnHandleMap.get("int1"), "symbol_2", columnHandleMap.get("onelevelrow0"), "symbol_3", columnHandleMap.get("twolevelrow0"));
        // Create variables for the emulated symbols
        Map<String, Variable> symbolVariableMapping = columnHandlesWithSymbols.entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, e -> new Variable(e.getKey(), ((HiveColumnHandle) e.getValue()).getBaseType())));
        // Create dereference expressions for testing
        FieldDereference symbol2Field0 = new FieldDereference(BIGINT, symbolVariableMapping.get("symbol_2"), 0);
        FieldDereference symbol3Field0 = new FieldDereference(oneLevelRowType, symbolVariableMapping.get("symbol_3"), 0);
        FieldDereference symbol3Field0Field0 = new FieldDereference(BIGINT, symbol3Field0, 0);
        FieldDereference symbol3Field1 = new FieldDereference(BIGINT, symbolVariableMapping.get("symbol_3"), 1);
        Map<String, ColumnHandle> inputAssignments;
        List<ConnectorExpression> inputProjections;
        Optional<ProjectionApplicationResult<ConnectorTableHandle>> projectionResult;
        List<ConnectorExpression> expectedProjections;
        Map<String, Type> expectedAssignments;
        // Test projected columns pushdown to HiveTableHandle in case of all variable references
        inputAssignments = getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_0", "symbol_1"));
        inputProjections = ImmutableList.of(symbolVariableMapping.get("symbol_0"), symbolVariableMapping.get("symbol_1"));
        expectedAssignments = ImmutableMap.of("symbol_0", BIGINT, "symbol_1", BIGINT);
        projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments);
        assertProjectionResult(projectionResult, false, inputProjections, expectedAssignments);
        // Empty result when projected column handles are same as those present in table handle
        projectionResult = metadata.applyProjection(session, projectionResult.get().getHandle(), inputProjections, inputAssignments);
        assertProjectionResult(projectionResult, true, ImmutableList.of(), ImmutableMap.of());
        // Extra columns handles in HiveTableHandle should get pruned
        projectionResult = metadata.applyProjection(session, ((HiveTableHandle) tableHandle).withProjectedColumns(ImmutableSet.copyOf(columnHandles)), inputProjections, inputAssignments);
        assertProjectionResult(projectionResult, false, inputProjections, expectedAssignments);
        // Test projection pushdown for dereferences
        inputAssignments = getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_2", "symbol_3"));
        inputProjections = ImmutableList.of(symbol2Field0, symbol3Field0Field0, symbol3Field1);
        expectedAssignments = ImmutableMap.of("onelevelrow0#f_int0", BIGINT, "twolevelrow0#f_onelevelrow0#f_int0", BIGINT, "twolevelrow0#f_int0", BIGINT);
        expectedProjections = ImmutableList.of(new Variable("onelevelrow0#f_int0", BIGINT), new Variable("twolevelrow0#f_onelevelrow0#f_int0", BIGINT), new Variable("twolevelrow0#f_int0", BIGINT));
        projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments);
        assertProjectionResult(projectionResult, false, expectedProjections, expectedAssignments);
        // Test reuse of virtual column handles
        // Round-1: input projections [symbol_2, symbol_2.int0]. virtual handle is created for symbol_2.int0.
        inputAssignments = getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_2"));
        inputProjections = ImmutableList.of(symbol2Field0, symbolVariableMapping.get("symbol_2"));
        projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments);
        expectedProjections = ImmutableList.of(new Variable("onelevelrow0#f_int0", BIGINT), symbolVariableMapping.get("symbol_2"));
        expectedAssignments = ImmutableMap.of("onelevelrow0#f_int0", BIGINT, "symbol_2", oneLevelRowType);
        assertProjectionResult(projectionResult, false, expectedProjections, expectedAssignments);
        // Round-2: input projections [symbol_2.int0 and onelevelrow0#f_int0]. Virtual handle is reused.
        Assignment newlyCreatedColumn = getOnlyElement(projectionResult.get().getAssignments().stream().filter(handle -> handle.getVariable().equals("onelevelrow0#f_int0")).collect(toList()));
        inputAssignments = ImmutableMap.<String, ColumnHandle>builder().putAll(getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_2"))).put(newlyCreatedColumn.getVariable(), newlyCreatedColumn.getColumn()).buildOrThrow();
        inputProjections = ImmutableList.of(symbol2Field0, new Variable("onelevelrow0#f_int0", BIGINT));
        projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments);
        expectedProjections = ImmutableList.of(new Variable("onelevelrow0#f_int0", BIGINT), new Variable("onelevelrow0#f_int0", BIGINT));
        expectedAssignments = ImmutableMap.of("onelevelrow0#f_int0", BIGINT);
        assertProjectionResult(projectionResult, false, expectedProjections, expectedAssignments);
    } finally {
        dropTable(tableName);
    }
}
Also used : Assertions.assertInstanceOf(io.airlift.testing.Assertions.assertInstanceOf) FileSystem(org.apache.hadoop.fs.FileSystem) Test(org.testng.annotations.Test) FileStatus(org.apache.hadoop.fs.FileStatus) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) HiveColumnStatistics.createDateColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createDateColumnStatistics) Files.createTempDirectory(java.nio.file.Files.createTempDirectory) Map(java.util.Map) PRESTO_QUERY_ID_NAME(io.trino.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) ViewNotFoundException(io.trino.spi.connector.ViewNotFoundException) MaterializedRow(io.trino.testing.MaterializedRow) ROLLBACK_AFTER_FINISH_INSERT(io.trino.plugin.hive.AbstractTestHive.TransactionDeleteInsertTestTag.ROLLBACK_AFTER_FINISH_INSERT) ENGLISH(java.util.Locale.ENGLISH) Assert.assertFalse(org.testng.Assert.assertFalse) HiveIdentity(io.trino.plugin.hive.authentication.HiveIdentity) Domain(io.trino.spi.predicate.Domain) MANAGED_TABLE(org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE) ASCENDING(io.trino.plugin.hive.metastore.SortingColumn.Order.ASCENDING) ValueSet(io.trino.spi.predicate.ValueSet) MoreExecutors.directExecutor(com.google.common.util.concurrent.MoreExecutors.directExecutor) COMMIT(io.trino.plugin.hive.AbstractTestHive.TransactionDeleteInsertTestTag.COMMIT) NOT_PARTITIONED(io.trino.spi.connector.NotPartitionedPartitionHandle.NOT_PARTITIONED) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) DESCENDING(io.trino.plugin.hive.metastore.SortingColumn.Order.DESCENDING) ConnectorPartitioningHandle(io.trino.spi.connector.ConnectorPartitioningHandle) TrinoS3ConfigurationInitializer(io.trino.plugin.hive.s3.TrinoS3ConfigurationInitializer) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) MetastoreLocator(io.trino.plugin.hive.metastore.thrift.MetastoreLocator) ROLLBACK_AFTER_SINK_FINISH(io.trino.plugin.hive.AbstractTestHive.TransactionDeleteInsertTestTag.ROLLBACK_AFTER_SINK_FINISH) TableScanRedirectApplicationResult(io.trino.spi.connector.TableScanRedirectApplicationResult) TableColumnsMetadata(io.trino.spi.connector.TableColumnsMetadata) REAL(io.trino.spi.type.RealType.REAL) Partition(io.trino.plugin.hive.metastore.Partition) BUCKETED_BY_PROPERTY(io.trino.plugin.hive.HiveTableProperties.BUCKETED_BY_PROPERTY) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) TIMESTAMP_MILLIS(io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS) LocalDateTime(java.time.LocalDateTime) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) HiveBasicStatistics.createEmptyStatistics(io.trino.plugin.hive.HiveBasicStatistics.createEmptyStatistics) Variable(io.trino.spi.expression.Variable) StorageFormat.fromHiveStorageFormat(io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat) OptionalLong(java.util.OptionalLong) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) HiveMetastore(io.trino.plugin.hive.metastore.HiveMetastore) OrcPageSource(io.trino.plugin.hive.orc.OrcPageSource) SEQUENCEFILE(io.trino.plugin.hive.HiveStorageFormat.SEQUENCEFILE) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) HIVE_INVALID_PARTITION_VALUE(io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_PARTITION_VALUE) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) Assertions.assertGreaterThanOrEqual(io.airlift.testing.Assertions.assertGreaterThanOrEqual) ImmutableMultimap(com.google.common.collect.ImmutableMultimap) HiveSessionProperties.isTemporaryStagingDirectoryEnabled(io.trino.plugin.hive.HiveSessionProperties.isTemporaryStagingDirectoryEnabled) AfterClass(org.testng.annotations.AfterClass) HiveAzureConfig(io.trino.plugin.hive.azure.HiveAzureConfig) FileUtils.makePartName(org.apache.hadoop.hive.common.FileUtils.makePartName) MapType(io.trino.spi.type.MapType) ConnectorSplit(io.trino.spi.connector.ConnectorSplit) TRANSACTIONAL(io.trino.plugin.hive.HiveTableProperties.TRANSACTIONAL) SPARK_TABLE_PROVIDER_KEY(io.trino.plugin.hive.util.HiveUtil.SPARK_TABLE_PROVIDER_KEY) ConnectorSplitSource(io.trino.spi.connector.ConnectorSplitSource) IOException(java.io.IOException) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) STAGE_AND_MOVE_TO_TARGET_DIRECTORY(io.trino.plugin.hive.LocationHandle.WriteMode.STAGE_AND_MOVE_TO_TARGET_DIRECTORY) ROLLBACK_AFTER_BEGIN_INSERT(io.trino.plugin.hive.AbstractTestHive.TransactionDeleteInsertTestTag.ROLLBACK_AFTER_BEGIN_INSERT) HostAndPort(com.google.common.net.HostAndPort) CatalogName(io.trino.plugin.base.CatalogName) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) HIVE_INT(io.trino.plugin.hive.HiveType.HIVE_INT) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) HiveTestUtils.mapType(io.trino.plugin.hive.HiveTestUtils.mapType) ParquetPageSource(io.trino.plugin.hive.parquet.ParquetPageSource) ThriftMetastoreConfig(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreConfig) RCTEXT(io.trino.plugin.hive.HiveStorageFormat.RCTEXT) VarcharType.createVarcharType(io.trino.spi.type.VarcharType.createVarcharType) WriteInfo(io.trino.plugin.hive.LocationService.WriteInfo) HivePrivilege(io.trino.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege) PARTITION_KEY(io.trino.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY) HiveColumnStatistics.createStringColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createStringColumnStatistics) MoreFiles.deleteRecursively(com.google.common.io.MoreFiles.deleteRecursively) MaterializedResult(io.trino.testing.MaterializedResult) HiveUtil.toPartitionValues(io.trino.plugin.hive.util.HiveUtil.toPartitionValues) NO_RETRIES(io.trino.spi.connector.RetryMode.NO_RETRIES) ConnectorMaterializedViewDefinition(io.trino.spi.connector.ConnectorMaterializedViewDefinition) ICEBERG_TABLE_TYPE_VALUE(io.trino.plugin.hive.util.HiveUtil.ICEBERG_TABLE_TYPE_VALUE) Duration(io.airlift.units.Duration) BUCKETING_V1(io.trino.plugin.hive.util.HiveBucketing.BucketingVersion.BUCKETING_V1) SqlTimestamp(io.trino.spi.type.SqlTimestamp) ICEBERG_TABLE_TYPE_NAME(io.trino.plugin.hive.util.HiveUtil.ICEBERG_TABLE_TYPE_NAME) NOOP_METADATA_PROVIDER(io.trino.spi.connector.MetadataProvider.NOOP_METADATA_PROVIDER) HiveMetastoreFactory(io.trino.plugin.hive.metastore.HiveMetastoreFactory) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) HiveTestUtils.arrayType(io.trino.plugin.hive.HiveTestUtils.arrayType) Block(io.trino.spi.block.Block) HIVE_PARTITION_SCHEMA_MISMATCH(io.trino.plugin.hive.HiveErrorCode.HIVE_PARTITION_SCHEMA_MISMATCH) ConnectorViewDefinition(io.trino.spi.connector.ConnectorViewDefinition) INTEGER(io.trino.spi.type.IntegerType.INTEGER) ROLLBACK_RIGHT_AWAY(io.trino.plugin.hive.AbstractTestHive.TransactionDeleteInsertTestTag.ROLLBACK_RIGHT_AWAY) ImmutableSet(com.google.common.collect.ImmutableSet) BeforeClass(org.testng.annotations.BeforeClass) Collection(java.util.Collection) UUID(java.util.UUID) Assert.assertNotNull(org.testng.Assert.assertNotNull) TrinoAzureConfigurationInitializer(io.trino.plugin.hive.azure.TrinoAzureConfigurationInitializer) HiveColumnStatistics.createDoubleColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createDoubleColumnStatistics) BUCKET_COLUMN_NAME(io.trino.plugin.hive.HiveColumnHandle.BUCKET_COLUMN_NAME) BIGINT(io.trino.spi.type.BigintType.BIGINT) SORTED_BY_PROPERTY(io.trino.plugin.hive.HiveTableProperties.SORTED_BY_PROPERTY) LocalDate(java.time.LocalDate) JsonCodec(io.airlift.json.JsonCodec) IntStream(java.util.stream.IntStream) HiveTestUtils.getDefaultHivePageSourceFactories(io.trino.plugin.hive.HiveTestUtils.getDefaultHivePageSourceFactories) Constraint(io.trino.spi.connector.Constraint) Assert.assertNull(org.testng.Assert.assertNull) OptionalDouble(java.util.OptionalDouble) Assert.assertEquals(org.testng.Assert.assertEquals) OptionalInt(java.util.OptionalInt) Function(java.util.function.Function) HashSet(java.util.HashSet) HYPER_LOG_LOG(io.trino.spi.type.HyperLogLogType.HYPER_LOG_LOG) ImmutableList(com.google.common.collect.ImmutableList) BridgingHiveMetastore(io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore) TableStatistics(io.trino.spi.statistics.TableStatistics) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) Math.toIntExact(java.lang.Math.toIntExact) ExecutorService(java.util.concurrent.ExecutorService) ConnectorPageSink(io.trino.spi.connector.ConnectorPageSink) DESC_NULLS_LAST(io.trino.spi.connector.SortOrder.DESC_NULLS_LAST) ORC(io.trino.plugin.hive.HiveStorageFormat.ORC) HiveColumnStatistics.createIntegerColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createIntegerColumnStatistics) UTF_8(java.nio.charset.StandardCharsets.UTF_8) Assert.fail(org.testng.Assert.fail) ConnectorPageSourceProvider(io.trino.spi.connector.ConnectorPageSourceProvider) DateTime(org.joda.time.DateTime) PAGE_SORTER(io.trino.plugin.hive.HiveTestUtils.PAGE_SORTER) Executors.newFixedThreadPool(java.util.concurrent.Executors.newFixedThreadPool) HIVE_STRING(io.trino.plugin.hive.HiveType.HIVE_STRING) Hashing.sha256(com.google.common.hash.Hashing.sha256) HiveTestUtils.getHiveSession(io.trino.plugin.hive.HiveTestUtils.getHiveSession) Collectors.toList(java.util.stream.Collectors.toList) PRESTO_VERSION_NAME(io.trino.plugin.hive.HiveMetadata.PRESTO_VERSION_NAME) Assert.assertTrue(org.testng.Assert.assertTrue) PrincipalPrivileges(io.trino.plugin.hive.metastore.PrincipalPrivileges) ConnectorPageSinkProvider(io.trino.spi.connector.ConnectorPageSinkProvider) ConnectorTransactionHandle(io.trino.spi.connector.ConnectorTransactionHandle) ConnectorSplitManager(io.trino.spi.connector.ConnectorSplitManager) Arrays(java.util.Arrays) NamedTypeSignature(io.trino.spi.type.NamedTypeSignature) USER(io.trino.spi.security.PrincipalType.USER) Maps.uniqueIndex(com.google.common.collect.Maps.uniqueIndex) NO_ACID_TRANSACTION(io.trino.plugin.hive.acid.AcidTransaction.NO_ACID_TRANSACTION) HiveColumnStatistics.createDecimalColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createDecimalColumnStatistics) TypeOperators(io.trino.spi.type.TypeOperators) ROLLBACK_AFTER_APPEND_PAGE(io.trino.plugin.hive.AbstractTestHive.TransactionDeleteInsertTestTag.ROLLBACK_AFTER_APPEND_PAGE) HiveTestUtils.rowType(io.trino.plugin.hive.HiveTestUtils.rowType) TrinoExceptionAssert.assertTrinoExceptionThrownBy(io.trino.testing.assertions.TrinoExceptionAssert.assertTrinoExceptionThrownBy) CharType.createCharType(io.trino.spi.type.CharType.createCharType) BigDecimal(java.math.BigDecimal) TypeId(io.trino.spi.type.TypeId) Sets.difference(com.google.common.collect.Sets.difference) PARQUET(io.trino.plugin.hive.HiveStorageFormat.PARQUET) Column(io.trino.plugin.hive.metastore.Column) Executors.newScheduledThreadPool(java.util.concurrent.Executors.newScheduledThreadPool) ConnectorOutputTableHandle(io.trino.spi.connector.ConnectorOutputTableHandle) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) ViewColumn(io.trino.spi.connector.ConnectorViewDefinition.ViewColumn) ProjectionApplicationResult(io.trino.spi.connector.ProjectionApplicationResult) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) PartitionWithStatistics(io.trino.plugin.hive.metastore.PartitionWithStatistics) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) HiveTestUtils.getDefaultHiveRecordCursorProviders(io.trino.plugin.hive.HiveTestUtils.getDefaultHiveRecordCursorProviders) ConnectorNodePartitioningProvider(io.trino.spi.connector.ConnectorNodePartitioningProvider) Table(io.trino.plugin.hive.metastore.Table) TestingNodeManager(io.trino.testing.TestingNodeManager) Range(io.trino.spi.predicate.Range) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) PARTITIONED_BY_PROPERTY(io.trino.plugin.hive.HiveTableProperties.PARTITIONED_BY_PROPERTY) RcFilePageSource(io.trino.plugin.hive.rcfile.RcFilePageSource) Set(java.util.Set) MILLISECONDS(java.util.concurrent.TimeUnit.MILLISECONDS) SchemaTableName(io.trino.spi.connector.SchemaTableName) SortingProperty(io.trino.spi.connector.SortingProperty) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) HiveColumnStatistics.createBooleanColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createBooleanColumnStatistics) SchemaTablePrefix(io.trino.spi.connector.SchemaTablePrefix) Lists.reverse(com.google.common.collect.Lists.reverse) DATE(io.trino.spi.type.DateType.DATE) MoreObjects.toStringHelper(com.google.common.base.MoreObjects.toStringHelper) HiveColumnHandle.bucketColumnHandle(io.trino.plugin.hive.HiveColumnHandle.bucketColumnHandle) HivePrincipal(io.trino.plugin.hive.metastore.HivePrincipal) ConnectorTableLayout(io.trino.spi.connector.ConnectorTableLayout) ConnectorInsertTableHandle(io.trino.spi.connector.ConnectorInsertTableHandle) Slice(io.airlift.slice.Slice) NullableValue(io.trino.spi.predicate.NullableValue) Page(io.trino.spi.Page) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) MINUTES(java.util.concurrent.TimeUnit.MINUTES) JoinCompiler(io.trino.sql.gen.JoinCompiler) HiveUtil.columnExtraInfo(io.trino.plugin.hive.util.HiveUtil.columnExtraInfo) GroupByHashPageIndexerFactory(io.trino.operator.GroupByHashPageIndexerFactory) Float.floatToRawIntBits(java.lang.Float.floatToRawIntBits) ALLOW_INSECURE(com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE) UNGROUPED_SCHEDULING(io.trino.spi.connector.ConnectorSplitManager.SplitSchedulingStrategy.UNGROUPED_SCHEDULING) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) ColumnHandle(io.trino.spi.connector.ColumnHandle) TEXTFILE(io.trino.plugin.hive.HiveStorageFormat.TEXTFILE) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) HIVE_INVALID_BUCKET_FILES(io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_BUCKET_FILES) HiveType.toHiveType(io.trino.plugin.hive.HiveType.toHiveType) TestingMetastoreLocator(io.trino.plugin.hive.metastore.thrift.TestingMetastoreLocator) STORAGE_FORMAT_PROPERTY(io.trino.plugin.hive.HiveTableProperties.STORAGE_FORMAT_PROPERTY) GoogleGcsConfigurationInitializer(io.trino.plugin.hive.gcs.GoogleGcsConfigurationInitializer) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) RecordCursor(io.trino.spi.connector.RecordCursor) BlockTypeOperators(io.trino.type.BlockTypeOperators) LongStream(java.util.stream.LongStream) NO_REDIRECTIONS(io.trino.plugin.hive.HiveTableRedirectionsProvider.NO_REDIRECTIONS) DecimalType.createDecimalType(io.trino.spi.type.DecimalType.createDecimalType) HIVE_LONG(io.trino.plugin.hive.HiveType.HIVE_LONG) HiveTestUtils.getTypes(io.trino.plugin.hive.HiveTestUtils.getTypes) TRANSACTION_CONFLICT(io.trino.spi.StandardErrorCode.TRANSACTION_CONFLICT) TESTING_TYPE_MANAGER(io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER) ConnectorSession(io.trino.spi.connector.ConnectorSession) MoreFutures.getFutureValue(io.airlift.concurrent.MoreFutures.getFutureValue) UTC(org.joda.time.DateTimeZone.UTC) SESSION(io.trino.plugin.hive.HiveTestUtils.SESSION) SqlVarbinary(io.trino.spi.type.SqlVarbinary) DiscretePredicates(io.trino.spi.connector.DiscretePredicates) CharType(io.trino.spi.type.CharType) TableType(org.apache.hadoop.hive.metastore.TableType) CachingHiveMetastore.cachingHiveMetastore(io.trino.plugin.hive.metastore.cache.CachingHiveMetastore.cachingHiveMetastore) TINYINT(io.trino.spi.type.TinyintType.TINYINT) DateTimeTestingUtils.sqlTimestampOf(io.trino.testing.DateTimeTestingUtils.sqlTimestampOf) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) Assertions.assertGreaterThan(io.airlift.testing.Assertions.assertGreaterThan) HiveColumnStatistics.createBinaryColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createBinaryColumnStatistics) MoreCollectors.onlyElement(com.google.common.collect.MoreCollectors.onlyElement) NoHdfsAuthentication(io.trino.plugin.hive.authentication.NoHdfsAuthentication) QueryAssertions.assertEqualsIgnoreOrder(io.trino.testing.QueryAssertions.assertEqualsIgnoreOrder) HiveGcsConfig(io.trino.plugin.hive.gcs.HiveGcsConfig) Iterables.concat(com.google.common.collect.Iterables.concat) Path(org.apache.hadoop.fs.Path) KILOBYTE(io.airlift.units.DataSize.Unit.KILOBYTE) TIMESTAMP_WITH_TIME_ZONE(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE) AVRO(io.trino.plugin.hive.HiveStorageFormat.AVRO) StorageFormat(io.trino.plugin.hive.metastore.StorageFormat) RowType(io.trino.spi.type.RowType) HiveWriteUtils.getTableDefaultLocation(io.trino.plugin.hive.util.HiveWriteUtils.getTableDefaultLocation) ImmutableMap(com.google.common.collect.ImmutableMap) Predicate(java.util.function.Predicate) ThriftHiveMetastore(io.trino.plugin.hive.metastore.thrift.ThriftHiveMetastore) HiveSessionProperties.getTemporaryStagingDirectoryPath(io.trino.plugin.hive.HiveSessionProperties.getTemporaryStagingDirectoryPath) TrinoException(io.trino.spi.TrinoException) ArrayType(io.trino.spi.type.ArrayType) MaterializedResult.materializeSourceDataStream(io.trino.testing.MaterializedResult.materializeSourceDataStream) ConnectorBucketNodeMap(io.trino.spi.connector.ConnectorBucketNodeMap) ASC_NULLS_FIRST(io.trino.spi.connector.SortOrder.ASC_NULLS_FIRST) String.format(java.lang.String.format) SqlDate(io.trino.spi.type.SqlDate) Preconditions.checkState(com.google.common.base.Preconditions.checkState) SqlTimestampWithTimeZone(io.trino.spi.type.SqlTimestampWithTimeZone) DataSize(io.airlift.units.DataSize) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) List(java.util.List) DynamicFilter(io.trino.spi.connector.DynamicFilter) Assignment(io.trino.spi.connector.Assignment) Optional(java.util.Optional) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) HivePrivilegeInfo(io.trino.plugin.hive.metastore.HivePrivilegeInfo) SqlStandardAccessControlMetadata(io.trino.plugin.hive.security.SqlStandardAccessControlMetadata) Logger(io.airlift.log.Logger) MetastoreConfig(io.trino.plugin.hive.metastore.MetastoreConfig) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) CounterStat(io.airlift.stats.CounterStat) HashMap(java.util.HashMap) HiveBasicStatistics.createZeroStatistics(io.trino.plugin.hive.HiveBasicStatistics.createZeroStatistics) CSV(io.trino.plugin.hive.HiveStorageFormat.CSV) AtomicReference(java.util.concurrent.atomic.AtomicReference) VarcharType(io.trino.spi.type.VarcharType) HiveColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics) ROLLBACK_AFTER_DELETE(io.trino.plugin.hive.AbstractTestHive.TransactionDeleteInsertTestTag.ROLLBACK_AFTER_DELETE) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) Verify.verify(com.google.common.base.Verify.verify) Assertions.assertLessThanOrEqual(io.airlift.testing.Assertions.assertLessThanOrEqual) Threads.daemonThreadsNamed(io.airlift.concurrent.Threads.daemonThreadsNamed) SemiTransactionalHiveMetastore(io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore) RecordPageSource(io.trino.spi.connector.RecordPageSource) Objects.requireNonNull(java.util.Objects.requireNonNull) RowFieldName(io.trino.spi.type.RowFieldName) JSON(io.trino.plugin.hive.HiveStorageFormat.JSON) RCBINARY(io.trino.plugin.hive.HiveStorageFormat.RCBINARY) NO_PRIVILEGES(io.trino.plugin.hive.metastore.PrincipalPrivileges.NO_PRIVILEGES) DELTA_LAKE_PROVIDER(io.trino.plugin.hive.util.HiveUtil.DELTA_LAKE_PROVIDER) ColumnStatistics(io.trino.spi.statistics.ColumnStatistics) FieldDereference(io.trino.spi.expression.FieldDereference) HiveTestUtils.getDefaultHiveFileWriterFactories(io.trino.plugin.hive.HiveTestUtils.getDefaultHiveFileWriterFactories) HiveTestUtils.getHiveSessionProperties(io.trino.plugin.hive.HiveTestUtils.getHiveSessionProperties) TupleDomain(io.trino.spi.predicate.TupleDomain) HiveWriteUtils.createDirectory(io.trino.plugin.hive.util.HiveWriteUtils.createDirectory) TestingConnectorSession(io.trino.testing.TestingConnectorSession) HiveS3Config(io.trino.plugin.hive.s3.HiveS3Config) Executors.newCachedThreadPool(java.util.concurrent.Executors.newCachedThreadPool) BUCKET_COUNT_PROPERTY(io.trino.plugin.hive.HiveTableProperties.BUCKET_COUNT_PROPERTY) SortingColumn(io.trino.plugin.hive.metastore.SortingColumn) SECONDS(java.util.concurrent.TimeUnit.SECONDS) REGULAR(io.trino.plugin.hive.HiveColumnHandle.ColumnType.REGULAR) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) Variable(io.trino.spi.expression.Variable) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) RowType(io.trino.spi.type.RowType) Assignment(io.trino.spi.connector.Assignment) FieldDereference(io.trino.spi.expression.FieldDereference) ConnectorSession(io.trino.spi.connector.ConnectorSession) TestingConnectorSession(io.trino.testing.TestingConnectorSession) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) HiveColumnHandle.bucketColumnHandle(io.trino.plugin.hive.HiveColumnHandle.bucketColumnHandle) ColumnHandle(io.trino.spi.connector.ColumnHandle) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTableName(io.trino.spi.connector.SchemaTableName) ProjectionApplicationResult(io.trino.spi.connector.ProjectionApplicationResult) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) MapType(io.trino.spi.type.MapType) HiveTestUtils.mapType(io.trino.plugin.hive.HiveTestUtils.mapType) VarcharType.createVarcharType(io.trino.spi.type.VarcharType.createVarcharType) HiveTestUtils.arrayType(io.trino.plugin.hive.HiveTestUtils.arrayType) HiveTestUtils.rowType(io.trino.plugin.hive.HiveTestUtils.rowType) CharType.createCharType(io.trino.spi.type.CharType.createCharType) HiveType.toHiveType(io.trino.plugin.hive.HiveType.toHiveType) DecimalType.createDecimalType(io.trino.spi.type.DecimalType.createDecimalType) CharType(io.trino.spi.type.CharType) TableType(org.apache.hadoop.hive.metastore.TableType) RowType(io.trino.spi.type.RowType) ArrayType(io.trino.spi.type.ArrayType) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) VarcharType(io.trino.spi.type.VarcharType) Map(java.util.Map) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) ImmutableMap(com.google.common.collect.ImmutableMap) ConnectorBucketNodeMap(io.trino.spi.connector.ConnectorBucketNodeMap) HashMap(java.util.HashMap) Test(org.testng.annotations.Test)

Example 4 with BIGINT

use of io.trino.spi.type.BigintType.BIGINT in project trino by trinodb.

the class TestLogicalPlanner method testOffset.

@Test
public void testOffset() {
    assertPlan("SELECT name FROM nation OFFSET 2 ROWS", any(strictProject(ImmutableMap.of("name", new ExpressionMatcher("name")), filter("row_num > BIGINT '2'", rowNumber(pattern -> pattern.partitionBy(ImmutableList.of()), any(tableScan("nation", ImmutableMap.of("name", "name")))).withAlias("row_num", new RowNumberSymbolMatcher())))));
    assertPlan("SELECT name FROM nation ORDER BY regionkey OFFSET 2 ROWS", any(strictProject(ImmutableMap.of("name", new ExpressionMatcher("name")), filter("row_num > BIGINT '2'", rowNumber(pattern -> pattern.partitionBy(ImmutableList.of()), anyTree(sort(ImmutableList.of(sort("regionkey", ASCENDING, LAST)), any(tableScan("nation", ImmutableMap.of("name", "name", "regionkey", "regionkey")))))).withAlias("row_num", new RowNumberSymbolMatcher())))));
    assertPlan("SELECT name FROM nation ORDER BY regionkey OFFSET 2 ROWS FETCH NEXT 5 ROWS ONLY", any(strictProject(ImmutableMap.of("name", new ExpressionMatcher("name")), filter("row_num > BIGINT '2'", rowNumber(pattern -> pattern.partitionBy(ImmutableList.of()), any(topN(7, ImmutableList.of(sort("regionkey", ASCENDING, LAST)), TopNNode.Step.FINAL, anyTree(tableScan("nation", ImmutableMap.of("name", "name", "regionkey", "regionkey")))))).withAlias("row_num", new RowNumberSymbolMatcher())))));
    assertPlan("SELECT name FROM nation OFFSET 2 ROWS FETCH NEXT 5 ROWS ONLY", any(strictProject(ImmutableMap.of("name", new ExpressionMatcher("name")), filter("row_num > BIGINT '2'", rowNumber(pattern -> pattern.partitionBy(ImmutableList.of()), limit(7, any(tableScan("nation", ImmutableMap.of("name", "name"))))).withAlias("row_num", new RowNumberSymbolMatcher())))));
}
Also used : ExpressionMatcher(io.trino.sql.planner.assertions.ExpressionMatcher) CREATED(io.trino.sql.planner.LogicalPlanner.Stage.CREATED) PlanMatchPattern.aggregation(io.trino.sql.planner.assertions.PlanMatchPattern.aggregation) ROW_NUMBER(io.trino.sql.planner.plan.TopNRankingNode.RankingType.ROW_NUMBER) PlanMatchPattern(io.trino.sql.planner.assertions.PlanMatchPattern) Test(org.testng.annotations.Test) CorrelatedJoinNode(io.trino.sql.planner.plan.CorrelatedJoinNode) PlanNode(io.trino.sql.planner.plan.PlanNode) PlanMatchPattern.assignUniqueId(io.trino.sql.planner.assertions.PlanMatchPattern.assignUniqueId) LEFT(io.trino.sql.planner.plan.JoinNode.Type.LEFT) PlanMatchPattern.markDistinct(io.trino.sql.planner.assertions.PlanMatchPattern.markDistinct) TpchTableHandle(io.trino.plugin.tpch.TpchTableHandle) AddLocalExchanges(io.trino.sql.planner.optimizations.AddLocalExchanges) LongLiteral(io.trino.sql.tree.LongLiteral) Slices(io.airlift.slice.Slices) Map(java.util.Map) LOCAL(io.trino.sql.planner.plan.ExchangeNode.Scope.LOCAL) TEST_SESSION(io.trino.SessionTestUtils.TEST_SESSION) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) Assert.assertFalse(org.testng.Assert.assertFalse) TableScanNode(io.trino.sql.planner.plan.TableScanNode) ExplainAnalyzeNode(io.trino.sql.planner.plan.ExplainAnalyzeNode) PlanMatchPattern.expression(io.trino.sql.planner.assertions.PlanMatchPattern.expression) Range(io.trino.spi.predicate.Range) PlanMatchPattern.topNRanking(io.trino.sql.planner.assertions.PlanMatchPattern.topNRanking) Domain(io.trino.spi.predicate.Domain) StatisticsWriterNode(io.trino.sql.planner.plan.StatisticsWriterNode) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) PlanMatchPattern.aliasToIndex(io.trino.sql.planner.assertions.PlanMatchPattern.aliasToIndex) SemiJoinNode(io.trino.sql.planner.plan.SemiJoinNode) QueryTemplate.queryTemplate(io.trino.tests.QueryTemplate.queryTemplate) PlanMatchPattern.values(io.trino.sql.planner.assertions.PlanMatchPattern.values) ValueSet(io.trino.spi.predicate.ValueSet) PlanMatchPattern.strictConstrainedTableScan(io.trino.sql.planner.assertions.PlanMatchPattern.strictConstrainedTableScan) PlanMatchPattern.apply(io.trino.sql.planner.assertions.PlanMatchPattern.apply) ValuesNode(io.trino.sql.planner.plan.ValuesNode) TASK_CONCURRENCY(io.trino.SystemSessionProperties.TASK_CONCURRENCY) PlanMatchPattern.output(io.trino.sql.planner.assertions.PlanMatchPattern.output) REAL(io.trino.spi.type.RealType.REAL) Session(io.trino.Session) OPTIMIZE_HASH_GENERATION(io.trino.SystemSessionProperties.OPTIMIZE_HASH_GENERATION) PlanNodeSearcher.searchFrom(io.trino.sql.planner.optimizations.PlanNodeSearcher.searchFrom) PARTITIONED(io.trino.sql.planner.plan.JoinNode.DistributionType.PARTITIONED) LimitNode(io.trino.sql.planner.plan.LimitNode) PlanMatchPattern.filter(io.trino.sql.planner.assertions.PlanMatchPattern.filter) PlanMatchPattern.patternRecognition(io.trino.sql.planner.assertions.PlanMatchPattern.patternRecognition) REPLICATE(io.trino.sql.planner.plan.ExchangeNode.Type.REPLICATE) DISTRIBUTED_SORT(io.trino.SystemSessionProperties.DISTRIBUTED_SORT) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) PlanOptimizer(io.trino.sql.planner.optimizations.PlanOptimizer) ColumnHandle(io.trino.spi.connector.ColumnHandle) AggregationNode(io.trino.sql.planner.plan.AggregationNode) JOIN_REORDERING_STRATEGY(io.trino.SystemSessionProperties.JOIN_REORDERING_STRATEGY) BasePlanTest(io.trino.sql.planner.assertions.BasePlanTest) PlanMatchPattern.join(io.trino.sql.planner.assertions.PlanMatchPattern.join) MoreCollectors.toOptional(com.google.common.collect.MoreCollectors.toOptional) StringLiteral(io.trino.sql.tree.StringLiteral) MorePredicates(io.trino.util.MorePredicates) QueryTemplate(io.trino.tests.QueryTemplate) IrLabel(io.trino.sql.planner.rowpattern.ir.IrLabel) RANK(io.trino.sql.planner.plan.TopNRankingNode.RankingType.RANK) LESS_THAN(io.trino.sql.tree.ComparisonExpression.Operator.LESS_THAN) JoinDistributionType(io.trino.sql.planner.OptimizerConfig.JoinDistributionType) SortOrder(io.trino.spi.connector.SortOrder) PlanMatchPattern.functionCall(io.trino.sql.planner.assertions.PlanMatchPattern.functionCall) PlanMatchPattern.windowFrame(io.trino.sql.planner.assertions.PlanMatchPattern.windowFrame) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) MorePredicates.isInstanceOfAny(io.trino.util.MorePredicates.isInstanceOfAny) PlanMatchPattern.node(io.trino.sql.planner.assertions.PlanMatchPattern.node) ROWS(io.trino.sql.tree.WindowFrame.Type.ROWS) Domain.multipleValues(io.trino.spi.predicate.Domain.multipleValues) VarcharType.createVarcharType(io.trino.spi.type.VarcharType.createVarcharType) PlanMatchPattern.tableScan(io.trino.sql.planner.assertions.PlanMatchPattern.tableScan) SUBQUERY_MULTIPLE_ROWS(io.trino.spi.StandardErrorCode.SUBQUERY_MULTIPLE_ROWS) PlanMatchPattern.strictTableScan(io.trino.sql.planner.assertions.PlanMatchPattern.strictTableScan) PlanMatchPattern.any(io.trino.sql.planner.assertions.PlanMatchPattern.any) CURRENT_ROW(io.trino.sql.tree.FrameBound.Type.CURRENT_ROW) PlanMatchPattern.constrainedTableScan(io.trino.sql.planner.assertions.PlanMatchPattern.constrainedTableScan) REPLICATED(io.trino.sql.planner.plan.JoinNode.DistributionType.REPLICATED) FilterNode(io.trino.sql.planner.plan.FilterNode) PlanMatchPattern.limit(io.trino.sql.planner.assertions.PlanMatchPattern.limit) PlanMatchPattern.exchange(io.trino.sql.planner.assertions.PlanMatchPattern.exchange) PlanMatchPattern.equiJoinClause(io.trino.sql.planner.assertions.PlanMatchPattern.equiJoinClause) REPARTITION(io.trino.sql.planner.plan.ExchangeNode.Type.REPARTITION) WINDOW(io.trino.sql.tree.PatternRecognitionRelation.RowsPerMatch.WINDOW) JoinNode(io.trino.sql.planner.plan.JoinNode) INTEGER(io.trino.spi.type.IntegerType.INTEGER) FunctionCall(io.trino.sql.tree.FunctionCall) ASCENDING(io.trino.sql.tree.SortItem.Ordering.ASCENDING) ExpressionMatcher(io.trino.sql.planner.assertions.ExpressionMatcher) RowNumberSymbolMatcher(io.trino.sql.planner.assertions.RowNumberSymbolMatcher) TpchColumnHandle(io.trino.plugin.tpch.TpchColumnHandle) RowType(io.trino.spi.type.RowType) MarkDistinctNode(io.trino.sql.planner.plan.MarkDistinctNode) ImmutableMap(com.google.common.collect.ImmutableMap) Predicate(java.util.function.Predicate) EnforceSingleRowNode(io.trino.sql.planner.plan.EnforceSingleRowNode) TypeSignatureTranslator.toSqlType(io.trino.sql.analyzer.TypeSignatureTranslator.toSqlType) IndexJoinNode(io.trino.sql.planner.plan.IndexJoinNode) FORCE_SINGLE_NODE_OUTPUT(io.trino.SystemSessionProperties.FORCE_SINGLE_NODE_OUTPUT) String.format(java.lang.String.format) IrQuantifier.oneOrMore(io.trino.sql.planner.rowpattern.ir.IrQuantifier.oneOrMore) GenericLiteral(io.trino.sql.tree.GenericLiteral) EQUAL(io.trino.sql.tree.ComparisonExpression.Operator.EQUAL) List(java.util.List) ASC_NULLS_LAST(io.trino.spi.connector.SortOrder.ASC_NULLS_LAST) PlanMatchPattern.anyTree(io.trino.sql.planner.assertions.PlanMatchPattern.anyTree) PlanMatchPattern.strictProject(io.trino.sql.planner.assertions.PlanMatchPattern.strictProject) GATHER(io.trino.sql.planner.plan.ExchangeNode.Type.GATHER) BIGINT(io.trino.spi.type.BigintType.BIGINT) PlanMatchPattern.constrainedTableScanWithTableLayout(io.trino.sql.planner.assertions.PlanMatchPattern.constrainedTableScanWithTableLayout) FILTERING_SEMI_JOIN_TO_INNER(io.trino.SystemSessionProperties.FILTERING_SEMI_JOIN_TO_INNER) Entry(java.util.Map.Entry) ApplyNode(io.trino.sql.planner.plan.ApplyNode) Optional(java.util.Optional) ExchangeNode(io.trino.sql.planner.plan.ExchangeNode) PlanMatchPattern.rowNumber(io.trino.sql.planner.assertions.PlanMatchPattern.rowNumber) UNBOUNDED_FOLLOWING(io.trino.sql.tree.FrameBound.Type.UNBOUNDED_FOLLOWING) PlanMatchPattern.anyNot(io.trino.sql.planner.assertions.PlanMatchPattern.anyNot) DESCENDING(io.trino.sql.tree.SortItem.Ordering.DESCENDING) PlanMatchPattern.semiJoin(io.trino.sql.planner.assertions.PlanMatchPattern.semiJoin) INNER(io.trino.sql.planner.plan.JoinNode.Type.INNER) SINGLE(io.trino.sql.planner.plan.AggregationNode.Step.SINGLE) Assert.assertEquals(org.testng.Assert.assertEquals) IrQuantified(io.trino.sql.planner.rowpattern.ir.IrQuantified) OPTIMIZED(io.trino.sql.planner.LogicalPlanner.Stage.OPTIMIZED) SortNode(io.trino.sql.planner.plan.SortNode) PlanMatchPattern.specification(io.trino.sql.planner.assertions.PlanMatchPattern.specification) Cast(io.trino.sql.tree.Cast) ImmutableList(com.google.common.collect.ImmutableList) JoinReorderingStrategy(io.trino.sql.planner.OptimizerConfig.JoinReorderingStrategy) PlanMatchPattern.sort(io.trino.sql.planner.assertions.PlanMatchPattern.sort) DynamicFilterPattern(io.trino.sql.planner.assertions.PlanMatchPattern.DynamicFilterPattern) ProjectNode(io.trino.sql.planner.plan.ProjectNode) LAST(io.trino.sql.tree.SortItem.NullOrdering.LAST) PARTIAL(io.trino.sql.planner.plan.AggregationNode.Step.PARTIAL) PlanMatchPattern.identityProject(io.trino.sql.planner.assertions.PlanMatchPattern.identityProject) TopNNode(io.trino.sql.planner.plan.TopNNode) PlanMatchPattern.topN(io.trino.sql.planner.assertions.PlanMatchPattern.topN) TRUE_LITERAL(io.trino.sql.tree.BooleanLiteral.TRUE_LITERAL) TupleDomain(io.trino.spi.predicate.TupleDomain) PlanMatchPattern.singleGroupingSet(io.trino.sql.planner.assertions.PlanMatchPattern.singleGroupingSet) QualifiedName(io.trino.sql.tree.QualifiedName) Consumer(java.util.function.Consumer) DistributionType(io.trino.sql.planner.plan.SemiJoinNode.DistributionType) PlanMatchPattern.project(io.trino.sql.planner.assertions.PlanMatchPattern.project) JOIN_DISTRIBUTION_TYPE(io.trino.SystemSessionProperties.JOIN_DISTRIBUTION_TYPE) CheckSubqueryNodesAreRewritten(io.trino.sql.planner.optimizations.CheckSubqueryNodesAreRewritten) REMOTE(io.trino.sql.planner.plan.ExchangeNode.Scope.REMOTE) DistinctLimitNode(io.trino.sql.planner.plan.DistinctLimitNode) Row(io.trino.sql.tree.Row) FINAL(io.trino.sql.planner.plan.AggregationNode.Step.FINAL) RowNumberSymbolMatcher(io.trino.sql.planner.assertions.RowNumberSymbolMatcher) Test(org.testng.annotations.Test) BasePlanTest(io.trino.sql.planner.assertions.BasePlanTest)

Example 5 with BIGINT

use of io.trino.spi.type.BigintType.BIGINT in project trino by trinodb.

the class TestPushPredicateIntoTableScan method doesNotFireOnNonDeterministicPredicate.

@Test
public void doesNotFireOnNonDeterministicPredicate() {
    ColumnHandle columnHandle = new TpchColumnHandle("nationkey", BIGINT);
    tester().assertThat(pushPredicateIntoTableScan).on(p -> p.filter(new ComparisonExpression(EQUAL, functionResolution.functionCallBuilder(QualifiedName.of("rand")).build(), new LongLiteral("42")), p.tableScan(nationTableHandle, ImmutableList.of(p.symbol("nationkey", BIGINT)), ImmutableMap.of(p.symbol("nationkey", BIGINT), columnHandle), TupleDomain.all()))).doesNotFire();
}
Also used : AND(io.trino.sql.tree.LogicalExpression.Operator.AND) TestingFunctionResolution(io.trino.metadata.TestingFunctionResolution) Test(org.testng.annotations.Test) CatalogName(io.trino.connector.CatalogName) MockConnectorFactory(io.trino.connector.MockConnectorFactory) TpchTableHandle(io.trino.plugin.tpch.TpchTableHandle) LongLiteral(io.trino.sql.tree.LongLiteral) Slices(io.airlift.slice.Slices) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Map(java.util.Map) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) TpchColumnHandle(io.trino.plugin.tpch.TpchColumnHandle) ImmutableMap(com.google.common.collect.ImmutableMap) Domain(io.trino.spi.predicate.Domain) BeforeClass(org.testng.annotations.BeforeClass) PlanMatchPattern.values(io.trino.sql.planner.assertions.PlanMatchPattern.values) MODULUS(io.trino.sql.tree.ArithmeticBinaryExpression.Operator.MODULUS) SchemaTableName(io.trino.spi.connector.SchemaTableName) ComparisonExpression(io.trino.sql.tree.ComparisonExpression) GenericLiteral(io.trino.sql.tree.GenericLiteral) EQUAL(io.trino.sql.tree.ComparisonExpression.Operator.EQUAL) PlanMatchPattern.anyTree(io.trino.sql.planner.assertions.PlanMatchPattern.anyTree) BIGINT(io.trino.spi.type.BigintType.BIGINT) ConnectorPartitioningHandle(io.trino.spi.connector.ConnectorPartitioningHandle) SymbolReference(io.trino.sql.tree.SymbolReference) Domain.singleValue(io.trino.spi.predicate.Domain.singleValue) PlanMatchPattern.constrainedTableScanWithTableLayout(io.trino.sql.planner.assertions.PlanMatchPattern.constrainedTableScanWithTableLayout) Optional(java.util.Optional) ConnectorTablePartitioning(io.trino.spi.connector.ConnectorTablePartitioning) PlanBuilder.expression(io.trino.sql.planner.iterative.rule.test.PlanBuilder.expression) Session(io.trino.Session) TypeAnalyzer.createTestingTypeAnalyzer(io.trino.sql.planner.TypeAnalyzer.createTestingTypeAnalyzer) NullableValue(io.trino.spi.predicate.NullableValue) Type(io.trino.spi.type.Type) PlanMatchPattern.filter(io.trino.sql.planner.assertions.PlanMatchPattern.filter) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) ImmutableList(com.google.common.collect.ImmutableList) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) ColumnHandle(io.trino.spi.connector.ColumnHandle) ArithmeticBinaryExpression(io.trino.sql.tree.ArithmeticBinaryExpression) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) StringLiteral(io.trino.sql.tree.StringLiteral) BaseRuleTest(io.trino.sql.planner.iterative.rule.test.BaseRuleTest) MockConnectorColumnHandle(io.trino.connector.MockConnectorColumnHandle) TestingTransactionHandle(io.trino.testing.TestingTransactionHandle) TupleDomain(io.trino.spi.predicate.TupleDomain) TpchTransactionHandle(io.trino.plugin.tpch.TpchTransactionHandle) QualifiedName(io.trino.sql.tree.QualifiedName) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) TableHandle(io.trino.metadata.TableHandle) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) LogicalExpression(io.trino.sql.tree.LogicalExpression) VarcharType.createVarcharType(io.trino.spi.type.VarcharType.createVarcharType) PlanMatchPattern.tableScan(io.trino.sql.planner.assertions.PlanMatchPattern.tableScan) TpchColumnHandle(io.trino.plugin.tpch.TpchColumnHandle) ColumnHandle(io.trino.spi.connector.ColumnHandle) MockConnectorColumnHandle(io.trino.connector.MockConnectorColumnHandle) ComparisonExpression(io.trino.sql.tree.ComparisonExpression) TpchColumnHandle(io.trino.plugin.tpch.TpchColumnHandle) LongLiteral(io.trino.sql.tree.LongLiteral) Test(org.testng.annotations.Test) BaseRuleTest(io.trino.sql.planner.iterative.rule.test.BaseRuleTest)

Aggregations

BIGINT (io.trino.spi.type.BigintType.BIGINT)106 ImmutableList (com.google.common.collect.ImmutableList)99 Optional (java.util.Optional)87 Test (org.testng.annotations.Test)86 ImmutableMap (com.google.common.collect.ImmutableMap)84 VARCHAR (io.trino.spi.type.VarcharType.VARCHAR)44 List (java.util.List)44 Map (java.util.Map)44 ColumnHandle (io.trino.spi.connector.ColumnHandle)38 Type (io.trino.spi.type.Type)38 Symbol (io.trino.sql.planner.Symbol)38 QualifiedName (io.trino.sql.tree.QualifiedName)38 ImmutableSet (com.google.common.collect.ImmutableSet)37 TupleDomain (io.trino.spi.predicate.TupleDomain)36 PlanMatchPattern.values (io.trino.sql.planner.assertions.PlanMatchPattern.values)36 TableHandle (io.trino.metadata.TableHandle)35 BaseRuleTest (io.trino.sql.planner.iterative.rule.test.BaseRuleTest)35 Session (io.trino.Session)34 PlanMatchPattern.filter (io.trino.sql.planner.assertions.PlanMatchPattern.filter)33 PlanBuilder.expression (io.trino.sql.planner.iterative.rule.test.PlanBuilder.expression)32