Search in sources :

Example 31 with VARCHAR

use of io.trino.spi.type.VarcharType.VARCHAR in project trino by trinodb.

the class TestLocalExchange method testPartitionCustomPartitioning.

@Test(dataProvider = "executionStrategy")
public void testPartitionCustomPartitioning(PipelineExecutionStrategy executionStrategy) {
    ConnectorPartitioningHandle connectorPartitioningHandle = new ConnectorPartitioningHandle() {
    };
    ConnectorNodePartitioningProvider connectorNodePartitioningProvider = new ConnectorNodePartitioningProvider() {

        @Override
        public ConnectorBucketNodeMap getBucketNodeMap(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorPartitioningHandle partitioningHandle) {
            return createBucketNodeMap(2);
        }

        @Override
        public ToIntFunction<ConnectorSplit> getSplitBucketFunction(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorPartitioningHandle partitioningHandle) {
            throw new UnsupportedOperationException();
        }

        @Override
        public BucketFunction getBucketFunction(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorPartitioningHandle partitioningHandle, List<Type> partitionChannelTypes, int bucketCount) {
            return (page, position) -> {
                long rowValue = BIGINT.getLong(page.getBlock(0), position);
                if (rowValue == 42) {
                    return 0;
                }
                return 1;
            };
        }
    };
    List<Type> types = ImmutableList.of(VARCHAR, BIGINT);
    nodePartitioningManager.addPartitioningProvider(new CatalogName("foo"), connectorNodePartitioningProvider);
    PartitioningHandle partitioningHandle = new PartitioningHandle(Optional.of(new CatalogName("foo")), Optional.of(TestingTransactionHandle.create()), connectorPartitioningHandle);
    LocalExchangeFactory localExchangeFactory = new LocalExchangeFactory(nodePartitioningManager, SESSION, partitioningHandle, 2, types, ImmutableList.of(1), Optional.empty(), executionStrategy, LOCAL_EXCHANGE_MAX_BUFFERED_BYTES, TYPE_OPERATOR_FACTORY);
    LocalExchangeSinkFactoryId localExchangeSinkFactoryId = localExchangeFactory.newSinkFactoryId();
    localExchangeFactory.noMoreSinkFactories();
    run(localExchangeFactory, executionStrategy, exchange -> {
        assertEquals(exchange.getBufferCount(), 2);
        assertExchangeTotalBufferedBytes(exchange, 0);
        LocalExchangeSinkFactory sinkFactory = exchange.getSinkFactory(localExchangeSinkFactoryId);
        LocalExchangeSink sink = sinkFactory.createSink();
        assertSinkCanWrite(sink);
        sinkFactory.close();
        sinkFactory.noMoreSinkFactories();
        LocalExchangeSource sourceA = exchange.getSource(1);
        assertSource(sourceA, 0);
        LocalExchangeSource sourceB = exchange.getSource(0);
        assertSource(sourceB, 0);
        Page pageA = SequencePageBuilder.createSequencePage(types, 1, 100, 42);
        sink.addPage(pageA);
        assertSource(sourceA, 1);
        assertSource(sourceB, 0);
        assertRemovePage(types, sourceA, pageA);
        assertSource(sourceA, 0);
        Page pageB = SequencePageBuilder.createSequencePage(types, 100, 100, 43);
        sink.addPage(pageB);
        assertSource(sourceA, 0);
        assertSource(sourceB, 1);
        assertRemovePage(types, sourceB, pageB);
        assertSource(sourceB, 0);
    });
}
Also used : BucketFunction(io.trino.spi.connector.BucketFunction) NodeTaskMap(io.trino.execution.NodeTaskMap) FIXED_HASH_DISTRIBUTION(io.trino.sql.planner.SystemPartitioningHandle.FIXED_HASH_DISTRIBUTION) Test(org.testng.annotations.Test) TypeOperators(io.trino.spi.type.TypeOperators) SequencePageBuilder(io.trino.SequencePageBuilder) CatalogName(io.trino.connector.CatalogName) NodeScheduler(io.trino.execution.scheduler.NodeScheduler) FinalizerService(io.trino.util.FinalizerService) SINGLE_DISTRIBUTION(io.trino.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION) Assert.assertFalse(org.testng.Assert.assertFalse) ConnectorNodePartitioningProvider(io.trino.spi.connector.ConnectorNodePartitioningProvider) FIXED_BROADCAST_DISTRIBUTION(io.trino.sql.planner.SystemPartitioningHandle.FIXED_BROADCAST_DISTRIBUTION) NodeSchedulerConfig(io.trino.execution.scheduler.NodeSchedulerConfig) BeforeMethod(org.testng.annotations.BeforeMethod) PartitioningHandle(io.trino.sql.planner.PartitioningHandle) LocalExchangeSinkFactory(io.trino.operator.exchange.LocalExchange.LocalExchangeSinkFactory) Assert.assertNotNull(org.testng.Assert.assertNotNull) UniformNodeSelectorFactory(io.trino.execution.scheduler.UniformNodeSelectorFactory) ConnectorBucketNodeMap(io.trino.spi.connector.ConnectorBucketNodeMap) FIXED_ARBITRARY_DISTRIBUTION(io.trino.sql.planner.SystemPartitioningHandle.FIXED_ARBITRARY_DISTRIBUTION) DataSize(io.airlift.units.DataSize) List(java.util.List) TestingSession.testSessionBuilder(io.trino.testing.TestingSession.testSessionBuilder) BIGINT(io.trino.spi.type.BigintType.BIGINT) ConnectorPartitioningHandle(io.trino.spi.connector.ConnectorPartitioningHandle) InMemoryNodeManager(io.trino.metadata.InMemoryNodeManager) FIXED_PASSTHROUGH_DISTRIBUTION(io.trino.sql.planner.SystemPartitioningHandle.FIXED_PASSTHROUGH_DISTRIBUTION) Optional(java.util.Optional) Session(io.trino.Session) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) DataProvider(org.testng.annotations.DataProvider) Assert.assertNull(org.testng.Assert.assertNull) Type(io.trino.spi.type.Type) ConnectorBucketNodeMap.createBucketNodeMap(io.trino.spi.connector.ConnectorBucketNodeMap.createBucketNodeMap) Page(io.trino.spi.Page) Assert.assertEquals(org.testng.Assert.assertEquals) NodePartitioningManager(io.trino.sql.planner.NodePartitioningManager) LocalExchangeFactory(io.trino.operator.exchange.LocalExchange.LocalExchangeFactory) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) ImmutableList(com.google.common.collect.ImmutableList) GROUPED_EXECUTION(io.trino.operator.PipelineExecutionStrategy.GROUPED_EXECUTION) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) PipelineExecutionStrategy(io.trino.operator.PipelineExecutionStrategy) Lifespan(io.trino.execution.Lifespan) InterpretedHashGenerator(io.trino.operator.InterpretedHashGenerator) BlockTypeOperators(io.trino.type.BlockTypeOperators) UNGROUPED_EXECUTION(io.trino.operator.PipelineExecutionStrategy.UNGROUPED_EXECUTION) ConnectorSplit(io.trino.spi.connector.ConnectorSplit) PageAssertions(io.trino.operator.PageAssertions) ToIntFunction(java.util.function.ToIntFunction) LocalExchangeSinkFactoryId(io.trino.operator.exchange.LocalExchange.LocalExchangeSinkFactoryId) TestingTransactionHandle(io.trino.testing.TestingTransactionHandle) ConnectorSession(io.trino.spi.connector.ConnectorSession) Consumer(java.util.function.Consumer) Assert.assertTrue(org.testng.Assert.assertTrue) ConnectorTransactionHandle(io.trino.spi.connector.ConnectorTransactionHandle) ConnectorTransactionHandle(io.trino.spi.connector.ConnectorTransactionHandle) Page(io.trino.spi.Page) LocalExchangeSinkFactory(io.trino.operator.exchange.LocalExchange.LocalExchangeSinkFactory) Type(io.trino.spi.type.Type) ConnectorPartitioningHandle(io.trino.spi.connector.ConnectorPartitioningHandle) ConnectorNodePartitioningProvider(io.trino.spi.connector.ConnectorNodePartitioningProvider) ConnectorSession(io.trino.spi.connector.ConnectorSession) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) CatalogName(io.trino.connector.CatalogName) LocalExchangeFactory(io.trino.operator.exchange.LocalExchange.LocalExchangeFactory) PartitioningHandle(io.trino.sql.planner.PartitioningHandle) ConnectorPartitioningHandle(io.trino.spi.connector.ConnectorPartitioningHandle) ConnectorSplit(io.trino.spi.connector.ConnectorSplit) LocalExchangeSinkFactoryId(io.trino.operator.exchange.LocalExchange.LocalExchangeSinkFactoryId) Test(org.testng.annotations.Test)

Example 32 with VARCHAR

use of io.trino.spi.type.VarcharType.VARCHAR in project trino by trinodb.

the class TestCachingHiveMetastore method testLoadAfterInvalidate.

@Test(timeOut = 60_000, dataProviderClass = DataProviders.class, dataProvider = "trueFalse")
public void testLoadAfterInvalidate(boolean invalidateAll) throws Exception {
    // State
    CopyOnWriteArrayList<Column> tableColumns = new CopyOnWriteArrayList<>();
    ConcurrentMap<String, Partition> tablePartitionsByName = new ConcurrentHashMap<>();
    Map<String, String> tableParameters = new ConcurrentHashMap<>();
    tableParameters.put("frequent-changing-table-parameter", "parameter initial value");
    // Initialize data
    String databaseName = "my_database";
    String tableName = "my_table_name";
    tableColumns.add(new Column("value", toHiveType(VARCHAR), Optional.empty()));
    tableColumns.add(new Column("pk", toHiveType(VARCHAR), Optional.empty()));
    List<String> partitionNames = new ArrayList<>();
    for (int i = 0; i < 10; i++) {
        String partitionName = "pk=" + i;
        tablePartitionsByName.put(partitionName, Partition.builder().setDatabaseName(databaseName).setTableName(tableName).setColumns(ImmutableList.copyOf(tableColumns)).setValues(List.of(Integer.toString(i))).withStorage(storage -> storage.setStorageFormat(fromHiveStorageFormat(TEXTFILE))).setParameters(Map.of("frequent-changing-partition-parameter", "parameter initial value")).build());
        partitionNames.add(partitionName);
    }
    // Mock metastore
    CountDownLatch getTableEnteredLatch = new CountDownLatch(1);
    CountDownLatch getTableReturnLatch = new CountDownLatch(1);
    CountDownLatch getTableFinishedLatch = new CountDownLatch(1);
    CountDownLatch getPartitionsByNamesEnteredLatch = new CountDownLatch(1);
    CountDownLatch getPartitionsByNamesReturnLatch = new CountDownLatch(1);
    CountDownLatch getPartitionsByNamesFinishedLatch = new CountDownLatch(1);
    HiveMetastore mockMetastore = new UnimplementedHiveMetastore() {

        @Override
        public Optional<Table> getTable(String databaseName, String tableName) {
            Optional<Table> table = Optional.of(Table.builder().setDatabaseName(databaseName).setTableName(tableName).setTableType(EXTERNAL_TABLE.name()).setDataColumns(tableColumns).setParameters(ImmutableMap.copyOf(tableParameters)).withStorage(storage -> storage.setStorageFormat(fromHiveStorageFormat(TEXTFILE))).setOwner(Optional.empty()).build());
            // 1
            getTableEnteredLatch.countDown();
            // 2
            await(getTableReturnLatch, 10, SECONDS);
            return table;
        }

        @Override
        public Map<String, Optional<Partition>> getPartitionsByNames(Table table, List<String> partitionNames) {
            Map<String, Optional<Partition>> result = new HashMap<>();
            for (String partitionName : partitionNames) {
                result.put(partitionName, Optional.ofNullable(tablePartitionsByName.get(partitionName)));
            }
            // loader#1
            getPartitionsByNamesEnteredLatch.countDown();
            // loader#2
            await(getPartitionsByNamesReturnLatch, 10, SECONDS);
            return result;
        }
    };
    // Caching metastore
    metastore = cachingHiveMetastore(mockMetastore, executor, new Duration(5, TimeUnit.MINUTES), Optional.of(new Duration(1, TimeUnit.MINUTES)), 1000);
    // The test. Main thread does modifications and verifies subsequent load sees them. Background thread loads the state into the cache.
    ExecutorService executor = Executors.newFixedThreadPool(1);
    try {
        Future<Void> future = executor.submit(() -> {
            try {
                Table table;
                table = metastore.getTable(databaseName, tableName).orElseThrow();
                // 3
                getTableFinishedLatch.countDown();
                metastore.getPartitionsByNames(table, partitionNames);
                // 6
                getPartitionsByNamesFinishedLatch.countDown();
                return (Void) null;
            } catch (Throwable e) {
                log.error(e);
                throw e;
            }
        });
        // 21
        await(getTableEnteredLatch, 10, SECONDS);
        tableParameters.put("frequent-changing-table-parameter", "main-thread-put-xyz");
        if (invalidateAll) {
            metastore.flushCache();
        } else {
            metastore.invalidateTable(databaseName, tableName);
        }
        // 2
        getTableReturnLatch.countDown();
        // 3
        await(getTableFinishedLatch, 10, SECONDS);
        Table table = metastore.getTable(databaseName, tableName).orElseThrow();
        assertThat(table.getParameters()).isEqualTo(Map.of("frequent-changing-table-parameter", "main-thread-put-xyz"));
        // 4
        await(getPartitionsByNamesEnteredLatch, 10, SECONDS);
        String partitionName = partitionNames.get(2);
        Map<String, String> newPartitionParameters = Map.of("frequent-changing-partition-parameter", "main-thread-put-alice");
        tablePartitionsByName.put(partitionName, Partition.builder(tablePartitionsByName.get(partitionName)).setParameters(newPartitionParameters).build());
        if (invalidateAll) {
            metastore.flushCache();
        } else {
            metastore.invalidateTable(databaseName, tableName);
        }
        // 5
        getPartitionsByNamesReturnLatch.countDown();
        // 6
        await(getPartitionsByNamesFinishedLatch, 10, SECONDS);
        Map<String, Optional<Partition>> loadedPartitions = metastore.getPartitionsByNames(table, partitionNames);
        assertThat(loadedPartitions.get(partitionName)).isNotNull().isPresent().hasValueSatisfying(partition -> assertThat(partition.getParameters()).isEqualTo(newPartitionParameters));
        // verify no failure in the background thread
        future.get(10, SECONDS);
    } finally {
        getTableEnteredLatch.countDown();
        getTableReturnLatch.countDown();
        getTableFinishedLatch.countDown();
        getPartitionsByNamesEnteredLatch.countDown();
        getPartitionsByNamesReturnLatch.countDown();
        getPartitionsByNamesFinishedLatch.countDown();
        executor.shutdownNow();
        executor.awaitTermination(10, SECONDS);
    }
}
Also used : PARTITION_KEY(io.trino.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY) USER(io.trino.spi.security.PrincipalType.USER) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) EXTERNAL_TABLE(org.apache.hadoop.hive.metastore.TableType.EXTERNAL_TABLE) Test(org.testng.annotations.Test) TEST_PARTITION1(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.TEST_PARTITION1) Duration(io.airlift.units.Duration) MockThriftMetastoreClient(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient) TEST_PARTITION2(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.TEST_PARTITION2) Future(java.util.concurrent.Future) Column(io.trino.plugin.hive.metastore.Column) Map(java.util.Map) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) CachingHiveMetastore.memoizeMetastore(io.trino.plugin.hive.metastore.cache.CachingHiveMetastore.memoizeMetastore) HiveColumnHandle(io.trino.plugin.hive.HiveColumnHandle) Assert.assertFalse(org.testng.Assert.assertFalse) HiveIdentity(io.trino.plugin.hive.authentication.HiveIdentity) Table(io.trino.plugin.hive.metastore.Table) ImmutableMap(com.google.common.collect.ImmutableMap) Range(io.trino.spi.predicate.Range) DataProviders(io.trino.testing.DataProviders) ThriftHiveMetastore(io.trino.plugin.hive.metastore.thrift.ThriftHiveMetastore) Domain(io.trino.spi.predicate.Domain) BAD_PARTITION(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.BAD_PARTITION) BeforeMethod(org.testng.annotations.BeforeMethod) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) SESSION(io.trino.testing.TestingConnectorSession.SESSION) Assert.assertNotNull(org.testng.Assert.assertNotNull) HDFS_ENVIRONMENT(io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT) Executors(java.util.concurrent.Executors) ValueSet(io.trino.spi.predicate.ValueSet) Preconditions.checkState(com.google.common.base.Preconditions.checkState) MoreExecutors.directExecutor(com.google.common.util.concurrent.MoreExecutors.directExecutor) BAD_DATABASE(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.BAD_DATABASE) TEST_COLUMN(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.TEST_COLUMN) CountDownLatch(java.util.concurrent.CountDownLatch) List(java.util.List) MetastoreLocator(io.trino.plugin.hive.metastore.thrift.MetastoreLocator) Function.identity(java.util.function.Function.identity) Optional(java.util.Optional) TEST_ROLES(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.TEST_ROLES) ListeningExecutorService(com.google.common.util.concurrent.ListeningExecutorService) Partition(io.trino.plugin.hive.metastore.Partition) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) PartitionStatistics(io.trino.plugin.hive.PartitionStatistics) HivePrincipal(io.trino.plugin.hive.metastore.HivePrincipal) MoreExecutors.listeningDecorator(com.google.common.util.concurrent.MoreExecutors.listeningDecorator) Iterables(com.google.common.collect.Iterables) Logger(io.airlift.log.Logger) TEST_PARTITION_VALUES1(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.TEST_PARTITION_VALUES1) MetastoreConfig(io.trino.plugin.hive.metastore.MetastoreConfig) Assert.assertEquals(org.testng.Assert.assertEquals) HashMap(java.util.HashMap) StorageFormat.fromHiveStorageFormat(io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat) UnimplementedHiveMetastore(io.trino.plugin.hive.metastore.UnimplementedHiveMetastore) ArrayList(java.util.ArrayList) ConcurrentMap(java.util.concurrent.ConcurrentMap) OptionalLong(java.util.OptionalLong) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) HiveMetastore(io.trino.plugin.hive.metastore.HiveMetastore) PARTITION_COLUMN_NAMES(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.PARTITION_COLUMN_NAMES) ImmutableList(com.google.common.collect.ImmutableList) ThriftMetastoreClient(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreClient) Threads.daemonThreadsNamed(io.airlift.concurrent.Threads.daemonThreadsNamed) BridgingHiveMetastore(io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) TEXTFILE(io.trino.plugin.hive.HiveStorageFormat.TEXTFILE) HiveType.toHiveType(io.trino.plugin.hive.HiveType.toHiveType) ExecutorService(java.util.concurrent.ExecutorService) AfterClass(org.testng.annotations.AfterClass) TEST_TABLE(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.TEST_TABLE) TupleDomain.withColumnDomains(io.trino.spi.predicate.TupleDomain.withColumnDomains) HiveColumnStatistics.createIntegerColumnStatistics(io.trino.plugin.hive.metastore.HiveColumnStatistics.createIntegerColumnStatistics) TupleDomain(io.trino.spi.predicate.TupleDomain) HIVE_STRING(io.trino.plugin.hive.HiveType.HIVE_STRING) ThriftMetastoreStats(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreStats) TimeUnit(java.util.concurrent.TimeUnit) HiveMetastoreClosure(io.trino.plugin.hive.HiveMetastoreClosure) Executors.newCachedThreadPool(java.util.concurrent.Executors.newCachedThreadPool) ThriftMetastoreConfig(io.trino.plugin.hive.metastore.thrift.ThriftMetastoreConfig) MetastoreUtil.computePartitionKeyFilter(io.trino.plugin.hive.metastore.MetastoreUtil.computePartitionKeyFilter) CachingHiveMetastore.cachingHiveMetastore(io.trino.plugin.hive.metastore.cache.CachingHiveMetastore.cachingHiveMetastore) Assert.assertTrue(org.testng.Assert.assertTrue) TEST_DATABASE(io.trino.plugin.hive.metastore.thrift.MockThriftMetastoreClient.TEST_DATABASE) HiveConfig(io.trino.plugin.hive.HiveConfig) SECONDS(java.util.concurrent.TimeUnit.SECONDS) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) ArrayList(java.util.ArrayList) Column(io.trino.plugin.hive.metastore.Column) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) List(java.util.List) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) UnimplementedHiveMetastore(io.trino.plugin.hive.metastore.UnimplementedHiveMetastore) Partition(io.trino.plugin.hive.metastore.Partition) Table(io.trino.plugin.hive.metastore.Table) Optional(java.util.Optional) ThriftHiveMetastore(io.trino.plugin.hive.metastore.thrift.ThriftHiveMetastore) UnimplementedHiveMetastore(io.trino.plugin.hive.metastore.UnimplementedHiveMetastore) HiveMetastore(io.trino.plugin.hive.metastore.HiveMetastore) BridgingHiveMetastore(io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore) CachingHiveMetastore.cachingHiveMetastore(io.trino.plugin.hive.metastore.cache.CachingHiveMetastore.cachingHiveMetastore) Duration(io.airlift.units.Duration) CountDownLatch(java.util.concurrent.CountDownLatch) ListeningExecutorService(com.google.common.util.concurrent.ListeningExecutorService) ExecutorService(java.util.concurrent.ExecutorService) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) Test(org.testng.annotations.Test)

Example 33 with VARCHAR

use of io.trino.spi.type.VarcharType.VARCHAR in project trino by trinodb.

the class QueryPlanner method planExpand.

public RelationPlan planExpand(Query query) {
    checkArgument(analysis.isExpandableQuery(query), "query is not registered as expandable");
    Union union = (Union) query.getQueryBody();
    ImmutableList.Builder<NodeAndMappings> recursionSteps = ImmutableList.builder();
    // plan anchor relation
    Relation anchorNode = union.getRelations().get(0);
    RelationPlan anchorPlan = new RelationPlanner(analysis, symbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, plannerContext, outerContext, session, recursiveSubqueries).process(anchorNode, null);
    // prune anchor plan outputs to contain only the symbols exposed in the scope
    NodeAndMappings prunedAnchorPlan = pruneInvisibleFields(anchorPlan, idAllocator);
    // if the anchor plan has duplicate output symbols, add projection on top to make the symbols unique
    // This is necessary to successfully unroll recursion: the recursion step relation must follow
    // the same layout while it might not have duplicate outputs where the anchor plan did
    NodeAndMappings disambiguatedAnchorPlan = disambiguateOutputs(prunedAnchorPlan, symbolAllocator, idAllocator);
    anchorPlan = new RelationPlan(disambiguatedAnchorPlan.getNode(), analysis.getScope(query), disambiguatedAnchorPlan.getFields(), outerContext);
    recursionSteps.add(copy(anchorPlan.getRoot(), anchorPlan.getFieldMappings()));
    // plan recursion step
    Relation recursionStepRelation = union.getRelations().get(1);
    RelationPlan recursionStepPlan = new RelationPlanner(analysis, symbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, plannerContext, outerContext, session, ImmutableMap.of(NodeRef.of(analysis.getRecursiveReference(query)), anchorPlan)).process(recursionStepRelation, null);
    // coerce recursion step outputs and prune them to contain only the symbols exposed in the scope
    NodeAndMappings coercedRecursionStep;
    List<Type> types = analysis.getRelationCoercion(recursionStepRelation);
    if (types == null) {
        coercedRecursionStep = pruneInvisibleFields(recursionStepPlan, idAllocator);
    } else {
        coercedRecursionStep = coerce(recursionStepPlan, types, symbolAllocator, idAllocator);
    }
    NodeAndMappings replacementSpot = new NodeAndMappings(anchorPlan.getRoot(), anchorPlan.getFieldMappings());
    PlanNode recursionStep = coercedRecursionStep.getNode();
    List<Symbol> mappings = coercedRecursionStep.getFields();
    // unroll recursion
    int maxRecursionDepth = getMaxRecursionDepth(session);
    for (int i = 0; i < maxRecursionDepth; i++) {
        recursionSteps.add(copy(recursionStep, mappings));
        NodeAndMappings replacement = copy(recursionStep, mappings);
        // if the recursion step plan has duplicate output symbols, add projection on top to make the symbols unique
        // This is necessary to successfully unroll recursion: the relation on the next recursion step must follow
        // the same layout while it might not have duplicate outputs where the plan for this step did
        replacement = disambiguateOutputs(replacement, symbolAllocator, idAllocator);
        recursionStep = replace(recursionStep, replacementSpot, replacement);
        replacementSpot = replacement;
    }
    // after the last recursion step, check if the recursion converged. the last step is expected to return empty result
    // 1. append window to count rows
    NodeAndMappings checkConvergenceStep = copy(recursionStep, mappings);
    Symbol countSymbol = symbolAllocator.newSymbol("count", BIGINT);
    ResolvedFunction function = plannerContext.getMetadata().resolveFunction(session, QualifiedName.of("count"), ImmutableList.of());
    WindowNode.Function countFunction = new WindowNode.Function(function, ImmutableList.of(), DEFAULT_FRAME, false);
    WindowNode windowNode = new WindowNode(idAllocator.getNextId(), checkConvergenceStep.getNode(), new WindowNode.Specification(ImmutableList.of(), Optional.empty()), ImmutableMap.of(countSymbol, countFunction), Optional.empty(), ImmutableSet.of(), 0);
    // 2. append filter to fail on non-empty result
    ResolvedFunction fail = plannerContext.getMetadata().resolveFunction(session, QualifiedName.of("fail"), fromTypes(VARCHAR));
    String recursionLimitExceededMessage = format("Recursion depth limit exceeded (%s). Use 'max_recursion_depth' session property to modify the limit.", maxRecursionDepth);
    Expression predicate = new IfExpression(new ComparisonExpression(GREATER_THAN_OR_EQUAL, countSymbol.toSymbolReference(), new GenericLiteral("BIGINT", "0")), new Cast(new FunctionCall(fail.toQualifiedName(), ImmutableList.of(new Cast(new StringLiteral(recursionLimitExceededMessage), toSqlType(VARCHAR)))), toSqlType(BOOLEAN)), TRUE_LITERAL);
    FilterNode filterNode = new FilterNode(idAllocator.getNextId(), windowNode, predicate);
    recursionSteps.add(new NodeAndMappings(filterNode, checkConvergenceStep.getFields()));
    // union all the recursion steps
    List<NodeAndMappings> recursionStepsToUnion = recursionSteps.build();
    List<Symbol> unionOutputSymbols = anchorPlan.getFieldMappings().stream().map(symbol -> symbolAllocator.newSymbol(symbol, "_expanded")).collect(toImmutableList());
    ImmutableListMultimap.Builder<Symbol, Symbol> unionSymbolMapping = ImmutableListMultimap.builder();
    for (NodeAndMappings plan : recursionStepsToUnion) {
        for (int i = 0; i < unionOutputSymbols.size(); i++) {
            unionSymbolMapping.put(unionOutputSymbols.get(i), plan.getFields().get(i));
        }
    }
    List<PlanNode> nodesToUnion = recursionStepsToUnion.stream().map(NodeAndMappings::getNode).collect(toImmutableList());
    PlanNode result = new UnionNode(idAllocator.getNextId(), nodesToUnion, unionSymbolMapping.build(), unionOutputSymbols);
    if (union.isDistinct()) {
        result = new AggregationNode(idAllocator.getNextId(), result, ImmutableMap.of(), singleGroupingSet(result.getOutputSymbols()), ImmutableList.of(), AggregationNode.Step.SINGLE, Optional.empty(), Optional.empty());
    }
    return new RelationPlan(result, anchorPlan.getScope(), unionOutputSymbols, outerContext);
}
Also used : Cast(io.trino.sql.tree.Cast) PatternRecognitionComponents(io.trino.sql.planner.RelationPlanner.PatternRecognitionComponents) Arrays(java.util.Arrays) TypeSignatureProvider.fromTypes(io.trino.sql.analyzer.TypeSignatureProvider.fromTypes) Delete(io.trino.sql.tree.Delete) PlanNode(io.trino.sql.planner.plan.PlanNode) Node(io.trino.sql.tree.Node) Offset(io.trino.sql.tree.Offset) PlanNodeId(io.trino.sql.planner.plan.PlanNodeId) LongLiteral(io.trino.sql.tree.LongLiteral) Map(java.util.Map) Union(io.trino.sql.tree.Union) FetchFirst(io.trino.sql.tree.FetchFirst) TableScanNode(io.trino.sql.planner.plan.TableScanNode) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Assignments(io.trino.sql.planner.plan.Assignments) Set(java.util.Set) TableSchema(io.trino.metadata.TableSchema) SortItem(io.trino.sql.tree.SortItem) NodeUtils.getSortItemsFromOrderBy(io.trino.sql.NodeUtils.getSortItemsFromOrderBy) DEFAULT_FRAME(io.trino.sql.planner.plan.WindowNode.Frame.DEFAULT_FRAME) RelationType(io.trino.sql.analyzer.RelationType) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) AggregationNode.groupingSets(io.trino.sql.planner.plan.AggregationNode.groupingSets) DeleteTarget(io.trino.sql.planner.plan.TableWriterNode.DeleteTarget) PlanBuilder.newPlanBuilder(io.trino.sql.planner.PlanBuilder.newPlanBuilder) ImmutableListMultimap(com.google.common.collect.ImmutableListMultimap) DecimalLiteral(io.trino.sql.tree.DecimalLiteral) ValuesNode(io.trino.sql.planner.plan.ValuesNode) ExpressionAnalyzer.isNumericType(io.trino.sql.analyzer.ExpressionAnalyzer.isNumericType) Session(io.trino.Session) Iterables(com.google.common.collect.Iterables) LimitNode(io.trino.sql.planner.plan.LimitNode) TypeCoercion(io.trino.type.TypeCoercion) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) UpdateTarget(io.trino.sql.planner.plan.TableWriterNode.UpdateTarget) ScopeAware.scopeAwareKey(io.trino.sql.planner.ScopeAware.scopeAwareKey) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) NodeRef(io.trino.sql.tree.NodeRef) ColumnHandle(io.trino.spi.connector.ColumnHandle) AggregationNode(io.trino.sql.planner.plan.AggregationNode) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) GroupingOperationRewriter.rewriteGroupingOperation(io.trino.sql.planner.GroupingOperationRewriter.rewriteGroupingOperation) NodeUtils(io.trino.sql.NodeUtils) INTERVAL_DAY_TIME(io.trino.type.IntervalDayTimeType.INTERVAL_DAY_TIME) Query(io.trino.sql.tree.Query) StringLiteral(io.trino.sql.tree.StringLiteral) Relation(io.trino.sql.tree.Relation) GroupingSetAnalysis(io.trino.sql.analyzer.Analysis.GroupingSetAnalysis) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) SortOrder(io.trino.spi.connector.SortOrder) AggregationNode.singleGroupingSet(io.trino.sql.planner.plan.AggregationNode.singleGroupingSet) TableHandle(io.trino.metadata.TableHandle) Table(io.trino.sql.tree.Table) GroupIdNode(io.trino.sql.planner.plan.GroupIdNode) YEAR(io.trino.sql.tree.IntervalLiteral.IntervalField.YEAR) OffsetNode(io.trino.sql.planner.plan.OffsetNode) ROWS(io.trino.sql.tree.WindowFrame.Type.ROWS) NullTreatment(io.trino.sql.tree.FunctionCall.NullTreatment) DAY(io.trino.sql.tree.IntervalLiteral.IntervalField.DAY) MeasureDefinition(io.trino.sql.tree.MeasureDefinition) INTERVAL_YEAR_MONTH(io.trino.type.IntervalYearMonthType.INTERVAL_YEAR_MONTH) Aggregation(io.trino.sql.planner.plan.AggregationNode.Aggregation) FilterNode(io.trino.sql.planner.plan.FilterNode) LambdaArgumentDeclaration(io.trino.sql.tree.LambdaArgumentDeclaration) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) OrderingScheme.sortItemToSortOrder(io.trino.sql.planner.OrderingScheme.sortItemToSortOrder) SelectExpression(io.trino.sql.analyzer.Analysis.SelectExpression) GROUPS(io.trino.sql.tree.WindowFrame.Type.GROUPS) DeleteNode(io.trino.sql.planner.plan.DeleteNode) Update(io.trino.sql.tree.Update) FunctionCall(io.trino.sql.tree.FunctionCall) QuerySpecification(io.trino.sql.tree.QuerySpecification) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) ResolvedFunction(io.trino.metadata.ResolvedFunction) TypeSignatureTranslator.toSqlType(io.trino.sql.analyzer.TypeSignatureTranslator.toSqlType) IntervalLiteral(io.trino.sql.tree.IntervalLiteral) RANGE(io.trino.sql.tree.WindowFrame.Type.RANGE) VariableDefinition(io.trino.sql.tree.VariableDefinition) PatternRecognitionNode(io.trino.sql.planner.plan.PatternRecognitionNode) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) ComparisonExpression(io.trino.sql.tree.ComparisonExpression) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) LESS_THAN_OR_EQUAL(io.trino.sql.tree.ComparisonExpression.Operator.LESS_THAN_OR_EQUAL) GenericLiteral(io.trino.sql.tree.GenericLiteral) SimplePlanRewriter(io.trino.sql.planner.plan.SimplePlanRewriter) List(java.util.List) POSITIVE(io.trino.sql.tree.IntervalLiteral.Sign.POSITIVE) IfExpression(io.trino.sql.tree.IfExpression) ColumnSchema(io.trino.spi.connector.ColumnSchema) BIGINT(io.trino.spi.type.BigintType.BIGINT) WindowFrame(io.trino.sql.tree.WindowFrame) Optional(java.util.Optional) Expression(io.trino.sql.tree.Expression) WindowNode(io.trino.sql.planner.plan.WindowNode) DecimalType(io.trino.spi.type.DecimalType) OrderBy(io.trino.sql.tree.OrderBy) PlannerContext(io.trino.sql.PlannerContext) Analysis(io.trino.sql.analyzer.Analysis) IntStream(java.util.stream.IntStream) FieldId(io.trino.sql.analyzer.FieldId) UnionNode(io.trino.sql.planner.plan.UnionNode) WindowOperation(io.trino.sql.tree.WindowOperation) Type(io.trino.spi.type.Type) LambdaExpression(io.trino.sql.tree.LambdaExpression) HashMap(java.util.HashMap) SystemSessionProperties.getMaxRecursionDepth(io.trino.SystemSessionProperties.getMaxRecursionDepth) SortNode(io.trino.sql.planner.plan.SortNode) Function(java.util.function.Function) Cast(io.trino.sql.tree.Cast) HashSet(java.util.HashSet) ImmutableList(com.google.common.collect.ImmutableList) Objects.requireNonNull(java.util.Objects.requireNonNull) GREATER_THAN_OR_EQUAL(io.trino.sql.tree.ComparisonExpression.Operator.GREATER_THAN_OR_EQUAL) ProjectNode(io.trino.sql.planner.plan.ProjectNode) ResolvedWindow(io.trino.sql.analyzer.Analysis.ResolvedWindow) RowsPerMatch(io.trino.sql.tree.PatternRecognitionRelation.RowsPerMatch) Iterator(java.util.Iterator) TRUE_LITERAL(io.trino.sql.tree.BooleanLiteral.TRUE_LITERAL) UpdateNode(io.trino.sql.planner.plan.UpdateNode) QualifiedName(io.trino.sql.tree.QualifiedName) SystemSessionProperties.isSkipRedundantSort(io.trino.SystemSessionProperties.isSkipRedundantSort) FrameBound(io.trino.sql.tree.FrameBound) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ImmutableList(com.google.common.collect.ImmutableList) FilterNode(io.trino.sql.planner.plan.FilterNode) Union(io.trino.sql.tree.Union) GenericLiteral(io.trino.sql.tree.GenericLiteral) ResolvedFunction(io.trino.metadata.ResolvedFunction) Function(java.util.function.Function) Relation(io.trino.sql.tree.Relation) PlanNode(io.trino.sql.planner.plan.PlanNode) UnionNode(io.trino.sql.planner.plan.UnionNode) ImmutableListMultimap(com.google.common.collect.ImmutableListMultimap) FunctionCall(io.trino.sql.tree.FunctionCall) WindowNode(io.trino.sql.planner.plan.WindowNode) IfExpression(io.trino.sql.tree.IfExpression) ResolvedFunction(io.trino.metadata.ResolvedFunction) AggregationNode(io.trino.sql.planner.plan.AggregationNode) ComparisonExpression(io.trino.sql.tree.ComparisonExpression) RelationType(io.trino.sql.analyzer.RelationType) ExpressionAnalyzer.isNumericType(io.trino.sql.analyzer.ExpressionAnalyzer.isNumericType) TypeSignatureTranslator.toSqlType(io.trino.sql.analyzer.TypeSignatureTranslator.toSqlType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) StringLiteral(io.trino.sql.tree.StringLiteral) SelectExpression(io.trino.sql.analyzer.Analysis.SelectExpression) ComparisonExpression(io.trino.sql.tree.ComparisonExpression) IfExpression(io.trino.sql.tree.IfExpression) Expression(io.trino.sql.tree.Expression) LambdaExpression(io.trino.sql.tree.LambdaExpression)

Example 34 with VARCHAR

use of io.trino.spi.type.VarcharType.VARCHAR in project trino by trinodb.

the class TestPushProjectionIntoTableScan method testPushProjection.

@Test
public void testPushProjection() {
    try (RuleTester ruleTester = defaultRuleTester()) {
        // Building context for input
        String columnName = "col0";
        Type columnType = ROW_TYPE;
        Symbol baseColumn = new Symbol(columnName);
        ColumnHandle columnHandle = new TpchColumnHandle(columnName, columnType);
        // Create catalog with applyProjection enabled
        MockConnectorFactory factory = createMockFactory(ImmutableMap.of(columnName, columnHandle), Optional.of(this::mockApplyProjection));
        ruleTester.getQueryRunner().createCatalog(MOCK_CATALOG, factory, ImmutableMap.of());
        TypeAnalyzer typeAnalyzer = createTestingTypeAnalyzer(ruleTester.getPlannerContext());
        // Prepare project node symbols and types
        Symbol identity = new Symbol("symbol_identity");
        Symbol dereference = new Symbol("symbol_dereference");
        Symbol constant = new Symbol("symbol_constant");
        Symbol call = new Symbol("symbol_call");
        ImmutableMap<Symbol, Type> types = ImmutableMap.of(baseColumn, ROW_TYPE, identity, ROW_TYPE, dereference, BIGINT, constant, BIGINT, call, VARCHAR);
        // Prepare project node assignments
        ImmutableMap<Symbol, Expression> inputProjections = ImmutableMap.of(identity, baseColumn.toSymbolReference(), dereference, new SubscriptExpression(baseColumn.toSymbolReference(), new LongLiteral("1")), constant, new LongLiteral("5"), call, new FunctionCall(QualifiedName.of("STARTS_WITH"), ImmutableList.of(new StringLiteral("abc"), new StringLiteral("ab"))));
        // Compute expected symbols after applyProjection
        TransactionId transactionId = ruleTester.getQueryRunner().getTransactionManager().beginTransaction(false);
        Session session = MOCK_SESSION.beginTransactionId(transactionId, ruleTester.getQueryRunner().getTransactionManager(), ruleTester.getQueryRunner().getAccessControl());
        ImmutableMap<Symbol, String> connectorNames = inputProjections.entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, e -> translate(session, e.getValue(), typeAnalyzer, viewOf(types), ruleTester.getPlannerContext()).get().toString()));
        ImmutableMap<Symbol, String> newNames = ImmutableMap.of(identity, "projected_variable_" + connectorNames.get(identity), dereference, "projected_dereference_" + connectorNames.get(dereference), constant, "projected_constant_" + connectorNames.get(constant), call, "projected_call_" + connectorNames.get(call));
        Map<String, ColumnHandle> expectedColumns = newNames.entrySet().stream().collect(toImmutableMap(Map.Entry::getValue, e -> column(e.getValue(), types.get(e.getKey()))));
        ruleTester.assertThat(createRule(ruleTester)).on(p -> {
            // Register symbols
            types.forEach((symbol, type) -> p.symbol(symbol.getName(), type));
            return p.project(new Assignments(inputProjections), p.tableScan(tableScan -> tableScan.setTableHandle(TEST_TABLE_HANDLE).setSymbols(ImmutableList.copyOf(types.keySet())).setAssignments(types.keySet().stream().collect(Collectors.toMap(Function.identity(), v -> columnHandle))).setStatistics(Optional.of(PlanNodeStatsEstimate.builder().setOutputRowCount(42).addSymbolStatistics(baseColumn, SymbolStatsEstimate.builder().setNullsFraction(0).setDistinctValuesCount(33).build()).build()))));
        }).withSession(MOCK_SESSION).matches(project(newNames.entrySet().stream().collect(toImmutableMap(e -> e.getKey().getName(), e -> expression(symbolReference(e.getValue())))), tableScan(new MockConnectorTableHandle(new SchemaTableName(TEST_SCHEMA, "projected_" + TEST_TABLE), TupleDomain.all(), Optional.of(ImmutableList.copyOf(expectedColumns.values())))::equals, TupleDomain.all(), expectedColumns.entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, e -> e.getValue()::equals)), Optional.of(PlanNodeStatsEstimate.builder().setOutputRowCount(42).addSymbolStatistics(new Symbol(newNames.get(constant)), SymbolStatsEstimate.builder().setDistinctValuesCount(1).setNullsFraction(0).setLowValue(5).setHighValue(5).build()).addSymbolStatistics(new Symbol(newNames.get(call).toLowerCase(ENGLISH)), SymbolStatsEstimate.builder().setDistinctValuesCount(1).setNullsFraction(0).build()).addSymbolStatistics(new Symbol(newNames.get(identity)), SymbolStatsEstimate.builder().setDistinctValuesCount(33).setNullsFraction(0).build()).addSymbolStatistics(new Symbol(newNames.get(dereference)), SymbolStatsEstimate.unknown()).build())::equals)));
    }
}
Also used : Test(org.testng.annotations.Test) CatalogName(io.trino.connector.CatalogName) MockConnectorFactory(io.trino.connector.MockConnectorFactory) LongLiteral(io.trino.sql.tree.LongLiteral) Arrays.asList(java.util.Arrays.asList) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Map(java.util.Map) ProjectionApplicationResult(io.trino.spi.connector.ProjectionApplicationResult) FunctionCall(io.trino.sql.tree.FunctionCall) ENGLISH(java.util.Locale.ENGLISH) PlanNodeStatsEstimate(io.trino.cost.PlanNodeStatsEstimate) PlanMatchPattern.expression(io.trino.sql.planner.assertions.PlanMatchPattern.expression) TpchColumnHandle(io.trino.plugin.tpch.TpchColumnHandle) RowType(io.trino.spi.type.RowType) ImmutableMap(com.google.common.collect.ImmutableMap) Call(io.trino.spi.expression.Call) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Assignments(io.trino.sql.planner.plan.Assignments) ScalarStatsCalculator(io.trino.cost.ScalarStatsCalculator) Collectors(java.util.stream.Collectors) SchemaTableName(io.trino.spi.connector.SchemaTableName) List(java.util.List) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) TestingSession.testSessionBuilder(io.trino.testing.TestingSession.testSessionBuilder) PlanMatchPattern.anyTree(io.trino.sql.planner.assertions.PlanMatchPattern.anyTree) BIGINT(io.trino.spi.type.BigintType.BIGINT) ConnectorPartitioningHandle(io.trino.spi.connector.ConnectorPartitioningHandle) SymbolReference(io.trino.sql.tree.SymbolReference) Assignment(io.trino.spi.connector.Assignment) Optional(java.util.Optional) Expression(io.trino.sql.tree.Expression) RowType.field(io.trino.spi.type.RowType.field) ConnectorTablePartitioning(io.trino.spi.connector.ConnectorTablePartitioning) ConnectorExpressionTranslator.translate(io.trino.sql.planner.ConnectorExpressionTranslator.translate) Session(io.trino.Session) PlannerContext(io.trino.sql.PlannerContext) TypeAnalyzer.createTestingTypeAnalyzer(io.trino.sql.planner.TypeAnalyzer.createTestingTypeAnalyzer) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) Type(io.trino.spi.type.Type) Variable(io.trino.spi.expression.Variable) Function(java.util.function.Function) SubscriptExpression(io.trino.sql.tree.SubscriptExpression) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) ImmutableList(com.google.common.collect.ImmutableList) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) ColumnHandle(io.trino.spi.connector.ColumnHandle) Constant(io.trino.spi.expression.Constant) TypeProvider.viewOf(io.trino.sql.planner.TypeProvider.viewOf) Symbol(io.trino.sql.planner.Symbol) SymbolStatsEstimate(io.trino.cost.SymbolStatsEstimate) StringLiteral(io.trino.sql.tree.StringLiteral) RuleTester.defaultRuleTester(io.trino.sql.planner.iterative.rule.test.RuleTester.defaultRuleTester) FieldDereference(io.trino.spi.expression.FieldDereference) TestingTransactionHandle(io.trino.testing.TestingTransactionHandle) ConnectorSession(io.trino.spi.connector.ConnectorSession) RuleTester(io.trino.sql.planner.iterative.rule.test.RuleTester) TupleDomain(io.trino.spi.predicate.TupleDomain) QualifiedName(io.trino.sql.tree.QualifiedName) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) TableHandle(io.trino.metadata.TableHandle) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) TypeAnalyzer(io.trino.sql.planner.TypeAnalyzer) PlanMatchPattern.project(io.trino.sql.planner.assertions.PlanMatchPattern.project) TransactionId(io.trino.transaction.TransactionId) PlanMatchPattern.tableScan(io.trino.sql.planner.assertions.PlanMatchPattern.tableScan) MockConnectorFactory(io.trino.connector.MockConnectorFactory) TpchColumnHandle(io.trino.plugin.tpch.TpchColumnHandle) RuleTester.defaultRuleTester(io.trino.sql.planner.iterative.rule.test.RuleTester.defaultRuleTester) RuleTester(io.trino.sql.planner.iterative.rule.test.RuleTester) Symbol(io.trino.sql.planner.Symbol) Assignments(io.trino.sql.planner.plan.Assignments) SubscriptExpression(io.trino.sql.tree.SubscriptExpression) FunctionCall(io.trino.sql.tree.FunctionCall) TpchColumnHandle(io.trino.plugin.tpch.TpchColumnHandle) ColumnHandle(io.trino.spi.connector.ColumnHandle) LongLiteral(io.trino.sql.tree.LongLiteral) SchemaTableName(io.trino.spi.connector.SchemaTableName) TypeAnalyzer.createTestingTypeAnalyzer(io.trino.sql.planner.TypeAnalyzer.createTestingTypeAnalyzer) TypeAnalyzer(io.trino.sql.planner.TypeAnalyzer) TransactionId(io.trino.transaction.TransactionId) RowType(io.trino.spi.type.RowType) Type(io.trino.spi.type.Type) StringLiteral(io.trino.sql.tree.StringLiteral) Expression(io.trino.sql.tree.Expression) SubscriptExpression(io.trino.sql.tree.SubscriptExpression) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) Session(io.trino.Session) ConnectorSession(io.trino.spi.connector.ConnectorSession) Test(org.testng.annotations.Test)

Example 35 with VARCHAR

use of io.trino.spi.type.VarcharType.VARCHAR in project trino by trinodb.

the class TestPushTopNIntoTableScan method testPushPartialTopNIntoTableScanNotGuaranteed.

@Test
public void testPushPartialTopNIntoTableScanNotGuaranteed() {
    try (RuleTester ruleTester = defaultRuleTester()) {
        MockConnectorTableHandle connectorHandle = new MockConnectorTableHandle(TEST_SCHEMA_TABLE);
        // make the mock connector return a new connectorHandle
        MockConnectorFactory.ApplyTopN applyTopN = (session, handle, topNCount, sortItems, tableAssignments) -> Optional.of(new TopNApplicationResult<>(connectorHandle, false, false));
        MockConnectorFactory mockFactory = createMockFactory(assignments, Optional.of(applyTopN));
        ruleTester.getQueryRunner().createCatalog(MOCK_CATALOG, mockFactory, ImmutableMap.of());
        ruleTester.assertThat(new PushTopNIntoTableScan(ruleTester.getMetadata())).on(p -> {
            Symbol dimension = p.symbol(dimensionName, VARCHAR);
            Symbol metric = p.symbol(metricName, BIGINT);
            return p.topN(1, ImmutableList.of(dimension), TopNNode.Step.PARTIAL, p.tableScan(TEST_TABLE_HANDLE, ImmutableList.of(dimension, metric), ImmutableMap.of(dimension, dimensionColumn, metric, metricColumn)));
        }).withSession(MOCK_SESSION).matches(topN(1, ImmutableList.of(sort(dimensionName, ASCENDING, FIRST)), TopNNode.Step.PARTIAL, tableScan(connectorHandle::equals, TupleDomain.all(), ImmutableMap.of(dimensionName, dimensionColumn::equals, metricName, metricColumn::equals))));
    }
}
Also used : ColumnMetadata(io.trino.spi.connector.ColumnMetadata) TopNApplicationResult(io.trino.spi.connector.TopNApplicationResult) Test(org.testng.annotations.Test) HashMap(java.util.HashMap) CatalogName(io.trino.connector.CatalogName) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) ImmutableList(com.google.common.collect.ImmutableList) MockConnectorFactory(io.trino.connector.MockConnectorFactory) PlanMatchPattern.sort(io.trino.sql.planner.assertions.PlanMatchPattern.sort) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Map(java.util.Map) ColumnHandle(io.trino.spi.connector.ColumnHandle) ASCENDING(io.trino.sql.tree.SortItem.Ordering.ASCENDING) Symbol(io.trino.sql.planner.Symbol) TpchColumnHandle(io.trino.plugin.tpch.TpchColumnHandle) RuleTester.defaultRuleTester(io.trino.sql.planner.iterative.rule.test.RuleTester.defaultRuleTester) ImmutableMap(com.google.common.collect.ImmutableMap) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) TopNNode(io.trino.sql.planner.plan.TopNNode) RuleTester(io.trino.sql.planner.iterative.rule.test.RuleTester) PlanMatchPattern.topN(io.trino.sql.planner.assertions.PlanMatchPattern.topN) TupleDomain(io.trino.spi.predicate.TupleDomain) SchemaTableName(io.trino.spi.connector.SchemaTableName) FIRST(io.trino.sql.tree.SortItem.NullOrdering.FIRST) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) List(java.util.List) TestingSession.testSessionBuilder(io.trino.testing.TestingSession.testSessionBuilder) TableHandle(io.trino.metadata.TableHandle) BIGINT(io.trino.spi.type.BigintType.BIGINT) Optional(java.util.Optional) PlanMatchPattern.tableScan(io.trino.sql.planner.assertions.PlanMatchPattern.tableScan) Session(io.trino.Session) ConnectorTransactionHandle(io.trino.spi.connector.ConnectorTransactionHandle) MockConnectorFactory(io.trino.connector.MockConnectorFactory) RuleTester.defaultRuleTester(io.trino.sql.planner.iterative.rule.test.RuleTester.defaultRuleTester) RuleTester(io.trino.sql.planner.iterative.rule.test.RuleTester) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) Symbol(io.trino.sql.planner.Symbol) Test(org.testng.annotations.Test)

Aggregations

ImmutableList (com.google.common.collect.ImmutableList)37 VARCHAR (io.trino.spi.type.VarcharType.VARCHAR)37 Test (org.testng.annotations.Test)34 Optional (java.util.Optional)31 BIGINT (io.trino.spi.type.BigintType.BIGINT)30 ImmutableMap (com.google.common.collect.ImmutableMap)26 List (java.util.List)25 ColumnHandle (io.trino.spi.connector.ColumnHandle)20 Assertions.assertThatThrownBy (org.assertj.core.api.Assertions.assertThatThrownBy)20 Map (java.util.Map)19 TupleDomain (io.trino.spi.predicate.TupleDomain)18 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)17 Session (io.trino.Session)17 SchemaTableName (io.trino.spi.connector.SchemaTableName)16 ImmutableSet (com.google.common.collect.ImmutableSet)15 TableHandle (io.trino.metadata.TableHandle)15 Type (io.trino.spi.type.Type)15 TestingSession.testSessionBuilder (io.trino.testing.TestingSession.testSessionBuilder)15 Assert.assertTrue (org.testng.Assert.assertTrue)15 CatalogName (io.trino.connector.CatalogName)14