Search in sources :

Example 51 with BIGINT

use of io.trino.spi.type.BigintType.BIGINT in project trino by trinodb.

the class BaseIcebergConnectorTest method testPartitionedTableStatistics.

@Test
public void testPartitionedTableStatistics() {
    assertUpdate("CREATE TABLE iceberg.tpch.test_partitioned_table_statistics (col1 REAL, col2 BIGINT) WITH (partitioning = ARRAY['col2'])");
    String insertStart = "INSERT INTO test_partitioned_table_statistics";
    assertUpdate(insertStart + " VALUES (-10, -1)", 1);
    assertUpdate(insertStart + " VALUES (100, 10)", 1);
    MaterializedResult result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics");
    assertEquals(result.getRowCount(), 3);
    MaterializedRow row0 = result.getMaterializedRows().get(0);
    assertEquals(row0.getField(0), "col1");
    assertEquals(row0.getField(3), 0.0);
    assertEquals(row0.getField(5), "-10.0");
    assertEquals(row0.getField(6), "100.0");
    MaterializedRow row1 = result.getMaterializedRows().get(1);
    assertEquals(row1.getField(0), "col2");
    assertEquals(row1.getField(3), 0.0);
    assertEquals(row1.getField(5), "-1");
    assertEquals(row1.getField(6), "10");
    MaterializedRow row2 = result.getMaterializedRows().get(2);
    assertEquals(row2.getField(4), 2.0);
    assertUpdate(insertStart + " VALUES " + IntStream.rangeClosed(1, 5).mapToObj(i -> format("(%d, 10)", i + 100)).collect(joining(", ")), 5);
    assertUpdate(insertStart + " VALUES " + IntStream.rangeClosed(6, 10).mapToObj(i -> "(NULL, 10)").collect(joining(", ")), 5);
    result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics");
    assertEquals(result.getRowCount(), 3);
    row0 = result.getMaterializedRows().get(0);
    assertEquals(row0.getField(0), "col1");
    assertEquals(row0.getField(3), 5.0 / 12.0);
    assertEquals(row0.getField(5), "-10.0");
    assertEquals(row0.getField(6), "105.0");
    row1 = result.getMaterializedRows().get(1);
    assertEquals(row1.getField(0), "col2");
    assertEquals(row1.getField(3), 0.0);
    assertEquals(row1.getField(5), "-1");
    assertEquals(row1.getField(6), "10");
    row2 = result.getMaterializedRows().get(2);
    assertEquals(row2.getField(4), 12.0);
    assertUpdate(insertStart + " VALUES " + IntStream.rangeClosed(6, 10).mapToObj(i -> "(100, NULL)").collect(joining(", ")), 5);
    result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics");
    row0 = result.getMaterializedRows().get(0);
    assertEquals(row0.getField(0), "col1");
    assertEquals(row0.getField(3), 5.0 / 17.0);
    assertEquals(row0.getField(5), "-10.0");
    assertEquals(row0.getField(6), "105.0");
    row1 = result.getMaterializedRows().get(1);
    assertEquals(row1.getField(0), "col2");
    assertEquals(row1.getField(3), 5.0 / 17.0);
    assertEquals(row1.getField(5), "-1");
    assertEquals(row1.getField(6), "10");
    row2 = result.getMaterializedRows().get(2);
    assertEquals(row2.getField(4), 17.0);
    dropTable("iceberg.tpch.test_partitioned_table_statistics");
}
Also used : SkipException(org.testng.SkipException) FileSystem(org.apache.hadoop.fs.FileSystem) Test(org.testng.annotations.Test) TestTable(io.trino.testing.sql.TestTable) BROADCAST(io.trino.sql.planner.OptimizerConfig.JoinDistributionType.BROADCAST) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Matcher(java.util.regex.Matcher) Map(java.util.Map) MaterializedRow(io.trino.testing.MaterializedRow) Path(java.nio.file.Path) Assert.assertFalse(org.testng.Assert.assertFalse) Assert.assertEquals(io.trino.testing.assertions.Assert.assertEquals) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) Assert.assertNotEquals(org.testng.Assert.assertNotEquals) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) Schema(org.apache.avro.Schema) DataProviders(io.trino.testing.DataProviders) Domain(io.trino.spi.predicate.Domain) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Set(java.util.Set) DataFileWriter(org.apache.avro.file.DataFileWriter) HDFS_ENVIRONMENT(io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT) Collectors.joining(java.util.stream.Collectors.joining) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) TestingSession.testSessionBuilder(io.trino.testing.TestingSession.testSessionBuilder) Stream(java.util.stream.Stream) ORC(io.trino.plugin.iceberg.IcebergFileFormat.ORC) Session(io.trino.Session) NullableValue(io.trino.spi.predicate.NullableValue) GenericData(org.apache.avro.generic.GenericData) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) String.join(java.lang.String.join) ColumnHandle(io.trino.spi.connector.ColumnHandle) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) LongStream(java.util.stream.LongStream) OperatorStats(io.trino.operator.OperatorStats) Language(org.intellij.lang.annotations.Language) Files(java.nio.file.Files) IOException(java.io.IOException) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) File(java.io.File) BaseConnectorTest(io.trino.testing.BaseConnectorTest) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) TableHandle(io.trino.metadata.TableHandle) QualifiedObjectName(io.trino.metadata.QualifiedObjectName) Paths(java.nio.file.Paths) QueryRunner(io.trino.testing.QueryRunner) Domain.multipleValues(io.trino.spi.predicate.Domain.multipleValues) QueryId(io.trino.spi.QueryId) TransactionBuilder.transaction(io.trino.transaction.TransactionBuilder.transaction) IntStream.range(java.util.stream.IntStream.range) MaterializedResult(io.trino.testing.MaterializedResult) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS(io.trino.SystemSessionProperties.PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS) MoreCollectors.onlyElement(com.google.common.collect.MoreCollectors.onlyElement) ICEBERG_DOMAIN_COMPACTION_THRESHOLD(io.trino.plugin.iceberg.IcebergSplitManager.ICEBERG_DOMAIN_COMPACTION_THRESHOLD) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) QueryAssertions.assertEqualsIgnoreOrder(io.trino.testing.QueryAssertions.assertEqualsIgnoreOrder) Locale(java.util.Locale) Iterables.concat(com.google.common.collect.Iterables.concat) TestingConnectorBehavior(io.trino.testing.TestingConnectorBehavior) TestTable.randomTableSuffix(io.trino.testing.sql.TestTable.randomTableSuffix) TpchTable(io.trino.tpch.TpchTable) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Predicate(java.util.function.Predicate) Collections.nCopies(java.util.Collections.nCopies) Collectors(java.util.stream.Collectors) String.format(java.lang.String.format) ICEBERG_CATALOG(io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG) DataSize(io.airlift.units.DataSize) List(java.util.List) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) BIGINT(io.trino.spi.type.BigintType.BIGINT) Domain.singleValue(io.trino.spi.predicate.Domain.singleValue) Optional(java.util.Optional) Pattern(java.util.regex.Pattern) DataFileReader(org.apache.avro.file.DataFileReader) GenericDatumReader(org.apache.avro.generic.GenericDatumReader) IcebergQueryRunner.createIcebergQueryRunner(io.trino.plugin.iceberg.IcebergQueryRunner.createIcebergQueryRunner) LINE_ITEM(io.trino.tpch.TpchTable.LINE_ITEM) IntStream(java.util.stream.IntStream) Constraint(io.trino.spi.connector.Constraint) DataProvider(org.testng.annotations.DataProvider) PARQUET(io.trino.plugin.iceberg.IcebergFileFormat.PARQUET) ImmutableList(com.google.common.collect.ImmutableList) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) Objects.requireNonNull(java.util.Objects.requireNonNull) TableStatistics(io.trino.spi.statistics.TableStatistics) NoSuchElementException(java.util.NoSuchElementException) VerifyException(com.google.common.base.VerifyException) OutputStream(java.io.OutputStream) ResultWithQueryId(io.trino.testing.ResultWithQueryId) TupleDomain(io.trino.spi.predicate.TupleDomain) Consumer(java.util.function.Consumer) Assert.assertEventually(io.trino.testing.assertions.Assert.assertEventually) JOIN_DISTRIBUTION_TYPE(io.trino.SystemSessionProperties.JOIN_DISTRIBUTION_TYPE) Metadata(io.trino.metadata.Metadata) Assert.assertTrue(org.testng.Assert.assertTrue) MaterializedResult.resultBuilder(io.trino.testing.MaterializedResult.resultBuilder) MaterializedResult(io.trino.testing.MaterializedResult) MaterializedRow(io.trino.testing.MaterializedRow) Test(org.testng.annotations.Test) BaseConnectorTest(io.trino.testing.BaseConnectorTest)

Example 52 with BIGINT

use of io.trino.spi.type.BigintType.BIGINT in project trino by trinodb.

the class BaseIcebergConnectorTest method testPredicatePushdown.

@Test
public void testPredicatePushdown() {
    QualifiedObjectName tableName = new QualifiedObjectName("iceberg", "tpch", "test_predicate");
    assertUpdate(format("CREATE TABLE %s (col1 BIGINT, col2 BIGINT, col3 BIGINT) WITH (partitioning = ARRAY['col2', 'col3'])", tableName));
    assertUpdate(format("INSERT INTO %s VALUES (1, 10, 100)", tableName), 1L);
    assertUpdate(format("INSERT INTO %s VALUES (2, 20, 200)", tableName), 1L);
    assertQuery(format("SELECT * FROM %s WHERE col1 = 1", tableName), "VALUES (1, 10, 100)");
    assertFilterPushdown(tableName, ImmutableMap.of("col1", singleValue(BIGINT, 1L)), ImmutableMap.of(), ImmutableMap.of("col1", singleValue(BIGINT, 1L)));
    assertQuery(format("SELECT * FROM %s WHERE col2 = 10", tableName), "VALUES (1, 10, 100)");
    assertFilterPushdown(tableName, ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of());
    assertQuery(format("SELECT * FROM %s WHERE col1 = 1 AND col2 = 10", tableName), "VALUES (1, 10, 100)");
    assertFilterPushdown(tableName, ImmutableMap.of("col1", singleValue(BIGINT, 1L), "col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col1", singleValue(BIGINT, 1L)));
    // Assert pushdown for an IN predicate with value count above the default compaction threshold
    List<Long> values = LongStream.range(1L, 1010L).boxed().filter(index -> index != 20L).collect(toImmutableList());
    assertTrue(values.size() > ICEBERG_DOMAIN_COMPACTION_THRESHOLD);
    String valuesString = join(",", values.stream().map(Object::toString).collect(toImmutableList()));
    String inPredicate = "%s IN (" + valuesString + ")";
    assertQuery(format("SELECT * FROM %s WHERE %s AND %s", tableName, format(inPredicate, "col1"), format(inPredicate, "col2")), "VALUES (1, 10, 100)");
    assertFilterPushdown(tableName, ImmutableMap.of("col1", multipleValues(BIGINT, values), "col2", multipleValues(BIGINT, values)), ImmutableMap.of("col2", multipleValues(BIGINT, values)), // Unenforced predicate is simplified during split generation, but not reflected here
    ImmutableMap.of("col1", multipleValues(BIGINT, values)));
    dropTable(tableName.getObjectName());
}
Also used : SkipException(org.testng.SkipException) FileSystem(org.apache.hadoop.fs.FileSystem) Test(org.testng.annotations.Test) TestTable(io.trino.testing.sql.TestTable) BROADCAST(io.trino.sql.planner.OptimizerConfig.JoinDistributionType.BROADCAST) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Matcher(java.util.regex.Matcher) Map(java.util.Map) MaterializedRow(io.trino.testing.MaterializedRow) Path(java.nio.file.Path) Assert.assertFalse(org.testng.Assert.assertFalse) Assert.assertEquals(io.trino.testing.assertions.Assert.assertEquals) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) Assert.assertNotEquals(org.testng.Assert.assertNotEquals) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) Schema(org.apache.avro.Schema) DataProviders(io.trino.testing.DataProviders) Domain(io.trino.spi.predicate.Domain) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Set(java.util.Set) DataFileWriter(org.apache.avro.file.DataFileWriter) HDFS_ENVIRONMENT(io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT) Collectors.joining(java.util.stream.Collectors.joining) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) TestingSession.testSessionBuilder(io.trino.testing.TestingSession.testSessionBuilder) Stream(java.util.stream.Stream) ORC(io.trino.plugin.iceberg.IcebergFileFormat.ORC) Session(io.trino.Session) NullableValue(io.trino.spi.predicate.NullableValue) GenericData(org.apache.avro.generic.GenericData) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) String.join(java.lang.String.join) ColumnHandle(io.trino.spi.connector.ColumnHandle) ConstraintApplicationResult(io.trino.spi.connector.ConstraintApplicationResult) LongStream(java.util.stream.LongStream) OperatorStats(io.trino.operator.OperatorStats) Language(org.intellij.lang.annotations.Language) Files(java.nio.file.Files) IOException(java.io.IOException) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) File(java.io.File) BaseConnectorTest(io.trino.testing.BaseConnectorTest) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) TableHandle(io.trino.metadata.TableHandle) QualifiedObjectName(io.trino.metadata.QualifiedObjectName) Paths(java.nio.file.Paths) QueryRunner(io.trino.testing.QueryRunner) Domain.multipleValues(io.trino.spi.predicate.Domain.multipleValues) QueryId(io.trino.spi.QueryId) TransactionBuilder.transaction(io.trino.transaction.TransactionBuilder.transaction) IntStream.range(java.util.stream.IntStream.range) MaterializedResult(io.trino.testing.MaterializedResult) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS(io.trino.SystemSessionProperties.PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS) MoreCollectors.onlyElement(com.google.common.collect.MoreCollectors.onlyElement) ICEBERG_DOMAIN_COMPACTION_THRESHOLD(io.trino.plugin.iceberg.IcebergSplitManager.ICEBERG_DOMAIN_COMPACTION_THRESHOLD) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) QueryAssertions.assertEqualsIgnoreOrder(io.trino.testing.QueryAssertions.assertEqualsIgnoreOrder) Locale(java.util.Locale) Iterables.concat(com.google.common.collect.Iterables.concat) TestingConnectorBehavior(io.trino.testing.TestingConnectorBehavior) TestTable.randomTableSuffix(io.trino.testing.sql.TestTable.randomTableSuffix) TpchTable(io.trino.tpch.TpchTable) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Predicate(java.util.function.Predicate) Collections.nCopies(java.util.Collections.nCopies) Collectors(java.util.stream.Collectors) String.format(java.lang.String.format) ICEBERG_CATALOG(io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG) DataSize(io.airlift.units.DataSize) List(java.util.List) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) BIGINT(io.trino.spi.type.BigintType.BIGINT) Domain.singleValue(io.trino.spi.predicate.Domain.singleValue) Optional(java.util.Optional) Pattern(java.util.regex.Pattern) DataFileReader(org.apache.avro.file.DataFileReader) GenericDatumReader(org.apache.avro.generic.GenericDatumReader) IcebergQueryRunner.createIcebergQueryRunner(io.trino.plugin.iceberg.IcebergQueryRunner.createIcebergQueryRunner) LINE_ITEM(io.trino.tpch.TpchTable.LINE_ITEM) IntStream(java.util.stream.IntStream) Constraint(io.trino.spi.connector.Constraint) DataProvider(org.testng.annotations.DataProvider) PARQUET(io.trino.plugin.iceberg.IcebergFileFormat.PARQUET) ImmutableList(com.google.common.collect.ImmutableList) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) Objects.requireNonNull(java.util.Objects.requireNonNull) TableStatistics(io.trino.spi.statistics.TableStatistics) NoSuchElementException(java.util.NoSuchElementException) VerifyException(com.google.common.base.VerifyException) OutputStream(java.io.OutputStream) ResultWithQueryId(io.trino.testing.ResultWithQueryId) TupleDomain(io.trino.spi.predicate.TupleDomain) Consumer(java.util.function.Consumer) Assert.assertEventually(io.trino.testing.assertions.Assert.assertEventually) JOIN_DISTRIBUTION_TYPE(io.trino.SystemSessionProperties.JOIN_DISTRIBUTION_TYPE) Metadata(io.trino.metadata.Metadata) Assert.assertTrue(org.testng.Assert.assertTrue) MaterializedResult.resultBuilder(io.trino.testing.MaterializedResult.resultBuilder) QualifiedObjectName(io.trino.metadata.QualifiedObjectName) Test(org.testng.annotations.Test) BaseConnectorTest(io.trino.testing.BaseConnectorTest)

Example 53 with BIGINT

use of io.trino.spi.type.BigintType.BIGINT in project trino by trinodb.

the class PhoenixClient method beginCreateTable.

@Override
public JdbcOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata) {
    SchemaTableName schemaTableName = tableMetadata.getTable();
    String schema = schemaTableName.getSchemaName();
    String table = schemaTableName.getTableName();
    if (!getSchemaNames(session).contains(schema)) {
        throw new SchemaNotFoundException(schema);
    }
    try (Connection connection = connectionFactory.openConnection(session)) {
        ConnectorIdentity identity = session.getIdentity();
        schema = getIdentifierMapping().toRemoteSchemaName(identity, connection, schema);
        table = getIdentifierMapping().toRemoteTableName(identity, connection, schema, table);
        schema = toPhoenixSchemaName(schema);
        LinkedList<ColumnMetadata> tableColumns = new LinkedList<>(tableMetadata.getColumns());
        Map<String, Object> tableProperties = tableMetadata.getProperties();
        Optional<Boolean> immutableRows = PhoenixTableProperties.getImmutableRows(tableProperties);
        String immutable = immutableRows.isPresent() && immutableRows.get() ? "IMMUTABLE" : "";
        ImmutableList.Builder<String> columnNames = ImmutableList.builder();
        ImmutableList.Builder<Type> columnTypes = ImmutableList.builder();
        ImmutableList.Builder<String> columnList = ImmutableList.builder();
        Set<ColumnMetadata> rowkeyColumns = tableColumns.stream().filter(col -> isPrimaryKey(col, tableProperties)).collect(toSet());
        ImmutableList.Builder<String> pkNames = ImmutableList.builder();
        Optional<String> rowkeyColumn = Optional.empty();
        if (rowkeyColumns.isEmpty()) {
            // Add a rowkey when not specified in DDL
            columnList.add(ROWKEY + " bigint not null");
            pkNames.add(ROWKEY);
            execute(session, format("CREATE SEQUENCE %s", getEscapedTableName(schema, table + "_sequence")));
            rowkeyColumn = Optional.of(ROWKEY);
        }
        for (ColumnMetadata column : tableColumns) {
            String columnName = getIdentifierMapping().toRemoteColumnName(connection, column.getName());
            columnNames.add(columnName);
            columnTypes.add(column.getType());
            String typeStatement = toWriteMapping(session, column.getType()).getDataType();
            if (rowkeyColumns.contains(column)) {
                typeStatement += " not null";
                pkNames.add(columnName);
            }
            columnList.add(format("%s %s", getEscapedArgument(columnName), typeStatement));
        }
        ImmutableList.Builder<String> tableOptions = ImmutableList.builder();
        PhoenixTableProperties.getSaltBuckets(tableProperties).ifPresent(value -> tableOptions.add(TableProperty.SALT_BUCKETS + "=" + value));
        PhoenixTableProperties.getSplitOn(tableProperties).ifPresent(value -> tableOptions.add("SPLIT ON (" + value.replace('"', '\'') + ")"));
        PhoenixTableProperties.getDisableWal(tableProperties).ifPresent(value -> tableOptions.add(TableProperty.DISABLE_WAL + "=" + value));
        PhoenixTableProperties.getDefaultColumnFamily(tableProperties).ifPresent(value -> tableOptions.add(TableProperty.DEFAULT_COLUMN_FAMILY + "=" + value));
        PhoenixTableProperties.getBloomfilter(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.BLOOMFILTER + "='" + value + "'"));
        PhoenixTableProperties.getVersions(tableProperties).ifPresent(value -> tableOptions.add(HConstants.VERSIONS + "=" + value));
        PhoenixTableProperties.getMinVersions(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.MIN_VERSIONS + "=" + value));
        PhoenixTableProperties.getCompression(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.COMPRESSION + "='" + value + "'"));
        PhoenixTableProperties.getTimeToLive(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.TTL + "=" + value));
        PhoenixTableProperties.getDataBlockEncoding(tableProperties).ifPresent(value -> tableOptions.add(HColumnDescriptor.DATA_BLOCK_ENCODING + "='" + value + "'"));
        String sql = format("CREATE %s TABLE %s (%s , CONSTRAINT PK PRIMARY KEY (%s)) %s", immutable, getEscapedTableName(schema, table), join(", ", columnList.build()), join(", ", pkNames.build()), join(", ", tableOptions.build()));
        execute(session, sql);
        return new PhoenixOutputTableHandle(schema, table, columnNames.build(), columnTypes.build(), Optional.empty(), rowkeyColumn);
    } catch (SQLException e) {
        if (e.getErrorCode() == SQLExceptionCode.TABLE_ALREADY_EXIST.getErrorCode()) {
            throw new TrinoException(ALREADY_EXISTS, "Phoenix table already exists", e);
        }
        throw new TrinoException(PHOENIX_METADATA_ERROR, "Error creating Phoenix table", e);
    }
}
Also used : UNNECESSARY(java.math.RoundingMode.UNNECESSARY) StandardColumnMappings.varcharColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.varcharColumnMapping) TypeUtils.getArrayElementPhoenixTypeName(io.trino.plugin.phoenix5.TypeUtils.getArrayElementPhoenixTypeName) TypeUtils.jdbcObjectArrayToBlock(io.trino.plugin.phoenix5.TypeUtils.jdbcObjectArrayToBlock) HBaseFactoryProvider(org.apache.phoenix.query.HBaseFactoryProvider) StandardColumnMappings.bigintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction) PredicatePushdownController(io.trino.plugin.jdbc.PredicatePushdownController) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) StandardColumnMappings.booleanColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.booleanColumnMapping) StandardColumnMappings.defaultVarcharColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.defaultVarcharColumnMapping) ResultSet(java.sql.ResultSet) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) StandardColumnMappings.doubleWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction) DecimalSessionSessionProperties.getDecimalDefaultScale(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalDefaultScale) MapReduceParallelScanGrouper(org.apache.phoenix.iterate.MapReduceParallelScanGrouper) ENGLISH(java.util.Locale.ENGLISH) PhoenixArray(org.apache.phoenix.schema.types.PhoenixArray) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) LONGNVARCHAR(java.sql.Types.LONGNVARCHAR) ConcatResultIterator(org.apache.phoenix.iterate.ConcatResultIterator) ConnectorIdentity(io.trino.spi.security.ConnectorIdentity) TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling(io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling) PhoenixClientModule.getConnectionProperties(io.trino.plugin.phoenix5.PhoenixClientModule.getConnectionProperties) TIME_WITH_TIMEZONE(java.sql.Types.TIME_WITH_TIMEZONE) FOREVER(org.apache.hadoop.hbase.HConstants.FOREVER) LongWriteFunction(io.trino.plugin.jdbc.LongWriteFunction) Set(java.util.Set) LONGVARCHAR(java.sql.Types.LONGVARCHAR) PreparedStatement(java.sql.PreparedStatement) BloomType(org.apache.hadoop.hbase.regionserver.BloomType) SchemaTableName(io.trino.spi.connector.SchemaTableName) Collectors.joining(java.util.stream.Collectors.joining) LongReadFunction(io.trino.plugin.jdbc.LongReadFunction) ResultIterator(org.apache.phoenix.iterate.ResultIterator) StandardColumnMappings.smallintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.smallintWriteFunction) StandardColumnMappings.longDecimalWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.longDecimalWriteFunction) ConnectionFactory(io.trino.plugin.jdbc.ConnectionFactory) CONVERT_TO_VARCHAR(io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR) SKIP_REGION_BOUNDARY_CHECK(org.apache.phoenix.coprocessor.BaseScannerRegionObserver.SKIP_REGION_BOUNDARY_CHECK) PhoenixResultSet(org.apache.phoenix.jdbc.PhoenixResultSet) JdbcTableHandle(io.trino.plugin.jdbc.JdbcTableHandle) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) ARRAY(java.sql.Types.ARRAY) StandardColumnMappings.doubleColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) SchemaUtil(org.apache.phoenix.util.SchemaUtil) QueryConstants(org.apache.phoenix.query.QueryConstants) StandardColumnMappings.booleanWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.booleanWriteFunction) JdbcSplit(io.trino.plugin.jdbc.JdbcSplit) SimpleDateFormat(java.text.SimpleDateFormat) ALLOW_OVERFLOW(io.trino.plugin.jdbc.DecimalConfig.DecimalMapping.ALLOW_OVERFLOW) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) TableProperty(org.apache.phoenix.schema.TableProperty) DatabaseMetaData(java.sql.DatabaseMetaData) StandardColumnMappings.bigintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping) StandardColumnMappings.defaultCharColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.defaultCharColumnMapping) ArrayList(java.util.ArrayList) JDBCType(java.sql.JDBCType) SQLException(java.sql.SQLException) TIMESTAMP_TZ_MILLIS(io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MILLIS) String.join(java.lang.String.join) FULL_PUSHDOWN(io.trino.plugin.jdbc.PredicatePushdownController.FULL_PUSHDOWN) StandardColumnMappings.charWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.charWriteFunction) PreparedQuery(io.trino.plugin.jdbc.PreparedQuery) PName(org.apache.phoenix.schema.PName) StandardColumnMappings.decimalColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping) TableName(org.apache.hadoop.hbase.TableName) StandardColumnMappings.realWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction) DataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding) DecimalType.createDecimalType(io.trino.spi.type.DecimalType.createDecimalType) QueryBuilder(io.trino.plugin.jdbc.QueryBuilder) PHOENIX_METADATA_ERROR(io.trino.plugin.phoenix5.PhoenixErrorCode.PHOENIX_METADATA_ERROR) SchemaUtil.getEscapedArgument(org.apache.phoenix.util.SchemaUtil.getEscapedArgument) StandardColumnMappings.smallintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping) IOException(java.io.IOException) DEFAULT_SCHEMA(io.trino.plugin.phoenix5.PhoenixMetadata.DEFAULT_SCHEMA) ConnectorSession(io.trino.spi.connector.ConnectorSession) Scan(org.apache.hadoop.hbase.client.Scan) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) IdentifierMapping(io.trino.plugin.jdbc.mapping.IdentifierMapping) PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) VarbinaryType(io.trino.spi.type.VarbinaryType) Admin(org.apache.hadoop.hbase.client.Admin) CharType(io.trino.spi.type.CharType) DecimalSessionSessionProperties.getDecimalRoundingMode(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRoundingMode) MetadataUtil.toPhoenixSchemaName(io.trino.plugin.phoenix5.MetadataUtil.toPhoenixSchemaName) TINYINT(io.trino.spi.type.TinyintType.TINYINT) StandardColumnMappings.shortDecimalWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.shortDecimalWriteFunction) StandardColumnMappings.varbinaryWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.varbinaryWriteFunction) WriteMapping(io.trino.plugin.jdbc.WriteMapping) TypeUtils.toBoxedArray(io.trino.plugin.phoenix5.TypeUtils.toBoxedArray) Connection(java.sql.Connection) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) BiFunction(java.util.function.BiFunction) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException) ObjectWriteFunction(io.trino.plugin.jdbc.ObjectWriteFunction) PhoenixInputSplit(org.apache.phoenix.mapreduce.PhoenixInputSplit) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) TypeUtils.getJdbcObjectArray(io.trino.plugin.phoenix5.TypeUtils.getJdbcObjectArray) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Block(io.trino.spi.block.Block) DEFAULT_SCALE(io.trino.spi.type.DecimalType.DEFAULT_SCALE) ColumnMapping(io.trino.plugin.jdbc.ColumnMapping) ALREADY_EXISTS(io.trino.spi.StandardErrorCode.ALREADY_EXISTS) INTEGER(io.trino.spi.type.IntegerType.INTEGER) Collectors.toSet(java.util.stream.Collectors.toSet) NVARCHAR(java.sql.Types.NVARCHAR) ImmutableSet(com.google.common.collect.ImmutableSet) DecimalSessionSessionProperties.getDecimalRounding(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRounding) ImmutableMap(com.google.common.collect.ImmutableMap) Collection(java.util.Collection) DelegatePreparedStatement(org.apache.phoenix.jdbc.DelegatePreparedStatement) Compression(org.apache.hadoop.hbase.io.compress.Compression) MetadataUtil.getEscapedTableName(io.trino.plugin.phoenix5.MetadataUtil.getEscapedTableName) TrinoException(io.trino.spi.TrinoException) ArrayType(io.trino.spi.type.ArrayType) StandardColumnMappings.realColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.realColumnMapping) JdbcOutputTableHandle(io.trino.plugin.jdbc.JdbcOutputTableHandle) WriteFunction(io.trino.plugin.jdbc.WriteFunction) String.format(java.lang.String.format) PhoenixRuntime.getTable(org.apache.phoenix.util.PhoenixRuntime.getTable) JdbcSortItem(io.trino.plugin.jdbc.JdbcSortItem) PColumn(org.apache.phoenix.schema.PColumn) List(java.util.List) JdbcTypeHandle(io.trino.plugin.jdbc.JdbcTypeHandle) BIGINT(io.trino.spi.type.BigintType.BIGINT) ScanMetricsHolder(org.apache.phoenix.monitoring.ScanMetricsHolder) LocalDate(java.time.LocalDate) Decimals(io.trino.spi.type.Decimals) Optional(java.util.Optional) Math.max(java.lang.Math.max) PhoenixPreparedStatement(org.apache.phoenix.jdbc.PhoenixPreparedStatement) MoreObjects.firstNonNull(com.google.common.base.MoreObjects.firstNonNull) VARCHAR(java.sql.Types.VARCHAR) ESCAPE_CHARACTER(org.apache.phoenix.util.SchemaUtil.ESCAPE_CHARACTER) SQLExceptionCode(org.apache.phoenix.exception.SQLExceptionCode) PDataType(org.apache.phoenix.schema.types.PDataType) StandardColumnMappings.tinyintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.tinyintColumnMapping) DecimalType(io.trino.spi.type.DecimalType) PeekingResultIterator(org.apache.phoenix.iterate.PeekingResultIterator) StandardColumnMappings.integerColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping) Types(java.sql.Types) PhoenixColumnProperties.isPrimaryKey(io.trino.plugin.phoenix5.PhoenixColumnProperties.isPrimaryKey) StandardColumnMappings.varbinaryColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.varbinaryColumnMapping) PHOENIX_QUERY_ERROR(io.trino.plugin.phoenix5.PhoenixErrorCode.PHOENIX_QUERY_ERROR) StatementContext(org.apache.phoenix.compile.StatementContext) SequenceResultIterator(org.apache.phoenix.iterate.SequenceResultIterator) StandardColumnMappings.varcharWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction) StandardColumnMappings.tinyintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.tinyintWriteFunction) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) TIMESTAMP(java.sql.Types.TIMESTAMP) Inject(javax.inject.Inject) VarcharType(io.trino.spi.type.VarcharType) TIME_WITH_TIME_ZONE(io.trino.spi.type.TimeWithTimeZoneType.TIME_WITH_TIME_ZONE) HConstants(org.apache.hadoop.hbase.HConstants) TIMESTAMP_WITH_TIMEZONE(java.sql.Types.TIMESTAMP_WITH_TIMEZONE) ImmutableList(com.google.common.collect.ImmutableList) QueryPlan(org.apache.phoenix.compile.QueryPlan) Verify.verify(com.google.common.base.Verify.verify) TIME(io.trino.spi.type.TimeType.TIME) BaseJdbcClient(io.trino.plugin.jdbc.BaseJdbcClient) LinkedList(java.util.LinkedList) Bytes(org.apache.hadoop.hbase.util.Bytes) PTable(org.apache.phoenix.schema.PTable) StandardColumnMappings.timeWriteFunctionUsingSqlTime(io.trino.plugin.jdbc.StandardColumnMappings.timeWriteFunctionUsingSqlTime) JdbcColumnHandle(io.trino.plugin.jdbc.JdbcColumnHandle) StandardColumnMappings.integerWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.integerWriteFunction) TableResultIterator(org.apache.phoenix.iterate.TableResultIterator) ConnectionQueryServices(org.apache.phoenix.query.ConnectionQueryServices) DEFAULT_PRECISION(io.trino.spi.type.DecimalType.DEFAULT_PRECISION) DISABLE_PUSHDOWN(io.trino.plugin.jdbc.PredicatePushdownController.DISABLE_PUSHDOWN) ObjectReadFunction(io.trino.plugin.jdbc.ObjectReadFunction) DateTimeFormatter(java.time.format.DateTimeFormatter) StringJoiner(java.util.StringJoiner) LookAheadResultIterator(org.apache.phoenix.iterate.LookAheadResultIterator) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) SQLException(java.sql.SQLException) ImmutableList(com.google.common.collect.ImmutableList) PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) Connection(java.sql.Connection) ConnectorIdentity(io.trino.spi.security.ConnectorIdentity) SchemaTableName(io.trino.spi.connector.SchemaTableName) LinkedList(java.util.LinkedList) BloomType(org.apache.hadoop.hbase.regionserver.BloomType) JDBCType(java.sql.JDBCType) DecimalType.createDecimalType(io.trino.spi.type.DecimalType.createDecimalType) VarbinaryType(io.trino.spi.type.VarbinaryType) CharType(io.trino.spi.type.CharType) ArrayType(io.trino.spi.type.ArrayType) PDataType(org.apache.phoenix.schema.types.PDataType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) VarcharType(io.trino.spi.type.VarcharType) TrinoException(io.trino.spi.TrinoException) SchemaNotFoundException(io.trino.spi.connector.SchemaNotFoundException)

Example 54 with BIGINT

use of io.trino.spi.type.BigintType.BIGINT in project trino by trinodb.

the class BaseRaptorConnectorTest method testTablesSystemTable.

@Test
public void testTablesSystemTable() {
    assertUpdate("" + "CREATE TABLE system_tables_test0 (c00 timestamp, c01 varchar, c02 double, c03 bigint, c04 bigint)");
    assertUpdate("" + "CREATE TABLE system_tables_test1 (c10 timestamp, c11 varchar, c12 double, c13 bigint, c14 bigint) " + "WITH (temporal_column = 'c10')");
    assertUpdate("" + "CREATE TABLE system_tables_test2 (c20 timestamp, c21 varchar, c22 double, c23 bigint, c24 bigint) " + "WITH (temporal_column = 'c20', ordering = ARRAY['c22', 'c21'])");
    assertUpdate("" + "CREATE TABLE system_tables_test3 (c30 timestamp, c31 varchar, c32 double, c33 bigint, c34 bigint) " + "WITH (temporal_column = 'c30', bucket_count = 40, bucketed_on = ARRAY ['c34', 'c33'])");
    assertUpdate("" + "CREATE TABLE system_tables_test4 (c40 timestamp, c41 varchar, c42 double, c43 bigint, c44 bigint) " + "WITH (temporal_column = 'c40', ordering = ARRAY['c41', 'c42'], distribution_name = 'test_distribution', bucket_count = 50, bucketed_on = ARRAY ['c43', 'c44'])");
    assertUpdate("" + "CREATE TABLE system_tables_test5 (c50 timestamp, c51 varchar, c52 double, c53 bigint, c54 bigint) " + "WITH (ordering = ARRAY['c51', 'c52'], distribution_name = 'test_distribution', bucket_count = 50, bucketed_on = ARRAY ['c53', 'c54'], organized = true)");
    MaterializedResult actualResults = computeActual("SELECT * FROM system.tables");
    assertEquals(actualResults.getTypes(), ImmutableList.builder().add(// table_schema
    VARCHAR).add(// table_name
    VARCHAR).add(// temporal_column
    VARCHAR).add(// ordering_columns
    new ArrayType(VARCHAR)).add(// distribution_name
    VARCHAR).add(// bucket_count
    BIGINT).add(// bucket_columns
    new ArrayType(VARCHAR)).add(// organized
    BOOLEAN).build());
    Map<String, MaterializedRow> map = actualResults.getMaterializedRows().stream().filter(row -> ((String) row.getField(1)).startsWith("system_tables_test")).collect(toImmutableMap(row -> ((String) row.getField(1)), identity()));
    assertEquals(map.size(), 6);
    assertEquals(map.get("system_tables_test0").getFields(), asList("tpch", "system_tables_test0", null, null, null, null, null, Boolean.FALSE));
    assertEquals(map.get("system_tables_test1").getFields(), asList("tpch", "system_tables_test1", "c10", null, null, null, null, Boolean.FALSE));
    assertEquals(map.get("system_tables_test2").getFields(), asList("tpch", "system_tables_test2", "c20", ImmutableList.of("c22", "c21"), null, null, null, Boolean.FALSE));
    assertEquals(map.get("system_tables_test3").getFields(), asList("tpch", "system_tables_test3", "c30", null, null, 40L, ImmutableList.of("c34", "c33"), Boolean.FALSE));
    assertEquals(map.get("system_tables_test4").getFields(), asList("tpch", "system_tables_test4", "c40", ImmutableList.of("c41", "c42"), "test_distribution", 50L, ImmutableList.of("c43", "c44"), Boolean.FALSE));
    assertEquals(map.get("system_tables_test5").getFields(), asList("tpch", "system_tables_test5", null, ImmutableList.of("c51", "c52"), "test_distribution", 50L, ImmutableList.of("c53", "c54"), Boolean.TRUE));
    actualResults = computeActual("SELECT * FROM system.tables WHERE table_schema = 'tpch'");
    long actualRowCount = actualResults.getMaterializedRows().stream().filter(row -> ((String) row.getField(1)).startsWith("system_tables_test")).count();
    assertEquals(actualRowCount, 6);
    actualResults = computeActual("SELECT * FROM system.tables WHERE table_name = 'system_tables_test3'");
    assertEquals(actualResults.getMaterializedRows().size(), 1);
    actualResults = computeActual("SELECT * FROM system.tables WHERE table_schema = 'tpch' and table_name = 'system_tables_test3'");
    assertEquals(actualResults.getMaterializedRows().size(), 1);
    actualResults = computeActual("" + "SELECT distribution_name, bucket_count, bucketing_columns, ordering_columns, temporal_column, organized " + "FROM system.tables " + "WHERE table_schema = 'tpch' and table_name = 'system_tables_test3'");
    assertEquals(actualResults.getTypes(), ImmutableList.of(VARCHAR, BIGINT, new ArrayType(VARCHAR), new ArrayType(VARCHAR), VARCHAR, BOOLEAN));
    assertEquals(actualResults.getMaterializedRows().size(), 1);
    assertUpdate("DROP TABLE system_tables_test0");
    assertUpdate("DROP TABLE system_tables_test1");
    assertUpdate("DROP TABLE system_tables_test2");
    assertUpdate("DROP TABLE system_tables_test3");
    assertUpdate("DROP TABLE system_tables_test4");
    assertUpdate("DROP TABLE system_tables_test5");
    assertEquals(computeActual("SELECT * FROM system.tables WHERE table_schema IN ('foo', 'bar')").getRowCount(), 0);
}
Also used : ArrayType(io.trino.spi.type.ArrayType) SkipException(org.testng.SkipException) IntStream(java.util.stream.IntStream) Assertions.assertInstanceOf(io.airlift.testing.Assertions.assertInstanceOf) MaterializedResult(io.trino.testing.MaterializedResult) LocalDateTime(java.time.LocalDateTime) Assertions.assertGreaterThan(io.airlift.testing.Assertions.assertGreaterThan) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) Assert.assertEquals(org.testng.Assert.assertEquals) Test(org.testng.annotations.Test) TestTable(io.trino.testing.sql.TestTable) SHARD_UUID_COLUMN_TYPE(io.trino.plugin.raptor.legacy.RaptorColumnHandle.SHARD_UUID_COLUMN_TYPE) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) HashMultimap(com.google.common.collect.HashMultimap) ImmutableList(com.google.common.collect.ImmutableList) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) Arrays.asList(java.util.Arrays.asList) Map(java.util.Map) TestingConnectorBehavior(io.trino.testing.TestingConnectorBehavior) Assertions.assertLessThan(io.airlift.testing.Assertions.assertLessThan) Assertions.assertGreaterThanOrEqual(io.airlift.testing.Assertions.assertGreaterThanOrEqual) MaterializedRow(io.trino.testing.MaterializedRow) INTEGER(io.trino.spi.type.IntegerType.INTEGER) Assert.assertFalse(org.testng.Assert.assertFalse) TestTable.randomTableSuffix(io.trino.testing.sql.TestTable.randomTableSuffix) Collectors.toSet(java.util.stream.Collectors.toSet) Flaky(io.trino.testng.services.Flaky) Assert.assertNotEquals(org.testng.Assert.assertNotEquals) Language(org.intellij.lang.annotations.Language) Collection(java.util.Collection) Set(java.util.Set) ArrayType(io.trino.spi.type.ArrayType) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) UUID(java.util.UUID) Assert.assertNotNull(org.testng.Assert.assertNotNull) SetMultimap(com.google.common.collect.SetMultimap) String.format(java.lang.String.format) BaseConnectorTest(io.trino.testing.BaseConnectorTest) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) BIGINT(io.trino.spi.type.BigintType.BIGINT) LocalDate(java.time.LocalDate) StringJoiner(java.util.StringJoiner) Function.identity(java.util.function.Function.identity) Optional(java.util.Optional) DATE(io.trino.spi.type.DateType.DATE) MaterializedResult(io.trino.testing.MaterializedResult) MaterializedRow(io.trino.testing.MaterializedRow) Test(org.testng.annotations.Test) BaseConnectorTest(io.trino.testing.BaseConnectorTest)

Example 55 with BIGINT

use of io.trino.spi.type.BigintType.BIGINT in project trino by trinodb.

the class TestIcebergProjectionPushdownPlans method testDereferencePushdown.

@Test
public void testDereferencePushdown() {
    String testTable = "test_simple_projection_pushdown" + randomTableSuffix();
    QualifiedObjectName completeTableName = new QualifiedObjectName(CATALOG, SCHEMA, testTable);
    getQueryRunner().execute(format("CREATE TABLE %s (col0, col1) WITH (partitioning = ARRAY['col1']) AS" + " SELECT CAST(row(5, 6) AS row(x bigint, y bigint)) AS col0, 5 AS col1 WHERE false", testTable));
    Session session = getQueryRunner().getDefaultSession();
    Optional<TableHandle> tableHandle = getTableHandle(session, completeTableName);
    assertTrue(tableHandle.isPresent(), "expected the table handle to be present");
    Map<String, ColumnHandle> columns = getColumnHandles(session, completeTableName);
    IcebergColumnHandle column0Handle = (IcebergColumnHandle) columns.get("col0");
    IcebergColumnHandle column1Handle = (IcebergColumnHandle) columns.get("col1");
    IcebergColumnHandle columnX = new IcebergColumnHandle(column0Handle.getColumnIdentity(), column0Handle.getType(), ImmutableList.of(column0Handle.getColumnIdentity().getChildren().get(0).getId()), BIGINT, Optional.empty());
    IcebergColumnHandle columnY = new IcebergColumnHandle(column0Handle.getColumnIdentity(), column0Handle.getType(), ImmutableList.of(column0Handle.getColumnIdentity().getChildren().get(1).getId()), BIGINT, Optional.empty());
    // Simple Projection pushdown
    assertPlan("SELECT col0.x expr_x, col0.y expr_y FROM " + testTable, any(tableScan(equalTo(((IcebergTableHandle) tableHandle.get().getConnectorHandle()).withProjectedColumns(Set.of(columnX, columnY))), TupleDomain.all(), ImmutableMap.of("col0#x", equalTo(columnX), "col0#y", equalTo(columnY)))));
    // Projection and predicate pushdown
    assertPlan(format("SELECT col0.x FROM %s WHERE col0.x = col1 + 3 and col0.y = 2", testTable), anyTree(filter("y = BIGINT '2' AND (x =  CAST((col1 + 3) AS BIGINT))", tableScan(table -> {
        IcebergTableHandle icebergTableHandle = (IcebergTableHandle) table;
        TupleDomain<IcebergColumnHandle> unenforcedConstraint = icebergTableHandle.getUnenforcedPredicate();
        return icebergTableHandle.getProjectedColumns().equals(ImmutableSet.of(column1Handle, columnX, columnY)) && unenforcedConstraint.equals(TupleDomain.withColumnDomains(ImmutableMap.of(columnY, Domain.singleValue(BIGINT, 2L))));
    }, TupleDomain.all(), ImmutableMap.of("y", equalTo(columnY), "x", equalTo(columnX), "col1", equalTo(column1Handle))))));
    // Projection and predicate pushdown with overlapping columns
    assertPlan(format("SELECT col0, col0.y expr_y FROM %s WHERE col0.x = 5", testTable), anyTree(filter("x = BIGINT '5'", tableScan(table -> {
        IcebergTableHandle icebergTableHandle = (IcebergTableHandle) table;
        TupleDomain<IcebergColumnHandle> unenforcedConstraint = icebergTableHandle.getUnenforcedPredicate();
        return icebergTableHandle.getProjectedColumns().equals(ImmutableSet.of(column0Handle, columnX)) && unenforcedConstraint.equals(TupleDomain.withColumnDomains(ImmutableMap.of(columnX, Domain.singleValue(BIGINT, 5L))));
    }, TupleDomain.all(), ImmutableMap.of("col0", equalTo(column0Handle), "x", equalTo(columnX))))));
    // Projection and predicate pushdown with joins
    assertPlan(format("SELECT T.col0.x, T.col0, T.col0.y FROM %s T join %s S on T.col1 = S.col1 WHERE (T.col0.x = 2)", testTable, testTable), anyTree(project(ImmutableMap.of("expr_0_x", expression("expr_0[1]"), "expr_0", expression("expr_0"), "expr_0_y", expression("expr_0[2]")), join(INNER, ImmutableList.of(equiJoinClause("t_expr_1", "s_expr_1")), anyTree(filter("x = BIGINT '2'", tableScan(table -> {
        IcebergTableHandle icebergTableHandle = (IcebergTableHandle) table;
        TupleDomain<IcebergColumnHandle> unenforcedConstraint = icebergTableHandle.getUnenforcedPredicate();
        Set<IcebergColumnHandle> expectedProjections = ImmutableSet.of(column0Handle, column1Handle, columnX);
        TupleDomain<IcebergColumnHandle> expectedUnenforcedConstraint = TupleDomain.withColumnDomains(ImmutableMap.of(columnX, Domain.singleValue(BIGINT, 2L)));
        return icebergTableHandle.getProjectedColumns().equals(expectedProjections) && unenforcedConstraint.equals(expectedUnenforcedConstraint);
    }, TupleDomain.all(), ImmutableMap.of("x", equalTo(columnX), "expr_0", equalTo(column0Handle), "t_expr_1", equalTo(column1Handle))))), anyTree(tableScan(equalTo(((IcebergTableHandle) tableHandle.get().getConnectorHandle()).withProjectedColumns(Set.of(column1Handle))), TupleDomain.all(), ImmutableMap.of("s_expr_1", equalTo(column1Handle))))))));
}
Also used : MoreFiles.deleteRecursively(com.google.common.io.MoreFiles.deleteRecursively) PlanMatchPattern.any(io.trino.sql.planner.assertions.PlanMatchPattern.any) INNER(io.trino.sql.planner.plan.JoinNode.Type.INNER) Database(io.trino.plugin.hive.metastore.Database) Test(org.testng.annotations.Test) PlanMatchPattern.filter(io.trino.sql.planner.assertions.PlanMatchPattern.filter) HiveMetastore(io.trino.plugin.hive.metastore.HiveMetastore) ALLOW_INSECURE(com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE) ImmutableList(com.google.common.collect.ImmutableList) Files(com.google.common.io.Files) BasePushdownPlanTest(io.trino.sql.planner.assertions.BasePushdownPlanTest) LocalQueryRunner(io.trino.testing.LocalQueryRunner) Map(java.util.Map) ColumnHandle(io.trino.spi.connector.ColumnHandle) FileHiveMetastore.createTestingFileHiveMetastore(io.trino.plugin.hive.metastore.file.FileHiveMetastore.createTestingFileHiveMetastore) PlanMatchPattern.equiJoinClause(io.trino.sql.planner.assertions.PlanMatchPattern.equiJoinClause) EMPTY_MODULE(com.google.inject.util.Modules.EMPTY_MODULE) PlanMatchPattern.join(io.trino.sql.planner.assertions.PlanMatchPattern.join) TestTable.randomTableSuffix(io.trino.testing.sql.TestTable.randomTableSuffix) AfterClass(org.testng.annotations.AfterClass) PlanMatchPattern.expression(io.trino.sql.planner.assertions.PlanMatchPattern.expression) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) PrincipalType(io.trino.spi.security.PrincipalType) Domain(io.trino.spi.predicate.Domain) Set(java.util.Set) TupleDomain(io.trino.spi.predicate.TupleDomain) File(java.io.File) String.format(java.lang.String.format) Predicates.equalTo(com.google.common.base.Predicates.equalTo) TestingSession.testSessionBuilder(io.trino.testing.TestingSession.testSessionBuilder) TableHandle(io.trino.metadata.TableHandle) PlanMatchPattern.anyTree(io.trino.sql.planner.assertions.PlanMatchPattern.anyTree) QualifiedObjectName(io.trino.metadata.QualifiedObjectName) BIGINT(io.trino.spi.type.BigintType.BIGINT) PlanMatchPattern.project(io.trino.sql.planner.assertions.PlanMatchPattern.project) Optional(java.util.Optional) Assert.assertTrue(org.testng.Assert.assertTrue) PlanMatchPattern.tableScan(io.trino.sql.planner.assertions.PlanMatchPattern.tableScan) Session(io.trino.Session) ColumnHandle(io.trino.spi.connector.ColumnHandle) QualifiedObjectName(io.trino.metadata.QualifiedObjectName) TupleDomain(io.trino.spi.predicate.TupleDomain) TableHandle(io.trino.metadata.TableHandle) Session(io.trino.Session) Test(org.testng.annotations.Test) BasePushdownPlanTest(io.trino.sql.planner.assertions.BasePushdownPlanTest)

Aggregations

BIGINT (io.trino.spi.type.BigintType.BIGINT)106 ImmutableList (com.google.common.collect.ImmutableList)99 Optional (java.util.Optional)87 Test (org.testng.annotations.Test)86 ImmutableMap (com.google.common.collect.ImmutableMap)84 VARCHAR (io.trino.spi.type.VarcharType.VARCHAR)44 List (java.util.List)44 Map (java.util.Map)44 ColumnHandle (io.trino.spi.connector.ColumnHandle)38 Type (io.trino.spi.type.Type)38 Symbol (io.trino.sql.planner.Symbol)38 QualifiedName (io.trino.sql.tree.QualifiedName)38 ImmutableSet (com.google.common.collect.ImmutableSet)37 TupleDomain (io.trino.spi.predicate.TupleDomain)36 PlanMatchPattern.values (io.trino.sql.planner.assertions.PlanMatchPattern.values)36 TableHandle (io.trino.metadata.TableHandle)35 BaseRuleTest (io.trino.sql.planner.iterative.rule.test.BaseRuleTest)35 Session (io.trino.Session)34 PlanMatchPattern.filter (io.trino.sql.planner.assertions.PlanMatchPattern.filter)33 PlanBuilder.expression (io.trino.sql.planner.iterative.rule.test.PlanBuilder.expression)32