Search in sources :

Example 6 with JdbcTableHandle

use of io.trino.plugin.jdbc.JdbcTableHandle in project trino by trinodb.

the class PhoenixSplitManager method getSplits.

@Override
public ConnectorSplitSource getSplits(ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorTableHandle table, SplitSchedulingStrategy splitSchedulingStrategy, DynamicFilter dynamicFilter) {
    JdbcTableHandle tableHandle = (JdbcTableHandle) table;
    try (Connection connection = phoenixClient.getConnection(session)) {
        List<JdbcColumnHandle> columns = tableHandle.getColumns().map(columnSet -> columnSet.stream().map(JdbcColumnHandle.class::cast).collect(toList())).orElseGet(() -> phoenixClient.getColumns(session, tableHandle));
        PhoenixPreparedStatement inputQuery = (PhoenixPreparedStatement) phoenixClient.prepareStatement(session, connection, tableHandle, columns, Optional.empty());
        int maxScansPerSplit = session.getProperty(PhoenixSessionProperties.MAX_SCANS_PER_SPLIT, Integer.class);
        List<ConnectorSplit> splits = getSplits(inputQuery, maxScansPerSplit).stream().map(PhoenixInputSplit.class::cast).map(split -> new PhoenixSplit(getSplitAddresses(split), SerializedPhoenixInputSplit.serialize(split))).collect(toImmutableList());
        return new FixedSplitSource(splits);
    } catch (IOException | SQLException e) {
        throw new TrinoException(PHOENIX_SPLIT_ERROR, "Couldn't get Phoenix splits", e);
    }
}
Also used : ConnectorSplitManager(io.trino.spi.connector.ConnectorSplitManager) KeyRange(org.apache.phoenix.query.KeyRange) PHOENIX_INTERNAL_ERROR(io.trino.plugin.phoenix.PhoenixErrorCode.PHOENIX_INTERNAL_ERROR) Connection(java.sql.Connection) Logger(io.airlift.log.Logger) FixedSplitSource(io.trino.spi.connector.FixedSplitSource) ArrayList(java.util.ArrayList) Inject(javax.inject.Inject) PhoenixInputSplit(org.apache.phoenix.mapreduce.PhoenixInputSplit) SQLException(java.sql.SQLException) Lists(com.google.common.collect.Lists) ImmutableList(com.google.common.collect.ImmutableList) QueryPlan(org.apache.phoenix.compile.QueryPlan) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Objects.requireNonNull(java.util.Objects.requireNonNull) Bytes(org.apache.hadoop.hbase.util.Bytes) TableName(org.apache.hadoop.hbase.TableName) InputSplit(org.apache.hadoop.mapreduce.InputSplit) JdbcColumnHandle(io.trino.plugin.jdbc.JdbcColumnHandle) ConnectorSplit(io.trino.spi.connector.ConnectorSplit) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) TrinoException(io.trino.spi.TrinoException) ConnectorSplitSource(io.trino.spi.connector.ConnectorSplitSource) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) Scan(org.apache.hadoop.hbase.client.Scan) List(java.util.List) Collectors.toList(java.util.stream.Collectors.toList) RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) DynamicFilter(io.trino.spi.connector.DynamicFilter) Optional(java.util.Optional) PhoenixPreparedStatement(org.apache.phoenix.jdbc.PhoenixPreparedStatement) JdbcTableHandle(io.trino.plugin.jdbc.JdbcTableHandle) HostAddress(io.trino.spi.HostAddress) PHOENIX_SPLIT_ERROR(io.trino.plugin.phoenix.PhoenixErrorCode.PHOENIX_SPLIT_ERROR) EXPECTED_UPPER_REGION_KEY(org.apache.phoenix.coprocessor.BaseScannerRegionObserver.EXPECTED_UPPER_REGION_KEY) ConnectorTransactionHandle(io.trino.spi.connector.ConnectorTransactionHandle) SQLException(java.sql.SQLException) Connection(java.sql.Connection) JdbcColumnHandle(io.trino.plugin.jdbc.JdbcColumnHandle) IOException(java.io.IOException) JdbcTableHandle(io.trino.plugin.jdbc.JdbcTableHandle) FixedSplitSource(io.trino.spi.connector.FixedSplitSource) PhoenixInputSplit(org.apache.phoenix.mapreduce.PhoenixInputSplit) TrinoException(io.trino.spi.TrinoException) PhoenixPreparedStatement(org.apache.phoenix.jdbc.PhoenixPreparedStatement) ConnectorSplit(io.trino.spi.connector.ConnectorSplit)

Example 7 with JdbcTableHandle

use of io.trino.plugin.jdbc.JdbcTableHandle in project trino by trinodb.

the class TestPostgreSqlConnectorTest method testPredicatePushdown.

@Test
public void testPredicatePushdown() {
    // varchar equality
    assertThat(query("SELECT regionkey, nationkey, name FROM nation WHERE name = 'ROMANIA'")).matches("VALUES (BIGINT '3', BIGINT '19', CAST('ROMANIA' AS varchar(25)))").isFullyPushedDown();
    // varchar range
    assertThat(query("SELECT regionkey, nationkey, name FROM nation WHERE name BETWEEN 'POLAND' AND 'RPA'")).matches("VALUES (BIGINT '3', BIGINT '19', CAST('ROMANIA' AS varchar(25)))").isNotFullyPushedDown(FilterNode.class);
    // varchar IN without domain compaction
    assertThat(query("SELECT regionkey, nationkey, name FROM nation WHERE name IN ('POLAND', 'ROMANIA', 'VIETNAM')")).matches("VALUES " + "(BIGINT '3', BIGINT '19', CAST('ROMANIA' AS varchar(25))), " + "(BIGINT '2', BIGINT '21', CAST('VIETNAM' AS varchar(25)))").isFullyPushedDown();
    // varchar IN with small compaction threshold
    assertThat(query(Session.builder(getSession()).setCatalogSessionProperty("postgresql", "domain_compaction_threshold", "1").build(), "SELECT regionkey, nationkey, name FROM nation WHERE name IN ('POLAND', 'ROMANIA', 'VIETNAM')")).matches("VALUES " + "(BIGINT '3', BIGINT '19', CAST('ROMANIA' AS varchar(25))), " + "(BIGINT '2', BIGINT '21', CAST('VIETNAM' AS varchar(25)))").isNotFullyPushedDown(node(FilterNode.class, // verify that no constraint is applied by the connector
    tableScan(tableHandle -> ((JdbcTableHandle) tableHandle).getConstraint().isAll(), TupleDomain.all(), ImmutableMap.of())));
    // varchar different case
    assertThat(query("SELECT regionkey, nationkey, name FROM nation WHERE name = 'romania'")).returnsEmptyResult().isFullyPushedDown();
    // bigint equality
    assertThat(query("SELECT regionkey, nationkey, name FROM nation WHERE nationkey = 19")).matches("VALUES (BIGINT '3', BIGINT '19', CAST('ROMANIA' AS varchar(25)))").isFullyPushedDown();
    // bigint equality with small compaction threshold
    assertThat(query(Session.builder(getSession()).setCatalogSessionProperty("postgresql", "domain_compaction_threshold", "1").build(), "SELECT regionkey, nationkey, name FROM nation WHERE nationkey IN (19, 21)")).matches("VALUES " + "(BIGINT '3', BIGINT '19', CAST('ROMANIA' AS varchar(25))), " + "(BIGINT '2', BIGINT '21', CAST('VIETNAM' AS varchar(25)))").isNotFullyPushedDown(FilterNode.class);
    // bigint range, with decimal to bigint simplification
    assertThat(query("SELECT regionkey, nationkey, name FROM nation WHERE nationkey BETWEEN 18.5 AND 19.5")).matches("VALUES (BIGINT '3', BIGINT '19', CAST('ROMANIA' AS varchar(25)))").isFullyPushedDown();
    // date equality
    assertThat(query("SELECT orderkey FROM orders WHERE orderdate = DATE '1992-09-29'")).matches("VALUES BIGINT '1250', 34406, 38436, 57570").isFullyPushedDown();
    // predicate over aggregation key (likely to be optimized before being pushed down into the connector)
    assertThat(query("SELECT * FROM (SELECT regionkey, sum(nationkey) FROM nation GROUP BY regionkey) WHERE regionkey = 3")).matches("VALUES (BIGINT '3', BIGINT '77')").isFullyPushedDown();
    // predicate over aggregation result
    assertThat(query("SELECT regionkey, sum(nationkey) FROM nation GROUP BY regionkey HAVING sum(nationkey) = 77")).matches("VALUES (BIGINT '3', BIGINT '77')").isFullyPushedDown();
    // predicate over TopN result
    assertThat(query("" + "SELECT orderkey " + "FROM (SELECT * FROM orders ORDER BY orderdate DESC, orderkey ASC LIMIT 10)" + "WHERE orderdate = DATE '1998-08-01'")).matches("VALUES BIGINT '27588', 22403, 37735").ordered().isFullyPushedDown();
    assertThat(query("" + "SELECT custkey " + "FROM (SELECT SUM(totalprice) as sum, custkey, COUNT(*) as cnt FROM orders GROUP BY custkey order by sum desc limit 10) " + "WHERE cnt > 30")).matches("VALUES BIGINT '643', 898").ordered().isFullyPushedDown();
    // predicate over join
    Session joinPushdownEnabled = joinPushdownEnabled(getSession());
    assertThat(query(joinPushdownEnabled, "SELECT c.name, n.name FROM customer c JOIN nation n ON c.custkey = n.nationkey WHERE acctbal > 8000")).isFullyPushedDown();
    // varchar predicate over join
    assertThat(query(joinPushdownEnabled, "SELECT c.name, n.name FROM customer c JOIN nation n ON c.custkey = n.nationkey WHERE address = 'TcGe5gaZNgVePxU5kRrvXBfkasDTea'")).isFullyPushedDown();
    assertThat(query(joinPushdownEnabled, "SELECT c.name, n.name FROM customer c JOIN nation n ON c.custkey = n.nationkey WHERE address < 'TcGe5gaZNgVePxU5kRrvXBfkasDTea'")).isNotFullyPushedDown(node(JoinNode.class, anyTree(node(TableScanNode.class)), anyTree(node(TableScanNode.class))));
}
Also used : TableScanNode(io.trino.sql.planner.plan.TableScanNode) JoinNode(io.trino.sql.planner.plan.JoinNode) FilterNode(io.trino.sql.planner.plan.FilterNode) JdbcTableHandle(io.trino.plugin.jdbc.JdbcTableHandle) Session(io.trino.Session) Test(org.testng.annotations.Test) BaseJdbcConnectorTest(io.trino.plugin.jdbc.BaseJdbcConnectorTest)

Example 8 with JdbcTableHandle

use of io.trino.plugin.jdbc.JdbcTableHandle in project trino by trinodb.

the class ClickHouseClient method setTableProperties.

@Override
public void setTableProperties(ConnectorSession session, JdbcTableHandle handle, Map<String, Optional<Object>> nullableProperties) {
    // TODO: Support other table properties
    checkArgument(nullableProperties.size() == 1 && nullableProperties.containsKey(SAMPLE_BY_PROPERTY), "Only support setting 'sample_by' property");
    // TODO: Support sampling key removal when we support a newer version of ClickHouse. See https://github.com/ClickHouse/ClickHouse/pull/30180.
    checkArgument(nullableProperties.values().stream().noneMatch(Optional::isEmpty), "Setting a property to null is not supported");
    Map<String, Object> properties = nullableProperties.entrySet().stream().filter(entry -> entry.getValue().isPresent()).collect(toImmutableMap(Entry::getKey, entry -> entry.getValue().orElseThrow()));
    ImmutableList.Builder<String> tableOptions = ImmutableList.builder();
    ClickHouseTableProperties.getSampleBy(properties).ifPresent(value -> tableOptions.add("SAMPLE BY " + value));
    try (Connection connection = connectionFactory.openConnection(session)) {
        String sql = format("ALTER TABLE %s MODIFY %s", quoted(handle.asPlainTable().getRemoteTableName()), join(" ", tableOptions.build()));
        execute(connection, sql);
    } catch (SQLException e) {
        throw new TrinoException(JDBC_ERROR, e);
    }
}
Also used : UNNECESSARY(java.math.RoundingMode.UNNECESSARY) AggregateFunction(io.trino.spi.connector.AggregateFunction) StandardColumnMappings.bigintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction) NANOSECONDS_PER_MICROSECOND(io.trino.spi.type.Timestamps.NANOSECONDS_PER_MICROSECOND) ImplementCount(io.trino.plugin.jdbc.aggregation.ImplementCount) InetAddress(java.net.InetAddress) BigDecimal(java.math.BigDecimal) Slices.wrappedBuffer(io.airlift.slice.Slices.wrappedBuffer) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) ResultSet(java.sql.ResultSet) Map(java.util.Map) StandardColumnMappings.doubleWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction) BigInteger(java.math.BigInteger) DecimalSessionSessionProperties.getDecimalDefaultScale(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalDefaultScale) ENGLISH(java.util.Locale.ENGLISH) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) MathContext(java.math.MathContext) ImplementAvgFloatingPoint(io.trino.plugin.jdbc.aggregation.ImplementAvgFloatingPoint) LongWriteFunction(io.trino.plugin.jdbc.LongWriteFunction) ImplementCountAll(io.trino.plugin.jdbc.aggregation.ImplementCountAll) PreparedStatement(java.sql.PreparedStatement) SchemaTableName(io.trino.spi.connector.SchemaTableName) UncheckedIOException(java.io.UncheckedIOException) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) StandardColumnMappings.smallintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.smallintWriteFunction) StandardColumnMappings.longDecimalWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.longDecimalWriteFunction) AggregateFunctionRewriter(io.trino.plugin.base.aggregation.AggregateFunctionRewriter) ConnectionFactory(io.trino.plugin.jdbc.ConnectionFactory) JdbcConnectorExpressionRewriterBuilder(io.trino.plugin.jdbc.expression.JdbcConnectorExpressionRewriterBuilder) JdbcTableHandle(io.trino.plugin.jdbc.JdbcTableHandle) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) StandardColumnMappings.doubleColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping) Slice(io.airlift.slice.Slice) StandardColumnMappings.timestampColumnMappingUsingSqlTimestampWithRounding(io.trino.plugin.jdbc.StandardColumnMappings.timestampColumnMappingUsingSqlTimestampWithRounding) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) TIMESTAMP_MILLIS(io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS) LocalDateTime(java.time.LocalDateTime) StandardColumnMappings.booleanWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.booleanWriteFunction) Strings.isNullOrEmpty(com.google.common.base.Strings.isNullOrEmpty) ALLOW_OVERFLOW(io.trino.plugin.jdbc.DecimalConfig.DecimalMapping.ALLOW_OVERFLOW) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) StandardColumnMappings.bigintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping) Enums(com.google.common.base.Enums) OptionalLong(java.util.OptionalLong) Float.floatToRawIntBits(java.lang.Float.floatToRawIntBits) SQLException(java.sql.SQLException) String.join(java.lang.String.join) UuidType.javaUuidToTrinoUuid(io.trino.spi.type.UuidType.javaUuidToTrinoUuid) StandardColumnMappings.decimalColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping) UuidType.trinoUuidToJavaUuid(io.trino.spi.type.UuidType.trinoUuidToJavaUuid) ColumnHandle(io.trino.spi.connector.ColumnHandle) INVALID_TABLE_PROPERTY(io.trino.spi.StandardErrorCode.INVALID_TABLE_PROPERTY) ConnectorExpressionRewriter(io.trino.plugin.base.expression.ConnectorExpressionRewriter) SAMPLE_BY_PROPERTY(io.trino.plugin.clickhouse.ClickHouseTableProperties.SAMPLE_BY_PROPERTY) Math.floorDiv(java.lang.Math.floorDiv) AggregateFunctionRule(io.trino.plugin.base.aggregation.AggregateFunctionRule) Nullable(javax.annotation.Nullable) TIMESTAMP_SECONDS(io.trino.spi.type.TimestampType.TIMESTAMP_SECONDS) Int128(io.trino.spi.type.Int128) StandardColumnMappings.realWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction) DecimalType.createDecimalType(io.trino.spi.type.DecimalType.createDecimalType) QueryBuilder(io.trino.plugin.jdbc.QueryBuilder) ClickHouseSessionProperties.isMapStringAsVarchar(io.trino.plugin.clickhouse.ClickHouseSessionProperties.isMapStringAsVarchar) StandardColumnMappings.smallintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping) StandardTypes(io.trino.spi.type.StandardTypes) ConnectorSession(io.trino.spi.connector.ConnectorSession) UnknownHostException(java.net.UnknownHostException) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) IdentifierMapping(io.trino.plugin.jdbc.mapping.IdentifierMapping) VarbinaryType(io.trino.spi.type.VarbinaryType) CharType(io.trino.spi.type.CharType) DecimalSessionSessionProperties.getDecimalRoundingMode(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRoundingMode) TINYINT(io.trino.spi.type.TinyintType.TINYINT) InetAddresses(com.google.common.net.InetAddresses) StandardColumnMappings.shortDecimalWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.shortDecimalWriteFunction) StandardColumnMappings.varbinaryWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.varbinaryWriteFunction) JdbcExpression(io.trino.plugin.jdbc.JdbcExpression) WriteMapping(io.trino.plugin.jdbc.WriteMapping) PRIMARY_KEY_PROPERTY(io.trino.plugin.clickhouse.ClickHouseTableProperties.PRIMARY_KEY_PROPERTY) JDBC_ERROR(io.trino.plugin.jdbc.JdbcErrorCode.JDBC_ERROR) System.arraycopy(java.lang.System.arraycopy) BaseJdbcConfig(io.trino.plugin.jdbc.BaseJdbcConfig) Connection(java.sql.Connection) BiFunction(java.util.function.BiFunction) ImplementSum(io.trino.plugin.jdbc.aggregation.ImplementSum) ObjectWriteFunction(io.trino.plugin.jdbc.ObjectWriteFunction) StandardColumnMappings.dateReadFunctionUsingLocalDate(io.trino.plugin.jdbc.StandardColumnMappings.dateReadFunctionUsingLocalDate) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) ColumnMapping(io.trino.plugin.jdbc.ColumnMapping) INTEGER(io.trino.spi.type.IntegerType.INTEGER) Splitter(com.google.common.base.Splitter) TypeSignature(io.trino.spi.type.TypeSignature) INVALID_ARGUMENTS(io.trino.spi.StandardErrorCode.INVALID_ARGUMENTS) ImmutableSet(com.google.common.collect.ImmutableSet) DecimalSessionSessionProperties.getDecimalRounding(io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRounding) ImmutableMap(com.google.common.collect.ImmutableMap) ClickHouseColumn(com.clickhouse.client.ClickHouseColumn) MICROSECONDS_PER_SECOND(io.trino.spi.type.Timestamps.MICROSECONDS_PER_SECOND) TrinoException(io.trino.spi.TrinoException) UUID(java.util.UUID) String.format(java.lang.String.format) List(java.util.List) JdbcTypeHandle(io.trino.plugin.jdbc.JdbcTypeHandle) StandardColumnMappings.longDecimalReadFunction(io.trino.plugin.jdbc.StandardColumnMappings.longDecimalReadFunction) BIGINT(io.trino.spi.type.BigintType.BIGINT) LocalDate(java.time.LocalDate) Decimals(io.trino.spi.type.Decimals) Entry(java.util.Map.Entry) UTC(java.time.ZoneOffset.UTC) Optional(java.util.Optional) Math.max(java.lang.Math.max) StandardColumnMappings.varcharReadFunction(io.trino.plugin.jdbc.StandardColumnMappings.varcharReadFunction) StandardColumnMappings.tinyintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.tinyintColumnMapping) DecimalType(io.trino.spi.type.DecimalType) StandardColumnMappings.integerColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping) Types(java.sql.Types) StandardColumnMappings.varbinaryColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.varbinaryColumnMapping) SliceWriteFunction(io.trino.plugin.jdbc.SliceWriteFunction) Math.floorMod(java.lang.Math.floorMod) ClickHouseDataType(com.clickhouse.client.ClickHouseDataType) StandardColumnMappings.varcharWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction) StandardColumnMappings.tinyintWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.tinyintWriteFunction) Type(io.trino.spi.type.Type) VarcharType.createUnboundedVarcharType(io.trino.spi.type.VarcharType.createUnboundedVarcharType) Shorts(com.google.common.primitives.Shorts) Inject(javax.inject.Inject) VarcharType(io.trino.spi.type.VarcharType) PARTITION_BY_PROPERTY(io.trino.plugin.clickhouse.ClickHouseTableProperties.PARTITION_BY_PROPERTY) ImmutableList(com.google.common.collect.ImmutableList) Verify.verify(com.google.common.base.Verify.verify) BaseJdbcClient(io.trino.plugin.jdbc.BaseJdbcClient) Math.toIntExact(java.lang.Math.toIntExact) RemoteTableName(io.trino.plugin.jdbc.RemoteTableName) ImplementMinMax(io.trino.plugin.jdbc.aggregation.ImplementMinMax) ORDER_BY_PROPERTY(io.trino.plugin.clickhouse.ClickHouseTableProperties.ORDER_BY_PROPERTY) JdbcColumnHandle(io.trino.plugin.jdbc.JdbcColumnHandle) ENGINE_PROPERTY(io.trino.plugin.clickhouse.ClickHouseTableProperties.ENGINE_PROPERTY) StandardColumnMappings.integerWriteFunction(io.trino.plugin.jdbc.StandardColumnMappings.integerWriteFunction) GENERIC_INTERNAL_ERROR(io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR) DISABLE_PUSHDOWN(io.trino.plugin.jdbc.PredicatePushdownController.DISABLE_PUSHDOWN) StandardColumnMappings.timestampReadFunction(io.trino.plugin.jdbc.StandardColumnMappings.timestampReadFunction) DateTimeFormatter(java.time.format.DateTimeFormatter) TypeManager(io.trino.spi.type.TypeManager) SQLException(java.sql.SQLException) ImmutableList(com.google.common.collect.ImmutableList) Connection(java.sql.Connection) TrinoException(io.trino.spi.TrinoException)

Example 9 with JdbcTableHandle

use of io.trino.plugin.jdbc.JdbcTableHandle in project trino by trinodb.

the class DruidJdbcClient method getTableHandle.

// Overridden to filter out tables that don't match schemaTableName
@Override
public Optional<JdbcTableHandle> getTableHandle(ConnectorSession session, SchemaTableName schemaTableName) {
    String jdbcSchemaName = schemaTableName.getSchemaName();
    String jdbcTableName = schemaTableName.getTableName();
    try (Connection connection = connectionFactory.openConnection(session);
        ResultSet resultSet = getTables(connection, Optional.of(jdbcSchemaName), Optional.of(jdbcTableName))) {
        List<JdbcTableHandle> tableHandles = new ArrayList<>();
        while (resultSet.next()) {
            String schemaName = resultSet.getString("TABLE_SCHEM");
            String tableName = resultSet.getString("TABLE_NAME");
            if (Objects.equals(schemaName, jdbcSchemaName) && Objects.equals(tableName, jdbcTableName)) {
                tableHandles.add(new JdbcTableHandle(schemaTableName, DRUID_CATALOG, schemaName, tableName));
            }
        }
        if (tableHandles.isEmpty()) {
            return Optional.empty();
        }
        return Optional.of(getOnlyElement(tableHandles));
    } catch (SQLException e) {
        throw new TrinoException(JDBC_ERROR, e);
    }
}
Also used : SQLException(java.sql.SQLException) Connection(java.sql.Connection) ResultSet(java.sql.ResultSet) ArrayList(java.util.ArrayList) TrinoException(io.trino.spi.TrinoException) JdbcTableHandle(io.trino.plugin.jdbc.JdbcTableHandle)

Example 10 with JdbcTableHandle

use of io.trino.plugin.jdbc.JdbcTableHandle in project trino by trinodb.

the class PhoenixMetadata method addColumn.

@Override
public void addColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnMetadata column) {
    JdbcTableHandle handle = (JdbcTableHandle) tableHandle;
    phoenixClient.execute(session, format("ALTER TABLE %s ADD %s %s", getEscapedTableName(handle.getSchemaName(), handle.getTableName()), column.getName(), phoenixClient.toWriteMapping(session, column.getType()).getDataType()));
}
Also used : JdbcTableHandle(io.trino.plugin.jdbc.JdbcTableHandle)

Aggregations

JdbcTableHandle (io.trino.plugin.jdbc.JdbcTableHandle)18 JdbcColumnHandle (io.trino.plugin.jdbc.JdbcColumnHandle)13 Connection (java.sql.Connection)7 SQLException (java.sql.SQLException)7 List (java.util.List)7 ImmutableList (com.google.common.collect.ImmutableList)6 TrinoException (io.trino.spi.TrinoException)6 Optional (java.util.Optional)6 ColumnHandle (io.trino.spi.connector.ColumnHandle)5 ConnectorSession (io.trino.spi.connector.ConnectorSession)5 String.format (java.lang.String.format)5 Inject (javax.inject.Inject)5 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)4 ImmutableMap (com.google.common.collect.ImmutableMap)4 Objects.requireNonNull (java.util.Objects.requireNonNull)4 Preconditions.checkArgument (com.google.common.base.Preconditions.checkArgument)3 Slice (io.airlift.slice.Slice)3 Map (java.util.Map)3 Enums (com.google.common.base.Enums)2 Verify.verify (com.google.common.base.Verify.verify)2