use of io.trino.plugin.jdbc.JdbcTableHandle in project trino by trinodb.
the class PhoenixMetadata method dropTable.
@Override
public void dropTable(ConnectorSession session, ConnectorTableHandle tableHandle) {
// if we autogenerated a ROWKEY for this table, delete the associated sequence as well
boolean hasRowkey = getColumnHandles(session, tableHandle).values().stream().map(JdbcColumnHandle.class::cast).map(JdbcColumnHandle::getColumnName).anyMatch(ROWKEY::equals);
if (hasRowkey) {
JdbcTableHandle jdbcHandle = (JdbcTableHandle) tableHandle;
phoenixClient.execute(session, format("DROP SEQUENCE %s", getEscapedTableName(jdbcHandle.getSchemaName(), jdbcHandle.getTableName() + "_sequence")));
}
phoenixClient.dropTable(session, (JdbcTableHandle) tableHandle);
}
use of io.trino.plugin.jdbc.JdbcTableHandle in project trino by trinodb.
the class PhoenixMetadata method getTableProperties.
@Override
public ConnectorTableProperties getTableProperties(ConnectorSession session, ConnectorTableHandle table) {
JdbcTableHandle tableHandle = (JdbcTableHandle) table;
List<LocalProperty<ColumnHandle>> sortingProperties = tableHandle.getSortOrder().map(properties -> properties.stream().map(item -> (LocalProperty<ColumnHandle>) new SortingProperty<ColumnHandle>(item.getColumn(), item.getSortOrder())).collect(toImmutableList())).orElse(ImmutableList.of());
return new ConnectorTableProperties(TupleDomain.all(), Optional.empty(), Optional.empty(), Optional.empty(), sortingProperties);
}
use of io.trino.plugin.jdbc.JdbcTableHandle in project trino by trinodb.
the class PhoenixSplitManager method getSplits.
@Override
public ConnectorSplitSource getSplits(ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorTableHandle table, SplitSchedulingStrategy splitSchedulingStrategy, DynamicFilter dynamicFilter) {
JdbcTableHandle tableHandle = (JdbcTableHandle) table;
try (Connection connection = phoenixClient.getConnection(session)) {
List<JdbcColumnHandle> columns = tableHandle.getColumns().map(columnSet -> columnSet.stream().map(JdbcColumnHandle.class::cast).collect(toList())).orElseGet(() -> phoenixClient.getColumns(session, tableHandle));
PhoenixPreparedStatement inputQuery = (PhoenixPreparedStatement) phoenixClient.prepareStatement(session, connection, tableHandle, columns, Optional.empty());
int maxScansPerSplit = session.getProperty(PhoenixSessionProperties.MAX_SCANS_PER_SPLIT, Integer.class);
List<ConnectorSplit> splits = getSplits(inputQuery, maxScansPerSplit).stream().map(PhoenixInputSplit.class::cast).map(split -> new PhoenixSplit(getSplitAddresses(split), SerializedPhoenixInputSplit.serialize(split))).collect(toImmutableList());
return new FixedSplitSource(splits);
} catch (IOException | SQLException e) {
throw new TrinoException(PHOENIX_SPLIT_ERROR, "Couldn't get Phoenix splits", e);
}
}
use of io.trino.plugin.jdbc.JdbcTableHandle in project trino by trinodb.
the class PhoenixMetadata method dropTable.
@Override
public void dropTable(ConnectorSession session, ConnectorTableHandle tableHandle) {
// if we autogenerated a ROWKEY for this table, delete the associated sequence as well
boolean hasRowkey = getColumnHandles(session, tableHandle).values().stream().map(JdbcColumnHandle.class::cast).map(JdbcColumnHandle::getColumnName).anyMatch(ROWKEY::equals);
if (hasRowkey) {
JdbcTableHandle jdbcHandle = (JdbcTableHandle) tableHandle;
phoenixClient.execute(session, format("DROP SEQUENCE %s", getEscapedTableName(jdbcHandle.getSchemaName(), jdbcHandle.getTableName() + "_sequence")));
}
phoenixClient.dropTable(session, (JdbcTableHandle) tableHandle);
}
use of io.trino.plugin.jdbc.JdbcTableHandle in project trino by trinodb.
the class PhoenixMetadata method addColumn.
@Override
public void addColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnMetadata column) {
JdbcTableHandle handle = (JdbcTableHandle) tableHandle;
phoenixClient.execute(session, format("ALTER TABLE %s ADD %s %s", getEscapedTableName(handle.getSchemaName(), handle.getTableName()), column.getName(), phoenixClient.toWriteMapping(session, column.getType()).getDataType()));
}
Aggregations