use of io.trino.plugin.hive.metastore.Table in project trino by trinodb.
the class GlueHiveMetastore method getPartitionNamesByFilter.
@Override
public Optional<List<String>> getPartitionNamesByFilter(String databaseName, String tableName, List<String> columnNames, TupleDomain<String> partitionKeysFilter) {
if (partitionKeysFilter.isNone()) {
return Optional.of(ImmutableList.of());
}
Table table = getExistingTable(databaseName, tableName);
String expression = GlueExpressionUtil.buildGlueExpression(columnNames, partitionKeysFilter, assumeCanonicalPartitionKeys);
List<Partition> partitions = getPartitions(table, expression);
return Optional.of(buildPartitionNames(table.getPartitionColumns(), partitions));
}
use of io.trino.plugin.hive.metastore.Table in project trino by trinodb.
the class GlueHiveMetastore method dropTable.
@Override
public void dropTable(String databaseName, String tableName, boolean deleteData) {
Table table = getExistingTable(databaseName, tableName);
try {
stats.getDeleteTable().call(() -> glueClient.deleteTable(new DeleteTableRequest().withCatalogId(catalogId).withDatabaseName(databaseName).withName(tableName)));
} catch (AmazonServiceException e) {
throw new TrinoException(HIVE_METASTORE_ERROR, e);
}
String tableLocation = table.getStorage().getLocation();
if (deleteData && isManagedTable(table) && !isNullOrEmpty(tableLocation)) {
deleteDir(hdfsContext, hdfsEnvironment, new Path(tableLocation), true);
}
}
use of io.trino.plugin.hive.metastore.Table in project trino by trinodb.
the class GlueHiveMetastore method updateTableStatistics.
@Override
public void updateTableStatistics(String databaseName, String tableName, AcidTransaction transaction, Function<PartitionStatistics, PartitionStatistics> update) {
Table table = getExistingTable(databaseName, tableName);
if (transaction.isAcidTransactionRunning()) {
table = Table.builder(table).setWriteId(OptionalLong.of(transaction.getWriteId())).build();
}
PartitionStatistics currentStatistics = getTableStatistics(table);
PartitionStatistics updatedStatistics = update.apply(currentStatistics);
try {
TableInput tableInput = GlueInputConverter.convertTable(table);
final Map<String, String> statisticsParameters = updateStatisticsParameters(table.getParameters(), updatedStatistics.getBasicStatistics());
tableInput.setParameters(statisticsParameters);
table = Table.builder(table).setParameters(statisticsParameters).build();
stats.getUpdateTable().call(() -> glueClient.updateTable(new UpdateTableRequest().withCatalogId(catalogId).withDatabaseName(databaseName).withTableInput(tableInput)));
columnStatisticsProvider.updateTableColumnStatistics(table, updatedStatistics.getColumnStatistics());
} catch (EntityNotFoundException e) {
throw new TableNotFoundException(new SchemaTableName(databaseName, tableName));
} catch (AmazonServiceException e) {
throw new TrinoException(HIVE_METASTORE_ERROR, e);
}
}
use of io.trino.plugin.hive.metastore.Table in project trino by trinodb.
the class GlueHiveMetastore method renameColumn.
@Override
public void renameColumn(String databaseName, String tableName, String oldColumnName, String newColumnName) {
Table oldTable = getExistingTable(databaseName, tableName);
if (oldTable.getPartitionColumns().stream().anyMatch(c -> c.getName().equals(oldColumnName))) {
throw new TrinoException(NOT_SUPPORTED, "Renaming partition columns is not supported");
}
ImmutableList.Builder<Column> newDataColumns = ImmutableList.builder();
for (Column column : oldTable.getDataColumns()) {
if (column.getName().equals(oldColumnName)) {
newDataColumns.add(new Column(newColumnName, column.getType(), column.getComment()));
} else {
newDataColumns.add(column);
}
}
Table newTable = Table.builder(oldTable).setDataColumns(newDataColumns.build()).build();
replaceTable(databaseName, tableName, newTable, null);
}
use of io.trino.plugin.hive.metastore.Table in project trino by trinodb.
the class ProtoUtils method fromProto.
public static Table fromProto(alluxio.grpc.table.TableInfo table) {
if (!table.hasLayout()) {
throw new TrinoException(NOT_SUPPORTED, "Unsupported table metadata. missing layout.: " + table.getTableName());
}
Layout layout = table.getLayout();
if (!alluxio.table.ProtoUtils.isHiveLayout(layout)) {
throw new TrinoException(NOT_SUPPORTED, "Unsupported table layout: " + layout + " for table: " + table.getTableName());
}
try {
PartitionInfo partitionInfo = alluxio.table.ProtoUtils.toHiveLayout(layout);
// compute the data columns
Set<String> partitionColumns = table.getPartitionColsList().stream().map(FieldSchema::getName).collect(toImmutableSet());
List<FieldSchema> dataColumns = table.getSchema().getColsList().stream().filter((f) -> !partitionColumns.contains(f.getName())).collect(toImmutableList());
Map<String, String> tableParameters = table.getParametersMap();
Table.Builder builder = Table.builder().setDatabaseName(table.getDbName()).setTableName(table.getTableName()).setOwner(Optional.ofNullable(table.getOwner())).setTableType(table.getType().toString()).setDataColumns(dataColumns.stream().map(ProtoUtils::fromProto).collect(toImmutableList())).setPartitionColumns(table.getPartitionColsList().stream().map(ProtoUtils::fromProto).collect(toImmutableList())).setParameters(tableParameters).setViewOriginalText(Optional.empty()).setViewExpandedText(Optional.empty());
alluxio.grpc.table.layout.hive.Storage storage = partitionInfo.getStorage();
builder.getStorageBuilder().setSkewed(storage.getSkewed()).setStorageFormat(fromProto(storage.getStorageFormat())).setLocation(storage.getLocation()).setBucketProperty(storage.hasBucketProperty() ? fromProto(tableParameters, storage.getBucketProperty()) : Optional.empty()).setSerdeParameters(storage.getStorageFormat().getSerdelibParametersMap());
return builder.build();
} catch (InvalidProtocolBufferException e) {
throw new IllegalArgumentException("Failed to extract PartitionInfo from TableInfo", e);
}
}
Aggregations