use of io.trino.plugin.hive.HiveMetadata.TABLE_COMMENT in project trino by trinodb.
the class BridgingHiveMetastore method commentTable.
@Override
public void commentTable(String databaseName, String tableName, Optional<String> comment) {
Optional<org.apache.hadoop.hive.metastore.api.Table> source = delegate.getTable(identity, databaseName, tableName);
if (source.isEmpty()) {
throw new TableNotFoundException(new SchemaTableName(databaseName, tableName));
}
org.apache.hadoop.hive.metastore.api.Table table = source.get();
Map<String, String> parameters = table.getParameters().entrySet().stream().filter(entry -> !entry.getKey().equals(TABLE_COMMENT)).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
comment.ifPresent(value -> parameters.put(TABLE_COMMENT, value));
table.setParameters(parameters);
alterTable(databaseName, tableName, table);
}
use of io.trino.plugin.hive.HiveMetadata.TABLE_COMMENT in project trino by trinodb.
the class AbstractMetastoreTableOperations method commitNewTable.
@Override
protected final void commitNewTable(TableMetadata metadata) {
String newMetadataLocation = writeNewMetadata(metadata, version + 1);
Table.Builder builder = Table.builder().setDatabaseName(database).setTableName(tableName).setOwner(owner).setTableType(TableType.EXTERNAL_TABLE.name()).setDataColumns(toHiveColumns(metadata.schema().columns())).withStorage(storage -> storage.setLocation(metadata.location())).withStorage(storage -> storage.setStorageFormat(STORAGE_FORMAT)).setParameter("EXTERNAL", "TRUE").setParameter(TABLE_TYPE_PROP, ICEBERG_TABLE_TYPE_VALUE).setParameter(METADATA_LOCATION_PROP, newMetadataLocation);
String tableComment = metadata.properties().get(TABLE_COMMENT);
if (tableComment != null) {
builder.setParameter(TABLE_COMMENT, tableComment);
}
Table table = builder.build();
PrincipalPrivileges privileges = owner.map(MetastoreUtil::buildInitialPrivilegeSet).orElse(NO_PRIVILEGES);
metastore.createTable(table, privileges);
}
use of io.trino.plugin.hive.HiveMetadata.TABLE_COMMENT in project trino by trinodb.
the class TrinoHiveCatalog method createMaterializedView.
@Override
public void createMaterializedView(ConnectorSession session, SchemaTableName schemaViewName, ConnectorMaterializedViewDefinition definition, boolean replace, boolean ignoreExisting) {
Optional<io.trino.plugin.hive.metastore.Table> existing = metastore.getTable(schemaViewName.getSchemaName(), schemaViewName.getTableName());
// It's a create command where the materialized view already exists and 'if not exists' clause is not specified
if (!replace && existing.isPresent()) {
if (ignoreExisting) {
return;
}
throw new TrinoException(ALREADY_EXISTS, "Materialized view already exists: " + schemaViewName);
}
// Generate a storage table name and create a storage table. The properties in the definition are table properties for the
// storage table as indicated in the materialized view definition.
String storageTableName = "st_" + randomUUID().toString().replace("-", "");
Map<String, Object> storageTableProperties = new HashMap<>(definition.getProperties());
storageTableProperties.putIfAbsent(FILE_FORMAT_PROPERTY, DEFAULT_FILE_FORMAT_DEFAULT);
SchemaTableName storageTable = new SchemaTableName(schemaViewName.getSchemaName(), storageTableName);
List<ColumnMetadata> columns = definition.getColumns().stream().map(column -> new ColumnMetadata(column.getName(), typeManager.getType(column.getType()))).collect(toImmutableList());
ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(storageTable, columns, storageTableProperties, Optional.empty());
Transaction transaction = IcebergUtil.newCreateTableTransaction(this, tableMetadata, session);
transaction.newAppend().commit();
transaction.commitTransaction();
// Create a view indicating the storage table
Map<String, String> viewProperties = ImmutableMap.<String, String>builder().put(PRESTO_QUERY_ID_NAME, session.getQueryId()).put(STORAGE_TABLE, storageTableName).put(PRESTO_VIEW_FLAG, "true").put(TRINO_CREATED_BY, TRINO_CREATED_BY_VALUE).put(TABLE_COMMENT, ICEBERG_MATERIALIZED_VIEW_COMMENT).buildOrThrow();
Column dummyColumn = new Column("dummy", HIVE_STRING, Optional.empty());
io.trino.plugin.hive.metastore.Table.Builder tableBuilder = io.trino.plugin.hive.metastore.Table.builder().setDatabaseName(schemaViewName.getSchemaName()).setTableName(schemaViewName.getTableName()).setOwner(isUsingSystemSecurity ? Optional.empty() : Optional.of(session.getUser())).setTableType(VIRTUAL_VIEW.name()).setDataColumns(ImmutableList.of(dummyColumn)).setPartitionColumns(ImmutableList.of()).setParameters(viewProperties).withStorage(storage -> storage.setStorageFormat(VIEW_STORAGE_FORMAT)).withStorage(storage -> storage.setLocation("")).setViewOriginalText(Optional.of(encodeMaterializedViewData(fromConnectorMaterializedViewDefinition(definition)))).setViewExpandedText(Optional.of("/* Presto Materialized View */"));
io.trino.plugin.hive.metastore.Table table = tableBuilder.build();
PrincipalPrivileges principalPrivileges = isUsingSystemSecurity ? NO_PRIVILEGES : buildInitialPrivilegeSet(session.getUser());
if (existing.isPresent() && replace) {
// drop the current storage table
String oldStorageTable = existing.get().getParameters().get(STORAGE_TABLE);
if (oldStorageTable != null) {
metastore.dropTable(schemaViewName.getSchemaName(), oldStorageTable, true);
}
// Replace the existing view definition
metastore.replaceTable(schemaViewName.getSchemaName(), schemaViewName.getTableName(), table, principalPrivileges);
return;
}
// create the view definition
metastore.createTable(table, principalPrivileges);
}
Aggregations