use of org.apache.iceberg.catalog.TableIdentifier in project presto by prestodb.
the class IcebergHadoopMetadata method getSystemTable.
@Override
public Optional<SystemTable> getSystemTable(ConnectorSession session, SchemaTableName tableName) {
IcebergTableName name = IcebergTableName.from(tableName.getTableName());
TableIdentifier tableIdentifier = toIcebergTableIdentifier(tableName.getSchemaName(), name.getTableName());
Table table;
try {
table = resourceFactory.getCatalog(session).loadTable(tableIdentifier);
} catch (NoSuchTableException e) {
return Optional.empty();
}
if (name.getSnapshotId().isPresent() && table.snapshot(name.getSnapshotId().get()) == null) {
throw new PrestoException(ICEBERG_INVALID_SNAPSHOT_ID, format("Invalid snapshot [%s] for table: %s", name.getSnapshotId().get(), table));
}
return getIcebergSystemTable(tableName, table);
}
use of org.apache.iceberg.catalog.TableIdentifier in project presto by prestodb.
the class IcebergHadoopMetadata method renameTable.
@Override
public void renameTable(ConnectorSession session, ConnectorTableHandle tableHandle, SchemaTableName newTable) {
TableIdentifier from = toIcebergTableIdentifier(((IcebergTableHandle) tableHandle).getSchemaTableName());
TableIdentifier to = toIcebergTableIdentifier(newTable);
resourceFactory.getCatalog(session).renameTable(from, to);
}
use of org.apache.iceberg.catalog.TableIdentifier in project presto by prestodb.
the class IcebergHadoopMetadata method addColumn.
@Override
public void addColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnMetadata column) {
TableIdentifier tableIdentifier = toIcebergTableIdentifier(((IcebergTableHandle) tableHandle).getSchemaTableName());
Table icebergTable = resourceFactory.getCatalog(session).loadTable(tableIdentifier);
icebergTable.updateSchema().addColumn(column.getName(), toIcebergType(column.getType())).commit();
}
use of org.apache.iceberg.catalog.TableIdentifier in project presto by prestodb.
the class IcebergHadoopMetadata method dropTable.
@Override
public void dropTable(ConnectorSession session, ConnectorTableHandle tableHandle) {
TableIdentifier tableIdentifier = toIcebergTableIdentifier(((IcebergTableHandle) tableHandle).getSchemaTableName());
resourceFactory.getCatalog(session).dropTable(tableIdentifier);
}
use of org.apache.iceberg.catalog.TableIdentifier in project hive by apache.
the class HiveIcebergMetaHook method getCatalogProperties.
/**
* Calculates the properties we would like to send to the catalog.
* <ul>
* <li>The base of the properties is the properties stored at the Hive Metastore for the given table
* <li>We add the {@link Catalogs#LOCATION} as the table location
* <li>We add the {@link Catalogs#NAME} as TableIdentifier defined by the database name and table name
* <li>We add the serdeProperties of the HMS table
* <li>We remove some parameters that we don't want to push down to the Iceberg table props
* </ul>
* @param hmsTable Table for which we are calculating the properties
* @return The properties we can provide for Iceberg functions, like {@link Catalogs}
*/
private static Properties getCatalogProperties(org.apache.hadoop.hive.metastore.api.Table hmsTable) {
Properties properties = new Properties();
hmsTable.getParameters().entrySet().stream().filter(e -> e.getKey() != null && e.getValue() != null).forEach(e -> {
// translate key names between HMS and Iceberg where needed
String icebergKey = HiveTableOperations.translateToIcebergProp(e.getKey());
properties.put(icebergKey, e.getValue());
});
if (properties.get(Catalogs.LOCATION) == null && hmsTable.getSd() != null && hmsTable.getSd().getLocation() != null) {
properties.put(Catalogs.LOCATION, hmsTable.getSd().getLocation());
}
if (properties.get(Catalogs.NAME) == null) {
properties.put(Catalogs.NAME, TableIdentifier.of(hmsTable.getDbName(), hmsTable.getTableName()).toString());
}
SerDeInfo serdeInfo = hmsTable.getSd().getSerdeInfo();
if (serdeInfo != null) {
serdeInfo.getParameters().entrySet().stream().filter(e -> e.getKey() != null && e.getValue() != null).forEach(e -> {
String icebergKey = HiveTableOperations.translateToIcebergProp(e.getKey());
properties.put(icebergKey, e.getValue());
});
}
// Remove HMS table parameters we don't want to propagate to Iceberg
PROPERTIES_TO_REMOVE.forEach(properties::remove);
return properties;
}
Aggregations