use of org.apache.spark.sql.connector.catalog.CatalogPlugin in project iceberg by apache.
the class Spark3Util method catalogAndIdentifier.
/**
* A modified version of Spark's LookupCatalog.CatalogAndIdentifier.unapply
* Attempts to find the catalog and identifier a multipart identifier represents
* @param spark Spark session to use for resolution
* @param nameParts Multipart identifier representing a table
* @param defaultCatalog Catalog to use if none is specified
* @return The CatalogPlugin and Identifier for the table
*/
public static CatalogAndIdentifier catalogAndIdentifier(SparkSession spark, List<String> nameParts, CatalogPlugin defaultCatalog) {
CatalogManager catalogManager = spark.sessionState().catalogManager();
String[] currentNamespace;
if (defaultCatalog.equals(catalogManager.currentCatalog())) {
currentNamespace = catalogManager.currentNamespace();
} else {
currentNamespace = defaultCatalog.defaultNamespace();
}
Pair<CatalogPlugin, Identifier> catalogIdentifier = SparkUtil.catalogAndIdentifier(nameParts, catalogName -> {
try {
return catalogManager.catalog(catalogName);
} catch (Exception e) {
return null;
}
}, Identifier::of, defaultCatalog, currentNamespace);
return new CatalogAndIdentifier(catalogIdentifier);
}
use of org.apache.spark.sql.connector.catalog.CatalogPlugin in project iceberg by apache.
the class BaseSnapshotTableSparkAction method as.
@Override
public SnapshotTable as(String ident) {
String ctx = "snapshot destination";
CatalogPlugin defaultCatalog = spark().sessionState().catalogManager().currentCatalog();
CatalogAndIdentifier catalogAndIdent = Spark3Util.catalogAndIdentifier(ctx, spark(), ident, defaultCatalog);
this.destCatalog = checkDestinationCatalog(catalogAndIdent.catalog());
this.destTableIdent = catalogAndIdent.identifier();
return this;
}
use of org.apache.spark.sql.connector.catalog.CatalogPlugin in project iceberg by apache.
the class SparkActions method migrateTable.
@Override
public MigrateTable migrateTable(String tableIdent) {
String ctx = "migrate target";
CatalogPlugin defaultCatalog = spark().sessionState().catalogManager().currentCatalog();
CatalogAndIdentifier catalogAndIdent = Spark3Util.catalogAndIdentifier(ctx, spark(), tableIdent, defaultCatalog);
return new BaseMigrateTableSparkAction(spark(), catalogAndIdent.catalog(), catalogAndIdent.identifier());
}
use of org.apache.spark.sql.connector.catalog.CatalogPlugin in project iceberg by apache.
the class SparkActions method snapshotTable.
@Override
public SnapshotTable snapshotTable(String tableIdent) {
String ctx = "snapshot source";
CatalogPlugin defaultCatalog = spark().sessionState().catalogManager().currentCatalog();
CatalogAndIdentifier catalogAndIdent = Spark3Util.catalogAndIdentifier(ctx, spark(), tableIdent, defaultCatalog);
return new BaseSnapshotTableSparkAction(spark(), catalogAndIdent.catalog(), catalogAndIdent.identifier());
}
use of org.apache.spark.sql.connector.catalog.CatalogPlugin in project iceberg by apache.
the class IcebergSource method getTable.
@Override
public Table getTable(StructType schema, Transform[] partitioning, Map<String, String> options) {
Spark3Util.CatalogAndIdentifier catalogIdentifier = catalogAndIdentifier(new CaseInsensitiveStringMap(options));
CatalogPlugin catalog = catalogIdentifier.catalog();
Identifier ident = catalogIdentifier.identifier();
try {
if (catalog instanceof TableCatalog) {
return ((TableCatalog) catalog).loadTable(ident);
}
} catch (NoSuchTableException e) {
// throwing an iceberg NoSuchTableException because the Spark one is typed and cant be thrown from this interface
throw new org.apache.iceberg.exceptions.NoSuchTableException(e, "Cannot find table for %s.", ident);
}
// throwing an iceberg NoSuchTableException because the Spark one is typed and cant be thrown from this interface
throw new org.apache.iceberg.exceptions.NoSuchTableException("Cannot find table for %s.", ident);
}
Aggregations