use of org.apache.iceberg.spark.PathIdentifier in project iceberg by apache.
the class IcebergSource method catalogAndIdentifier.
private Spark3Util.CatalogAndIdentifier catalogAndIdentifier(CaseInsensitiveStringMap options) {
Preconditions.checkArgument(options.containsKey("path"), "Cannot open table: path is not set");
SparkSession spark = SparkSession.active();
setupDefaultSparkCatalog(spark);
String path = options.get("path");
Long snapshotId = propertyAsLong(options, SparkReadOptions.SNAPSHOT_ID);
Long asOfTimestamp = propertyAsLong(options, SparkReadOptions.AS_OF_TIMESTAMP);
Preconditions.checkArgument(asOfTimestamp == null || snapshotId == null, "Cannot specify both snapshot-id (%s) and as-of-timestamp (%s)", snapshotId, asOfTimestamp);
String selector = null;
if (snapshotId != null) {
selector = SNAPSHOT_ID + snapshotId;
}
if (asOfTimestamp != null) {
selector = AT_TIMESTAMP + asOfTimestamp;
}
CatalogManager catalogManager = spark.sessionState().catalogManager();
if (path.contains("/")) {
// contains a path. Return iceberg default catalog and a PathIdentifier
String newPath = (selector == null) ? path : path + "#" + selector;
return new Spark3Util.CatalogAndIdentifier(catalogManager.catalog(DEFAULT_CATALOG_NAME), new PathIdentifier(newPath));
}
final Spark3Util.CatalogAndIdentifier catalogAndIdentifier = Spark3Util.catalogAndIdentifier("path or identifier", spark, path);
Identifier ident = identifierWithSelector(catalogAndIdentifier.identifier(), selector);
if (catalogAndIdentifier.catalog().name().equals("spark_catalog") && !(catalogAndIdentifier.catalog() instanceof SparkSessionCatalog)) {
// catalog is a session catalog but does not support Iceberg. Use Iceberg instead.
return new Spark3Util.CatalogAndIdentifier(catalogManager.catalog(DEFAULT_CATALOG_NAME), ident);
} else {
return new Spark3Util.CatalogAndIdentifier(catalogAndIdentifier.catalog(), ident);
}
}
use of org.apache.iceberg.spark.PathIdentifier in project iceberg by apache.
the class TestPathIdentifier method before.
@Before
public void before() throws IOException {
tableLocation = temp.newFolder();
identifier = new PathIdentifier(tableLocation.getAbsolutePath());
sparkCatalog = new SparkCatalog();
sparkCatalog.initialize("test", new CaseInsensitiveStringMap(ImmutableMap.of()));
}
Aggregations