Search in sources :

Example 1 with TableChange

use of org.apache.spark.sql.connector.catalog.TableChange in project iceberg by apache.

the class SparkCatalog method alterTable.

@Override
public SparkTable alterTable(Identifier ident, TableChange... changes) throws NoSuchTableException {
    SetProperty setLocation = null;
    SetProperty setSnapshotId = null;
    SetProperty pickSnapshotId = null;
    List<TableChange> propertyChanges = Lists.newArrayList();
    List<TableChange> schemaChanges = Lists.newArrayList();
    for (TableChange change : changes) {
        if (change instanceof SetProperty) {
            SetProperty set = (SetProperty) change;
            if (TableCatalog.PROP_LOCATION.equalsIgnoreCase(set.property())) {
                setLocation = set;
            } else if ("current-snapshot-id".equalsIgnoreCase(set.property())) {
                setSnapshotId = set;
            } else if ("cherry-pick-snapshot-id".equalsIgnoreCase(set.property())) {
                pickSnapshotId = set;
            } else if ("sort-order".equalsIgnoreCase(set.property())) {
                throw new UnsupportedOperationException("Cannot specify the 'sort-order' because it's a reserved table " + "property. Please use the command 'ALTER TABLE ... WRITE ORDERED BY' to specify write sort-orders.");
            } else {
                propertyChanges.add(set);
            }
        } else if (change instanceof RemoveProperty) {
            propertyChanges.add(change);
        } else if (change instanceof ColumnChange) {
            schemaChanges.add(change);
        } else {
            throw new UnsupportedOperationException("Cannot apply unknown table change: " + change);
        }
    }
    try {
        Table table = load(ident).first();
        commitChanges(table, setLocation, setSnapshotId, pickSnapshotId, propertyChanges, schemaChanges);
        return new SparkTable(table, true);
    } catch (org.apache.iceberg.exceptions.NoSuchTableException e) {
        throw new NoSuchTableException(ident);
    }
}
Also used : RemoveProperty(org.apache.spark.sql.connector.catalog.TableChange.RemoveProperty) StagedSparkTable(org.apache.iceberg.spark.source.StagedSparkTable) StagedTable(org.apache.spark.sql.connector.catalog.StagedTable) Table(org.apache.iceberg.Table) SparkTable(org.apache.iceberg.spark.source.SparkTable) ColumnChange(org.apache.spark.sql.connector.catalog.TableChange.ColumnChange) NoSuchTableException(org.apache.spark.sql.catalyst.analysis.NoSuchTableException) SetProperty(org.apache.spark.sql.connector.catalog.TableChange.SetProperty) StagedSparkTable(org.apache.iceberg.spark.source.StagedSparkTable) SparkTable(org.apache.iceberg.spark.source.SparkTable) TableChange(org.apache.spark.sql.connector.catalog.TableChange)

Example 2 with TableChange

use of org.apache.spark.sql.connector.catalog.TableChange in project iceberg by apache.

the class Spark3Util method applyPropertyChanges.

/**
 * Applies a list of Spark table changes to an {@link UpdateProperties} operation.
 *
 * @param pendingUpdate an uncommitted UpdateProperties operation to configure
 * @param changes a list of Spark table changes
 * @return the UpdateProperties operation configured with the changes
 */
public static UpdateProperties applyPropertyChanges(UpdateProperties pendingUpdate, List<TableChange> changes) {
    for (TableChange change : changes) {
        if (change instanceof TableChange.SetProperty) {
            TableChange.SetProperty set = (TableChange.SetProperty) change;
            pendingUpdate.set(set.property(), set.value());
        } else if (change instanceof TableChange.RemoveProperty) {
            TableChange.RemoveProperty remove = (TableChange.RemoveProperty) change;
            pendingUpdate.remove(remove.property());
        } else {
            throw new UnsupportedOperationException("Cannot apply unknown table change: " + change);
        }
    }
    return pendingUpdate;
}
Also used : TableChange(org.apache.spark.sql.connector.catalog.TableChange)

Example 3 with TableChange

use of org.apache.spark.sql.connector.catalog.TableChange in project iceberg by apache.

the class Spark3Util method applySchemaChanges.

/**
 * Applies a list of Spark table changes to an {@link UpdateSchema} operation.
 *
 * @param pendingUpdate an uncommitted UpdateSchema operation to configure
 * @param changes a list of Spark table changes
 * @return the UpdateSchema operation configured with the changes
 */
public static UpdateSchema applySchemaChanges(UpdateSchema pendingUpdate, List<TableChange> changes) {
    for (TableChange change : changes) {
        if (change instanceof TableChange.AddColumn) {
            apply(pendingUpdate, (TableChange.AddColumn) change);
        } else if (change instanceof TableChange.UpdateColumnType) {
            TableChange.UpdateColumnType update = (TableChange.UpdateColumnType) change;
            Type newType = SparkSchemaUtil.convert(update.newDataType());
            Preconditions.checkArgument(newType.isPrimitiveType(), "Cannot update '%s', not a primitive type: %s", DOT.join(update.fieldNames()), update.newDataType());
            pendingUpdate.updateColumn(DOT.join(update.fieldNames()), newType.asPrimitiveType());
        } else if (change instanceof TableChange.UpdateColumnComment) {
            TableChange.UpdateColumnComment update = (TableChange.UpdateColumnComment) change;
            pendingUpdate.updateColumnDoc(DOT.join(update.fieldNames()), update.newComment());
        } else if (change instanceof TableChange.RenameColumn) {
            TableChange.RenameColumn rename = (TableChange.RenameColumn) change;
            pendingUpdate.renameColumn(DOT.join(rename.fieldNames()), rename.newName());
        } else if (change instanceof TableChange.DeleteColumn) {
            TableChange.DeleteColumn delete = (TableChange.DeleteColumn) change;
            pendingUpdate.deleteColumn(DOT.join(delete.fieldNames()));
        } else if (change instanceof TableChange.UpdateColumnNullability) {
            TableChange.UpdateColumnNullability update = (TableChange.UpdateColumnNullability) change;
            if (update.nullable()) {
                pendingUpdate.makeColumnOptional(DOT.join(update.fieldNames()));
            } else {
                pendingUpdate.requireColumn(DOT.join(update.fieldNames()));
            }
        } else if (change instanceof TableChange.UpdateColumnPosition) {
            apply(pendingUpdate, (TableChange.UpdateColumnPosition) change);
        } else {
            throw new UnsupportedOperationException("Cannot apply unknown table change: " + change);
        }
    }
    return pendingUpdate;
}
Also used : StructType(org.apache.spark.sql.types.StructType) IntegerType(org.apache.spark.sql.types.IntegerType) LongType(org.apache.spark.sql.types.LongType) Type(org.apache.iceberg.types.Type) MetadataTableType(org.apache.iceberg.MetadataTableType) TableChange(org.apache.spark.sql.connector.catalog.TableChange)

Aggregations

TableChange (org.apache.spark.sql.connector.catalog.TableChange)3 MetadataTableType (org.apache.iceberg.MetadataTableType)1 Table (org.apache.iceberg.Table)1 SparkTable (org.apache.iceberg.spark.source.SparkTable)1 StagedSparkTable (org.apache.iceberg.spark.source.StagedSparkTable)1 Type (org.apache.iceberg.types.Type)1 NoSuchTableException (org.apache.spark.sql.catalyst.analysis.NoSuchTableException)1 StagedTable (org.apache.spark.sql.connector.catalog.StagedTable)1 ColumnChange (org.apache.spark.sql.connector.catalog.TableChange.ColumnChange)1 RemoveProperty (org.apache.spark.sql.connector.catalog.TableChange.RemoveProperty)1 SetProperty (org.apache.spark.sql.connector.catalog.TableChange.SetProperty)1 IntegerType (org.apache.spark.sql.types.IntegerType)1 LongType (org.apache.spark.sql.types.LongType)1 StructType (org.apache.spark.sql.types.StructType)1