use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveCatalog method listPartitions.
@Override
public List<CatalogPartitionSpec> listPartitions(ObjectPath tablePath) throws TableNotExistException, TableNotPartitionedException, CatalogException {
checkNotNull(tablePath, "Table path cannot be null");
Table hiveTable = getHiveTable(tablePath);
ensurePartitionedTable(tablePath, hiveTable);
try {
// pass -1 as max_parts to fetch all partitions
return client.listPartitionNames(tablePath.getDatabaseName(), tablePath.getObjectName(), (short) -1).stream().map(HiveCatalog::createPartitionSpec).collect(Collectors.toList());
} catch (TException e) {
throw new CatalogException(String.format("Failed to list partitions of table %s", tablePath), e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveCatalog method alterTableViaProperties.
private void alterTableViaProperties(AlterTableOp alterOp, Table hiveTable, CatalogTable catalogTable, Map<String, String> oldProps, Map<String, String> newProps, StorageDescriptor sd) {
switch(alterOp) {
case CHANGE_TBL_PROPS:
oldProps.putAll(newProps);
break;
case CHANGE_LOCATION:
HiveTableUtil.extractLocation(sd, newProps);
break;
case CHANGE_FILE_FORMAT:
String newFileFormat = newProps.remove(STORED_AS_FILE_FORMAT);
HiveTableUtil.setStorageFormat(sd, newFileFormat, hiveConf);
break;
case CHANGE_SERDE_PROPS:
HiveTableUtil.extractRowFormat(sd, newProps);
break;
case ALTER_COLUMNS:
if (hiveTable == null) {
throw new CatalogException("ALTER COLUMNS cannot be done with ALTER PARTITION");
}
HiveTableUtil.alterColumns(hiveTable.getSd(), catalogTable);
boolean cascade = Boolean.parseBoolean(newProps.remove(ALTER_COL_CASCADE));
if (cascade) {
if (!isTablePartitioned(hiveTable)) {
throw new CatalogException("ALTER COLUMNS CASCADE for non-partitioned table");
}
try {
for (CatalogPartitionSpec spec : listPartitions(new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName()))) {
Partition partition = getHivePartition(hiveTable, spec);
HiveTableUtil.alterColumns(partition.getSd(), catalogTable);
client.alter_partition(hiveTable.getDbName(), hiveTable.getTableName(), partition);
}
} catch (Exception e) {
throw new CatalogException("Failed to cascade add/replace columns to partitions", e);
}
}
break;
default:
throw new CatalogException("Unsupported alter table operation " + alterOp);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveCatalog method alterFunction.
@Override
public void alterFunction(ObjectPath functionPath, CatalogFunction newFunction, boolean ignoreIfNotExists) throws FunctionNotExistException, CatalogException {
checkNotNull(functionPath, "functionPath cannot be null");
checkNotNull(newFunction, "newFunction cannot be null");
try {
// check if function exists
getFunction(functionPath);
Function hiveFunction;
if (newFunction instanceof CatalogFunctionImpl) {
hiveFunction = instantiateHiveFunction(functionPath, newFunction);
} else {
throw new CatalogException(String.format("Unsupported catalog function type %s", newFunction.getClass().getName()));
}
client.alterFunction(functionPath.getDatabaseName(), functionPath.getObjectName(), hiveFunction);
} catch (FunctionNotExistException e) {
if (!ignoreIfNotExists) {
throw e;
}
} catch (TException e) {
throw new CatalogException(String.format("Failed to alter function %s", functionPath.getFullName()), e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveCatalog method alterPartition.
@Override
public void alterPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, CatalogPartition newPartition, boolean ignoreIfNotExists) throws PartitionNotExistException, CatalogException {
checkNotNull(tablePath, "Table path cannot be null");
checkNotNull(partitionSpec, "CatalogPartitionSpec cannot be null");
checkNotNull(newPartition, "New partition cannot be null");
// the target doesn't exist
try {
Table hiveTable = getHiveTable(tablePath);
boolean isHiveTable = isHiveTable(hiveTable.getParameters());
if (!isHiveTable) {
throw new CatalogException("Currently only supports partition for hive tables");
}
Partition hivePartition = getHivePartition(hiveTable, partitionSpec);
if (hivePartition == null) {
if (ignoreIfNotExists) {
return;
}
throw new PartitionNotExistException(getName(), tablePath, partitionSpec);
}
AlterTableOp op = HiveTableUtil.extractAlterTableOp(newPartition.getProperties());
if (op == null) {
throw new CatalogException(ALTER_TABLE_OP + " is missing for alter table operation");
}
alterTableViaProperties(op, null, null, hivePartition.getParameters(), newPartition.getProperties(), hivePartition.getSd());
client.alter_partition(tablePath.getDatabaseName(), tablePath.getObjectName(), hivePartition);
} catch (NoSuchObjectException e) {
if (!ignoreIfNotExists) {
throw new PartitionNotExistException(getName(), tablePath, partitionSpec, e);
}
} catch (InvalidOperationException | MetaException | TableNotExistException | PartitionSpecInvalidException e) {
throw new PartitionNotExistException(getName(), tablePath, partitionSpec, e);
} catch (TException e) {
throw new CatalogException(String.format("Failed to alter existing partition with new partition %s of table %s", partitionSpec, tablePath), e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveCatalog method renameTable.
@Override
public void renameTable(ObjectPath tablePath, String newTableName, boolean ignoreIfNotExists) throws TableNotExistException, TableAlreadyExistException, CatalogException {
checkNotNull(tablePath, "tablePath cannot be null");
checkArgument(!isNullOrWhitespaceOnly(newTableName), "newTableName cannot be null or empty");
try {
// Thus, check the table existence explicitly
if (tableExists(tablePath)) {
ObjectPath newPath = new ObjectPath(tablePath.getDatabaseName(), newTableName);
// Thus, check the table existence explicitly
if (tableExists(newPath)) {
throw new TableAlreadyExistException(getName(), newPath);
} else {
Table table = getHiveTable(tablePath);
table.setTableName(newTableName);
client.alter_table(tablePath.getDatabaseName(), tablePath.getObjectName(), table);
}
} else if (!ignoreIfNotExists) {
throw new TableNotExistException(getName(), tablePath);
}
} catch (TException e) {
throw new CatalogException(String.format("Failed to rename table %s", tablePath.getFullName()), e);
}
}
Aggregations