use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveCatalog method partitionExists.
// ------ partitions ------
@Override
public boolean partitionExists(ObjectPath tablePath, CatalogPartitionSpec partitionSpec) throws CatalogException {
checkNotNull(tablePath, "Table path cannot be null");
checkNotNull(partitionSpec, "CatalogPartitionSpec cannot be null");
try {
return getHivePartition(tablePath, partitionSpec) != null;
} catch (NoSuchObjectException | TableNotExistException | PartitionSpecInvalidException e) {
return false;
} catch (TException e) {
throw new CatalogException(String.format("Failed to get partition %s of table %s", partitionSpec, tablePath), e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveCatalog method createFunction.
// ------ functions ------
@Override
public void createFunction(ObjectPath functionPath, CatalogFunction function, boolean ignoreIfExists) throws FunctionAlreadyExistException, DatabaseNotExistException, CatalogException {
checkNotNull(functionPath, "functionPath cannot be null");
checkNotNull(function, "function cannot be null");
Function hiveFunction;
if (function instanceof CatalogFunctionImpl) {
hiveFunction = instantiateHiveFunction(functionPath, function);
} else {
throw new CatalogException(String.format("Unsupported catalog function type %s", function.getClass().getName()));
}
try {
client.createFunction(hiveFunction);
} catch (NoSuchObjectException e) {
throw new DatabaseNotExistException(getName(), functionPath.getDatabaseName(), e);
} catch (AlreadyExistsException e) {
if (!ignoreIfExists) {
throw new FunctionAlreadyExistException(getName(), functionPath, e);
}
} catch (TException e) {
throw new CatalogException(String.format("Failed to create function %s", functionPath.getFullName()), e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveCatalog method alterPartitionStatistics.
@Override
public void alterPartitionStatistics(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, CatalogTableStatistics partitionStatistics, boolean ignoreIfNotExists) throws PartitionNotExistException, CatalogException {
try {
Partition hivePartition = getHivePartition(tablePath, partitionSpec);
// Set table stats
if (statsChanged(partitionStatistics, hivePartition.getParameters())) {
updateStats(partitionStatistics, hivePartition.getParameters());
client.alter_partition(tablePath.getDatabaseName(), tablePath.getObjectName(), hivePartition);
}
} catch (TableNotExistException | PartitionSpecInvalidException e) {
throw new PartitionNotExistException(getName(), tablePath, partitionSpec, e);
} catch (TException e) {
throw new CatalogException(String.format("Failed to alter table stats of table %s 's partition %s", tablePath.getFullName(), String.valueOf(partitionSpec)), e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveCatalog method dropPartition.
@Override
public void dropPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, boolean ignoreIfNotExists) throws PartitionNotExistException, CatalogException {
checkNotNull(tablePath, "Table path cannot be null");
checkNotNull(partitionSpec, "CatalogPartitionSpec cannot be null");
try {
Table hiveTable = getHiveTable(tablePath);
client.dropPartition(tablePath.getDatabaseName(), tablePath.getObjectName(), getOrderedFullPartitionValues(partitionSpec, getFieldNames(hiveTable.getPartitionKeys()), tablePath), true);
} catch (NoSuchObjectException e) {
if (!ignoreIfNotExists) {
throw new PartitionNotExistException(getName(), tablePath, partitionSpec, e);
}
} catch (MetaException | TableNotExistException | PartitionSpecInvalidException e) {
throw new PartitionNotExistException(getName(), tablePath, partitionSpec, e);
} catch (TException e) {
throw new CatalogException(String.format("Failed to drop partition %s of table %s", partitionSpec, tablePath));
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveShimV110 method getFieldsFromDeserializer.
@Override
public List<FieldSchema> getFieldsFromDeserializer(Configuration conf, Table table, boolean skipConfError) {
try {
Method utilMethod = getHiveMetaStoreUtilsClass().getMethod("getDeserializer", Configuration.class, Table.class, boolean.class);
Deserializer deserializer = (Deserializer) utilMethod.invoke(null, conf, table, skipConfError);
utilMethod = getHiveMetaStoreUtilsClass().getMethod("getFieldsFromDeserializer", String.class, Deserializer.class);
return (List<FieldSchema>) utilMethod.invoke(null, table.getTableName(), deserializer);
} catch (Exception e) {
throw new CatalogException("Failed to get table schema from deserializer", e);
}
}
Aggregations