use of org.apache.flink.table.catalog.exceptions.PartitionSpecInvalidException in project flink by apache.
the class HiveCatalog method getPartitionColumnStatistics.
@Override
public CatalogColumnStatistics getPartitionColumnStatistics(ObjectPath tablePath, CatalogPartitionSpec partitionSpec) throws PartitionNotExistException, CatalogException {
try {
Partition partition = getHivePartition(tablePath, partitionSpec);
Table hiveTable = getHiveTable(tablePath);
String partName = getEscapedPartitionName(tablePath, partitionSpec, hiveTable);
List<String> partNames = new ArrayList<>();
partNames.add(partName);
Map<String, List<ColumnStatisticsObj>> partitionColumnStatistics = client.getPartitionColumnStatistics(partition.getDbName(), partition.getTableName(), partNames, getFieldNames(partition.getSd().getCols()));
List<ColumnStatisticsObj> columnStatisticsObjs = partitionColumnStatistics.get(partName);
if (columnStatisticsObjs != null && !columnStatisticsObjs.isEmpty()) {
return new CatalogColumnStatistics(HiveStatsUtil.createCatalogColumnStats(columnStatisticsObjs, hiveVersion));
} else {
return CatalogColumnStatistics.UNKNOWN;
}
} catch (TableNotExistException | PartitionSpecInvalidException e) {
throw new PartitionNotExistException(getName(), tablePath, partitionSpec);
} catch (TException e) {
throw new CatalogException(String.format("Failed to get table stats of table %s 's partition %s", tablePath.getFullName(), String.valueOf(partitionSpec)), e);
}
}
use of org.apache.flink.table.catalog.exceptions.PartitionSpecInvalidException in project flink by apache.
the class HiveCatalog method getPartition.
@Override
public CatalogPartition getPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec) throws PartitionNotExistException, CatalogException {
checkNotNull(tablePath, "Table path cannot be null");
checkNotNull(partitionSpec, "CatalogPartitionSpec cannot be null");
try {
Partition hivePartition = getHivePartition(tablePath, partitionSpec);
Map<String, String> properties = hivePartition.getParameters();
properties.put(SqlCreateHiveTable.TABLE_LOCATION_URI, hivePartition.getSd().getLocation());
String comment = properties.remove(HiveCatalogConfig.COMMENT);
return new CatalogPartitionImpl(properties, comment);
} catch (NoSuchObjectException | MetaException | TableNotExistException | PartitionSpecInvalidException e) {
throw new PartitionNotExistException(getName(), tablePath, partitionSpec, e);
} catch (TException e) {
throw new CatalogException(String.format("Failed to get partition %s of table %s", partitionSpec, tablePath), e);
}
}
use of org.apache.flink.table.catalog.exceptions.PartitionSpecInvalidException in project flink by apache.
the class HiveCatalog method instantiateHivePartition.
private Partition instantiateHivePartition(Table hiveTable, CatalogPartitionSpec partitionSpec, CatalogPartition catalogPartition) throws PartitionSpecInvalidException {
List<String> partCols = getFieldNames(hiveTable.getPartitionKeys());
List<String> partValues = getOrderedFullPartitionValues(partitionSpec, partCols, new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName()));
// validate partition values
for (int i = 0; i < partCols.size(); i++) {
if (isNullOrWhitespaceOnly(partValues.get(i))) {
throw new PartitionSpecInvalidException(getName(), partCols, new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName()), partitionSpec);
}
}
// TODO: handle GenericCatalogPartition
StorageDescriptor sd = hiveTable.getSd().deepCopy();
sd.setLocation(catalogPartition.getProperties().remove(SqlCreateHiveTable.TABLE_LOCATION_URI));
Map<String, String> properties = new HashMap<>(catalogPartition.getProperties());
String comment = catalogPartition.getComment();
if (comment != null) {
properties.put(HiveCatalogConfig.COMMENT, comment);
}
return HiveTableUtil.createHivePartition(hiveTable.getDbName(), hiveTable.getTableName(), partValues, sd, properties);
}
use of org.apache.flink.table.catalog.exceptions.PartitionSpecInvalidException in project flink by apache.
the class HiveCatalog method alterPartitionColumnStatistics.
@Override
public void alterPartitionColumnStatistics(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, CatalogColumnStatistics columnStatistics, boolean ignoreIfNotExists) throws PartitionNotExistException, CatalogException {
try {
Partition hivePartition = getHivePartition(tablePath, partitionSpec);
Table hiveTable = getHiveTable(tablePath);
String partName = getEscapedPartitionName(tablePath, partitionSpec, hiveTable);
client.updatePartitionColumnStatistics(HiveStatsUtil.createPartitionColumnStats(hivePartition, partName, columnStatistics.getColumnStatisticsData(), hiveVersion));
} catch (TableNotExistException | PartitionSpecInvalidException e) {
if (!ignoreIfNotExists) {
throw new PartitionNotExistException(getName(), tablePath, partitionSpec, e);
}
} catch (TException e) {
throw new CatalogException(String.format("Failed to alter table column stats of table %s 's partition %s", tablePath.getFullName(), String.valueOf(partitionSpec)), e);
}
}
use of org.apache.flink.table.catalog.exceptions.PartitionSpecInvalidException in project flink by apache.
the class HiveCatalog method partitionExists.
// ------ partitions ------
@Override
public boolean partitionExists(ObjectPath tablePath, CatalogPartitionSpec partitionSpec) throws CatalogException {
checkNotNull(tablePath, "Table path cannot be null");
checkNotNull(partitionSpec, "CatalogPartitionSpec cannot be null");
try {
return getHivePartition(tablePath, partitionSpec) != null;
} catch (NoSuchObjectException | TableNotExistException | PartitionSpecInvalidException e) {
return false;
} catch (TException e) {
throw new CatalogException(String.format("Failed to get partition %s of table %s", partitionSpec, tablePath), e);
}
}
Aggregations