use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.
the class CatalogStatisticsTest method alterTableStatisticsWithUnknownRowCount.
private void alterTableStatisticsWithUnknownRowCount(Catalog catalog, String tableName) throws TableNotExistException, TablePartitionedException {
catalog.alterTableStatistics(new ObjectPath(databaseName, tableName), new CatalogTableStatistics(CatalogTableStatistics.UNKNOWN.getRowCount(), 1, 10000, 200000), true);
catalog.alterTableColumnStatistics(new ObjectPath(databaseName, tableName), createColumnStats(), true);
}
use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.
the class CatalogStatisticsTest method createPartitionColumnStats.
private void createPartitionColumnStats(String part1, int part2, boolean unknown) throws Exception {
ObjectPath path = ObjectPath.fromString("default_database.PartT");
LinkedHashMap<String, String> partSpecMap = new LinkedHashMap<>();
partSpecMap.put("part1", part1);
partSpecMap.put("part2", String.valueOf(part2));
CatalogPartitionSpec partSpec = new CatalogPartitionSpec(partSpecMap);
CatalogColumnStatisticsDataLong longColStats = new CatalogColumnStatisticsDataLong(-123L, 763322L, 23L, 77L);
CatalogColumnStatisticsDataString stringColStats = new CatalogColumnStatisticsDataString(152L, 43.5D, 20L, 0L);
Map<String, CatalogColumnStatisticsDataBase> colStatsMap = new HashMap<>();
colStatsMap.put("id", unknown ? new CatalogColumnStatisticsDataLong(null, null, null, null) : longColStats);
colStatsMap.put("name", unknown ? new CatalogColumnStatisticsDataString(null, null, null, null) : stringColStats);
catalog.alterPartitionColumnStatistics(path, partSpec, new CatalogColumnStatistics(colStatsMap), true);
}
use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.
the class CatalogStatisticsTest method testGetStatsFromCatalogForConnectorCatalogTable.
@Test
public void testGetStatsFromCatalogForConnectorCatalogTable() throws Exception {
catalog.createTable(new ObjectPath(databaseName, "T1"), ConnectorCatalogTable.source(new TestTableSource(true, tableSchema), true), false);
catalog.createTable(new ObjectPath(databaseName, "T2"), ConnectorCatalogTable.source(new TestTableSource(true, tableSchema), true), false);
alterTableStatistics(catalog, "T1");
assertStatistics(tEnv, "T1");
alterTableStatisticsWithUnknownRowCount(catalog, "T2");
assertTableStatisticsWithUnknownRowCount(tEnv, "T2");
}
use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.
the class HiveCatalog method alterTableViaProperties.
private void alterTableViaProperties(AlterTableOp alterOp, Table hiveTable, CatalogTable catalogTable, Map<String, String> oldProps, Map<String, String> newProps, StorageDescriptor sd) {
switch(alterOp) {
case CHANGE_TBL_PROPS:
oldProps.putAll(newProps);
break;
case CHANGE_LOCATION:
HiveTableUtil.extractLocation(sd, newProps);
break;
case CHANGE_FILE_FORMAT:
String newFileFormat = newProps.remove(STORED_AS_FILE_FORMAT);
HiveTableUtil.setStorageFormat(sd, newFileFormat, hiveConf);
break;
case CHANGE_SERDE_PROPS:
HiveTableUtil.extractRowFormat(sd, newProps);
break;
case ALTER_COLUMNS:
if (hiveTable == null) {
throw new CatalogException("ALTER COLUMNS cannot be done with ALTER PARTITION");
}
HiveTableUtil.alterColumns(hiveTable.getSd(), catalogTable);
boolean cascade = Boolean.parseBoolean(newProps.remove(ALTER_COL_CASCADE));
if (cascade) {
if (!isTablePartitioned(hiveTable)) {
throw new CatalogException("ALTER COLUMNS CASCADE for non-partitioned table");
}
try {
for (CatalogPartitionSpec spec : listPartitions(new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName()))) {
Partition partition = getHivePartition(hiveTable, spec);
HiveTableUtil.alterColumns(partition.getSd(), catalogTable);
client.alter_partition(hiveTable.getDbName(), hiveTable.getTableName(), partition);
}
} catch (Exception e) {
throw new CatalogException("Failed to cascade add/replace columns to partitions", e);
}
}
break;
default:
throw new CatalogException("Unsupported alter table operation " + alterOp);
}
}
use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.
the class HiveCatalog method renameTable.
@Override
public void renameTable(ObjectPath tablePath, String newTableName, boolean ignoreIfNotExists) throws TableNotExistException, TableAlreadyExistException, CatalogException {
checkNotNull(tablePath, "tablePath cannot be null");
checkArgument(!isNullOrWhitespaceOnly(newTableName), "newTableName cannot be null or empty");
try {
// Thus, check the table existence explicitly
if (tableExists(tablePath)) {
ObjectPath newPath = new ObjectPath(tablePath.getDatabaseName(), newTableName);
// Thus, check the table existence explicitly
if (tableExists(newPath)) {
throw new TableAlreadyExistException(getName(), newPath);
} else {
Table table = getHiveTable(tablePath);
table.setTableName(newTableName);
client.alter_table(tablePath.getDatabaseName(), tablePath.getObjectName(), table);
}
} else if (!ignoreIfNotExists) {
throw new TableNotExistException(getName(), tablePath);
}
} catch (TException e) {
throw new CatalogException(String.format("Failed to rename table %s", tablePath.getFullName()), e);
}
}
Aggregations