Search in sources :

Example 41 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class CatalogStatisticsTest method alterTableStatisticsWithUnknownRowCount.

private void alterTableStatisticsWithUnknownRowCount(Catalog catalog, String tableName) throws TableNotExistException, TablePartitionedException {
    catalog.alterTableStatistics(new ObjectPath(databaseName, tableName), new CatalogTableStatistics(CatalogTableStatistics.UNKNOWN.getRowCount(), 1, 10000, 200000), true);
    catalog.alterTableColumnStatistics(new ObjectPath(databaseName, tableName), createColumnStats(), true);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTableStatistics(org.apache.flink.table.catalog.stats.CatalogTableStatistics)

Example 42 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class CatalogStatisticsTest method createPartitionColumnStats.

private void createPartitionColumnStats(String part1, int part2, boolean unknown) throws Exception {
    ObjectPath path = ObjectPath.fromString("default_database.PartT");
    LinkedHashMap<String, String> partSpecMap = new LinkedHashMap<>();
    partSpecMap.put("part1", part1);
    partSpecMap.put("part2", String.valueOf(part2));
    CatalogPartitionSpec partSpec = new CatalogPartitionSpec(partSpecMap);
    CatalogColumnStatisticsDataLong longColStats = new CatalogColumnStatisticsDataLong(-123L, 763322L, 23L, 77L);
    CatalogColumnStatisticsDataString stringColStats = new CatalogColumnStatisticsDataString(152L, 43.5D, 20L, 0L);
    Map<String, CatalogColumnStatisticsDataBase> colStatsMap = new HashMap<>();
    colStatsMap.put("id", unknown ? new CatalogColumnStatisticsDataLong(null, null, null, null) : longColStats);
    colStatsMap.put("name", unknown ? new CatalogColumnStatisticsDataString(null, null, null, null) : stringColStats);
    catalog.alterPartitionColumnStatistics(path, partSpec, new CatalogColumnStatistics(colStatsMap), true);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogColumnStatisticsDataLong(org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataLong) CatalogColumnStatisticsDataBase(org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataBase) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) CatalogColumnStatisticsDataString(org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataString) CatalogColumnStatisticsDataString(org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataString) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) CatalogColumnStatistics(org.apache.flink.table.catalog.stats.CatalogColumnStatistics) LinkedHashMap(java.util.LinkedHashMap)

Example 43 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class CatalogStatisticsTest method testGetStatsFromCatalogForConnectorCatalogTable.

@Test
public void testGetStatsFromCatalogForConnectorCatalogTable() throws Exception {
    catalog.createTable(new ObjectPath(databaseName, "T1"), ConnectorCatalogTable.source(new TestTableSource(true, tableSchema), true), false);
    catalog.createTable(new ObjectPath(databaseName, "T2"), ConnectorCatalogTable.source(new TestTableSource(true, tableSchema), true), false);
    alterTableStatistics(catalog, "T1");
    assertStatistics(tEnv, "T1");
    alterTableStatisticsWithUnknownRowCount(catalog, "T2");
    assertTableStatisticsWithUnknownRowCount(tEnv, "T2");
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) TestTableSource(org.apache.flink.table.planner.utils.TestTableSource) Test(org.junit.Test)

Example 44 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class HiveCatalog method alterTableViaProperties.

private void alterTableViaProperties(AlterTableOp alterOp, Table hiveTable, CatalogTable catalogTable, Map<String, String> oldProps, Map<String, String> newProps, StorageDescriptor sd) {
    switch(alterOp) {
        case CHANGE_TBL_PROPS:
            oldProps.putAll(newProps);
            break;
        case CHANGE_LOCATION:
            HiveTableUtil.extractLocation(sd, newProps);
            break;
        case CHANGE_FILE_FORMAT:
            String newFileFormat = newProps.remove(STORED_AS_FILE_FORMAT);
            HiveTableUtil.setStorageFormat(sd, newFileFormat, hiveConf);
            break;
        case CHANGE_SERDE_PROPS:
            HiveTableUtil.extractRowFormat(sd, newProps);
            break;
        case ALTER_COLUMNS:
            if (hiveTable == null) {
                throw new CatalogException("ALTER COLUMNS cannot be done with ALTER PARTITION");
            }
            HiveTableUtil.alterColumns(hiveTable.getSd(), catalogTable);
            boolean cascade = Boolean.parseBoolean(newProps.remove(ALTER_COL_CASCADE));
            if (cascade) {
                if (!isTablePartitioned(hiveTable)) {
                    throw new CatalogException("ALTER COLUMNS CASCADE for non-partitioned table");
                }
                try {
                    for (CatalogPartitionSpec spec : listPartitions(new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName()))) {
                        Partition partition = getHivePartition(hiveTable, spec);
                        HiveTableUtil.alterColumns(partition.getSd(), catalogTable);
                        client.alter_partition(hiveTable.getDbName(), hiveTable.getTableName(), partition);
                    }
                } catch (Exception e) {
                    throw new CatalogException("Failed to cascade add/replace columns to partitions", e);
                }
            }
            break;
        default:
            throw new CatalogException("Unsupported alter table operation " + alterOp);
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) CatalogPartition(org.apache.flink.table.catalog.CatalogPartition) ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) FunctionAlreadyExistException(org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException) PartitionNotExistException(org.apache.flink.table.catalog.exceptions.PartitionNotExistException) PartitionSpecInvalidException(org.apache.flink.table.catalog.exceptions.PartitionSpecInvalidException) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) TablePartitionedException(org.apache.flink.table.catalog.exceptions.TablePartitionedException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) FunctionNotExistException(org.apache.flink.table.catalog.exceptions.FunctionNotExistException) DatabaseNotEmptyException(org.apache.flink.table.catalog.exceptions.DatabaseNotEmptyException) DatabaseAlreadyExistException(org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException) FileNotFoundException(java.io.FileNotFoundException) TableNotPartitionedException(org.apache.flink.table.catalog.exceptions.TableNotPartitionedException) PartitionAlreadyExistsException(org.apache.flink.table.catalog.exceptions.PartitionAlreadyExistsException) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException)

Example 45 with ObjectPath

use of org.apache.flink.table.catalog.ObjectPath in project flink by apache.

the class HiveCatalog method renameTable.

@Override
public void renameTable(ObjectPath tablePath, String newTableName, boolean ignoreIfNotExists) throws TableNotExistException, TableAlreadyExistException, CatalogException {
    checkNotNull(tablePath, "tablePath cannot be null");
    checkArgument(!isNullOrWhitespaceOnly(newTableName), "newTableName cannot be null or empty");
    try {
        // Thus, check the table existence explicitly
        if (tableExists(tablePath)) {
            ObjectPath newPath = new ObjectPath(tablePath.getDatabaseName(), newTableName);
            // Thus, check the table existence explicitly
            if (tableExists(newPath)) {
                throw new TableAlreadyExistException(getName(), newPath);
            } else {
                Table table = getHiveTable(tablePath);
                table.setTableName(newTableName);
                client.alter_table(tablePath.getDatabaseName(), tablePath.getObjectName(), table);
            }
        } else if (!ignoreIfNotExists) {
            throw new TableNotExistException(getName(), tablePath);
        }
    } catch (TException e) {
        throw new CatalogException(String.format("Failed to rename table %s", tablePath.getFullName()), e);
    }
}
Also used : TException(org.apache.thrift.TException) ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.metastore.api.Table) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException)

Aggregations

ObjectPath (org.apache.flink.table.catalog.ObjectPath)81 Test (org.junit.Test)52 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)32 CatalogTable (org.apache.flink.table.catalog.CatalogTable)29 HashMap (java.util.HashMap)21 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)20 TableSchema (org.apache.flink.table.api.TableSchema)19 TableEnvironment (org.apache.flink.table.api.TableEnvironment)17 CatalogPartitionSpec (org.apache.flink.table.catalog.CatalogPartitionSpec)12 Table (org.apache.hadoop.hive.metastore.api.Table)12 Configuration (org.apache.flink.configuration.Configuration)11 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)11 TableNotExistException (org.apache.flink.table.catalog.exceptions.TableNotExistException)9 ArrayList (java.util.ArrayList)8 Map (java.util.Map)8 GenericInMemoryCatalog (org.apache.flink.table.catalog.GenericInMemoryCatalog)8 LinkedHashMap (java.util.LinkedHashMap)7 Catalog (org.apache.flink.table.catalog.Catalog)7 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)6 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)6