Search in sources :

Example 36 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by splunk.

the class PostgresCatalogTest method testPrimitiveDataTypes.

@Test
public void testPrimitiveDataTypes() throws TableNotExistException {
    CatalogBaseTable table = catalog.getTable(new ObjectPath(PostgresCatalog.DEFAULT_DATABASE, TABLE_PRIMITIVE_TYPE));
    assertEquals(getPrimitiveTable().schema, table.getUnresolvedSchema());
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) Test(org.junit.Test)

Example 37 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by splunk.

the class HiveParserDDLSemanticAnalyzer method convertAlterView.

private Operation convertAlterView(HiveParserASTNode ast) throws SemanticException {
    Operation operation = null;
    String[] qualified = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
    String tableName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
    CatalogBaseTable alteredTable = getAlteredTable(tableName, true);
    if (ast.getChild(1).getType() == HiveASTParser.TOK_QUERY) {
        // alter view as
        operation = convertCreateView(ast);
    } else {
        ast = (HiveParserASTNode) ast.getChild(1);
        switch(ast.getType()) {
            case HiveASTParser.TOK_ALTERVIEW_PROPERTIES:
                operation = convertAlterTableProps(alteredTable, tableName, null, ast, true, false);
                break;
            case HiveASTParser.TOK_ALTERVIEW_DROPPROPERTIES:
                operation = convertAlterTableProps(alteredTable, tableName, null, ast, true, true);
                break;
            case HiveASTParser.TOK_ALTERVIEW_RENAME:
                operation = convertAlterTableRename(tableName, ast, true);
                break;
            case HiveASTParser.TOK_ALTERVIEW_ADDPARTS:
            case HiveASTParser.TOK_ALTERVIEW_DROPPARTS:
                handleUnsupportedOperation("ADD/DROP PARTITION for view is not supported");
                break;
            default:
                throw new ValidationException("Unknown AST node for ALTER VIEW: " + ast);
        }
    }
    return operation;
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) HiveOperation(org.apache.hadoop.hive.ql.plan.HiveOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) Operation(org.apache.flink.table.operations.Operation) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation)

Example 38 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by splunk.

the class HiveDialectITCase method testView.

@Test
public void testView() throws Exception {
    tableEnv.executeSql("create table tbl (x int,y string)");
    // create
    tableEnv.executeSql("create view v(vx) comment 'v comment' tblproperties ('k1'='v1') as select x from tbl");
    ObjectPath viewPath = new ObjectPath("default", "v");
    CatalogBaseTable catalogBaseTable = hiveCatalog.getTable(viewPath);
    assertTrue(catalogBaseTable instanceof CatalogView);
    assertEquals("vx", catalogBaseTable.getUnresolvedSchema().getColumns().get(0).getName());
    assertEquals("v1", catalogBaseTable.getOptions().get("k1"));
    // change properties
    tableEnv.executeSql("alter view v set tblproperties ('k1'='v11')");
    catalogBaseTable = hiveCatalog.getTable(viewPath);
    assertEquals("v11", catalogBaseTable.getOptions().get("k1"));
    // change query
    tableEnv.executeSql("alter view v as select y from tbl");
    catalogBaseTable = hiveCatalog.getTable(viewPath);
    assertEquals("y", catalogBaseTable.getUnresolvedSchema().getColumns().get(0).getName());
    // rename
    tableEnv.executeSql("alter view v rename to v1");
    viewPath = new ObjectPath("default", "v1");
    assertTrue(hiveCatalog.tableExists(viewPath));
    // drop
    tableEnv.executeSql("drop view v1");
    assertFalse(hiveCatalog.tableExists(viewPath));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CatalogView(org.apache.flink.table.catalog.CatalogView) Test(org.junit.Test)

Example 39 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by splunk.

the class HiveCatalog method createTable.

@Override
public void createTable(ObjectPath tablePath, CatalogBaseTable table, boolean ignoreIfExists) throws TableAlreadyExistException, DatabaseNotExistException, CatalogException {
    checkNotNull(tablePath, "tablePath cannot be null");
    checkNotNull(table, "table cannot be null");
    if (!databaseExists(tablePath.getDatabaseName())) {
        throw new DatabaseNotExistException(getName(), tablePath.getDatabaseName());
    }
    boolean managedTable = ManagedTableListener.isManagedTable(this, table);
    Table hiveTable = HiveTableUtil.instantiateHiveTable(tablePath, table, hiveConf, managedTable);
    UniqueConstraint pkConstraint = null;
    List<String> notNullCols = new ArrayList<>();
    boolean isHiveTable = isHiveTable(table.getOptions());
    if (isHiveTable) {
        pkConstraint = table.getSchema().getPrimaryKey().orElse(null);
        String nnColStr = hiveTable.getParameters().remove(NOT_NULL_COLS);
        if (nnColStr != null) {
            notNullCols.addAll(Arrays.asList(nnColStr.split(HiveDDLUtils.COL_DELIMITER)));
        } else {
            for (int i = 0; i < table.getSchema().getFieldDataTypes().length; i++) {
                if (!table.getSchema().getFieldDataTypes()[i].getLogicalType().isNullable()) {
                    notNullCols.add(table.getSchema().getFieldNames()[i]);
                }
            }
        }
        // remove the 'connector' option for hive table
        hiveTable.getParameters().remove(CONNECTOR.key());
    }
    try {
        if (pkConstraint != null || !notNullCols.isEmpty()) {
            // extract constraint traits from table properties
            String pkTraitStr = hiveTable.getParameters().remove(PK_CONSTRAINT_TRAIT);
            byte pkTrait = pkTraitStr == null ? HiveDDLUtils.defaultTrait() : Byte.parseByte(pkTraitStr);
            List<Byte> pkTraits = Collections.nCopies(pkConstraint == null ? 0 : pkConstraint.getColumns().size(), pkTrait);
            List<Byte> nnTraits;
            String nnTraitsStr = hiveTable.getParameters().remove(NOT_NULL_CONSTRAINT_TRAITS);
            if (nnTraitsStr != null) {
                String[] traits = nnTraitsStr.split(HiveDDLUtils.COL_DELIMITER);
                Preconditions.checkArgument(traits.length == notNullCols.size(), "Number of NOT NULL columns and constraint traits mismatch");
                nnTraits = Arrays.stream(traits).map(Byte::new).collect(Collectors.toList());
            } else {
                nnTraits = Collections.nCopies(notNullCols.size(), HiveDDLUtils.defaultTrait());
            }
            client.createTableWithConstraints(hiveTable, hiveConf, pkConstraint, pkTraits, notNullCols, nnTraits);
        } else {
            client.createTable(hiveTable);
        }
    } catch (AlreadyExistsException e) {
        if (!ignoreIfExists) {
            throw new TableAlreadyExistException(getName(), tablePath, e);
        }
    } catch (TException e) {
        throw new CatalogException(String.format("Failed to create table %s", tablePath.getFullName()), e);
    }
}
Also used : TException(org.apache.thrift.TException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.metastore.api.Table) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) PartitionAlreadyExistsException(org.apache.flink.table.catalog.exceptions.PartitionAlreadyExistsException) ArrayList(java.util.ArrayList) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException)

Example 40 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by splunk.

the class HiveCatalog method alterTable.

@Override
public void alterTable(ObjectPath tablePath, CatalogBaseTable newCatalogTable, boolean ignoreIfNotExists) throws TableNotExistException, CatalogException {
    checkNotNull(tablePath, "tablePath cannot be null");
    checkNotNull(newCatalogTable, "newCatalogTable cannot be null");
    Table hiveTable;
    try {
        hiveTable = getHiveTable(tablePath);
    } catch (TableNotExistException e) {
        if (!ignoreIfNotExists) {
            throw e;
        }
        return;
    }
    CatalogBaseTable existingTable = instantiateCatalogTable(hiveTable);
    if (existingTable.getTableKind() != newCatalogTable.getTableKind()) {
        throw new CatalogException(String.format("Table types don't match. Existing table is '%s' and new table is '%s'.", existingTable.getTableKind(), newCatalogTable.getTableKind()));
    }
    disallowChangeCatalogTableType(existingTable.getOptions(), newCatalogTable.getOptions());
    boolean isHiveTable = isHiveTable(hiveTable.getParameters());
    if (isHiveTable) {
        AlterTableOp op = HiveTableUtil.extractAlterTableOp(newCatalogTable.getOptions());
        if (op == null) {
            // the alter operation isn't encoded as properties
            hiveTable = HiveTableUtil.alterTableViaCatalogBaseTable(tablePath, newCatalogTable, hiveTable, hiveConf, false);
        } else {
            alterTableViaProperties(op, hiveTable, (CatalogTable) newCatalogTable, hiveTable.getParameters(), newCatalogTable.getOptions(), hiveTable.getSd());
        }
    } else {
        hiveTable = HiveTableUtil.alterTableViaCatalogBaseTable(tablePath, newCatalogTable, hiveTable, hiveConf, ManagedTableListener.isManagedTable(this, newCatalogTable));
    }
    if (isHiveTable) {
        hiveTable.getParameters().remove(CONNECTOR.key());
    }
    try {
        client.alter_table(tablePath.getDatabaseName(), tablePath.getObjectName(), hiveTable);
    } catch (TException e) {
        throw new CatalogException(String.format("Failed to alter table %s", tablePath.getFullName()), e);
    }
}
Also used : TException(org.apache.thrift.TException) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.metastore.api.Table) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) AlterTableOp(org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveTable.AlterTableOp) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException)

Aggregations

CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)111 ObjectPath (org.apache.flink.table.catalog.ObjectPath)57 CatalogTable (org.apache.flink.table.catalog.CatalogTable)46 Test (org.junit.Test)46 ValidationException (org.apache.flink.table.api.ValidationException)33 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)30 CatalogView (org.apache.flink.table.catalog.CatalogView)27 TableSchema (org.apache.flink.table.api.TableSchema)26 Table (org.apache.hadoop.hive.metastore.api.Table)21 HashMap (java.util.HashMap)19 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)18 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)15 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)15 Map (java.util.Map)13 LinkedHashMap (java.util.LinkedHashMap)12 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)12 AlterViewAsOperation (org.apache.flink.table.operations.ddl.AlterViewAsOperation)12 DropTableOperation (org.apache.flink.table.operations.ddl.DropTableOperation)12 ArrayList (java.util.ArrayList)9 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)9