Search in sources :

Example 51 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class HiveCatalogDataTypeTest method testNonSupportedVarBinaryDataTypes.

@Test
public void testNonSupportedVarBinaryDataTypes() throws Exception {
    DataType[] types = new DataType[] { DataTypes.VARBINARY(20) };
    CatalogTable table = createCatalogTable(types);
    catalog.createDatabase(db1, createDb(), false);
    exception.expect(UnsupportedOperationException.class);
    catalog.createTable(path1, table, false);
}
Also used : DataType(org.apache.flink.table.types.DataType) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Test(org.junit.Test)

Example 52 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class HiveCatalogTest method testAlterFlinkNonManagedTableToHiveTable.

@Test
public void testAlterFlinkNonManagedTableToHiveTable() throws Exception {
    Map<String, String> originOptions = Collections.singletonMap(FactoryUtil.CONNECTOR.key(), DataGenTableSourceFactory.IDENTIFIER);
    CatalogTable originTable = new CatalogTableImpl(schema, originOptions, "Flink non-managed table");
    hiveCatalog.createTable(tablePath, originTable, false);
    Map<String, String> newOptions = getLegacyFileSystemConnectorOptions("/test_path");
    newOptions.put(FactoryUtil.CONNECTOR.key(), SqlCreateHiveTable.IDENTIFIER);
    CatalogTable newTable = new CatalogTableImpl(schema, newOptions, "Hive table");
    assertThatThrownBy(() -> hiveCatalog.alterTable(tablePath, newTable, false)).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("Changing catalog table type is not allowed. " + "Existing table type is 'FLINK_NON_MANAGED_TABLE', but new table type is 'HIVE_TABLE'");
}
Also used : CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Test(org.junit.Test)

Example 53 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class HiveCatalogTest method testAlterHiveTableToFlinkNonManagedTable.

@Test
public void testAlterHiveTableToFlinkNonManagedTable() throws Exception {
    Map<String, String> originOptions = getLegacyFileSystemConnectorOptions("/test_path");
    originOptions.put(FactoryUtil.CONNECTOR.key(), SqlCreateHiveTable.IDENTIFIER);
    CatalogTable originTable = new CatalogTableImpl(schema, originOptions, "Hive table");
    hiveCatalog.createTable(tablePath, originTable, false);
    Map<String, String> newOptions = Collections.singletonMap(FactoryUtil.CONNECTOR.key(), DataGenTableSourceFactory.IDENTIFIER);
    CatalogTable newTable = new CatalogTableImpl(schema, newOptions, "Flink managed table");
    assertThatThrownBy(() -> hiveCatalog.alterTable(tablePath, newTable, false)).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("Changing catalog table type is not allowed. " + "Existing table type is 'HIVE_TABLE', but new table type is 'FLINK_NON_MANAGED_TABLE'");
}
Also used : CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Test(org.junit.Test)

Example 54 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableChangeCol.

private Operation convertAlterTableChangeCol(CatalogBaseTable alteredTable, String[] qualified, HiveParserASTNode ast) throws SemanticException {
    String newComment = null;
    boolean first = false;
    String flagCol = null;
    boolean isCascade = false;
    // col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name]
    // [CASCADE|RESTRICT]
    String oldColName = ast.getChild(0).getText();
    String newColName = ast.getChild(1).getText();
    String newType = HiveParserBaseSemanticAnalyzer.getTypeStringFromAST((HiveParserASTNode) ast.getChild(2));
    int childCount = ast.getChildCount();
    for (int i = 3; i < childCount; i++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(i);
        switch(child.getToken().getType()) {
            case HiveASTParser.StringLiteral:
                newComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getText());
                break;
            case HiveASTParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
                flagCol = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
                break;
            case HiveASTParser.KW_FIRST:
                first = true;
                break;
            case HiveASTParser.TOK_CASCADE:
                isCascade = true;
                break;
            case HiveASTParser.TOK_RESTRICT:
                break;
            default:
                throw new ValidationException("Unsupported token: " + child.getToken() + " for alter table");
        }
    }
    // Validate the operation of renaming a column name.
    Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
    SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
    if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
        throw new ValidationException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
    }
    String tblName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
    ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
    CatalogTable oldTable = (CatalogTable) alteredTable;
    String oldName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(oldColName);
    String newName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(newColName);
    if (oldTable.getPartitionKeys().contains(oldName)) {
        // disallow changing partition columns
        throw new ValidationException("CHANGE COLUMN cannot be applied to partition columns");
    }
    TableSchema oldSchema = oldTable.getSchema();
    TableColumn newTableColumn = TableColumn.physical(newName, HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(newType)));
    TableSchema newSchema = OperationConverterUtils.changeColumn(oldSchema, oldName, newTableColumn, first, flagCol);
    Map<String, String> props = new HashMap<>(oldTable.getOptions());
    props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
    if (isCascade) {
        props.put(ALTER_COL_CASCADE, "true");
    }
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(newSchema, oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) TableSchema(org.apache.flink.table.api.TableSchema) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogTable(org.apache.flink.table.catalog.CatalogTable) TableColumn(org.apache.flink.table.api.TableColumn) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 55 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class HiveCatalogITCase method testCsvTableViaAPI.

@Test
public void testCsvTableViaAPI() throws Exception {
    TableEnvironment tableEnv = TableEnvironment.create(EnvironmentSettings.inBatchMode());
    tableEnv.getConfig().addConfiguration(new Configuration().set(CoreOptions.DEFAULT_PARALLELISM, 1));
    tableEnv.registerCatalog("myhive", hiveCatalog);
    tableEnv.useCatalog("myhive");
    final TableSchema schema = TableSchema.builder().field("name", DataTypes.STRING()).field("age", DataTypes.INT()).build();
    final Map<String, String> sourceOptions = new HashMap<>();
    sourceOptions.put("connector.type", "filesystem");
    sourceOptions.put("connector.path", getClass().getResource("/csv/test.csv").getPath());
    sourceOptions.put("format.type", "csv");
    CatalogTable source = new CatalogTableImpl(schema, sourceOptions, "Comment.");
    Path p = Paths.get(tempFolder.newFolder().getAbsolutePath(), "test.csv");
    final Map<String, String> sinkOptions = new HashMap<>();
    sinkOptions.put("connector.type", "filesystem");
    sinkOptions.put("connector.path", p.toAbsolutePath().toString());
    sinkOptions.put("format.type", "csv");
    CatalogTable sink = new CatalogTableImpl(schema, sinkOptions, "Comment.");
    hiveCatalog.createTable(new ObjectPath(HiveCatalog.DEFAULT_DB, sourceTableName), source, false);
    hiveCatalog.createTable(new ObjectPath(HiveCatalog.DEFAULT_DB, sinkTableName), sink, false);
    Table t = tableEnv.sqlQuery(String.format("select * from myhive.`default`.%s", sourceTableName));
    List<Row> result = CollectionUtil.iteratorToList(t.execute().collect());
    result.sort(Comparator.comparing(String::valueOf));
    // assert query result
    assertThat(result).containsExactly(Row.of("1", 1), Row.of("2", 2), Row.of("3", 3));
    tableEnv.executeSql(String.format("insert into myhive.`default`.%s select * from myhive.`default`.%s", sinkTableName, sourceTableName)).await();
    // assert written result
    File resultFile = new File(p.toAbsolutePath().toString());
    BufferedReader reader = new BufferedReader(new FileReader(resultFile));
    String readLine;
    for (int i = 0; i < 3; i++) {
        readLine = reader.readLine();
        assertThat(readLine).isEqualTo(String.format("%d,%d", i + 1, i + 1));
    }
    // No more line
    assertThat(reader.readLine()).isNull();
    tableEnv.executeSql(String.format("DROP TABLE %s", sourceTableName));
    tableEnv.executeSql(String.format("DROP TABLE %s", sinkTableName));
}
Also used : Path(java.nio.file.Path) ObjectPath(org.apache.flink.table.catalog.ObjectPath) ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Table(org.apache.flink.table.api.Table) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) Configuration(org.apache.flink.configuration.Configuration) TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) TableEnvironment(org.apache.flink.table.api.TableEnvironment) CatalogTable(org.apache.flink.table.catalog.CatalogTable) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) Row(org.apache.flink.types.Row) File(java.io.File) Test(org.junit.Test)

Aggregations

CatalogTable (org.apache.flink.table.catalog.CatalogTable)68 Test (org.junit.Test)35 HashMap (java.util.HashMap)30 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)24 TableSchema (org.apache.flink.table.api.TableSchema)17 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)17 CreateTableOperation (org.apache.flink.table.operations.ddl.CreateTableOperation)14 ValidationException (org.apache.flink.table.api.ValidationException)13 ObjectPath (org.apache.flink.table.catalog.ObjectPath)13 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)12 Operation (org.apache.flink.table.operations.Operation)12 AlterTableAddConstraintOperation (org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation)12 AlterTableDropConstraintOperation (org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation)12 AlterTableOptionsOperation (org.apache.flink.table.operations.ddl.AlterTableOptionsOperation)12 AlterTableRenameOperation (org.apache.flink.table.operations.ddl.AlterTableRenameOperation)12 ExplainOperation (org.apache.flink.table.operations.ExplainOperation)11 LoadModuleOperation (org.apache.flink.table.operations.LoadModuleOperation)11 QueryOperation (org.apache.flink.table.operations.QueryOperation)11 ShowFunctionsOperation (org.apache.flink.table.operations.ShowFunctionsOperation)11 ShowModulesOperation (org.apache.flink.table.operations.ShowModulesOperation)11