Search in sources :

Example 1 with CatalogView

use of org.apache.flink.table.catalog.CatalogView in project flink by apache.

the class HiveTableUtil method instantiateHiveTable.

public static Table instantiateHiveTable(ObjectPath tablePath, CatalogBaseTable table, HiveConf hiveConf, boolean managedTable) {
    final boolean isView = table instanceof CatalogView;
    // let Hive set default parameters for us, e.g. serialization.format
    Table hiveTable = org.apache.hadoop.hive.ql.metadata.Table.getEmptyTable(tablePath.getDatabaseName(), tablePath.getObjectName());
    hiveTable.setCreateTime((int) (System.currentTimeMillis() / 1000));
    Map<String, String> properties = new HashMap<>(table.getOptions());
    if (managedTable) {
        properties.put(CONNECTOR.key(), ManagedTableFactory.DEFAULT_IDENTIFIER);
    }
    // Table comment
    if (table.getComment() != null) {
        properties.put(HiveCatalogConfig.COMMENT, table.getComment());
    }
    boolean isHiveTable = HiveCatalog.isHiveTable(properties);
    // Hive table's StorageDescriptor
    StorageDescriptor sd = hiveTable.getSd();
    HiveTableUtil.setDefaultStorageFormat(sd, hiveConf);
    // because hive cannot understand the expanded query anyway
    if (isHiveTable && !isView) {
        HiveTableUtil.initiateTableFromProperties(hiveTable, properties, hiveConf);
        List<FieldSchema> allColumns = HiveTableUtil.createHiveColumns(table.getSchema());
        // Table columns and partition keys
        if (table instanceof CatalogTable) {
            CatalogTable catalogTable = (CatalogTable) table;
            if (catalogTable.isPartitioned()) {
                int partitionKeySize = catalogTable.getPartitionKeys().size();
                List<FieldSchema> regularColumns = allColumns.subList(0, allColumns.size() - partitionKeySize);
                List<FieldSchema> partitionColumns = allColumns.subList(allColumns.size() - partitionKeySize, allColumns.size());
                sd.setCols(regularColumns);
                hiveTable.setPartitionKeys(partitionColumns);
            } else {
                sd.setCols(allColumns);
                hiveTable.setPartitionKeys(new ArrayList<>());
            }
        } else {
            sd.setCols(allColumns);
        }
        // Table properties
        hiveTable.getParameters().putAll(properties);
    } else {
        DescriptorProperties tableSchemaProps = new DescriptorProperties(true);
        tableSchemaProps.putTableSchema(Schema.SCHEMA, table.getSchema());
        if (table instanceof CatalogTable) {
            tableSchemaProps.putPartitionKeys(((CatalogTable) table).getPartitionKeys());
        }
        properties.putAll(tableSchemaProps.asMap());
        properties = maskFlinkProperties(properties);
        // 2. when creating views which don't have connector properties
        if (isView || (!properties.containsKey(FLINK_PROPERTY_PREFIX + CONNECTOR.key()) && !properties.containsKey(FLINK_PROPERTY_PREFIX + CONNECTOR_TYPE))) {
            properties.put(IS_GENERIC, "true");
        }
        hiveTable.setParameters(properties);
    }
    if (isView) {
        // TODO: [FLINK-12398] Support partitioned view in catalog API
        hiveTable.setPartitionKeys(new ArrayList<>());
        CatalogView view = (CatalogView) table;
        hiveTable.setViewOriginalText(view.getOriginalQuery());
        hiveTable.setViewExpandedText(view.getExpandedQuery());
        hiveTable.setTableType(TableType.VIRTUAL_VIEW.name());
    }
    return hiveTable;
}
Also used : CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlAlterHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) Table(org.apache.hadoop.hive.metastore.api.Table) HashMap(java.util.HashMap) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) CatalogTable(org.apache.flink.table.catalog.CatalogTable) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogView(org.apache.flink.table.catalog.CatalogView)

Example 2 with CatalogView

use of org.apache.flink.table.catalog.CatalogView in project flink by apache.

the class HiveCatalogHiveMetadataTest method testViewCompatibility.

// ------ table and column stats ------
@Test
public void testViewCompatibility() throws Exception {
    // we always store view schema via properties now
    // make sure non-generic views created previously can still be used
    catalog.createDatabase(db1, createDb(), false);
    Table hiveView = org.apache.hadoop.hive.ql.metadata.Table.getEmptyTable(path1.getDatabaseName(), path1.getObjectName());
    // mark as a view
    hiveView.setTableType(TableType.VIRTUAL_VIEW.name());
    final String originQuery = "view origin query";
    final String expandedQuery = "view expanded query";
    hiveView.setViewOriginalText(originQuery);
    hiveView.setViewExpandedText(expandedQuery);
    // set schema in SD
    Schema schema = Schema.newBuilder().fromFields(new String[] { "i", "s" }, new AbstractDataType[] { DataTypes.INT(), DataTypes.STRING() }).build();
    List<FieldSchema> fields = new ArrayList<>();
    for (Schema.UnresolvedColumn column : schema.getColumns()) {
        String name = column.getName();
        DataType type = (DataType) ((Schema.UnresolvedPhysicalColumn) column).getDataType();
        fields.add(new FieldSchema(name, HiveTypeUtil.toHiveTypeInfo(type, true).getTypeName(), null));
    }
    hiveView.getSd().setCols(fields);
    // test mark as non-generic with is_generic
    hiveView.getParameters().put(CatalogPropertiesUtil.IS_GENERIC, "false");
    // add some other properties
    hiveView.getParameters().put("k1", "v1");
    ((HiveCatalog) catalog).client.createTable(hiveView);
    CatalogBaseTable baseTable = catalog.getTable(path1);
    assertTrue(baseTable instanceof CatalogView);
    CatalogView catalogView = (CatalogView) baseTable;
    assertEquals(schema, catalogView.getUnresolvedSchema());
    assertEquals(originQuery, catalogView.getOriginalQuery());
    assertEquals(expandedQuery, catalogView.getExpandedQuery());
    assertEquals("v1", catalogView.getOptions().get("k1"));
    // test mark as non-generic with connector
    hiveView.setDbName(path3.getDatabaseName());
    hiveView.setTableName(path3.getObjectName());
    hiveView.getParameters().remove(CatalogPropertiesUtil.IS_GENERIC);
    hiveView.getParameters().put(CONNECTOR.key(), IDENTIFIER);
    ((HiveCatalog) catalog).client.createTable(hiveView);
    baseTable = catalog.getTable(path3);
    assertTrue(baseTable instanceof CatalogView);
    catalogView = (CatalogView) baseTable;
    assertEquals(schema, catalogView.getUnresolvedSchema());
    assertEquals(originQuery, catalogView.getOriginalQuery());
    assertEquals(expandedQuery, catalogView.getExpandedQuery());
    assertEquals("v1", catalogView.getOptions().get("k1"));
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) SqlAlterHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) Table(org.apache.hadoop.hive.metastore.api.Table) AbstractDataType(org.apache.flink.table.types.AbstractDataType) Schema(org.apache.flink.table.api.Schema) TableSchema(org.apache.flink.table.api.TableSchema) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) DataType(org.apache.flink.table.types.DataType) AbstractDataType(org.apache.flink.table.types.AbstractDataType) CatalogColumnStatisticsDataString(org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataString) CatalogView(org.apache.flink.table.catalog.CatalogView) Test(org.junit.Test)

Example 3 with CatalogView

use of org.apache.flink.table.catalog.CatalogView in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertDropTable.

private Operation convertDropTable(HiveParserASTNode ast, TableType expectedType) {
    String tableName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
    boolean ifExists = (ast.getFirstChildWithType(HiveASTParser.TOK_IFEXISTS) != null);
    ObjectIdentifier identifier = parseObjectIdentifier(tableName);
    CatalogBaseTable baseTable = getCatalogBaseTable(identifier, true);
    if (expectedType == TableType.VIRTUAL_VIEW) {
        if (baseTable instanceof CatalogTable) {
            throw new ValidationException("DROP VIEW for a table is not allowed");
        }
        return new DropViewOperation(identifier, ifExists, false);
    } else {
        if (baseTable instanceof CatalogView) {
            throw new ValidationException("DROP TABLE for a view is not allowed");
        }
        return new DropTableOperation(identifier, ifExists, false);
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogView(org.apache.flink.table.catalog.CatalogView) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 4 with CatalogView

use of org.apache.flink.table.catalog.CatalogView in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method getAlteredTable.

private CatalogBaseTable getAlteredTable(String tableName, boolean expectView) {
    ObjectIdentifier objectIdentifier = parseObjectIdentifier(tableName);
    CatalogBaseTable catalogBaseTable = getCatalogBaseTable(objectIdentifier);
    if (expectView) {
        if (catalogBaseTable instanceof CatalogTable) {
            throw new ValidationException("ALTER VIEW for a table is not allowed");
        }
    } else {
        if (catalogBaseTable instanceof CatalogView) {
            throw new ValidationException("ALTER TABLE for a view is not allowed");
        }
    }
    return catalogBaseTable;
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogView(org.apache.flink.table.catalog.CatalogView) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 5 with CatalogView

use of org.apache.flink.table.catalog.CatalogView in project flink by apache.

the class HiveDialectITCase method testView.

@Test
public void testView() throws Exception {
    tableEnv.executeSql("create table tbl (x int,y string)");
    // create
    tableEnv.executeSql("create view v(vx) comment 'v comment' tblproperties ('k1'='v1') as select x from tbl");
    ObjectPath viewPath = new ObjectPath("default", "v");
    CatalogBaseTable catalogBaseTable = hiveCatalog.getTable(viewPath);
    assertTrue(catalogBaseTable instanceof CatalogView);
    assertEquals("vx", catalogBaseTable.getUnresolvedSchema().getColumns().get(0).getName());
    assertEquals("v1", catalogBaseTable.getOptions().get("k1"));
    // change properties
    tableEnv.executeSql("alter view v set tblproperties ('k1'='v11')");
    catalogBaseTable = hiveCatalog.getTable(viewPath);
    assertEquals("v11", catalogBaseTable.getOptions().get("k1"));
    // change query
    tableEnv.executeSql("alter view v as select y from tbl");
    catalogBaseTable = hiveCatalog.getTable(viewPath);
    assertEquals("y", catalogBaseTable.getUnresolvedSchema().getColumns().get(0).getName());
    // rename
    tableEnv.executeSql("alter view v rename to v1");
    viewPath = new ObjectPath("default", "v1");
    assertTrue(hiveCatalog.tableExists(viewPath));
    // drop
    tableEnv.executeSql("drop view v1");
    assertFalse(hiveCatalog.tableExists(viewPath));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CatalogView(org.apache.flink.table.catalog.CatalogView) Test(org.junit.Test)

Aggregations

CatalogView (org.apache.flink.table.catalog.CatalogView)12 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)10 CatalogTable (org.apache.flink.table.catalog.CatalogTable)8 ValidationException (org.apache.flink.table.api.ValidationException)7 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)7 HashMap (java.util.HashMap)6 LinkedHashMap (java.util.LinkedHashMap)5 TableSchema (org.apache.flink.table.api.TableSchema)5 CatalogViewImpl (org.apache.flink.table.catalog.CatalogViewImpl)4 ArrayList (java.util.ArrayList)3 Map (java.util.Map)3 Schema (org.apache.flink.table.api.Schema)3 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)3 ResolvedCatalogTable (org.apache.flink.table.catalog.ResolvedCatalogTable)3 UnresolvedIdentifier (org.apache.flink.table.catalog.UnresolvedIdentifier)3 AlterViewAsOperation (org.apache.flink.table.operations.ddl.AlterViewAsOperation)3 AlterViewPropertiesOperation (org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation)3 List (java.util.List)2 RelHint (org.apache.calcite.rel.hint.RelHint)2 SqlNode (org.apache.calcite.sql.SqlNode)2