Search in sources :

Example 6 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class HiveDialectITCase method testView.

@Test
public void testView() throws Exception {
    tableEnv.executeSql("create table tbl (x int,y string)");
    // create
    tableEnv.executeSql("create view v(vx) comment 'v comment' tblproperties ('k1'='v1') as select x from tbl");
    ObjectPath viewPath = new ObjectPath("default", "v");
    CatalogBaseTable catalogBaseTable = hiveCatalog.getTable(viewPath);
    assertTrue(catalogBaseTable instanceof CatalogView);
    assertEquals("vx", catalogBaseTable.getUnresolvedSchema().getColumns().get(0).getName());
    assertEquals("v1", catalogBaseTable.getOptions().get("k1"));
    // change properties
    tableEnv.executeSql("alter view v set tblproperties ('k1'='v11')");
    catalogBaseTable = hiveCatalog.getTable(viewPath);
    assertEquals("v11", catalogBaseTable.getOptions().get("k1"));
    // change query
    tableEnv.executeSql("alter view v as select y from tbl");
    catalogBaseTable = hiveCatalog.getTable(viewPath);
    assertEquals("y", catalogBaseTable.getUnresolvedSchema().getColumns().get(0).getName());
    // rename
    tableEnv.executeSql("alter view v rename to v1");
    viewPath = new ObjectPath("default", "v1");
    assertTrue(hiveCatalog.tableExists(viewPath));
    // drop
    tableEnv.executeSql("drop view v1");
    assertFalse(hiveCatalog.tableExists(viewPath));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CatalogView(org.apache.flink.table.catalog.CatalogView) Test(org.junit.Test)

Example 7 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class HiveCatalog method getTable.

// ------ tables ------
@Override
public CatalogBaseTable getTable(ObjectPath tablePath) throws TableNotExistException, CatalogException {
    checkNotNull(tablePath, "tablePath cannot be null");
    Table hiveTable = getHiveTable(tablePath);
    return instantiateCatalogTable(hiveTable);
}
Also used : CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.metastore.api.Table) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable)

Example 8 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class PostgresCatalogTest method testPrimitiveDataTypes.

@Test
public void testPrimitiveDataTypes() throws TableNotExistException {
    CatalogBaseTable table = catalog.getTable(new ObjectPath(PostgresCatalog.DEFAULT_DATABASE, TABLE_PRIMITIVE_TYPE));
    assertEquals(getPrimitiveTable().schema, table.getUnresolvedSchema());
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) Test(org.junit.Test)

Example 9 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class PostgresCatalogTest method testGetTable.

@Test
public void testGetTable() throws org.apache.flink.table.catalog.exceptions.TableNotExistException {
    // test postgres.public.user1
    Schema schema = getSimpleTable().schema;
    CatalogBaseTable table = catalog.getTable(new ObjectPath("postgres", TABLE1));
    assertEquals(schema, table.getUnresolvedSchema());
    table = catalog.getTable(new ObjectPath("postgres", "public.t1"));
    assertEquals(schema, table.getUnresolvedSchema());
    // test testdb.public.user2
    table = catalog.getTable(new ObjectPath(TEST_DB, TABLE2));
    assertEquals(schema, table.getUnresolvedSchema());
    table = catalog.getTable(new ObjectPath(TEST_DB, "public.t2"));
    assertEquals(schema, table.getUnresolvedSchema());
    // test testdb.testschema.user2
    table = catalog.getTable(new ObjectPath(TEST_DB, TEST_SCHEMA + ".t3"));
    assertEquals(schema, table.getUnresolvedSchema());
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) Schema(org.apache.flink.table.api.Schema) Test(org.junit.Test)

Example 10 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class FlinkCalciteCatalogReader method toPreparingTable.

/**
 * Translate this {@link CatalogSchemaTable} into Flink source table.
 */
private static FlinkPreparingTableBase toPreparingTable(RelOptSchema relOptSchema, List<String> names, RelDataType rowType, CatalogSchemaTable schemaTable) {
    final ResolvedCatalogBaseTable<?> resolvedBaseTable = schemaTable.getContextResolvedTable().getResolvedTable();
    final CatalogBaseTable originTable = resolvedBaseTable.getOrigin();
    if (originTable instanceof QueryOperationCatalogView) {
        return convertQueryOperationView(relOptSchema, names, rowType, (QueryOperationCatalogView) originTable);
    } else if (originTable instanceof ConnectorCatalogTable) {
        ConnectorCatalogTable<?, ?> connectorTable = (ConnectorCatalogTable<?, ?>) originTable;
        if ((connectorTable).getTableSource().isPresent()) {
            return convertLegacyTableSource(relOptSchema, rowType, schemaTable.getContextResolvedTable().getIdentifier(), connectorTable, schemaTable.getStatistic(), schemaTable.isStreamingMode());
        } else {
            throw new ValidationException("Cannot convert a connector table " + "without source.");
        }
    } else if (originTable instanceof CatalogView) {
        return convertCatalogView(relOptSchema, names, rowType, schemaTable.getStatistic(), (CatalogView) originTable);
    } else if (originTable instanceof CatalogTable) {
        return convertCatalogTable(relOptSchema, names, rowType, schemaTable);
    } else {
        throw new ValidationException("Unsupported table type: " + originTable);
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ResolvedCatalogBaseTable(org.apache.flink.table.catalog.ResolvedCatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) QueryOperationCatalogView(org.apache.flink.table.catalog.QueryOperationCatalogView) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogView(org.apache.flink.table.catalog.CatalogView) QueryOperationCatalogView(org.apache.flink.table.catalog.QueryOperationCatalogView)

Aggregations

CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)34 ObjectPath (org.apache.flink.table.catalog.ObjectPath)17 CatalogTable (org.apache.flink.table.catalog.CatalogTable)15 Test (org.junit.Test)14 ValidationException (org.apache.flink.table.api.ValidationException)11 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)10 CatalogView (org.apache.flink.table.catalog.CatalogView)9 TableSchema (org.apache.flink.table.api.TableSchema)8 Table (org.apache.hadoop.hive.metastore.api.Table)7 HashMap (java.util.HashMap)6 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)6 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)5 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)5 LinkedHashMap (java.util.LinkedHashMap)4 Map (java.util.Map)4 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)4 AlterViewAsOperation (org.apache.flink.table.operations.ddl.AlterViewAsOperation)4 ArrayList (java.util.ArrayList)3 TableEnvironment (org.apache.flink.table.api.TableEnvironment)3 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)3