Search in sources :

Example 76 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.

the class HiveCatalogTest method testGetNoSchemaGenericTable.

@Test
public void testGetNoSchemaGenericTable() throws Exception {
    ObjectPath hiveObjectPath = new ObjectPath(HiveCatalog.DEFAULT_DB, "testGetNoSchemaGenericTable");
    Map<String, String> properties = new HashMap<>();
    properties.put(CONNECTOR.key(), "jdbc");
    hiveCatalog.createTable(hiveObjectPath, new CatalogTableImpl(TableSchema.builder().build(), properties, null), false);
    CatalogBaseTable catalogTable = hiveCatalog.getTable(hiveObjectPath);
    assertThat(catalogTable.getSchema()).isEqualTo(TableSchema.builder().build());
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HashMap(java.util.HashMap) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) Test(org.junit.Test)

Example 77 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.

the class HiveCatalogGenericMetadataTest method testTableSchemaCompatibility.

@Test
public // NOTE: Be careful to modify this test, it is important to backward compatibility
void testTableSchemaCompatibility() throws Exception {
    catalog.createDatabase(db1, createDb(), false);
    try {
        // table with numeric types
        ObjectPath tablePath = new ObjectPath(db1, "generic1");
        Table hiveTable = org.apache.hadoop.hive.ql.metadata.Table.getEmptyTable(tablePath.getDatabaseName(), tablePath.getObjectName());
        hiveTable.setDbName(tablePath.getDatabaseName());
        hiveTable.setTableName(tablePath.getObjectName());
        setLegacyGeneric(hiveTable.getParameters());
        hiveTable.getParameters().put("flink.generic.table.schema.0.name", "ti");
        hiveTable.getParameters().put("flink.generic.table.schema.0.data-type", "TINYINT");
        hiveTable.getParameters().put("flink.generic.table.schema.1.name", "si");
        hiveTable.getParameters().put("flink.generic.table.schema.1.data-type", "SMALLINT");
        hiveTable.getParameters().put("flink.generic.table.schema.2.name", "i");
        hiveTable.getParameters().put("flink.generic.table.schema.2.data-type", "INT");
        hiveTable.getParameters().put("flink.generic.table.schema.3.name", "bi");
        hiveTable.getParameters().put("flink.generic.table.schema.3.data-type", "BIGINT");
        hiveTable.getParameters().put("flink.generic.table.schema.4.name", "f");
        hiveTable.getParameters().put("flink.generic.table.schema.4.data-type", "FLOAT");
        hiveTable.getParameters().put("flink.generic.table.schema.5.name", "d");
        hiveTable.getParameters().put("flink.generic.table.schema.5.data-type", "DOUBLE");
        hiveTable.getParameters().put("flink.generic.table.schema.6.name", "de");
        hiveTable.getParameters().put("flink.generic.table.schema.6.data-type", "DECIMAL(10, 5)");
        hiveTable.getParameters().put("flink.generic.table.schema.7.name", "cost");
        hiveTable.getParameters().put("flink.generic.table.schema.7.expr", "`d` * `bi`");
        hiveTable.getParameters().put("flink.generic.table.schema.7.data-type", "DOUBLE");
        ((HiveCatalog) catalog).client.createTable(hiveTable);
        CatalogBaseTable catalogBaseTable = catalog.getTable(tablePath);
        assertFalse(HiveCatalog.isHiveTable(catalogBaseTable.getOptions()));
        TableSchema expectedSchema = TableSchema.builder().fields(new String[] { "ti", "si", "i", "bi", "f", "d", "de" }, new DataType[] { DataTypes.TINYINT(), DataTypes.SMALLINT(), DataTypes.INT(), DataTypes.BIGINT(), DataTypes.FLOAT(), DataTypes.DOUBLE(), DataTypes.DECIMAL(10, 5) }).field("cost", DataTypes.DOUBLE(), "`d` * `bi`").build();
        assertEquals(expectedSchema, catalogBaseTable.getSchema());
        // table with character types
        tablePath = new ObjectPath(db1, "generic2");
        hiveTable = org.apache.hadoop.hive.ql.metadata.Table.getEmptyTable(tablePath.getDatabaseName(), tablePath.getObjectName());
        hiveTable.setDbName(tablePath.getDatabaseName());
        hiveTable.setTableName(tablePath.getObjectName());
        setLegacyGeneric(hiveTable.getParameters());
        hiveTable.setTableName(tablePath.getObjectName());
        hiveTable.getParameters().put("flink.generic.table.schema.0.name", "c");
        hiveTable.getParameters().put("flink.generic.table.schema.0.data-type", "CHAR(265)");
        hiveTable.getParameters().put("flink.generic.table.schema.1.name", "vc");
        hiveTable.getParameters().put("flink.generic.table.schema.1.data-type", "VARCHAR(65536)");
        hiveTable.getParameters().put("flink.generic.table.schema.2.name", "s");
        hiveTable.getParameters().put("flink.generic.table.schema.2.data-type", "VARCHAR(2147483647)");
        hiveTable.getParameters().put("flink.generic.table.schema.3.name", "b");
        hiveTable.getParameters().put("flink.generic.table.schema.3.data-type", "BINARY(1)");
        hiveTable.getParameters().put("flink.generic.table.schema.4.name", "vb");
        hiveTable.getParameters().put("flink.generic.table.schema.4.data-type", "VARBINARY(255)");
        hiveTable.getParameters().put("flink.generic.table.schema.5.name", "bs");
        hiveTable.getParameters().put("flink.generic.table.schema.5.data-type", "VARBINARY(2147483647)");
        hiveTable.getParameters().put("flink.generic.table.schema.6.name", "len");
        hiveTable.getParameters().put("flink.generic.table.schema.6.expr", "CHAR_LENGTH(`s`)");
        hiveTable.getParameters().put("flink.generic.table.schema.6.data-type", "INT");
        ((HiveCatalog) catalog).client.createTable(hiveTable);
        catalogBaseTable = catalog.getTable(tablePath);
        expectedSchema = TableSchema.builder().fields(new String[] { "c", "vc", "s", "b", "vb", "bs" }, new DataType[] { DataTypes.CHAR(265), DataTypes.VARCHAR(65536), DataTypes.STRING(), DataTypes.BINARY(1), DataTypes.VARBINARY(255), DataTypes.BYTES() }).field("len", DataTypes.INT(), "CHAR_LENGTH(`s`)").build();
        assertEquals(expectedSchema, catalogBaseTable.getSchema());
        // table with date/time types
        tablePath = new ObjectPath(db1, "generic3");
        hiveTable = org.apache.hadoop.hive.ql.metadata.Table.getEmptyTable(tablePath.getDatabaseName(), tablePath.getObjectName());
        hiveTable.setDbName(tablePath.getDatabaseName());
        hiveTable.setTableName(tablePath.getObjectName());
        setLegacyGeneric(hiveTable.getParameters());
        hiveTable.setTableName(tablePath.getObjectName());
        hiveTable.getParameters().put("flink.generic.table.schema.0.name", "dt");
        hiveTable.getParameters().put("flink.generic.table.schema.0.data-type", "DATE");
        hiveTable.getParameters().put("flink.generic.table.schema.1.name", "t");
        hiveTable.getParameters().put("flink.generic.table.schema.1.data-type", "TIME(0)");
        hiveTable.getParameters().put("flink.generic.table.schema.2.name", "ts");
        hiveTable.getParameters().put("flink.generic.table.schema.2.data-type", "TIMESTAMP(3)");
        hiveTable.getParameters().put("flink.generic.table.schema.3.name", "tstz");
        hiveTable.getParameters().put("flink.generic.table.schema.3.data-type", "TIMESTAMP(6) WITH LOCAL TIME ZONE");
        hiveTable.getParameters().put("flink.generic.table.schema.watermark.0.rowtime", "ts");
        hiveTable.getParameters().put("flink.generic.table.schema.watermark.0.strategy.data-type", "TIMESTAMP(3)");
        hiveTable.getParameters().put("flink.generic.table.schema.watermark.0.strategy.expr", "ts");
        ((HiveCatalog) catalog).client.createTable(hiveTable);
        catalogBaseTable = catalog.getTable(tablePath);
        expectedSchema = TableSchema.builder().fields(new String[] { "dt", "t", "ts", "tstz" }, new DataType[] { DataTypes.DATE(), DataTypes.TIME(), DataTypes.TIMESTAMP(3), DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE() }).watermark("ts", "ts", DataTypes.TIMESTAMP(3)).build();
        assertEquals(expectedSchema, catalogBaseTable.getSchema());
        // table with complex/misc types
        tablePath = new ObjectPath(db1, "generic4");
        hiveTable = org.apache.hadoop.hive.ql.metadata.Table.getEmptyTable(tablePath.getDatabaseName(), tablePath.getObjectName());
        hiveTable.setDbName(tablePath.getDatabaseName());
        hiveTable.setTableName(tablePath.getObjectName());
        setLegacyGeneric(hiveTable.getParameters());
        hiveTable.setTableName(tablePath.getObjectName());
        hiveTable.getParameters().put("flink.generic.table.schema.0.name", "a");
        hiveTable.getParameters().put("flink.generic.table.schema.0.data-type", "ARRAY<INT>");
        hiveTable.getParameters().put("flink.generic.table.schema.1.name", "m");
        hiveTable.getParameters().put("flink.generic.table.schema.1.data-type", "MAP<BIGINT, TIMESTAMP(6)>");
        hiveTable.getParameters().put("flink.generic.table.schema.2.name", "mul");
        hiveTable.getParameters().put("flink.generic.table.schema.2.data-type", "MULTISET<DOUBLE>");
        hiveTable.getParameters().put("flink.generic.table.schema.3.name", "r");
        hiveTable.getParameters().put("flink.generic.table.schema.3.data-type", "ROW<`f1` INT, `f2` VARCHAR(2147483647)>");
        hiveTable.getParameters().put("flink.generic.table.schema.4.name", "b");
        hiveTable.getParameters().put("flink.generic.table.schema.4.data-type", "BOOLEAN");
        hiveTable.getParameters().put("flink.generic.table.schema.5.name", "ts");
        hiveTable.getParameters().put("flink.generic.table.schema.5.data-type", "TIMESTAMP(3)");
        hiveTable.getParameters().put("flink.generic.table.schema.watermark.0.rowtime", "ts");
        hiveTable.getParameters().put("flink.generic.table.schema.watermark.0.strategy.data-type", "TIMESTAMP(3)");
        hiveTable.getParameters().put("flink.generic.table.schema.watermark.0.strategy.expr", "`ts` - INTERVAL '5' SECOND");
        ((HiveCatalog) catalog).client.createTable(hiveTable);
        catalogBaseTable = catalog.getTable(tablePath);
        expectedSchema = TableSchema.builder().fields(new String[] { "a", "m", "mul", "r", "b", "ts" }, new DataType[] { DataTypes.ARRAY(DataTypes.INT()), DataTypes.MAP(DataTypes.BIGINT(), DataTypes.TIMESTAMP()), DataTypes.MULTISET(DataTypes.DOUBLE()), DataTypes.ROW(DataTypes.FIELD("f1", DataTypes.INT()), DataTypes.FIELD("f2", DataTypes.STRING())), DataTypes.BOOLEAN(), DataTypes.TIMESTAMP(3) }).watermark("ts", "`ts` - INTERVAL '5' SECOND", DataTypes.TIMESTAMP(3)).build();
        assertEquals(expectedSchema, catalogBaseTable.getSchema());
    } finally {
        catalog.dropDatabase(db1, true, true);
    }
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) Table(org.apache.hadoop.hive.metastore.api.Table) TableSchema(org.apache.flink.table.api.TableSchema) DataType(org.apache.flink.table.types.DataType) Test(org.junit.Test)

Example 78 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.

the class HiveCatalogHiveMetadataTest method testViewCompatibility.

// ------ table and column stats ------
@Test
public void testViewCompatibility() throws Exception {
    // we always store view schema via properties now
    // make sure non-generic views created previously can still be used
    catalog.createDatabase(db1, createDb(), false);
    Table hiveView = org.apache.hadoop.hive.ql.metadata.Table.getEmptyTable(path1.getDatabaseName(), path1.getObjectName());
    // mark as a view
    hiveView.setTableType(TableType.VIRTUAL_VIEW.name());
    final String originQuery = "view origin query";
    final String expandedQuery = "view expanded query";
    hiveView.setViewOriginalText(originQuery);
    hiveView.setViewExpandedText(expandedQuery);
    // set schema in SD
    Schema schema = Schema.newBuilder().fromFields(new String[] { "i", "s" }, new AbstractDataType[] { DataTypes.INT(), DataTypes.STRING() }).build();
    List<FieldSchema> fields = new ArrayList<>();
    for (Schema.UnresolvedColumn column : schema.getColumns()) {
        String name = column.getName();
        DataType type = (DataType) ((Schema.UnresolvedPhysicalColumn) column).getDataType();
        fields.add(new FieldSchema(name, HiveTypeUtil.toHiveTypeInfo(type, true).getTypeName(), null));
    }
    hiveView.getSd().setCols(fields);
    // test mark as non-generic with is_generic
    hiveView.getParameters().put(CatalogPropertiesUtil.IS_GENERIC, "false");
    // add some other properties
    hiveView.getParameters().put("k1", "v1");
    ((HiveCatalog) catalog).client.createTable(hiveView);
    CatalogBaseTable baseTable = catalog.getTable(path1);
    assertTrue(baseTable instanceof CatalogView);
    CatalogView catalogView = (CatalogView) baseTable;
    assertEquals(schema, catalogView.getUnresolvedSchema());
    assertEquals(originQuery, catalogView.getOriginalQuery());
    assertEquals(expandedQuery, catalogView.getExpandedQuery());
    assertEquals("v1", catalogView.getOptions().get("k1"));
    // test mark as non-generic with connector
    hiveView.setDbName(path3.getDatabaseName());
    hiveView.setTableName(path3.getObjectName());
    hiveView.getParameters().remove(CatalogPropertiesUtil.IS_GENERIC);
    hiveView.getParameters().put(CONNECTOR.key(), IDENTIFIER);
    ((HiveCatalog) catalog).client.createTable(hiveView);
    baseTable = catalog.getTable(path3);
    assertTrue(baseTable instanceof CatalogView);
    catalogView = (CatalogView) baseTable;
    assertEquals(schema, catalogView.getUnresolvedSchema());
    assertEquals(originQuery, catalogView.getOriginalQuery());
    assertEquals(expandedQuery, catalogView.getExpandedQuery());
    assertEquals("v1", catalogView.getOptions().get("k1"));
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) SqlAlterHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlAlterHiveTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) Table(org.apache.hadoop.hive.metastore.api.Table) AbstractDataType(org.apache.flink.table.types.AbstractDataType) Schema(org.apache.flink.table.api.Schema) TableSchema(org.apache.flink.table.api.TableSchema) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) DataType(org.apache.flink.table.types.DataType) AbstractDataType(org.apache.flink.table.types.AbstractDataType) CatalogColumnStatisticsDataString(org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataString) CatalogView(org.apache.flink.table.catalog.CatalogView) Test(org.junit.Test)

Example 79 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.

the class PostgresCatalogTest method testArrayDataTypes.

@Test
public void testArrayDataTypes() throws TableNotExistException {
    CatalogBaseTable table = catalog.getTable(new ObjectPath(PostgresCatalog.DEFAULT_DATABASE, TABLE_ARRAY_TYPE));
    assertEquals(getArrayTable().schema, table.getUnresolvedSchema());
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) Test(org.junit.Test)

Example 80 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink-mirror by flink-ci.

the class PostgresCatalogTest method testPrimitiveDataTypes.

@Test
public void testPrimitiveDataTypes() throws TableNotExistException {
    CatalogBaseTable table = catalog.getTable(new ObjectPath(PostgresCatalog.DEFAULT_DATABASE, TABLE_PRIMITIVE_TYPE));
    assertEquals(getPrimitiveTable().schema, table.getUnresolvedSchema());
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) Test(org.junit.Test)

Aggregations

CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)106 ObjectPath (org.apache.flink.table.catalog.ObjectPath)52 CatalogTable (org.apache.flink.table.catalog.CatalogTable)46 Test (org.junit.Test)42 ValidationException (org.apache.flink.table.api.ValidationException)33 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)30 CatalogView (org.apache.flink.table.catalog.CatalogView)27 TableSchema (org.apache.flink.table.api.TableSchema)24 Table (org.apache.hadoop.hive.metastore.api.Table)21 HashMap (java.util.HashMap)19 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)18 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)15 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)15 Map (java.util.Map)13 LinkedHashMap (java.util.LinkedHashMap)12 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)12 AlterViewAsOperation (org.apache.flink.table.operations.ddl.AlterViewAsOperation)12 DropTableOperation (org.apache.flink.table.operations.ddl.DropTableOperation)12 ArrayList (java.util.ArrayList)9 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)9