Search in sources :

Example 46 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by splunk.

the class FlinkCalciteCatalogReader method toPreparingTable.

/**
 * Translate this {@link CatalogSchemaTable} into Flink source table.
 */
private static FlinkPreparingTableBase toPreparingTable(RelOptSchema relOptSchema, List<String> names, RelDataType rowType, CatalogSchemaTable schemaTable) {
    final ResolvedCatalogBaseTable<?> resolvedBaseTable = schemaTable.getContextResolvedTable().getResolvedTable();
    final CatalogBaseTable originTable = resolvedBaseTable.getOrigin();
    if (originTable instanceof QueryOperationCatalogView) {
        return convertQueryOperationView(relOptSchema, names, rowType, (QueryOperationCatalogView) originTable);
    } else if (originTable instanceof ConnectorCatalogTable) {
        ConnectorCatalogTable<?, ?> connectorTable = (ConnectorCatalogTable<?, ?>) originTable;
        if ((connectorTable).getTableSource().isPresent()) {
            return convertLegacyTableSource(relOptSchema, rowType, schemaTable.getContextResolvedTable().getIdentifier(), connectorTable, schemaTable.getStatistic(), schemaTable.isStreamingMode());
        } else {
            throw new ValidationException("Cannot convert a connector table " + "without source.");
        }
    } else if (originTable instanceof CatalogView) {
        return convertCatalogView(relOptSchema, names, rowType, schemaTable.getStatistic(), (CatalogView) originTable);
    } else if (originTable instanceof CatalogTable) {
        return convertCatalogTable(relOptSchema, names, rowType, schemaTable);
    } else {
        throw new ValidationException("Unsupported table type: " + originTable);
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ResolvedCatalogBaseTable(org.apache.flink.table.catalog.ResolvedCatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) QueryOperationCatalogView(org.apache.flink.table.catalog.QueryOperationCatalogView) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogView(org.apache.flink.table.catalog.CatalogView) QueryOperationCatalogView(org.apache.flink.table.catalog.QueryOperationCatalogView)

Example 47 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project hudi by apache.

the class TestHoodieCatalog method testGetTable.

@Test
public void testGetTable() throws Exception {
    ObjectPath tablePath = new ObjectPath(TEST_DEFAULT_DATABASE, "tb1");
    // create table
    catalog.createTable(tablePath, EXPECTED_CATALOG_TABLE, true);
    Map<String, String> expectedOptions = new HashMap<>(EXPECTED_OPTIONS);
    expectedOptions.put(FlinkOptions.TABLE_TYPE.key(), FlinkOptions.TABLE_TYPE_MERGE_ON_READ);
    expectedOptions.put(FlinkOptions.INDEX_GLOBAL_ENABLED.key(), "false");
    expectedOptions.put(FlinkOptions.PRE_COMBINE.key(), "true");
    expectedOptions.put("connector", "hudi");
    expectedOptions.put(FlinkOptions.PATH.key(), String.format("%s/%s/%s", tempFile.getAbsolutePath(), tablePath.getDatabaseName(), tablePath.getObjectName()));
    // test get table
    CatalogBaseTable actualTable = catalog.getTable(tablePath);
    // validate schema
    Schema actualSchema = actualTable.getUnresolvedSchema();
    Schema expectedSchema = Schema.newBuilder().fromResolvedSchema(EXPECTED_TABLE_SCHEMA).build();
    assertEquals(expectedSchema, actualSchema);
    // validate options
    Map<String, String> actualOptions = actualTable.getOptions();
    assertEquals(expectedOptions, actualOptions);
    // validate comment
    assertEquals(EXPECTED_CATALOG_TABLE.getComment(), actualTable.getComment());
    // validate partition key
    assertEquals(EXPECTED_CATALOG_TABLE.getPartitionKeys(), ((CatalogTable) actualTable).getPartitionKeys());
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HashMap(java.util.HashMap) Schema(org.apache.flink.table.api.Schema) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) Test(org.junit.jupiter.api.Test)

Example 48 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class HiveCatalog method createTable.

@Override
public void createTable(ObjectPath tablePath, CatalogBaseTable table, boolean ignoreIfExists) throws TableAlreadyExistException, DatabaseNotExistException, CatalogException {
    checkNotNull(tablePath, "tablePath cannot be null");
    checkNotNull(table, "table cannot be null");
    if (!databaseExists(tablePath.getDatabaseName())) {
        throw new DatabaseNotExistException(getName(), tablePath.getDatabaseName());
    }
    boolean managedTable = ManagedTableListener.isManagedTable(this, table);
    Table hiveTable = HiveTableUtil.instantiateHiveTable(tablePath, table, hiveConf, managedTable);
    UniqueConstraint pkConstraint = null;
    List<String> notNullCols = new ArrayList<>();
    boolean isHiveTable = isHiveTable(table.getOptions());
    if (isHiveTable) {
        pkConstraint = table.getSchema().getPrimaryKey().orElse(null);
        String nnColStr = hiveTable.getParameters().remove(NOT_NULL_COLS);
        if (nnColStr != null) {
            notNullCols.addAll(Arrays.asList(nnColStr.split(HiveDDLUtils.COL_DELIMITER)));
        } else {
            for (int i = 0; i < table.getSchema().getFieldDataTypes().length; i++) {
                if (!table.getSchema().getFieldDataTypes()[i].getLogicalType().isNullable()) {
                    notNullCols.add(table.getSchema().getFieldNames()[i]);
                }
            }
        }
        // remove the 'connector' option for hive table
        hiveTable.getParameters().remove(CONNECTOR.key());
    }
    try {
        if (pkConstraint != null || !notNullCols.isEmpty()) {
            // extract constraint traits from table properties
            String pkTraitStr = hiveTable.getParameters().remove(PK_CONSTRAINT_TRAIT);
            byte pkTrait = pkTraitStr == null ? HiveDDLUtils.defaultTrait() : Byte.parseByte(pkTraitStr);
            List<Byte> pkTraits = Collections.nCopies(pkConstraint == null ? 0 : pkConstraint.getColumns().size(), pkTrait);
            List<Byte> nnTraits;
            String nnTraitsStr = hiveTable.getParameters().remove(NOT_NULL_CONSTRAINT_TRAITS);
            if (nnTraitsStr != null) {
                String[] traits = nnTraitsStr.split(HiveDDLUtils.COL_DELIMITER);
                Preconditions.checkArgument(traits.length == notNullCols.size(), "Number of NOT NULL columns and constraint traits mismatch");
                nnTraits = Arrays.stream(traits).map(Byte::new).collect(Collectors.toList());
            } else {
                nnTraits = Collections.nCopies(notNullCols.size(), HiveDDLUtils.defaultTrait());
            }
            client.createTableWithConstraints(hiveTable, hiveConf, pkConstraint, pkTraits, notNullCols, nnTraits);
        } else {
            client.createTable(hiveTable);
        }
    } catch (AlreadyExistsException e) {
        if (!ignoreIfExists) {
            throw new TableAlreadyExistException(getName(), tablePath, e);
        }
    } catch (TException e) {
        throw new CatalogException(String.format("Failed to create table %s", tablePath.getFullName()), e);
    }
}
Also used : TException(org.apache.thrift.TException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.metastore.api.Table) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) PartitionAlreadyExistsException(org.apache.flink.table.catalog.exceptions.PartitionAlreadyExistsException) ArrayList(java.util.ArrayList) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException)

Example 49 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class HiveCatalogTest method testRetrieveFlinkProperties.

@Test
public void testRetrieveFlinkProperties() throws Exception {
    ObjectPath hiveObjectPath = new ObjectPath(HiveCatalog.DEFAULT_DB, "testRetrieveProperties");
    Map<String, String> options = getLegacyFileSystemConnectorOptions("/test_path");
    options.put(CONNECTOR.key(), "jdbc");
    options.put("url", "jdbc:clickhouse://host:port/testUrl1");
    options.put("flink.url", "jdbc:clickhouse://host:port/testUrl2");
    hiveCatalog.createTable(hiveObjectPath, new CatalogTableImpl(schema, options, null), false);
    CatalogBaseTable hiveTable = hiveCatalog.getTable(hiveObjectPath);
    assertThat(hiveTable.getOptions()).containsEntry("url", "jdbc:clickhouse://host:port/testUrl1");
    assertThat(hiveTable.getOptions()).containsEntry("flink.url", "jdbc:clickhouse://host:port/testUrl2");
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) Test(org.junit.Test)

Example 50 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class HiveCatalogTest method testGetNoSchemaGenericTable.

@Test
public void testGetNoSchemaGenericTable() throws Exception {
    ObjectPath hiveObjectPath = new ObjectPath(HiveCatalog.DEFAULT_DB, "testGetNoSchemaGenericTable");
    Map<String, String> properties = new HashMap<>();
    properties.put(CONNECTOR.key(), "jdbc");
    hiveCatalog.createTable(hiveObjectPath, new CatalogTableImpl(TableSchema.builder().build(), properties, null), false);
    CatalogBaseTable catalogTable = hiveCatalog.getTable(hiveObjectPath);
    assertThat(catalogTable.getSchema()).isEqualTo(TableSchema.builder().build());
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HashMap(java.util.HashMap) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) Test(org.junit.Test)

Aggregations

CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)106 ObjectPath (org.apache.flink.table.catalog.ObjectPath)52 CatalogTable (org.apache.flink.table.catalog.CatalogTable)46 Test (org.junit.Test)42 ValidationException (org.apache.flink.table.api.ValidationException)33 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)30 CatalogView (org.apache.flink.table.catalog.CatalogView)27 TableSchema (org.apache.flink.table.api.TableSchema)24 Table (org.apache.hadoop.hive.metastore.api.Table)21 HashMap (java.util.HashMap)19 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)18 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)15 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)15 Map (java.util.Map)13 LinkedHashMap (java.util.LinkedHashMap)12 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)12 AlterViewAsOperation (org.apache.flink.table.operations.ddl.AlterViewAsOperation)12 DropTableOperation (org.apache.flink.table.operations.ddl.DropTableOperation)12 ArrayList (java.util.ArrayList)9 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)9