Search in sources :

Example 6 with UniqueConstraint

use of org.apache.flink.table.api.constraints.UniqueConstraint in project flink by apache.

the class HiveCatalog method createTable.

@Override
public void createTable(ObjectPath tablePath, CatalogBaseTable table, boolean ignoreIfExists) throws TableAlreadyExistException, DatabaseNotExistException, CatalogException {
    checkNotNull(tablePath, "tablePath cannot be null");
    checkNotNull(table, "table cannot be null");
    if (!databaseExists(tablePath.getDatabaseName())) {
        throw new DatabaseNotExistException(getName(), tablePath.getDatabaseName());
    }
    boolean managedTable = ManagedTableListener.isManagedTable(this, table);
    Table hiveTable = HiveTableUtil.instantiateHiveTable(tablePath, table, hiveConf, managedTable);
    UniqueConstraint pkConstraint = null;
    List<String> notNullCols = new ArrayList<>();
    boolean isHiveTable = isHiveTable(table.getOptions());
    if (isHiveTable) {
        pkConstraint = table.getSchema().getPrimaryKey().orElse(null);
        String nnColStr = hiveTable.getParameters().remove(NOT_NULL_COLS);
        if (nnColStr != null) {
            notNullCols.addAll(Arrays.asList(nnColStr.split(HiveDDLUtils.COL_DELIMITER)));
        } else {
            for (int i = 0; i < table.getSchema().getFieldDataTypes().length; i++) {
                if (!table.getSchema().getFieldDataTypes()[i].getLogicalType().isNullable()) {
                    notNullCols.add(table.getSchema().getFieldNames()[i]);
                }
            }
        }
        // remove the 'connector' option for hive table
        hiveTable.getParameters().remove(CONNECTOR.key());
    }
    try {
        if (pkConstraint != null || !notNullCols.isEmpty()) {
            // extract constraint traits from table properties
            String pkTraitStr = hiveTable.getParameters().remove(PK_CONSTRAINT_TRAIT);
            byte pkTrait = pkTraitStr == null ? HiveDDLUtils.defaultTrait() : Byte.parseByte(pkTraitStr);
            List<Byte> pkTraits = Collections.nCopies(pkConstraint == null ? 0 : pkConstraint.getColumns().size(), pkTrait);
            List<Byte> nnTraits;
            String nnTraitsStr = hiveTable.getParameters().remove(NOT_NULL_CONSTRAINT_TRAITS);
            if (nnTraitsStr != null) {
                String[] traits = nnTraitsStr.split(HiveDDLUtils.COL_DELIMITER);
                Preconditions.checkArgument(traits.length == notNullCols.size(), "Number of NOT NULL columns and constraint traits mismatch");
                nnTraits = Arrays.stream(traits).map(Byte::new).collect(Collectors.toList());
            } else {
                nnTraits = Collections.nCopies(notNullCols.size(), HiveDDLUtils.defaultTrait());
            }
            client.createTableWithConstraints(hiveTable, hiveConf, pkConstraint, pkTraits, notNullCols, nnTraits);
        } else {
            client.createTable(hiveTable);
        }
    } catch (AlreadyExistsException e) {
        if (!ignoreIfExists) {
            throw new TableAlreadyExistException(getName(), tablePath, e);
        }
    } catch (TException e) {
        throw new CatalogException(String.format("Failed to create table %s", tablePath.getFullName()), e);
    }
}
Also used : TException(org.apache.thrift.TException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.metastore.api.Table) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) PartitionAlreadyExistsException(org.apache.flink.table.catalog.exceptions.PartitionAlreadyExistsException) ArrayList(java.util.ArrayList) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException)

Example 7 with UniqueConstraint

use of org.apache.flink.table.api.constraints.UniqueConstraint in project flink by apache.

the class HiveTableUtil method createTableSchema.

/**
 * Create a Flink's TableSchema from Hive table's columns and partition keys.
 */
public static TableSchema createTableSchema(List<FieldSchema> cols, List<FieldSchema> partitionKeys, Set<String> notNullColumns, UniqueConstraint primaryKey) {
    List<FieldSchema> allCols = new ArrayList<>(cols);
    allCols.addAll(partitionKeys);
    String[] colNames = new String[allCols.size()];
    DataType[] colTypes = new DataType[allCols.size()];
    for (int i = 0; i < allCols.size(); i++) {
        FieldSchema fs = allCols.get(i);
        colNames[i] = fs.getName();
        colTypes[i] = HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(fs.getType()));
        if (notNullColumns.contains(colNames[i])) {
            colTypes[i] = colTypes[i].notNull();
        }
    }
    TableSchema.Builder builder = TableSchema.builder().fields(colNames, colTypes);
    if (primaryKey != null) {
        builder.primaryKey(primaryKey.getName(), primaryKey.getColumns().toArray(new String[0]));
    }
    return builder.build();
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) DataType(org.apache.flink.table.types.DataType) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint)

Example 8 with UniqueConstraint

use of org.apache.flink.table.api.constraints.UniqueConstraint in project flink by apache.

the class TableEnvHiveConnectorITCase method testPKConstraint.

@Test
public void testPKConstraint() throws Exception {
    // While PK constraints are supported since Hive 2.1.0, the constraints cannot be RELY in
    // 2.x versions.
    // So let's only test for 3.x.
    Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
    TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
    tableEnv.executeSql("create database db1");
    try {
        // test rely PK constraints
        tableEnv.executeSql("create table db1.tbl1 (x tinyint,y smallint,z int, primary key (x,z) disable novalidate rely)");
        CatalogBaseTable catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl1"));
        TableSchema tableSchema = catalogTable.getSchema();
        assertTrue(tableSchema.getPrimaryKey().isPresent());
        UniqueConstraint pk = tableSchema.getPrimaryKey().get();
        assertEquals(2, pk.getColumns().size());
        assertTrue(pk.getColumns().containsAll(Arrays.asList("x", "z")));
        // test norely PK constraints
        tableEnv.executeSql("create table db1.tbl2 (x tinyint,y smallint, primary key (x) disable norely)");
        catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl2"));
        tableSchema = catalogTable.getSchema();
        assertFalse(tableSchema.getPrimaryKey().isPresent());
        // test table w/o PK
        tableEnv.executeSql("create table db1.tbl3 (x tinyint)");
        catalogTable = hiveCatalog.getTable(new ObjectPath("db1", "tbl3"));
        tableSchema = catalogTable.getSchema();
        assertFalse(tableSchema.getPrimaryKey().isPresent());
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ObjectPath(org.apache.flink.table.catalog.ObjectPath) TableSchema(org.apache.flink.table.api.TableSchema) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Test(org.junit.Test)

Example 9 with UniqueConstraint

use of org.apache.flink.table.api.constraints.UniqueConstraint in project flink by apache.

the class TableSchemaUtils method dropConstraint.

/**
 * Creates a new schema but drop the constraint with given name.
 */
public static TableSchema dropConstraint(TableSchema oriSchema, String constraintName) {
    // Validate the constraint name is valid.
    Optional<UniqueConstraint> uniqueConstraintOpt = oriSchema.getPrimaryKey();
    if (!uniqueConstraintOpt.isPresent() || !uniqueConstraintOpt.get().getName().equals(constraintName)) {
        throw new ValidationException(String.format("Constraint %s to drop does not exist", constraintName));
    }
    TableSchema.Builder builder = builderWithGivenColumns(oriSchema.getTableColumns());
    // Copy watermark specification.
    for (WatermarkSpec wms : oriSchema.getWatermarkSpecs()) {
        builder.watermark(wms.getRowtimeAttribute(), wms.getWatermarkExpr(), wms.getWatermarkExprOutputType());
    }
    return builder.build();
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) WatermarkSpec(org.apache.flink.table.api.WatermarkSpec) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint)

Aggregations

UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)9 TableSchema (org.apache.flink.table.api.TableSchema)5 ArrayList (java.util.ArrayList)4 Test (org.junit.Test)4 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)2 CatalogTable (org.apache.flink.table.catalog.CatalogTable)2 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)2 Constructor (java.lang.reflect.Constructor)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 LinkedHashMap (java.util.LinkedHashMap)1 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)1 TableEnvironment (org.apache.flink.table.api.TableEnvironment)1 ValidationException (org.apache.flink.table.api.ValidationException)1 WatermarkSpec (org.apache.flink.table.api.WatermarkSpec)1 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)1 ObjectPath (org.apache.flink.table.catalog.ObjectPath)1 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)1 DatabaseNotExistException (org.apache.flink.table.catalog.exceptions.DatabaseNotExistException)1 PartitionAlreadyExistsException (org.apache.flink.table.catalog.exceptions.PartitionAlreadyExistsException)1