Search in sources :

Example 1 with UniqueConstraint

use of org.apache.flink.table.api.constraints.UniqueConstraint in project flink by apache.

the class HiveCatalogHiveMetadataTest method testCreateTableWithConstraints.

@Test
public void testCreateTableWithConstraints() throws Exception {
    Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
    HiveCatalog hiveCatalog = (HiveCatalog) catalog;
    hiveCatalog.createDatabase(db1, createDb(), false);
    TableSchema.Builder builder = TableSchema.builder();
    builder.fields(new String[] { "x", "y", "z" }, new DataType[] { DataTypes.INT().notNull(), DataTypes.TIMESTAMP(9).notNull(), DataTypes.BIGINT() });
    builder.primaryKey("pk_name", new String[] { "x" });
    hiveCatalog.createTable(path1, new CatalogTableImpl(builder.build(), getBatchTableProperties(), null), false);
    CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(path1);
    assertTrue("PK not present", catalogTable.getSchema().getPrimaryKey().isPresent());
    UniqueConstraint pk = catalogTable.getSchema().getPrimaryKey().get();
    assertEquals("pk_name", pk.getName());
    assertEquals(Collections.singletonList("x"), pk.getColumns());
    assertFalse(catalogTable.getSchema().getFieldDataTypes()[0].getLogicalType().isNullable());
    assertFalse(catalogTable.getSchema().getFieldDataTypes()[1].getLogicalType().isNullable());
    assertTrue(catalogTable.getSchema().getFieldDataTypes()[2].getLogicalType().isNullable());
    hiveCatalog.dropDatabase(db1, false, true);
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogTable(org.apache.flink.table.catalog.CatalogTable) Test(org.junit.Test)

Example 2 with UniqueConstraint

use of org.apache.flink.table.api.constraints.UniqueConstraint in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateTable.

private CreateTableOperation convertCreateTable(String compoundName, boolean isExternal, boolean ifNotExists, boolean isTemporary, List<FieldSchema> cols, List<FieldSchema> partCols, String comment, String location, Map<String, String> tblProps, HiveParserRowFormatParams rowFormatParams, HiveParserStorageFormat storageFormat, List<PrimaryKey> primaryKeys, List<NotNullConstraint> notNullConstraints) {
    Map<String, String> props = new HashMap<>();
    if (tblProps != null) {
        props.putAll(tblProps);
    }
    markHiveConnector(props);
    // external
    if (isExternal) {
        props.put(TABLE_IS_EXTERNAL, "true");
    }
    // PK trait
    UniqueConstraint uniqueConstraint = null;
    if (primaryKeys != null && !primaryKeys.isEmpty()) {
        PrimaryKey primaryKey = primaryKeys.get(0);
        byte trait = 0;
        if (primaryKey.isEnable()) {
            trait = HiveDDLUtils.enableConstraint(trait);
        }
        if (primaryKey.isValidate()) {
            trait = HiveDDLUtils.validateConstraint(trait);
        }
        if (primaryKey.isRely()) {
            trait = HiveDDLUtils.relyConstraint(trait);
        }
        props.put(PK_CONSTRAINT_TRAIT, String.valueOf(trait));
        List<String> pkCols = primaryKeys.stream().map(PrimaryKey::getPk).collect(Collectors.toList());
        String constraintName = primaryKey.getConstraintName();
        if (constraintName == null) {
            constraintName = pkCols.stream().collect(Collectors.joining("_", "PK_", ""));
        }
        uniqueConstraint = UniqueConstraint.primaryKey(constraintName, pkCols);
    }
    // NOT NULL constraints
    List<String> notNullCols = new ArrayList<>();
    if (!notNullConstraints.isEmpty()) {
        List<String> traits = new ArrayList<>();
        for (NotNullConstraint notNull : notNullConstraints) {
            byte trait = 0;
            if (notNull.isEnable()) {
                trait = HiveDDLUtils.enableConstraint(trait);
            }
            if (notNull.isValidate()) {
                trait = HiveDDLUtils.validateConstraint(trait);
            }
            if (notNull.isRely()) {
                trait = HiveDDLUtils.relyConstraint(trait);
            }
            traits.add(String.valueOf(trait));
            notNullCols.add(notNull.getColName());
        }
        props.put(NOT_NULL_CONSTRAINT_TRAITS, String.join(COL_DELIMITER, traits));
        props.put(NOT_NULL_COLS, String.join(COL_DELIMITER, notNullCols));
    }
    // row format
    if (rowFormatParams != null) {
        encodeRowFormat(rowFormatParams, props);
    }
    // storage format
    if (storageFormat != null) {
        encodeStorageFormat(storageFormat, props);
    }
    // location
    if (location != null) {
        props.put(TABLE_LOCATION_URI, location);
    }
    ObjectIdentifier identifier = parseObjectIdentifier(compoundName);
    Set<String> notNullColSet = new HashSet<>(notNullCols);
    if (uniqueConstraint != null) {
        notNullColSet.addAll(uniqueConstraint.getColumns());
    }
    TableSchema tableSchema = HiveTableUtil.createTableSchema(cols, partCols, notNullColSet, uniqueConstraint);
    return new CreateTableOperation(identifier, new CatalogTableImpl(tableSchema, HiveCatalog.getFieldNames(partCols), props, comment), ifNotExists, isTemporary);
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) PrimaryKey(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.PrimaryKey) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) HashSet(java.util.HashSet)

Example 3 with UniqueConstraint

use of org.apache.flink.table.api.constraints.UniqueConstraint in project flink by apache.

the class HiveShimV210 method createHivePKs.

List<Object> createHivePKs(Table table, UniqueConstraint pk, List<Byte> traits) throws ClassNotFoundException, IllegalAccessException, InstantiationException, NoSuchMethodException, InvocationTargetException {
    List<Object> res = new ArrayList<>();
    if (pk != null) {
        Class pkClz = Class.forName("org.apache.hadoop.hive.metastore.api.SQLPrimaryKey");
        // PK constructor takes dbName, tableName, colName, keySeq, pkName, enable, validate,
        // rely
        Constructor constructor = pkClz.getConstructor(String.class, String.class, String.class, int.class, String.class, boolean.class, boolean.class, boolean.class);
        int seq = 1;
        Preconditions.checkArgument(pk.getColumns().size() == traits.size(), "Number of PK columns and traits mismatch");
        for (int i = 0; i < pk.getColumns().size(); i++) {
            String col = pk.getColumns().get(i);
            byte trait = traits.get(i);
            boolean enable = HiveTableUtil.requireEnableConstraint(trait);
            boolean validate = HiveTableUtil.requireValidateConstraint(trait);
            boolean rely = HiveTableUtil.requireRelyConstraint(trait);
            Object hivePK = constructor.newInstance(table.getDbName(), table.getTableName(), col, seq++, pk.getName(), enable, validate, rely);
            res.add(hivePK);
        }
    }
    return res;
}
Also used : Constructor(java.lang.reflect.Constructor) ArrayList(java.util.ArrayList) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint)

Example 4 with UniqueConstraint

use of org.apache.flink.table.api.constraints.UniqueConstraint in project flink by apache.

the class TableSchemaTest method testPrimaryKeyLazilyDefinedColumns.

@Test
public void testPrimaryKeyLazilyDefinedColumns() {
    TableSchema schema = TableSchema.builder().field("f0", DataTypes.BIGINT().notNull()).primaryKey("pk", new String[] { "f0", "f2" }).field("f1", DataTypes.STRING().notNull()).field("f2", DataTypes.DOUBLE().notNull()).build();
    UniqueConstraint expectedKey = UniqueConstraint.primaryKey("pk", Arrays.asList("f0", "f2"));
    assertThat(schema.getPrimaryKey().get(), equalTo(expectedKey));
}
Also used : UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) Test(org.junit.Test)

Example 5 with UniqueConstraint

use of org.apache.flink.table.api.constraints.UniqueConstraint in project flink by apache.

the class TableSchemaTest method testPrimaryKeyColumnsIndices.

@Test
public void testPrimaryKeyColumnsIndices() {
    TableSchema schema = TableSchema.builder().field("f0", DataTypes.BIGINT().notNull()).field("f1", DataTypes.STRING().notNull()).field("f2", DataTypes.DOUBLE().notNull()).primaryKey("pk", new String[] { "f0", "f2" }).build();
    UniqueConstraint expectedKey = UniqueConstraint.primaryKey("pk", Arrays.asList("f0", "f2"));
    assertThat(schema.getPrimaryKey().get(), equalTo(expectedKey));
}
Also used : UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) Test(org.junit.Test)

Aggregations

UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)9 TableSchema (org.apache.flink.table.api.TableSchema)5 ArrayList (java.util.ArrayList)4 Test (org.junit.Test)4 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)2 CatalogTable (org.apache.flink.table.catalog.CatalogTable)2 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)2 Constructor (java.lang.reflect.Constructor)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 LinkedHashMap (java.util.LinkedHashMap)1 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)1 TableEnvironment (org.apache.flink.table.api.TableEnvironment)1 ValidationException (org.apache.flink.table.api.ValidationException)1 WatermarkSpec (org.apache.flink.table.api.WatermarkSpec)1 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)1 ObjectPath (org.apache.flink.table.catalog.ObjectPath)1 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)1 DatabaseNotExistException (org.apache.flink.table.catalog.exceptions.DatabaseNotExistException)1 PartitionAlreadyExistsException (org.apache.flink.table.catalog.exceptions.PartitionAlreadyExistsException)1