use of org.apache.flink.table.api.constraints.UniqueConstraint in project flink by apache.
the class HiveCatalogHiveMetadataTest method testCreateTableWithConstraints.
@Test
public void testCreateTableWithConstraints() throws Exception {
Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
HiveCatalog hiveCatalog = (HiveCatalog) catalog;
hiveCatalog.createDatabase(db1, createDb(), false);
TableSchema.Builder builder = TableSchema.builder();
builder.fields(new String[] { "x", "y", "z" }, new DataType[] { DataTypes.INT().notNull(), DataTypes.TIMESTAMP(9).notNull(), DataTypes.BIGINT() });
builder.primaryKey("pk_name", new String[] { "x" });
hiveCatalog.createTable(path1, new CatalogTableImpl(builder.build(), getBatchTableProperties(), null), false);
CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(path1);
assertTrue("PK not present", catalogTable.getSchema().getPrimaryKey().isPresent());
UniqueConstraint pk = catalogTable.getSchema().getPrimaryKey().get();
assertEquals("pk_name", pk.getName());
assertEquals(Collections.singletonList("x"), pk.getColumns());
assertFalse(catalogTable.getSchema().getFieldDataTypes()[0].getLogicalType().isNullable());
assertFalse(catalogTable.getSchema().getFieldDataTypes()[1].getLogicalType().isNullable());
assertTrue(catalogTable.getSchema().getFieldDataTypes()[2].getLogicalType().isNullable());
hiveCatalog.dropDatabase(db1, false, true);
}
use of org.apache.flink.table.api.constraints.UniqueConstraint in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertCreateTable.
private CreateTableOperation convertCreateTable(String compoundName, boolean isExternal, boolean ifNotExists, boolean isTemporary, List<FieldSchema> cols, List<FieldSchema> partCols, String comment, String location, Map<String, String> tblProps, HiveParserRowFormatParams rowFormatParams, HiveParserStorageFormat storageFormat, List<PrimaryKey> primaryKeys, List<NotNullConstraint> notNullConstraints) {
Map<String, String> props = new HashMap<>();
if (tblProps != null) {
props.putAll(tblProps);
}
markHiveConnector(props);
// external
if (isExternal) {
props.put(TABLE_IS_EXTERNAL, "true");
}
// PK trait
UniqueConstraint uniqueConstraint = null;
if (primaryKeys != null && !primaryKeys.isEmpty()) {
PrimaryKey primaryKey = primaryKeys.get(0);
byte trait = 0;
if (primaryKey.isEnable()) {
trait = HiveDDLUtils.enableConstraint(trait);
}
if (primaryKey.isValidate()) {
trait = HiveDDLUtils.validateConstraint(trait);
}
if (primaryKey.isRely()) {
trait = HiveDDLUtils.relyConstraint(trait);
}
props.put(PK_CONSTRAINT_TRAIT, String.valueOf(trait));
List<String> pkCols = primaryKeys.stream().map(PrimaryKey::getPk).collect(Collectors.toList());
String constraintName = primaryKey.getConstraintName();
if (constraintName == null) {
constraintName = pkCols.stream().collect(Collectors.joining("_", "PK_", ""));
}
uniqueConstraint = UniqueConstraint.primaryKey(constraintName, pkCols);
}
// NOT NULL constraints
List<String> notNullCols = new ArrayList<>();
if (!notNullConstraints.isEmpty()) {
List<String> traits = new ArrayList<>();
for (NotNullConstraint notNull : notNullConstraints) {
byte trait = 0;
if (notNull.isEnable()) {
trait = HiveDDLUtils.enableConstraint(trait);
}
if (notNull.isValidate()) {
trait = HiveDDLUtils.validateConstraint(trait);
}
if (notNull.isRely()) {
trait = HiveDDLUtils.relyConstraint(trait);
}
traits.add(String.valueOf(trait));
notNullCols.add(notNull.getColName());
}
props.put(NOT_NULL_CONSTRAINT_TRAITS, String.join(COL_DELIMITER, traits));
props.put(NOT_NULL_COLS, String.join(COL_DELIMITER, notNullCols));
}
// row format
if (rowFormatParams != null) {
encodeRowFormat(rowFormatParams, props);
}
// storage format
if (storageFormat != null) {
encodeStorageFormat(storageFormat, props);
}
// location
if (location != null) {
props.put(TABLE_LOCATION_URI, location);
}
ObjectIdentifier identifier = parseObjectIdentifier(compoundName);
Set<String> notNullColSet = new HashSet<>(notNullCols);
if (uniqueConstraint != null) {
notNullColSet.addAll(uniqueConstraint.getColumns());
}
TableSchema tableSchema = HiveTableUtil.createTableSchema(cols, partCols, notNullColSet, uniqueConstraint);
return new CreateTableOperation(identifier, new CatalogTableImpl(tableSchema, HiveCatalog.getFieldNames(partCols), props, comment), ifNotExists, isTemporary);
}
use of org.apache.flink.table.api.constraints.UniqueConstraint in project flink by apache.
the class HiveShimV210 method createHivePKs.
List<Object> createHivePKs(Table table, UniqueConstraint pk, List<Byte> traits) throws ClassNotFoundException, IllegalAccessException, InstantiationException, NoSuchMethodException, InvocationTargetException {
List<Object> res = new ArrayList<>();
if (pk != null) {
Class pkClz = Class.forName("org.apache.hadoop.hive.metastore.api.SQLPrimaryKey");
// PK constructor takes dbName, tableName, colName, keySeq, pkName, enable, validate,
// rely
Constructor constructor = pkClz.getConstructor(String.class, String.class, String.class, int.class, String.class, boolean.class, boolean.class, boolean.class);
int seq = 1;
Preconditions.checkArgument(pk.getColumns().size() == traits.size(), "Number of PK columns and traits mismatch");
for (int i = 0; i < pk.getColumns().size(); i++) {
String col = pk.getColumns().get(i);
byte trait = traits.get(i);
boolean enable = HiveTableUtil.requireEnableConstraint(trait);
boolean validate = HiveTableUtil.requireValidateConstraint(trait);
boolean rely = HiveTableUtil.requireRelyConstraint(trait);
Object hivePK = constructor.newInstance(table.getDbName(), table.getTableName(), col, seq++, pk.getName(), enable, validate, rely);
res.add(hivePK);
}
}
return res;
}
use of org.apache.flink.table.api.constraints.UniqueConstraint in project flink by apache.
the class TableSchemaTest method testPrimaryKeyLazilyDefinedColumns.
@Test
public void testPrimaryKeyLazilyDefinedColumns() {
TableSchema schema = TableSchema.builder().field("f0", DataTypes.BIGINT().notNull()).primaryKey("pk", new String[] { "f0", "f2" }).field("f1", DataTypes.STRING().notNull()).field("f2", DataTypes.DOUBLE().notNull()).build();
UniqueConstraint expectedKey = UniqueConstraint.primaryKey("pk", Arrays.asList("f0", "f2"));
assertThat(schema.getPrimaryKey().get(), equalTo(expectedKey));
}
use of org.apache.flink.table.api.constraints.UniqueConstraint in project flink by apache.
the class TableSchemaTest method testPrimaryKeyColumnsIndices.
@Test
public void testPrimaryKeyColumnsIndices() {
TableSchema schema = TableSchema.builder().field("f0", DataTypes.BIGINT().notNull()).field("f1", DataTypes.STRING().notNull()).field("f2", DataTypes.DOUBLE().notNull()).primaryKey("pk", new String[] { "f0", "f2" }).build();
UniqueConstraint expectedKey = UniqueConstraint.primaryKey("pk", Arrays.asList("f0", "f2"));
assertThat(schema.getPrimaryKey().get(), equalTo(expectedKey));
}
Aggregations