use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.
the class ObjectStore method generateValidPKsOrUniqueSignatures.
private static Set<String> generateValidPKsOrUniqueSignatures(List<MFieldSchema> tableCols, List<SQLPrimaryKey> refTablePrimaryKeys, List<SQLUniqueConstraint> refTableUniqueConstraints) {
final Set<String> validPKsOrUnique = new HashSet<>();
if (!refTablePrimaryKeys.isEmpty()) {
Collections.sort(refTablePrimaryKeys, new Comparator<SQLPrimaryKey>() {
@Override
public int compare(SQLPrimaryKey o1, SQLPrimaryKey o2) {
int keyNameComp = o1.getPk_name().compareTo(o2.getPk_name());
if (keyNameComp == 0) {
return Integer.compare(o1.getKey_seq(), o2.getKey_seq());
}
return keyNameComp;
}
});
StringBuilder pkSignature = new StringBuilder();
for (SQLPrimaryKey pk : refTablePrimaryKeys) {
pkSignature.append(generateColNameTypeSignature(pk.getColumn_name(), getColumnFromTableColumns(tableCols, pk.getColumn_name()).getType()));
}
validPKsOrUnique.add(pkSignature.toString());
}
if (!refTableUniqueConstraints.isEmpty()) {
Collections.sort(refTableUniqueConstraints, new Comparator<SQLUniqueConstraint>() {
@Override
public int compare(SQLUniqueConstraint o1, SQLUniqueConstraint o2) {
int keyNameComp = o1.getUk_name().compareTo(o2.getUk_name());
if (keyNameComp == 0) {
return Integer.compare(o1.getKey_seq(), o2.getKey_seq());
}
return keyNameComp;
}
});
StringBuilder ukSignature = new StringBuilder();
for (int j = 0; j < refTableUniqueConstraints.size(); j++) {
SQLUniqueConstraint uk = refTableUniqueConstraints.get(j);
ukSignature.append(generateColNameTypeSignature(uk.getColumn_name(), getColumnFromTableColumns(tableCols, uk.getColumn_name()).getType()));
if (j + 1 < refTableUniqueConstraints.size()) {
if (!refTableUniqueConstraints.get(j + 1).getUk_name().equals(refTableUniqueConstraints.get(j).getUk_name())) {
validPKsOrUnique.add(ukSignature.toString());
ukSignature = new StringBuilder();
}
} else {
validPKsOrUnique.add(ukSignature.toString());
}
}
}
return validPKsOrUnique;
}
use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.
the class ObjectStore method getUniqueConstraintsViaJdo.
private List<SQLUniqueConstraint> getUniqueConstraintsViaJdo(String db_name, String tbl_name) throws MetaException {
boolean commited = false;
List<SQLUniqueConstraint> uniqueConstraints = null;
Query query = null;
try {
openTransaction();
query = pm.newQuery(MConstraint.class, "parentTable.tableName == tbl_name && parentTable.database.name == db_name &&" + " constraintType == MConstraint.UNIQUE_CONSTRAINT");
query.declareParameters("java.lang.String tbl_name, java.lang.String db_name");
Collection<?> constraints = (Collection<?>) query.execute(tbl_name, db_name);
pm.retrieveAll(constraints);
uniqueConstraints = new ArrayList<>();
for (Iterator<?> i = constraints.iterator(); i.hasNext(); ) {
MConstraint currConstraint = (MConstraint) i.next();
List<MFieldSchema> cols = currConstraint.getParentColumn() != null ? currConstraint.getParentColumn().getCols() : currConstraint.getParentTable().getPartitionKeys();
int enableValidateRely = currConstraint.getEnableValidateRely();
boolean enable = (enableValidateRely & 4) != 0;
boolean validate = (enableValidateRely & 2) != 0;
boolean rely = (enableValidateRely & 1) != 0;
uniqueConstraints.add(new SQLUniqueConstraint(db_name, tbl_name, cols.get(currConstraint.getParentIntegerIndex()).getName(), currConstraint.getPosition(), currConstraint.getConstraintName(), enable, validate, rely));
}
commited = commitTransaction();
} finally {
rollbackAndCleanup(commited, query);
}
return uniqueConstraints;
}
use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.
the class ObjectStore method addForeignKeys.
private List<String> addForeignKeys(List<SQLForeignKey> foreignKeys, boolean retrieveCD, List<SQLPrimaryKey> primaryKeys, List<SQLUniqueConstraint> uniqueConstraints) throws InvalidObjectException, MetaException {
List<String> fkNames = new ArrayList<>();
if (CollectionUtils.isNotEmpty(foreignKeys)) {
List<MConstraint> mpkfks = new ArrayList<>();
String currentConstraintName = null;
// checks / adds information about each column.
for (int i = 0; i < foreignKeys.size(); i++) {
final String fkTableDB = normalizeIdentifier(foreignKeys.get(i).getFktable_db());
final String fkTableName = normalizeIdentifier(foreignKeys.get(i).getFktable_name());
// If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
// For instance, this is the case when we are creating the table.
final AttachedMTableInfo nChildTable = getMTable(fkTableDB, fkTableName, retrieveCD);
final MTable childTable = nChildTable.mtbl;
if (childTable == null) {
throw new InvalidObjectException("Child table not found: " + fkTableName);
}
MColumnDescriptor childCD = retrieveCD ? nChildTable.mcd : childTable.getSd().getCD();
final List<MFieldSchema> childCols = childCD == null || childCD.getCols() == null ? new ArrayList<>() : new ArrayList<>(childCD.getCols());
if (childTable.getPartitionKeys() != null) {
childCols.addAll(childTable.getPartitionKeys());
}
final String pkTableDB = normalizeIdentifier(foreignKeys.get(i).getPktable_db());
final String pkTableName = normalizeIdentifier(foreignKeys.get(i).getPktable_name());
// For primary keys, we retrieve the column descriptors if retrieveCD is true (which means
// it is an alter table statement) or if it is a create table statement but we are
// referencing another table instead of self for the primary key.
final AttachedMTableInfo nParentTable;
final MTable parentTable;
MColumnDescriptor parentCD;
final List<MFieldSchema> parentCols;
final List<SQLPrimaryKey> existingTablePrimaryKeys;
final List<SQLUniqueConstraint> existingTableUniqueConstraints;
final boolean sameTable = fkTableDB.equals(pkTableDB) && fkTableName.equals(pkTableName);
if (sameTable) {
nParentTable = nChildTable;
parentTable = childTable;
parentCD = childCD;
parentCols = childCols;
existingTablePrimaryKeys = primaryKeys;
existingTableUniqueConstraints = uniqueConstraints;
} else {
nParentTable = getMTable(pkTableDB, pkTableName, true);
parentTable = nParentTable.mtbl;
if (parentTable == null) {
throw new InvalidObjectException("Parent table not found: " + pkTableName);
}
parentCD = nParentTable.mcd;
parentCols = parentCD == null || parentCD.getCols() == null ? new ArrayList<>() : new ArrayList<>(parentCD.getCols());
if (parentTable.getPartitionKeys() != null) {
parentCols.addAll(parentTable.getPartitionKeys());
}
existingTablePrimaryKeys = getPrimaryKeys(pkTableDB, pkTableName);
existingTableUniqueConstraints = getUniqueConstraints(pkTableDB, pkTableName);
}
// the columns correspond.
if (existingTablePrimaryKeys.isEmpty() && existingTableUniqueConstraints.isEmpty()) {
throw new MetaException("Trying to define foreign key but there are no primary keys or unique keys for referenced table");
}
final Set<String> validPKsOrUnique = generateValidPKsOrUniqueSignatures(parentCols, existingTablePrimaryKeys, existingTableUniqueConstraints);
StringBuilder fkSignature = new StringBuilder();
StringBuilder referencedKSignature = new StringBuilder();
for (; i < foreignKeys.size(); i++) {
final SQLForeignKey foreignKey = foreignKeys.get(i);
final String fkColumnName = normalizeIdentifier(foreignKey.getFkcolumn_name());
int childIntegerIndex = getColumnIndexFromTableColumns(childCD.getCols(), fkColumnName);
if (childIntegerIndex == -1) {
if (childTable.getPartitionKeys() != null) {
childCD = null;
childIntegerIndex = getColumnIndexFromTableColumns(childTable.getPartitionKeys(), fkColumnName);
}
if (childIntegerIndex == -1) {
throw new InvalidObjectException("Child column not found: " + fkColumnName);
}
}
final String pkColumnName = normalizeIdentifier(foreignKey.getPkcolumn_name());
int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD.getCols(), pkColumnName);
if (parentIntegerIndex == -1) {
if (parentTable.getPartitionKeys() != null) {
parentCD = null;
parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), pkColumnName);
}
if (parentIntegerIndex == -1) {
throw new InvalidObjectException("Parent column not found: " + pkColumnName);
}
}
if (foreignKey.getFk_name() == null) {
// the uniqueness of the generated constraint name.
if (foreignKey.getKey_seq() == 1) {
currentConstraintName = generateConstraintName(fkTableDB, fkTableName, pkTableDB, pkTableName, pkColumnName, fkColumnName, "fk");
}
} else {
currentConstraintName = normalizeIdentifier(foreignKey.getFk_name());
if (constraintNameAlreadyExists(currentConstraintName)) {
throw new InvalidObjectException("Constraint name already exists: " + currentConstraintName);
}
}
fkNames.add(currentConstraintName);
Integer updateRule = foreignKey.getUpdate_rule();
Integer deleteRule = foreignKey.getDelete_rule();
int enableValidateRely = (foreignKey.isEnable_cstr() ? 4 : 0) + (foreignKey.isValidate_cstr() ? 2 : 0) + (foreignKey.isRely_cstr() ? 1 : 0);
MConstraint mpkfk = new MConstraint(currentConstraintName, MConstraint.FOREIGN_KEY_CONSTRAINT, foreignKey.getKey_seq(), deleteRule, updateRule, enableValidateRely, parentTable, childTable, parentCD, childCD, childIntegerIndex, parentIntegerIndex);
mpkfks.add(mpkfk);
final String fkColType = getColumnFromTableColumns(childCols, fkColumnName).getType();
fkSignature.append(generateColNameTypeSignature(fkColumnName, fkColType));
referencedKSignature.append(generateColNameTypeSignature(pkColumnName, fkColType));
if (i + 1 < foreignKeys.size() && foreignKeys.get(i + 1).getKey_seq() == 1) {
// Next one is a new key, we bail out from the inner loop
break;
}
}
String referenced = referencedKSignature.toString();
if (!validPKsOrUnique.contains(referenced)) {
throw new MetaException("Foreign key references " + referenced + " but no corresponding " + "primary key or unique key exists. Possible keys: " + validPKsOrUnique);
}
if (sameTable && fkSignature.toString().equals(referenced)) {
throw new MetaException("Cannot be both foreign key and primary/unique key on same table: " + referenced);
}
fkSignature = new StringBuilder();
referencedKSignature = new StringBuilder();
}
pm.makePersistentAll(mpkfks);
}
return fkNames;
}
use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.
the class MetaStoreDirectSql method getUniqueConstraints.
public List<SQLUniqueConstraint> getUniqueConstraints(String db_name, String tbl_name) throws MetaException {
List<SQLUniqueConstraint> ret = new ArrayList<SQLUniqueConstraint>();
String queryText = "SELECT " + DBS + ".\"NAME\", " + TBLS + ".\"TBL_NAME\", " + "CASE WHEN " + COLUMNS_V2 + ".\"COLUMN_NAME\" IS NOT NULL THEN " + COLUMNS_V2 + ".\"COLUMN_NAME\" " + "ELSE " + PARTITION_KEYS + ".\"PKEY_NAME\" END, " + KEY_CONSTRAINTS + ".\"POSITION\", " + KEY_CONSTRAINTS + ".\"CONSTRAINT_NAME\", " + KEY_CONSTRAINTS + ".\"ENABLE_VALIDATE_RELY\" " + " from " + TBLS + " " + " INNER JOIN " + KEY_CONSTRAINTS + " ON " + TBLS + ".\"TBL_ID\" = " + KEY_CONSTRAINTS + ".\"PARENT_TBL_ID\" " + " INNER JOIN " + DBS + " ON " + TBLS + ".\"DB_ID\" = " + DBS + ".\"DB_ID\" " + " LEFT OUTER JOIN " + COLUMNS_V2 + " ON " + COLUMNS_V2 + ".\"CD_ID\" = " + KEY_CONSTRAINTS + ".\"PARENT_CD_ID\" AND " + " " + COLUMNS_V2 + ".\"INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"PARENT_INTEGER_IDX\" " + " LEFT OUTER JOIN " + PARTITION_KEYS + " ON " + TBLS + ".\"TBL_ID\" = " + PARTITION_KEYS + ".\"TBL_ID\" AND " + " " + PARTITION_KEYS + ".\"INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"PARENT_INTEGER_IDX\" " + " WHERE " + KEY_CONSTRAINTS + ".\"CONSTRAINT_TYPE\" = " + MConstraint.UNIQUE_CONSTRAINT + " AND" + (db_name == null ? "" : " " + DBS + ".\"NAME\" = ? AND") + (tbl_name == null ? "" : " " + TBLS + ".\"TBL_NAME\" = ? ");
queryText = queryText.trim();
if (queryText.endsWith("AND")) {
queryText = queryText.substring(0, queryText.length() - 3);
}
List<String> pms = new ArrayList<String>();
if (db_name != null) {
pms.add(db_name);
}
if (tbl_name != null) {
pms.add(tbl_name);
}
Query queryParams = pm.newQuery("javax.jdo.query.SQL", queryText);
List<Object[]> sqlResult = ensureList(executeWithArray(queryParams, pms.toArray(), queryText));
if (!sqlResult.isEmpty()) {
for (Object[] line : sqlResult) {
int enableValidateRely = extractSqlInt(line[5]);
boolean enable = (enableValidateRely & 4) != 0;
boolean validate = (enableValidateRely & 2) != 0;
boolean rely = (enableValidateRely & 1) != 0;
SQLUniqueConstraint currConstraint = new SQLUniqueConstraint(extractSqlString(line[0]), extractSqlString(line[1]), extractSqlString(line[2]), extractSqlInt(line[3]), extractSqlString(line[4]), enable, validate, rely);
ret.add(currConstraint);
}
}
return ret;
}
use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.
the class ObjectStore method addUniqueConstraints.
private List<String> addUniqueConstraints(List<SQLUniqueConstraint> uks, boolean retrieveCD) throws InvalidObjectException, MetaException {
List<String> ukNames = new ArrayList<>();
List<MConstraint> cstrs = new ArrayList<>();
String constraintName = null;
for (int i = 0; i < uks.size(); i++) {
final String tableDB = normalizeIdentifier(uks.get(i).getTable_db());
final String tableName = normalizeIdentifier(uks.get(i).getTable_name());
final String columnName = normalizeIdentifier(uks.get(i).getColumn_name());
// If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
// For instance, this is the case when we are creating the table.
AttachedMTableInfo nParentTable = getMTable(tableDB, tableName, retrieveCD);
MTable parentTable = nParentTable.mtbl;
if (parentTable == null) {
throw new InvalidObjectException("Parent table not found: " + tableName);
}
MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), columnName);
if (parentIntegerIndex == -1) {
if (parentTable.getPartitionKeys() != null) {
parentCD = null;
parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), columnName);
}
if (parentIntegerIndex == -1) {
throw new InvalidObjectException("Parent column not found: " + columnName);
}
}
if (uks.get(i).getUk_name() == null) {
if (uks.get(i).getKey_seq() == 1) {
constraintName = generateConstraintName(tableDB, tableName, columnName, "uk");
}
} else {
constraintName = normalizeIdentifier(uks.get(i).getUk_name());
if (constraintNameAlreadyExists(constraintName)) {
throw new InvalidObjectException("Constraint name already exists: " + constraintName);
}
}
ukNames.add(constraintName);
int enableValidateRely = (uks.get(i).isEnable_cstr() ? 4 : 0) + (uks.get(i).isValidate_cstr() ? 2 : 0) + (uks.get(i).isRely_cstr() ? 1 : 0);
MConstraint muk = new MConstraint(constraintName, MConstraint.UNIQUE_CONSTRAINT, uks.get(i).getKey_seq(), null, null, enableValidateRely, parentTable, null, parentCD, null, null, parentIntegerIndex);
cstrs.add(muk);
}
pm.makePersistentAll(cstrs);
return ukNames;
}
Aggregations