use of org.apache.hadoop.hive.metastore.model.MColumnDescriptor in project hive by apache.
the class ObjectStore method addNotNullConstraints.
private List<String> addNotNullConstraints(List<SQLNotNullConstraint> nns, boolean retrieveCD) throws InvalidObjectException, MetaException {
List<String> nnNames = new ArrayList<>();
List<MConstraint> cstrs = new ArrayList<>();
String constraintName = null;
for (int i = 0; i < nns.size(); i++) {
final String tableDB = normalizeIdentifier(nns.get(i).getTable_db());
final String tableName = normalizeIdentifier(nns.get(i).getTable_name());
final String columnName = normalizeIdentifier(nns.get(i).getColumn_name());
// If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
// For instance, this is the case when we are creating the table.
AttachedMTableInfo nParentTable = getMTable(tableDB, tableName, retrieveCD);
MTable parentTable = nParentTable.mtbl;
if (parentTable == null) {
throw new InvalidObjectException("Parent table not found: " + tableName);
}
MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), columnName);
if (parentIntegerIndex == -1) {
if (parentTable.getPartitionKeys() != null) {
parentCD = null;
parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), columnName);
}
if (parentIntegerIndex == -1) {
throw new InvalidObjectException("Parent column not found: " + columnName);
}
}
if (nns.get(i).getNn_name() == null) {
constraintName = generateConstraintName(tableDB, tableName, columnName, "nn");
} else {
constraintName = normalizeIdentifier(nns.get(i).getNn_name());
if (constraintNameAlreadyExists(constraintName)) {
throw new InvalidObjectException("Constraint name already exists: " + constraintName);
}
}
nnNames.add(constraintName);
int enableValidateRely = (nns.get(i).isEnable_cstr() ? 4 : 0) + (nns.get(i).isValidate_cstr() ? 2 : 0) + (nns.get(i).isRely_cstr() ? 1 : 0);
MConstraint muk = new MConstraint(constraintName, MConstraint.NOT_NULL_CONSTRAINT, // Not null constraint should reference a single column
1, null, null, enableValidateRely, parentTable, null, parentCD, null, null, parentIntegerIndex);
cstrs.add(muk);
}
pm.makePersistentAll(cstrs);
return nnNames;
}
use of org.apache.hadoop.hive.metastore.model.MColumnDescriptor in project hive by apache.
the class ObjectStore method addConstraint.
private void addConstraint(String tableDB, String tableName, String columnName, String ccName, boolean isEnable, boolean isRely, boolean isValidate, int constraintType, String constraintValue, boolean retrieveCD, List<String> nnNames, List<MConstraint> cstrs) throws InvalidObjectException, MetaException {
String constraintName = null;
// If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
// For instance, this is the case when we are creating the table.
AttachedMTableInfo nParentTable = getMTable(tableDB, tableName, retrieveCD);
MTable parentTable = nParentTable.mtbl;
if (parentTable == null) {
throw new InvalidObjectException("Parent table not found: " + tableName);
}
MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), columnName);
if (parentIntegerIndex == -1) {
if (parentTable.getPartitionKeys() != null) {
parentCD = null;
parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), columnName);
}
if (parentIntegerIndex == -1) {
throw new InvalidObjectException("Parent column not found: " + columnName);
}
}
if (ccName == null) {
constraintName = generateConstraintName(tableDB, tableName, columnName, "dc");
} else {
constraintName = normalizeIdentifier(ccName);
if (constraintNameAlreadyExists(constraintName)) {
throw new InvalidObjectException("Constraint name already exists: " + constraintName);
}
}
nnNames.add(constraintName);
int enableValidateRely = (isEnable ? 4 : 0) + (isValidate ? 2 : 0) + (isRely ? 1 : 0);
MConstraint muk = new MConstraint(constraintName, constraintType, // Not null constraint should reference a single column
1, null, null, enableValidateRely, parentTable, null, parentCD, null, null, parentIntegerIndex, constraintValue);
cstrs.add(muk);
}
use of org.apache.hadoop.hive.metastore.model.MColumnDescriptor in project hive by apache.
the class ObjectStore method addForeignKeys.
private List<String> addForeignKeys(List<SQLForeignKey> foreignKeys, boolean retrieveCD, List<SQLPrimaryKey> primaryKeys, List<SQLUniqueConstraint> uniqueConstraints) throws InvalidObjectException, MetaException {
List<String> fkNames = new ArrayList<>();
if (CollectionUtils.isNotEmpty(foreignKeys)) {
List<MConstraint> mpkfks = new ArrayList<>();
String currentConstraintName = null;
// checks / adds information about each column.
for (int i = 0; i < foreignKeys.size(); i++) {
final String fkTableDB = normalizeIdentifier(foreignKeys.get(i).getFktable_db());
final String fkTableName = normalizeIdentifier(foreignKeys.get(i).getFktable_name());
// If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
// For instance, this is the case when we are creating the table.
final AttachedMTableInfo nChildTable = getMTable(fkTableDB, fkTableName, retrieveCD);
final MTable childTable = nChildTable.mtbl;
if (childTable == null) {
throw new InvalidObjectException("Child table not found: " + fkTableName);
}
MColumnDescriptor childCD = retrieveCD ? nChildTable.mcd : childTable.getSd().getCD();
final List<MFieldSchema> childCols = childCD == null || childCD.getCols() == null ? new ArrayList<>() : new ArrayList<>(childCD.getCols());
if (childTable.getPartitionKeys() != null) {
childCols.addAll(childTable.getPartitionKeys());
}
final String pkTableDB = normalizeIdentifier(foreignKeys.get(i).getPktable_db());
final String pkTableName = normalizeIdentifier(foreignKeys.get(i).getPktable_name());
// For primary keys, we retrieve the column descriptors if retrieveCD is true (which means
// it is an alter table statement) or if it is a create table statement but we are
// referencing another table instead of self for the primary key.
final AttachedMTableInfo nParentTable;
final MTable parentTable;
MColumnDescriptor parentCD;
final List<MFieldSchema> parentCols;
final List<SQLPrimaryKey> existingTablePrimaryKeys;
final List<SQLUniqueConstraint> existingTableUniqueConstraints;
final boolean sameTable = fkTableDB.equals(pkTableDB) && fkTableName.equals(pkTableName);
if (sameTable) {
nParentTable = nChildTable;
parentTable = childTable;
parentCD = childCD;
parentCols = childCols;
existingTablePrimaryKeys = primaryKeys;
existingTableUniqueConstraints = uniqueConstraints;
} else {
nParentTable = getMTable(pkTableDB, pkTableName, true);
parentTable = nParentTable.mtbl;
if (parentTable == null) {
throw new InvalidObjectException("Parent table not found: " + pkTableName);
}
parentCD = nParentTable.mcd;
parentCols = parentCD == null || parentCD.getCols() == null ? new ArrayList<>() : new ArrayList<>(parentCD.getCols());
if (parentTable.getPartitionKeys() != null) {
parentCols.addAll(parentTable.getPartitionKeys());
}
existingTablePrimaryKeys = getPrimaryKeys(pkTableDB, pkTableName);
existingTableUniqueConstraints = getUniqueConstraints(pkTableDB, pkTableName);
}
// the columns correspond.
if (existingTablePrimaryKeys.isEmpty() && existingTableUniqueConstraints.isEmpty()) {
throw new MetaException("Trying to define foreign key but there are no primary keys or unique keys for referenced table");
}
final Set<String> validPKsOrUnique = generateValidPKsOrUniqueSignatures(parentCols, existingTablePrimaryKeys, existingTableUniqueConstraints);
StringBuilder fkSignature = new StringBuilder();
StringBuilder referencedKSignature = new StringBuilder();
for (; i < foreignKeys.size(); i++) {
final SQLForeignKey foreignKey = foreignKeys.get(i);
final String fkColumnName = normalizeIdentifier(foreignKey.getFkcolumn_name());
int childIntegerIndex = getColumnIndexFromTableColumns(childCD.getCols(), fkColumnName);
if (childIntegerIndex == -1) {
if (childTable.getPartitionKeys() != null) {
childCD = null;
childIntegerIndex = getColumnIndexFromTableColumns(childTable.getPartitionKeys(), fkColumnName);
}
if (childIntegerIndex == -1) {
throw new InvalidObjectException("Child column not found: " + fkColumnName);
}
}
final String pkColumnName = normalizeIdentifier(foreignKey.getPkcolumn_name());
int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD.getCols(), pkColumnName);
if (parentIntegerIndex == -1) {
if (parentTable.getPartitionKeys() != null) {
parentCD = null;
parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), pkColumnName);
}
if (parentIntegerIndex == -1) {
throw new InvalidObjectException("Parent column not found: " + pkColumnName);
}
}
if (foreignKey.getFk_name() == null) {
// the uniqueness of the generated constraint name.
if (foreignKey.getKey_seq() == 1) {
currentConstraintName = generateConstraintName(fkTableDB, fkTableName, pkTableDB, pkTableName, pkColumnName, fkColumnName, "fk");
}
} else {
currentConstraintName = normalizeIdentifier(foreignKey.getFk_name());
if (constraintNameAlreadyExists(currentConstraintName)) {
throw new InvalidObjectException("Constraint name already exists: " + currentConstraintName);
}
}
fkNames.add(currentConstraintName);
Integer updateRule = foreignKey.getUpdate_rule();
Integer deleteRule = foreignKey.getDelete_rule();
int enableValidateRely = (foreignKey.isEnable_cstr() ? 4 : 0) + (foreignKey.isValidate_cstr() ? 2 : 0) + (foreignKey.isRely_cstr() ? 1 : 0);
MConstraint mpkfk = new MConstraint(currentConstraintName, MConstraint.FOREIGN_KEY_CONSTRAINT, foreignKey.getKey_seq(), deleteRule, updateRule, enableValidateRely, parentTable, childTable, parentCD, childCD, childIntegerIndex, parentIntegerIndex);
mpkfks.add(mpkfk);
final String fkColType = getColumnFromTableColumns(childCols, fkColumnName).getType();
fkSignature.append(generateColNameTypeSignature(fkColumnName, fkColType));
referencedKSignature.append(generateColNameTypeSignature(pkColumnName, fkColType));
if (i + 1 < foreignKeys.size() && foreignKeys.get(i + 1).getKey_seq() == 1) {
// Next one is a new key, we bail out from the inner loop
break;
}
}
String referenced = referencedKSignature.toString();
if (!validPKsOrUnique.contains(referenced)) {
throw new MetaException("Foreign key references " + referenced + " but no corresponding " + "primary key or unique key exists. Possible keys: " + validPKsOrUnique);
}
if (sameTable && fkSignature.toString().equals(referenced)) {
throw new MetaException("Cannot be both foreign key and primary/unique key on same table: " + referenced);
}
fkSignature = new StringBuilder();
referencedKSignature = new StringBuilder();
}
pm.makePersistentAll(mpkfks);
}
return fkNames;
}
use of org.apache.hadoop.hive.metastore.model.MColumnDescriptor in project hive by apache.
the class ObjectStore method alterPartition.
@Override
public void alterPartition(String dbname, String name, List<String> part_vals, Partition newPart) throws InvalidObjectException, MetaException {
boolean success = false;
Exception e = null;
try {
openTransaction();
MColumnDescriptor oldCd = alterPartitionNoTxn(dbname, name, part_vals, newPart);
removeUnusedColumnDescriptor(oldCd);
// commit the changes
success = commitTransaction();
} catch (Exception exception) {
e = exception;
} finally {
if (!success) {
rollbackTransaction();
MetaException metaException = new MetaException("The transaction for alter partition did not commit successfully.");
if (e != null) {
metaException.initCause(e);
}
throw metaException;
}
}
}
use of org.apache.hadoop.hive.metastore.model.MColumnDescriptor in project hive by apache.
the class ObjectStore method addUniqueConstraints.
private List<String> addUniqueConstraints(List<SQLUniqueConstraint> uks, boolean retrieveCD) throws InvalidObjectException, MetaException {
List<String> ukNames = new ArrayList<>();
List<MConstraint> cstrs = new ArrayList<>();
String constraintName = null;
for (int i = 0; i < uks.size(); i++) {
final String tableDB = normalizeIdentifier(uks.get(i).getTable_db());
final String tableName = normalizeIdentifier(uks.get(i).getTable_name());
final String columnName = normalizeIdentifier(uks.get(i).getColumn_name());
// If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
// For instance, this is the case when we are creating the table.
AttachedMTableInfo nParentTable = getMTable(tableDB, tableName, retrieveCD);
MTable parentTable = nParentTable.mtbl;
if (parentTable == null) {
throw new InvalidObjectException("Parent table not found: " + tableName);
}
MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), columnName);
if (parentIntegerIndex == -1) {
if (parentTable.getPartitionKeys() != null) {
parentCD = null;
parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), columnName);
}
if (parentIntegerIndex == -1) {
throw new InvalidObjectException("Parent column not found: " + columnName);
}
}
if (uks.get(i).getUk_name() == null) {
if (uks.get(i).getKey_seq() == 1) {
constraintName = generateConstraintName(tableDB, tableName, columnName, "uk");
}
} else {
constraintName = normalizeIdentifier(uks.get(i).getUk_name());
if (constraintNameAlreadyExists(constraintName)) {
throw new InvalidObjectException("Constraint name already exists: " + constraintName);
}
}
ukNames.add(constraintName);
int enableValidateRely = (uks.get(i).isEnable_cstr() ? 4 : 0) + (uks.get(i).isValidate_cstr() ? 2 : 0) + (uks.get(i).isRely_cstr() ? 1 : 0);
MConstraint muk = new MConstraint(constraintName, MConstraint.UNIQUE_CONSTRAINT, uks.get(i).getKey_seq(), null, null, enableValidateRely, parentTable, null, parentCD, null, null, parentIntegerIndex);
cstrs.add(muk);
}
pm.makePersistentAll(cstrs);
return ukNames;
}
Aggregations