use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class TestObjectStore method dropAllStoreObjects.
private static void dropAllStoreObjects(RawStore store) throws MetaException, InvalidObjectException, InvalidInputException {
try {
Deadline.registerIfNot(100000);
List<Function> functions = store.getAllFunctions();
for (Function func : functions) {
store.dropFunction(func.getDbName(), func.getFunctionName());
}
List<String> dbs = store.getAllDatabases();
for (String db : dbs) {
List<String> tbls = store.getAllTables(db);
for (String tbl : tbls) {
Deadline.startTimer("getPartition");
List<Partition> parts = store.getPartitions(db, tbl, 100);
for (Partition part : parts) {
store.dropPartition(db, tbl, part.getValues());
}
// Find any constraints and drop them
Set<String> constraints = new HashSet<>();
List<SQLPrimaryKey> pk = store.getPrimaryKeys(db, tbl);
if (pk != null) {
for (SQLPrimaryKey pkcol : pk) {
constraints.add(pkcol.getPk_name());
}
}
List<SQLForeignKey> fks = store.getForeignKeys(null, null, db, tbl);
if (fks != null) {
for (SQLForeignKey fkcol : fks) {
constraints.add(fkcol.getFk_name());
}
}
for (String constraint : constraints) {
store.dropConstraint(db, tbl, constraint);
}
store.dropTable(db, tbl);
}
store.dropDatabase(db);
}
List<String> roles = store.listRoleNames();
for (String role : roles) {
store.removeRole(role);
}
} catch (NoSuchObjectException e) {
}
}
use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class ObjectStore method generateValidPKsOrUniqueSignatures.
private static Set<String> generateValidPKsOrUniqueSignatures(List<MFieldSchema> tableCols, List<SQLPrimaryKey> refTablePrimaryKeys, List<SQLUniqueConstraint> refTableUniqueConstraints) {
final Set<String> validPKsOrUnique = new HashSet<>();
if (!refTablePrimaryKeys.isEmpty()) {
Collections.sort(refTablePrimaryKeys, new Comparator<SQLPrimaryKey>() {
@Override
public int compare(SQLPrimaryKey o1, SQLPrimaryKey o2) {
int keyNameComp = o1.getPk_name().compareTo(o2.getPk_name());
if (keyNameComp == 0) {
return Integer.compare(o1.getKey_seq(), o2.getKey_seq());
}
return keyNameComp;
}
});
StringBuilder pkSignature = new StringBuilder();
for (SQLPrimaryKey pk : refTablePrimaryKeys) {
pkSignature.append(generateColNameTypeSignature(pk.getColumn_name(), getColumnFromTableColumns(tableCols, pk.getColumn_name()).getType()));
}
validPKsOrUnique.add(pkSignature.toString());
}
if (!refTableUniqueConstraints.isEmpty()) {
Collections.sort(refTableUniqueConstraints, new Comparator<SQLUniqueConstraint>() {
@Override
public int compare(SQLUniqueConstraint o1, SQLUniqueConstraint o2) {
int keyNameComp = o1.getUk_name().compareTo(o2.getUk_name());
if (keyNameComp == 0) {
return Integer.compare(o1.getKey_seq(), o2.getKey_seq());
}
return keyNameComp;
}
});
StringBuilder ukSignature = new StringBuilder();
for (int j = 0; j < refTableUniqueConstraints.size(); j++) {
SQLUniqueConstraint uk = refTableUniqueConstraints.get(j);
ukSignature.append(generateColNameTypeSignature(uk.getColumn_name(), getColumnFromTableColumns(tableCols, uk.getColumn_name()).getType()));
if (j + 1 < refTableUniqueConstraints.size()) {
if (!refTableUniqueConstraints.get(j + 1).getUk_name().equals(refTableUniqueConstraints.get(j).getUk_name())) {
validPKsOrUnique.add(ukSignature.toString());
ukSignature = new StringBuilder();
}
} else {
validPKsOrUnique.add(ukSignature.toString());
}
}
}
return validPKsOrUnique;
}
use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class ObjectStore method addForeignKeys.
private List<String> addForeignKeys(List<SQLForeignKey> foreignKeys, boolean retrieveCD, List<SQLPrimaryKey> primaryKeys, List<SQLUniqueConstraint> uniqueConstraints) throws InvalidObjectException, MetaException {
List<String> fkNames = new ArrayList<>();
if (CollectionUtils.isNotEmpty(foreignKeys)) {
List<MConstraint> mpkfks = new ArrayList<>();
String currentConstraintName = null;
// checks / adds information about each column.
for (int i = 0; i < foreignKeys.size(); i++) {
final String fkTableDB = normalizeIdentifier(foreignKeys.get(i).getFktable_db());
final String fkTableName = normalizeIdentifier(foreignKeys.get(i).getFktable_name());
// If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
// For instance, this is the case when we are creating the table.
final AttachedMTableInfo nChildTable = getMTable(fkTableDB, fkTableName, retrieveCD);
final MTable childTable = nChildTable.mtbl;
if (childTable == null) {
throw new InvalidObjectException("Child table not found: " + fkTableName);
}
MColumnDescriptor childCD = retrieveCD ? nChildTable.mcd : childTable.getSd().getCD();
final List<MFieldSchema> childCols = childCD == null || childCD.getCols() == null ? new ArrayList<>() : new ArrayList<>(childCD.getCols());
if (childTable.getPartitionKeys() != null) {
childCols.addAll(childTable.getPartitionKeys());
}
final String pkTableDB = normalizeIdentifier(foreignKeys.get(i).getPktable_db());
final String pkTableName = normalizeIdentifier(foreignKeys.get(i).getPktable_name());
// For primary keys, we retrieve the column descriptors if retrieveCD is true (which means
// it is an alter table statement) or if it is a create table statement but we are
// referencing another table instead of self for the primary key.
final AttachedMTableInfo nParentTable;
final MTable parentTable;
MColumnDescriptor parentCD;
final List<MFieldSchema> parentCols;
final List<SQLPrimaryKey> existingTablePrimaryKeys;
final List<SQLUniqueConstraint> existingTableUniqueConstraints;
final boolean sameTable = fkTableDB.equals(pkTableDB) && fkTableName.equals(pkTableName);
if (sameTable) {
nParentTable = nChildTable;
parentTable = childTable;
parentCD = childCD;
parentCols = childCols;
existingTablePrimaryKeys = primaryKeys;
existingTableUniqueConstraints = uniqueConstraints;
} else {
nParentTable = getMTable(pkTableDB, pkTableName, true);
parentTable = nParentTable.mtbl;
if (parentTable == null) {
throw new InvalidObjectException("Parent table not found: " + pkTableName);
}
parentCD = nParentTable.mcd;
parentCols = parentCD == null || parentCD.getCols() == null ? new ArrayList<>() : new ArrayList<>(parentCD.getCols());
if (parentTable.getPartitionKeys() != null) {
parentCols.addAll(parentTable.getPartitionKeys());
}
existingTablePrimaryKeys = getPrimaryKeys(pkTableDB, pkTableName);
existingTableUniqueConstraints = getUniqueConstraints(pkTableDB, pkTableName);
}
// the columns correspond.
if (existingTablePrimaryKeys.isEmpty() && existingTableUniqueConstraints.isEmpty()) {
throw new MetaException("Trying to define foreign key but there are no primary keys or unique keys for referenced table");
}
final Set<String> validPKsOrUnique = generateValidPKsOrUniqueSignatures(parentCols, existingTablePrimaryKeys, existingTableUniqueConstraints);
StringBuilder fkSignature = new StringBuilder();
StringBuilder referencedKSignature = new StringBuilder();
for (; i < foreignKeys.size(); i++) {
final SQLForeignKey foreignKey = foreignKeys.get(i);
final String fkColumnName = normalizeIdentifier(foreignKey.getFkcolumn_name());
int childIntegerIndex = getColumnIndexFromTableColumns(childCD.getCols(), fkColumnName);
if (childIntegerIndex == -1) {
if (childTable.getPartitionKeys() != null) {
childCD = null;
childIntegerIndex = getColumnIndexFromTableColumns(childTable.getPartitionKeys(), fkColumnName);
}
if (childIntegerIndex == -1) {
throw new InvalidObjectException("Child column not found: " + fkColumnName);
}
}
final String pkColumnName = normalizeIdentifier(foreignKey.getPkcolumn_name());
int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD.getCols(), pkColumnName);
if (parentIntegerIndex == -1) {
if (parentTable.getPartitionKeys() != null) {
parentCD = null;
parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), pkColumnName);
}
if (parentIntegerIndex == -1) {
throw new InvalidObjectException("Parent column not found: " + pkColumnName);
}
}
if (foreignKey.getFk_name() == null) {
// the uniqueness of the generated constraint name.
if (foreignKey.getKey_seq() == 1) {
currentConstraintName = generateConstraintName(fkTableDB, fkTableName, pkTableDB, pkTableName, pkColumnName, fkColumnName, "fk");
}
} else {
currentConstraintName = normalizeIdentifier(foreignKey.getFk_name());
if (constraintNameAlreadyExists(currentConstraintName)) {
throw new InvalidObjectException("Constraint name already exists: " + currentConstraintName);
}
}
fkNames.add(currentConstraintName);
Integer updateRule = foreignKey.getUpdate_rule();
Integer deleteRule = foreignKey.getDelete_rule();
int enableValidateRely = (foreignKey.isEnable_cstr() ? 4 : 0) + (foreignKey.isValidate_cstr() ? 2 : 0) + (foreignKey.isRely_cstr() ? 1 : 0);
MConstraint mpkfk = new MConstraint(currentConstraintName, MConstraint.FOREIGN_KEY_CONSTRAINT, foreignKey.getKey_seq(), deleteRule, updateRule, enableValidateRely, parentTable, childTable, parentCD, childCD, childIntegerIndex, parentIntegerIndex);
mpkfks.add(mpkfk);
final String fkColType = getColumnFromTableColumns(childCols, fkColumnName).getType();
fkSignature.append(generateColNameTypeSignature(fkColumnName, fkColType));
referencedKSignature.append(generateColNameTypeSignature(pkColumnName, fkColType));
if (i + 1 < foreignKeys.size() && foreignKeys.get(i + 1).getKey_seq() == 1) {
// Next one is a new key, we bail out from the inner loop
break;
}
}
String referenced = referencedKSignature.toString();
if (!validPKsOrUnique.contains(referenced)) {
throw new MetaException("Foreign key references " + referenced + " but no corresponding " + "primary key or unique key exists. Possible keys: " + validPKsOrUnique);
}
if (sameTable && fkSignature.toString().equals(referenced)) {
throw new MetaException("Cannot be both foreign key and primary/unique key on same table: " + referenced);
}
fkSignature = new StringBuilder();
referencedKSignature = new StringBuilder();
}
pm.makePersistentAll(mpkfks);
}
return fkNames;
}
use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class DDLTask method createTable.
/**
* Create a new table.
*
* @param db
* The database in question.
* @param crtTbl
* This is the table we're creating.
* @return Returns 0 when execution succeeds and above 0 if it fails.
* @throws HiveException
* Throws this exception if an unexpected error occurs.
*/
private int createTable(Hive db, CreateTableDesc crtTbl) throws HiveException {
// create the table
Table tbl = crtTbl.toTable(conf);
List<SQLPrimaryKey> primaryKeys = crtTbl.getPrimaryKeys();
List<SQLForeignKey> foreignKeys = crtTbl.getForeignKeys();
List<SQLUniqueConstraint> uniqueConstraints = crtTbl.getUniqueConstraints();
List<SQLNotNullConstraint> notNullConstraints = crtTbl.getNotNullConstraints();
List<SQLDefaultConstraint> defaultConstraints = crtTbl.getDefaultConstraints();
List<SQLCheckConstraint> checkConstraints = crtTbl.getCheckConstraints();
LOG.debug("creating table {} on {}", tbl.getFullyQualifiedName(), tbl.getDataLocation());
if (crtTbl.getReplicationSpec().isInReplicationScope() && (!crtTbl.getReplaceMode())) {
// if this is a replication spec, then replace-mode semantics might apply.
// if we're already asking for a table replacement, then we can skip this check.
// however, otherwise, if in replication scope, and we've not been explicitly asked
// to replace, we should check if the object we're looking at exists, and if so,
// trigger replace-mode semantics.
Table existingTable = db.getTable(tbl.getDbName(), tbl.getTableName(), false);
if (existingTable != null) {
if (crtTbl.getReplicationSpec().allowEventReplacementInto(existingTable.getParameters())) {
// we replace existing table.
crtTbl.setReplaceMode(true);
} else {
LOG.debug("DDLTask: Create Table is skipped as table {} is newer than update", crtTbl.getTableName());
// no replacement, the existing table state is newer than our update.
return 0;
}
}
}
// create the table
if (crtTbl.getReplaceMode()) {
// replace-mode creates are really alters using CreateTableDesc.
db.alterTable(tbl, null);
} else {
if ((foreignKeys != null && foreignKeys.size() > 0) || (primaryKeys != null && primaryKeys.size() > 0) || (uniqueConstraints != null && uniqueConstraints.size() > 0) || (notNullConstraints != null && notNullConstraints.size() > 0) || (checkConstraints != null && checkConstraints.size() > 0) || defaultConstraints != null && defaultConstraints.size() > 0) {
db.createTable(tbl, crtTbl.getIfNotExists(), primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
} else {
db.createTable(tbl, crtTbl.getIfNotExists());
}
Long mmWriteId = crtTbl.getInitialMmWriteId();
if (crtTbl.isCTAS() || mmWriteId != null) {
Table createdTable = db.getTable(tbl.getDbName(), tbl.getTableName());
if (crtTbl.isCTAS()) {
DataContainer dc = new DataContainer(createdTable.getTTable());
queryState.getLineageState().setLineage(createdTable.getPath(), dc, createdTable.getCols());
}
}
}
addIfAbsentByName(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK));
return 0;
}
use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class BaseSemanticAnalyzer method processPrimaryKeys.
/**
* Process the primary keys from the ast nodes and populate the SQLPrimaryKey list.
* As of now, this is used by 'alter table add constraint' command. We expect constraint
* name to be user specified.
* @param parent Parent of the primary key token node
* @param child Child of the primary key token node containing the primary key columns details
* @param primaryKeys SQLPrimaryKey list to be populated by this function
* @throws SemanticException
*/
protected static void processPrimaryKeys(ASTNode parent, ASTNode child, List<SQLPrimaryKey> primaryKeys) throws SemanticException {
int relyIndex = 2;
int cnt = 1;
String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
for (int j = 0; j < child.getChild(0).getChildCount(); j++) {
Tree grandChild = child.getChild(0).getChild(j);
boolean rely = child.getChild(relyIndex).getType() == HiveParser.TOK_VALIDATE;
boolean enable = child.getChild(relyIndex + 1).getType() == HiveParser.TOK_ENABLE;
boolean validate = child.getChild(relyIndex + 2).getType() == HiveParser.TOK_VALIDATE;
if (enable) {
throw new SemanticException(ErrorMsg.INVALID_PK_SYNTAX.getMsg(" ENABLE feature not supported yet"));
}
if (validate) {
throw new SemanticException(ErrorMsg.INVALID_PK_SYNTAX.getMsg(" VALIDATE feature not supported yet"));
}
primaryKeys.add(new SQLPrimaryKey(qualifiedTabName[0], qualifiedTabName[1], unescapeIdentifier(grandChild.getText().toLowerCase()), cnt++, unescapeIdentifier(child.getChild(1).getText().toLowerCase()), false, false, rely));
}
}
Aggregations