use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.
the class ObjectStore method getPrimaryKeysViaJdo.
private List<SQLPrimaryKey> getPrimaryKeysViaJdo(String catName, String dbName, String tblName) {
boolean commited = false;
List<SQLPrimaryKey> primaryKeys = null;
Query query = null;
try {
openTransaction();
query = pm.newQuery(MConstraint.class, "parentTable.tableName == tbl_name && parentTable.database.name == db_name &&" + " parentTable.database.catalogName == cat_name &&" + " constraintType == MConstraint.PRIMARY_KEY_CONSTRAINT");
query.declareParameters("java.lang.String tbl_name, java.lang.String db_name, " + "java.lang.String cat_name");
Collection<?> constraints = (Collection<?>) query.execute(tblName, dbName, catName);
pm.retrieveAll(constraints);
primaryKeys = new ArrayList<>();
for (Iterator<?> i = constraints.iterator(); i.hasNext(); ) {
MConstraint currPK = (MConstraint) i.next();
List<MFieldSchema> cols = currPK.getParentColumn() != null ? currPK.getParentColumn().getCols() : currPK.getParentTable().getPartitionKeys();
int enableValidateRely = currPK.getEnableValidateRely();
boolean enable = (enableValidateRely & 4) != 0;
boolean validate = (enableValidateRely & 2) != 0;
boolean rely = (enableValidateRely & 1) != 0;
SQLPrimaryKey keyCol = new SQLPrimaryKey(dbName, tblName, cols.get(currPK.getParentIntegerIndex()).getName(), currPK.getPosition(), currPK.getConstraintName(), enable, validate, rely);
keyCol.setCatName(catName);
primaryKeys.add(keyCol);
}
commited = commitTransaction();
} finally {
rollbackAndCleanup(commited, query);
}
return primaryKeys;
}
use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.
the class ObjectStore method dropTable.
@Override
public boolean dropTable(String catName, String dbName, String tableName) throws MetaException, NoSuchObjectException, InvalidObjectException, InvalidInputException {
boolean materializedView = false;
boolean success = false;
try {
openTransaction();
MTable tbl = getMTable(catName, dbName, tableName);
pm.retrieve(tbl);
if (tbl != null) {
materializedView = TableType.MATERIALIZED_VIEW.toString().equals(tbl.getTableType());
// first remove all the grants
List<MTablePrivilege> tabGrants = listAllTableGrants(catName, dbName, tableName);
if (CollectionUtils.isNotEmpty(tabGrants)) {
pm.deletePersistentAll(tabGrants);
}
List<MTableColumnPrivilege> tblColGrants = listTableAllColumnGrants(catName, dbName, tableName);
if (CollectionUtils.isNotEmpty(tblColGrants)) {
pm.deletePersistentAll(tblColGrants);
}
List<MPartitionPrivilege> partGrants = this.listTableAllPartitionGrants(catName, dbName, tableName);
if (CollectionUtils.isNotEmpty(partGrants)) {
pm.deletePersistentAll(partGrants);
}
List<MPartitionColumnPrivilege> partColGrants = listTableAllPartitionColumnGrants(catName, dbName, tableName);
if (CollectionUtils.isNotEmpty(partColGrants)) {
pm.deletePersistentAll(partColGrants);
}
// delete column statistics if present
try {
deleteTableColumnStatistics(catName, dbName, tableName, null, null);
} catch (NoSuchObjectException e) {
LOG.info("Found no table level column statistics associated with {} to delete", TableName.getQualified(catName, dbName, tableName));
}
List<MConstraint> tabConstraints = listAllTableConstraintsWithOptionalConstraintName(catName, dbName, tableName, null);
if (CollectionUtils.isNotEmpty(tabConstraints)) {
pm.deletePersistentAll(tabConstraints);
}
preDropStorageDescriptor(tbl.getSd());
if (materializedView) {
dropCreationMetadata(tbl.getDatabase().getCatalogName(), tbl.getDatabase().getName(), tbl.getTableName());
}
// then remove the table
pm.deletePersistentAll(tbl);
}
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return success;
}
use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.
the class ObjectStore method getPrimaryKeyConstraintName.
private String getPrimaryKeyConstraintName(String catName, String dbName, String tblName) {
boolean commited = false;
String ret = null;
Query query = null;
try {
openTransaction();
query = pm.newQuery(MConstraint.class, "parentTable.tableName == tbl_name && parentTable.database.name == db_name &&" + " parentTable.database.catalogName == catName &&" + " constraintType == MConstraint.PRIMARY_KEY_CONSTRAINT");
query.declareParameters("java.lang.String tbl_name, java.lang.String db_name, " + "java.lang.String catName");
Collection<?> constraints = (Collection<?>) query.execute(tblName, dbName, catName);
pm.retrieveAll(constraints);
for (Iterator<?> i = constraints.iterator(); i.hasNext(); ) {
MConstraint currPK = (MConstraint) i.next();
ret = currPK.getConstraintName();
break;
}
commited = commitTransaction();
} finally {
rollbackAndCleanup(commited, query);
}
return ret;
}
use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.
the class ObjectStore method addPrimaryKeys.
private List<SQLPrimaryKey> addPrimaryKeys(List<SQLPrimaryKey> pks, boolean retrieveCD) throws InvalidObjectException, MetaException {
List<MConstraint> mpks = new ArrayList<>();
String constraintName = null;
for (SQLPrimaryKey pk : pks) {
final String catName = normalizeIdentifier(pk.getCatName());
final String tableDB = normalizeIdentifier(pk.getTable_db());
final String tableName = normalizeIdentifier(pk.getTable_name());
final String columnName = normalizeIdentifier(pk.getColumn_name());
// If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
// For instance, this is the case when we are creating the table.
AttachedMTableInfo nParentTable = getMTable(catName, tableDB, tableName, retrieveCD);
MTable parentTable = nParentTable.mtbl;
if (parentTable == null) {
throw new InvalidObjectException("Parent table not found: " + tableName);
}
MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), columnName);
if (parentIntegerIndex == -1) {
if (parentTable.getPartitionKeys() != null) {
parentCD = null;
parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), columnName);
}
if (parentIntegerIndex == -1) {
throw new InvalidObjectException("Parent column not found: " + columnName);
}
}
if (getPrimaryKeyConstraintName(parentTable.getDatabase().getCatalogName(), parentTable.getDatabase().getName(), parentTable.getTableName()) != null) {
throw new MetaException(" Primary key already exists for: " + TableName.getQualified(catName, tableDB, tableName));
}
if (pk.getPk_name() == null) {
if (pk.getKey_seq() == 1) {
constraintName = generateConstraintName(parentTable, tableDB, tableName, columnName, "pk");
}
} else {
constraintName = normalizeIdentifier(pk.getPk_name());
if (constraintNameAlreadyExists(parentTable, constraintName)) {
String fqConstraintName = String.format("%s.%s.%s", parentTable.getDatabase().getName(), parentTable.getTableName(), constraintName);
throw new InvalidObjectException("Constraint name already exists: " + fqConstraintName);
}
}
int enableValidateRely = (pk.isEnable_cstr() ? 4 : 0) + (pk.isValidate_cstr() ? 2 : 0) + (pk.isRely_cstr() ? 1 : 0);
MConstraint mpk = new MConstraint(constraintName, pk.getKey_seq(), MConstraint.PRIMARY_KEY_CONSTRAINT, null, null, enableValidateRely, parentTable, null, parentCD, null, null, parentIntegerIndex);
mpks.add(mpk);
// Add normalized identifier back to result
pk.setCatName(catName);
pk.setTable_db(tableDB);
pk.setTable_name(tableName);
pk.setColumn_name(columnName);
pk.setPk_name(constraintName);
}
pm.makePersistentAll(mpks);
return pks;
}
use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.
the class ObjectStore method addCheckConstraints.
private List<SQLCheckConstraint> addCheckConstraints(List<SQLCheckConstraint> ccs, boolean retrieveCD) throws InvalidObjectException, MetaException {
List<MConstraint> cstrs = new ArrayList<>();
for (SQLCheckConstraint cc : ccs) {
final String catName = normalizeIdentifier(cc.getCatName());
final String tableDB = normalizeIdentifier(cc.getTable_db());
final String tableName = normalizeIdentifier(cc.getTable_name());
final String columnName = cc.getColumn_name() == null ? null : normalizeIdentifier(cc.getColumn_name());
final String ccName = cc.getDc_name();
boolean isEnable = cc.isEnable_cstr();
boolean isValidate = cc.isValidate_cstr();
boolean isRely = cc.isRely_cstr();
String constraintValue = cc.getCheck_expression();
MConstraint muk = addConstraint(catName, tableDB, tableName, columnName, ccName, isEnable, isRely, isValidate, MConstraint.CHECK_CONSTRAINT, constraintValue, retrieveCD);
cstrs.add(muk);
// Add normalized identifier back to result
cc.setCatName(catName);
cc.setTable_db(tableDB);
cc.setTable_name(tableName);
cc.setColumn_name(columnName);
cc.setDc_name(muk.getConstraintName());
}
pm.makePersistentAll(cstrs);
return ccs;
}
Aggregations