use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class ObjectStore method getForeignKeysViaJdo.
private List<SQLForeignKey> getForeignKeysViaJdo(String catName, String parentDbName, String parentTblName, String foreignDbName, String foreignTblName) {
boolean commited = false;
List<SQLForeignKey> foreignKeys = null;
Collection<?> constraints = null;
Query query = null;
Map<String, String> tblToConstraint = new HashMap<>();
try {
openTransaction();
String queryText = " parentTable.database.catalogName == catName1 &&" + "childTable.database.catalogName == catName2 && " + (parentTblName != null ? "parentTable.tableName == parent_tbl_name && " : "") + (parentDbName != null ? " parentTable.database.name == parent_db_name && " : "") + (foreignTblName != null ? " childTable.tableName == foreign_tbl_name && " : "") + (foreignDbName != null ? " childTable.database.name == foreign_db_name && " : "") + " constraintType == MConstraint.FOREIGN_KEY_CONSTRAINT";
queryText = queryText.trim();
query = pm.newQuery(MConstraint.class, queryText);
String paramText = "java.lang.String catName1, java.lang.String catName2" + (parentTblName == null ? "" : ", java.lang.String parent_tbl_name") + (parentDbName == null ? "" : " , java.lang.String parent_db_name") + (foreignTblName == null ? "" : ", java.lang.String foreign_tbl_name") + (foreignDbName == null ? "" : " , java.lang.String foreign_db_name");
query.declareParameters(paramText);
List<String> params = new ArrayList<>();
params.add(catName);
// This is not a mistake, catName is in the where clause twice
params.add(catName);
if (parentTblName != null) {
params.add(parentTblName);
}
if (parentDbName != null) {
params.add(parentDbName);
}
if (foreignTblName != null) {
params.add(foreignTblName);
}
if (foreignDbName != null) {
params.add(foreignDbName);
}
constraints = (Collection<?>) query.executeWithArray(params.toArray(new String[0]));
pm.retrieveAll(constraints);
foreignKeys = new ArrayList<>();
for (Iterator<?> i = constraints.iterator(); i.hasNext(); ) {
MConstraint currPKFK = (MConstraint) i.next();
List<MFieldSchema> parentCols = currPKFK.getParentColumn() != null ? currPKFK.getParentColumn().getCols() : currPKFK.getParentTable().getPartitionKeys();
List<MFieldSchema> childCols = currPKFK.getChildColumn() != null ? currPKFK.getChildColumn().getCols() : currPKFK.getChildTable().getPartitionKeys();
int enableValidateRely = currPKFK.getEnableValidateRely();
boolean enable = (enableValidateRely & 4) != 0;
boolean validate = (enableValidateRely & 2) != 0;
boolean rely = (enableValidateRely & 1) != 0;
String consolidatedtblName = currPKFK.getParentTable().getDatabase().getName() + "." + currPKFK.getParentTable().getTableName();
String pkName;
if (tblToConstraint.containsKey(consolidatedtblName)) {
pkName = tblToConstraint.get(consolidatedtblName);
} else {
pkName = getPrimaryKeyConstraintName(currPKFK.getParentTable().getDatabase().getCatalogName(), currPKFK.getParentTable().getDatabase().getName(), currPKFK.getParentTable().getTableName());
tblToConstraint.put(consolidatedtblName, pkName);
}
SQLForeignKey fk = new SQLForeignKey(currPKFK.getParentTable().getDatabase().getName(), currPKFK.getParentTable().getTableName(), parentCols.get(currPKFK.getParentIntegerIndex()).getName(), currPKFK.getChildTable().getDatabase().getName(), currPKFK.getChildTable().getTableName(), childCols.get(currPKFK.getChildIntegerIndex()).getName(), currPKFK.getPosition(), currPKFK.getUpdateRule(), currPKFK.getDeleteRule(), currPKFK.getConstraintName(), pkName, enable, validate, rely);
fk.setCatName(catName);
foreignKeys.add(fk);
}
commited = commitTransaction();
} finally {
rollbackAndCleanup(commited, query);
}
return foreignKeys;
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class SQLForeignKeyBuilder method build.
public List<SQLForeignKey> build(Configuration conf) throws MetaException {
checkBuildable("to_" + pkTable + "_foreign_key", conf);
if (pkTable == null || pkColumns.isEmpty() || pkName == null) {
throw new MetaException("You must provide the primary key table, columns, and name");
}
if (columns.size() != pkColumns.size()) {
throw new MetaException("The number of foreign columns must match the number of primary key" + " columns");
}
List<SQLForeignKey> fk = new ArrayList<>(columns.size());
for (int i = 0; i < columns.size(); i++) {
SQLForeignKey keyCol = new SQLForeignKey(pkDb, pkTable, pkColumns.get(i), dbName, tableName, columns.get(i), getNextSeq(), updateRule, deleteRule, constraintName, pkName, enable, validate, rely);
keyCol.setCatName(catName);
fk.add(keyCol);
}
return fk;
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class TestObjectStore method dropAllStoreObjects.
@Deprecated
private static void dropAllStoreObjects(RawStore store) throws MetaException, InvalidObjectException, InvalidInputException {
try {
List<Function> functions = store.getAllFunctions(DEFAULT_CATALOG_NAME);
for (Function func : functions) {
store.dropFunction(DEFAULT_CATALOG_NAME, func.getDbName(), func.getFunctionName());
}
for (String catName : store.getCatalogs()) {
List<String> dbs = store.getAllDatabases(catName);
for (String db : dbs) {
List<String> tbls = store.getAllTables(DEFAULT_CATALOG_NAME, db);
for (String tbl : tbls) {
List<Partition> parts = store.getPartitions(DEFAULT_CATALOG_NAME, db, tbl, 100);
for (Partition part : parts) {
store.dropPartition(DEFAULT_CATALOG_NAME, db, tbl, part.getValues());
}
// Find any constraints and drop them
Set<String> constraints = new HashSet<>();
List<SQLPrimaryKey> pk = store.getPrimaryKeys(DEFAULT_CATALOG_NAME, db, tbl);
if (pk != null) {
for (SQLPrimaryKey pkcol : pk) {
constraints.add(pkcol.getPk_name());
}
}
List<SQLForeignKey> fks = store.getForeignKeys(DEFAULT_CATALOG_NAME, null, null, db, tbl);
if (fks != null) {
for (SQLForeignKey fkcol : fks) {
constraints.add(fkcol.getFk_name());
}
}
for (String constraint : constraints) {
store.dropConstraint(DEFAULT_CATALOG_NAME, db, tbl, constraint);
}
store.dropTable(DEFAULT_CATALOG_NAME, db, tbl);
}
store.dropDatabase(catName, db);
}
store.dropCatalog(catName);
}
List<String> roles = store.listRoleNames();
for (String role : roles) {
store.removeRole(role);
}
} catch (NoSuchObjectException e) {
}
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class GetCrossReferenceOperation method runInternal.
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
log.info("Fetching cross reference metadata");
try {
IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
ForeignKeysRequest fkReq = new ForeignKeysRequest(parentSchemaName, parentTableName, foreignSchemaName, foreignTableName);
List<SQLForeignKey> fks = metastoreClient.getForeignKeys(fkReq);
if (fks == null) {
return;
}
for (SQLForeignKey fk : fks) {
Object[] rowData = new Object[] { parentCatalogName, fk.getPktable_db(), fk.getPktable_name(), fk.getPkcolumn_name(), foreignCatalogName, fk.getFktable_db(), fk.getFktable_name(), fk.getFkcolumn_name(), fk.getKey_seq(), fk.getUpdate_rule(), fk.getDelete_rule(), fk.getFk_name(), fk.getPk_name(), 0 };
rowSet.addRow(rowData);
if (log.isDebugEnabled()) {
String debugMessage = getDebugMessage("cross reference", RESULT_SET_SCHEMA);
log.debug(debugMessage, rowData);
}
}
if (log.isDebugEnabled() && rowSet.numRows() == 0) {
log.debug("No cross reference metadata has been returned.");
}
setState(OperationState.FINISHED);
log.info("Fetching cross reference metadata has been successfully finished");
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class TestForeignKey method inOtherCatalog.
@Test
public void inOtherCatalog() throws TException {
Table parentTable = testTables[2];
Table table = testTables[3];
String constraintName = "othercatfk";
// Single column unnamed primary key in default catalog and database
List<SQLPrimaryKey> pk = new SQLPrimaryKeyBuilder().onTable(parentTable).addColumn("col1").build(metaStore.getConf());
client.addPrimaryKey(pk);
List<SQLForeignKey> fk = new SQLForeignKeyBuilder().fromPrimaryKey(pk).onTable(table).addColumn("col1").setConstraintName(constraintName).build(metaStore.getConf());
client.addForeignKey(fk);
ForeignKeysRequest rqst = new ForeignKeysRequest(parentTable.getDbName(), parentTable.getTableName(), table.getDbName(), table.getTableName());
rqst.setCatName(table.getCatName());
List<SQLForeignKey> fetched = client.getForeignKeys(rqst);
fk.get(0).setFk_name(fetched.get(0).getFk_name());
Assert.assertEquals(fk, fetched);
// Drop a foreign key
client.dropConstraint(table.getCatName(), table.getDbName(), table.getTableName(), constraintName);
rqst = new ForeignKeysRequest(parentTable.getDbName(), parentTable.getTableName(), table.getDbName(), table.getTableName());
rqst.setCatName(table.getCatName());
fetched = client.getForeignKeys(rqst);
Assert.assertTrue(fetched.isEmpty());
// Make sure I can add it back
client.addForeignKey(fk);
}
Aggregations