Search in sources :

Example 16 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class ObjectStore method getForeignKeysViaJdo.

private List<SQLForeignKey> getForeignKeysViaJdo(String catName, String parentDbName, String parentTblName, String foreignDbName, String foreignTblName) {
    boolean commited = false;
    List<SQLForeignKey> foreignKeys = null;
    Collection<?> constraints = null;
    Query query = null;
    Map<String, String> tblToConstraint = new HashMap<>();
    try {
        openTransaction();
        String queryText = " parentTable.database.catalogName == catName1 &&" + "childTable.database.catalogName == catName2 && " + (parentTblName != null ? "parentTable.tableName == parent_tbl_name && " : "") + (parentDbName != null ? " parentTable.database.name == parent_db_name && " : "") + (foreignTblName != null ? " childTable.tableName == foreign_tbl_name && " : "") + (foreignDbName != null ? " childTable.database.name == foreign_db_name && " : "") + " constraintType == MConstraint.FOREIGN_KEY_CONSTRAINT";
        queryText = queryText.trim();
        query = pm.newQuery(MConstraint.class, queryText);
        String paramText = "java.lang.String catName1, java.lang.String catName2" + (parentTblName == null ? "" : ", java.lang.String parent_tbl_name") + (parentDbName == null ? "" : " , java.lang.String parent_db_name") + (foreignTblName == null ? "" : ", java.lang.String foreign_tbl_name") + (foreignDbName == null ? "" : " , java.lang.String foreign_db_name");
        query.declareParameters(paramText);
        List<String> params = new ArrayList<>();
        params.add(catName);
        // This is not a mistake, catName is in the where clause twice
        params.add(catName);
        if (parentTblName != null) {
            params.add(parentTblName);
        }
        if (parentDbName != null) {
            params.add(parentDbName);
        }
        if (foreignTblName != null) {
            params.add(foreignTblName);
        }
        if (foreignDbName != null) {
            params.add(foreignDbName);
        }
        constraints = (Collection<?>) query.executeWithArray(params.toArray(new String[0]));
        pm.retrieveAll(constraints);
        foreignKeys = new ArrayList<>();
        for (Iterator<?> i = constraints.iterator(); i.hasNext(); ) {
            MConstraint currPKFK = (MConstraint) i.next();
            List<MFieldSchema> parentCols = currPKFK.getParentColumn() != null ? currPKFK.getParentColumn().getCols() : currPKFK.getParentTable().getPartitionKeys();
            List<MFieldSchema> childCols = currPKFK.getChildColumn() != null ? currPKFK.getChildColumn().getCols() : currPKFK.getChildTable().getPartitionKeys();
            int enableValidateRely = currPKFK.getEnableValidateRely();
            boolean enable = (enableValidateRely & 4) != 0;
            boolean validate = (enableValidateRely & 2) != 0;
            boolean rely = (enableValidateRely & 1) != 0;
            String consolidatedtblName = currPKFK.getParentTable().getDatabase().getName() + "." + currPKFK.getParentTable().getTableName();
            String pkName;
            if (tblToConstraint.containsKey(consolidatedtblName)) {
                pkName = tblToConstraint.get(consolidatedtblName);
            } else {
                pkName = getPrimaryKeyConstraintName(currPKFK.getParentTable().getDatabase().getCatalogName(), currPKFK.getParentTable().getDatabase().getName(), currPKFK.getParentTable().getTableName());
                tblToConstraint.put(consolidatedtblName, pkName);
            }
            SQLForeignKey fk = new SQLForeignKey(currPKFK.getParentTable().getDatabase().getName(), currPKFK.getParentTable().getTableName(), parentCols.get(currPKFK.getParentIntegerIndex()).getName(), currPKFK.getChildTable().getDatabase().getName(), currPKFK.getChildTable().getTableName(), childCols.get(currPKFK.getChildIntegerIndex()).getName(), currPKFK.getPosition(), currPKFK.getUpdateRule(), currPKFK.getDeleteRule(), currPKFK.getConstraintName(), pkName, enable, validate, rely);
            fk.setCatName(catName);
            foreignKeys.add(fk);
        }
        commited = commitTransaction();
    } finally {
        rollbackAndCleanup(commited, query);
    }
    return foreignKeys;
}
Also used : SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ScheduledQuery(org.apache.hadoop.hive.metastore.api.ScheduledQuery) Query(javax.jdo.Query) MScheduledQuery(org.apache.hadoop.hive.metastore.model.MScheduledQuery) HashMap(java.util.HashMap) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) ArrayList(java.util.ArrayList) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)

Example 17 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class SQLForeignKeyBuilder method build.

public List<SQLForeignKey> build(Configuration conf) throws MetaException {
    checkBuildable("to_" + pkTable + "_foreign_key", conf);
    if (pkTable == null || pkColumns.isEmpty() || pkName == null) {
        throw new MetaException("You must provide the primary key table, columns, and name");
    }
    if (columns.size() != pkColumns.size()) {
        throw new MetaException("The number of foreign columns must match the number of primary key" + " columns");
    }
    List<SQLForeignKey> fk = new ArrayList<>(columns.size());
    for (int i = 0; i < columns.size(); i++) {
        SQLForeignKey keyCol = new SQLForeignKey(pkDb, pkTable, pkColumns.get(i), dbName, tableName, columns.get(i), getNextSeq(), updateRule, deleteRule, constraintName, pkName, enable, validate, rely);
        keyCol.setCatName(catName);
        fk.add(keyCol);
    }
    return fk;
}
Also used : SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 18 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class TestObjectStore method dropAllStoreObjects.

@Deprecated
private static void dropAllStoreObjects(RawStore store) throws MetaException, InvalidObjectException, InvalidInputException {
    try {
        List<Function> functions = store.getAllFunctions(DEFAULT_CATALOG_NAME);
        for (Function func : functions) {
            store.dropFunction(DEFAULT_CATALOG_NAME, func.getDbName(), func.getFunctionName());
        }
        for (String catName : store.getCatalogs()) {
            List<String> dbs = store.getAllDatabases(catName);
            for (String db : dbs) {
                List<String> tbls = store.getAllTables(DEFAULT_CATALOG_NAME, db);
                for (String tbl : tbls) {
                    List<Partition> parts = store.getPartitions(DEFAULT_CATALOG_NAME, db, tbl, 100);
                    for (Partition part : parts) {
                        store.dropPartition(DEFAULT_CATALOG_NAME, db, tbl, part.getValues());
                    }
                    // Find any constraints and drop them
                    Set<String> constraints = new HashSet<>();
                    List<SQLPrimaryKey> pk = store.getPrimaryKeys(DEFAULT_CATALOG_NAME, db, tbl);
                    if (pk != null) {
                        for (SQLPrimaryKey pkcol : pk) {
                            constraints.add(pkcol.getPk_name());
                        }
                    }
                    List<SQLForeignKey> fks = store.getForeignKeys(DEFAULT_CATALOG_NAME, null, null, db, tbl);
                    if (fks != null) {
                        for (SQLForeignKey fkcol : fks) {
                            constraints.add(fkcol.getFk_name());
                        }
                    }
                    for (String constraint : constraints) {
                        store.dropConstraint(DEFAULT_CATALOG_NAME, db, tbl, constraint);
                    }
                    store.dropTable(DEFAULT_CATALOG_NAME, db, tbl);
                }
                store.dropDatabase(catName, db);
            }
            store.dropCatalog(catName);
        }
        List<String> roles = store.listRoleNames();
        for (String role : roles) {
            store.removeRole(role);
        }
    } catch (NoSuchObjectException e) {
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) Function(org.apache.hadoop.hive.metastore.api.Function) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) HashSet(java.util.HashSet)

Example 19 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class GetCrossReferenceOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    log.info("Fetching cross reference metadata");
    try {
        IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
        ForeignKeysRequest fkReq = new ForeignKeysRequest(parentSchemaName, parentTableName, foreignSchemaName, foreignTableName);
        List<SQLForeignKey> fks = metastoreClient.getForeignKeys(fkReq);
        if (fks == null) {
            return;
        }
        for (SQLForeignKey fk : fks) {
            Object[] rowData = new Object[] { parentCatalogName, fk.getPktable_db(), fk.getPktable_name(), fk.getPkcolumn_name(), foreignCatalogName, fk.getFktable_db(), fk.getFktable_name(), fk.getFkcolumn_name(), fk.getKey_seq(), fk.getUpdate_rule(), fk.getDelete_rule(), fk.getFk_name(), fk.getPk_name(), 0 };
            rowSet.addRow(rowData);
            if (log.isDebugEnabled()) {
                String debugMessage = getDebugMessage("cross reference", RESULT_SET_SCHEMA);
                log.debug(debugMessage, rowData);
            }
        }
        if (log.isDebugEnabled() && rowSet.numRows() == 0) {
            log.debug("No cross reference metadata has been returned.");
        }
        setState(OperationState.FINISHED);
        log.info("Fetching cross reference metadata has been successfully finished");
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException(e);
    }
}
Also used : ForeignKeysRequest(org.apache.hadoop.hive.metastore.api.ForeignKeysRequest) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) HiveSQLException(org.apache.hive.service.cli.HiveSQLException)

Example 20 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class TestForeignKey method inOtherCatalog.

@Test
public void inOtherCatalog() throws TException {
    Table parentTable = testTables[2];
    Table table = testTables[3];
    String constraintName = "othercatfk";
    // Single column unnamed primary key in default catalog and database
    List<SQLPrimaryKey> pk = new SQLPrimaryKeyBuilder().onTable(parentTable).addColumn("col1").build(metaStore.getConf());
    client.addPrimaryKey(pk);
    List<SQLForeignKey> fk = new SQLForeignKeyBuilder().fromPrimaryKey(pk).onTable(table).addColumn("col1").setConstraintName(constraintName).build(metaStore.getConf());
    client.addForeignKey(fk);
    ForeignKeysRequest rqst = new ForeignKeysRequest(parentTable.getDbName(), parentTable.getTableName(), table.getDbName(), table.getTableName());
    rqst.setCatName(table.getCatName());
    List<SQLForeignKey> fetched = client.getForeignKeys(rqst);
    fk.get(0).setFk_name(fetched.get(0).getFk_name());
    Assert.assertEquals(fk, fetched);
    // Drop a foreign key
    client.dropConstraint(table.getCatName(), table.getDbName(), table.getTableName(), constraintName);
    rqst = new ForeignKeysRequest(parentTable.getDbName(), parentTable.getTableName(), table.getDbName(), table.getTableName());
    rqst.setCatName(table.getCatName());
    fetched = client.getForeignKeys(rqst);
    Assert.assertTrue(fetched.isEmpty());
    // Make sure I can add it back
    client.addForeignKey(fk);
}
Also used : SQLPrimaryKeyBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder) SQLForeignKeyBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLForeignKeyBuilder) ForeignKeysRequest(org.apache.hadoop.hive.metastore.api.ForeignKeysRequest) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Aggregations

SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)46 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)28 Test (org.junit.Test)20 Table (org.apache.hadoop.hive.metastore.api.Table)19 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)15 ArrayList (java.util.ArrayList)13 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)13 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)13 SQLForeignKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLForeignKeyBuilder)13 SQLPrimaryKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder)13 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)12 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)11 ForeignKeysRequest (org.apache.hadoop.hive.metastore.api.ForeignKeysRequest)9 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)6 IOException (java.io.IOException)5 HashMap (java.util.HashMap)5 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)5 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)5 TApplicationException (org.apache.thrift.TApplicationException)5 Tree (org.antlr.runtime.tree.Tree)3