Search in sources :

Example 16 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method getPrimaryKeysViaJdo.

private List<SQLPrimaryKey> getPrimaryKeysViaJdo(String catName, String dbName, String tblName) {
    boolean commited = false;
    List<SQLPrimaryKey> primaryKeys = null;
    Query query = null;
    try {
        openTransaction();
        query = pm.newQuery(MConstraint.class, "parentTable.tableName == tbl_name && parentTable.database.name == db_name &&" + " parentTable.database.catalogName == cat_name &&" + " constraintType == MConstraint.PRIMARY_KEY_CONSTRAINT");
        query.declareParameters("java.lang.String tbl_name, java.lang.String db_name, " + "java.lang.String cat_name");
        Collection<?> constraints = (Collection<?>) query.execute(tblName, dbName, catName);
        pm.retrieveAll(constraints);
        primaryKeys = new ArrayList<>();
        for (Iterator<?> i = constraints.iterator(); i.hasNext(); ) {
            MConstraint currPK = (MConstraint) i.next();
            List<MFieldSchema> cols = currPK.getParentColumn() != null ? currPK.getParentColumn().getCols() : currPK.getParentTable().getPartitionKeys();
            int enableValidateRely = currPK.getEnableValidateRely();
            boolean enable = (enableValidateRely & 4) != 0;
            boolean validate = (enableValidateRely & 2) != 0;
            boolean rely = (enableValidateRely & 1) != 0;
            SQLPrimaryKey keyCol = new SQLPrimaryKey(dbName, tblName, cols.get(currPK.getParentIntegerIndex()).getName(), currPK.getPosition(), currPK.getConstraintName(), enable, validate, rely);
            keyCol.setCatName(catName);
            primaryKeys.add(keyCol);
        }
        commited = commitTransaction();
    } finally {
        rollbackAndCleanup(commited, query);
    }
    return primaryKeys;
}
Also used : SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) ScheduledQuery(org.apache.hadoop.hive.metastore.api.ScheduledQuery) Query(javax.jdo.Query) MScheduledQuery(org.apache.hadoop.hive.metastore.model.MScheduledQuery) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) Collection(java.util.Collection) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)

Example 17 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method dropTable.

@Override
public boolean dropTable(String catName, String dbName, String tableName) throws MetaException, NoSuchObjectException, InvalidObjectException, InvalidInputException {
    boolean materializedView = false;
    boolean success = false;
    try {
        openTransaction();
        MTable tbl = getMTable(catName, dbName, tableName);
        pm.retrieve(tbl);
        if (tbl != null) {
            materializedView = TableType.MATERIALIZED_VIEW.toString().equals(tbl.getTableType());
            // first remove all the grants
            List<MTablePrivilege> tabGrants = listAllTableGrants(catName, dbName, tableName);
            if (CollectionUtils.isNotEmpty(tabGrants)) {
                pm.deletePersistentAll(tabGrants);
            }
            List<MTableColumnPrivilege> tblColGrants = listTableAllColumnGrants(catName, dbName, tableName);
            if (CollectionUtils.isNotEmpty(tblColGrants)) {
                pm.deletePersistentAll(tblColGrants);
            }
            List<MPartitionPrivilege> partGrants = this.listTableAllPartitionGrants(catName, dbName, tableName);
            if (CollectionUtils.isNotEmpty(partGrants)) {
                pm.deletePersistentAll(partGrants);
            }
            List<MPartitionColumnPrivilege> partColGrants = listTableAllPartitionColumnGrants(catName, dbName, tableName);
            if (CollectionUtils.isNotEmpty(partColGrants)) {
                pm.deletePersistentAll(partColGrants);
            }
            // delete column statistics if present
            try {
                deleteTableColumnStatistics(catName, dbName, tableName, null, null);
            } catch (NoSuchObjectException e) {
                LOG.info("Found no table level column statistics associated with {} to delete", TableName.getQualified(catName, dbName, tableName));
            }
            List<MConstraint> tabConstraints = listAllTableConstraintsWithOptionalConstraintName(catName, dbName, tableName, null);
            if (CollectionUtils.isNotEmpty(tabConstraints)) {
                pm.deletePersistentAll(tabConstraints);
            }
            preDropStorageDescriptor(tbl.getSd());
            if (materializedView) {
                dropCreationMetadata(tbl.getDatabase().getCatalogName(), tbl.getDatabase().getName(), tbl.getTableName());
            }
            // then remove the table
            pm.deletePersistentAll(tbl);
        }
        success = commitTransaction();
    } finally {
        if (!success) {
            rollbackTransaction();
        }
    }
    return success;
}
Also used : MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) MPartitionColumnPrivilege(org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege) MTable(org.apache.hadoop.hive.metastore.model.MTable) MPartitionPrivilege(org.apache.hadoop.hive.metastore.model.MPartitionPrivilege) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MTablePrivilege(org.apache.hadoop.hive.metastore.model.MTablePrivilege) MTableColumnPrivilege(org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege)

Example 18 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method getPrimaryKeyConstraintName.

private String getPrimaryKeyConstraintName(String catName, String dbName, String tblName) {
    boolean commited = false;
    String ret = null;
    Query query = null;
    try {
        openTransaction();
        query = pm.newQuery(MConstraint.class, "parentTable.tableName == tbl_name && parentTable.database.name == db_name &&" + " parentTable.database.catalogName == catName &&" + " constraintType == MConstraint.PRIMARY_KEY_CONSTRAINT");
        query.declareParameters("java.lang.String tbl_name, java.lang.String db_name, " + "java.lang.String catName");
        Collection<?> constraints = (Collection<?>) query.execute(tblName, dbName, catName);
        pm.retrieveAll(constraints);
        for (Iterator<?> i = constraints.iterator(); i.hasNext(); ) {
            MConstraint currPK = (MConstraint) i.next();
            ret = currPK.getConstraintName();
            break;
        }
        commited = commitTransaction();
    } finally {
        rollbackAndCleanup(commited, query);
    }
    return ret;
}
Also used : ScheduledQuery(org.apache.hadoop.hive.metastore.api.ScheduledQuery) Query(javax.jdo.Query) MScheduledQuery(org.apache.hadoop.hive.metastore.model.MScheduledQuery) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) Collection(java.util.Collection)

Example 19 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method addPrimaryKeys.

private List<SQLPrimaryKey> addPrimaryKeys(List<SQLPrimaryKey> pks, boolean retrieveCD) throws InvalidObjectException, MetaException {
    List<MConstraint> mpks = new ArrayList<>();
    String constraintName = null;
    for (SQLPrimaryKey pk : pks) {
        final String catName = normalizeIdentifier(pk.getCatName());
        final String tableDB = normalizeIdentifier(pk.getTable_db());
        final String tableName = normalizeIdentifier(pk.getTable_name());
        final String columnName = normalizeIdentifier(pk.getColumn_name());
        // If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
        // For instance, this is the case when we are creating the table.
        AttachedMTableInfo nParentTable = getMTable(catName, tableDB, tableName, retrieveCD);
        MTable parentTable = nParentTable.mtbl;
        if (parentTable == null) {
            throw new InvalidObjectException("Parent table not found: " + tableName);
        }
        MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
        int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), columnName);
        if (parentIntegerIndex == -1) {
            if (parentTable.getPartitionKeys() != null) {
                parentCD = null;
                parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), columnName);
            }
            if (parentIntegerIndex == -1) {
                throw new InvalidObjectException("Parent column not found: " + columnName);
            }
        }
        if (getPrimaryKeyConstraintName(parentTable.getDatabase().getCatalogName(), parentTable.getDatabase().getName(), parentTable.getTableName()) != null) {
            throw new MetaException(" Primary key already exists for: " + TableName.getQualified(catName, tableDB, tableName));
        }
        if (pk.getPk_name() == null) {
            if (pk.getKey_seq() == 1) {
                constraintName = generateConstraintName(parentTable, tableDB, tableName, columnName, "pk");
            }
        } else {
            constraintName = normalizeIdentifier(pk.getPk_name());
            if (constraintNameAlreadyExists(parentTable, constraintName)) {
                String fqConstraintName = String.format("%s.%s.%s", parentTable.getDatabase().getName(), parentTable.getTableName(), constraintName);
                throw new InvalidObjectException("Constraint name already exists: " + fqConstraintName);
            }
        }
        int enableValidateRely = (pk.isEnable_cstr() ? 4 : 0) + (pk.isValidate_cstr() ? 2 : 0) + (pk.isRely_cstr() ? 1 : 0);
        MConstraint mpk = new MConstraint(constraintName, pk.getKey_seq(), MConstraint.PRIMARY_KEY_CONSTRAINT, null, null, enableValidateRely, parentTable, null, parentCD, null, null, parentIntegerIndex);
        mpks.add(mpk);
        // Add normalized identifier back to result
        pk.setCatName(catName);
        pk.setTable_db(tableDB);
        pk.setTable_name(tableName);
        pk.setColumn_name(columnName);
        pk.setPk_name(constraintName);
    }
    pm.makePersistentAll(mpks);
    return pks;
}
Also used : SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) MTable(org.apache.hadoop.hive.metastore.model.MTable) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) ArrayList(java.util.ArrayList) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MColumnDescriptor(org.apache.hadoop.hive.metastore.model.MColumnDescriptor) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 20 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method addCheckConstraints.

private List<SQLCheckConstraint> addCheckConstraints(List<SQLCheckConstraint> ccs, boolean retrieveCD) throws InvalidObjectException, MetaException {
    List<MConstraint> cstrs = new ArrayList<>();
    for (SQLCheckConstraint cc : ccs) {
        final String catName = normalizeIdentifier(cc.getCatName());
        final String tableDB = normalizeIdentifier(cc.getTable_db());
        final String tableName = normalizeIdentifier(cc.getTable_name());
        final String columnName = cc.getColumn_name() == null ? null : normalizeIdentifier(cc.getColumn_name());
        final String ccName = cc.getDc_name();
        boolean isEnable = cc.isEnable_cstr();
        boolean isValidate = cc.isValidate_cstr();
        boolean isRely = cc.isRely_cstr();
        String constraintValue = cc.getCheck_expression();
        MConstraint muk = addConstraint(catName, tableDB, tableName, columnName, ccName, isEnable, isRely, isValidate, MConstraint.CHECK_CONSTRAINT, constraintValue, retrieveCD);
        cstrs.add(muk);
        // Add normalized identifier back to result
        cc.setCatName(catName);
        cc.setTable_db(tableDB);
        cc.setTable_name(tableName);
        cc.setColumn_name(columnName);
        cc.setDc_name(muk.getConstraintName());
    }
    pm.makePersistentAll(cstrs);
    return ccs;
}
Also used : SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) ArrayList(java.util.ArrayList)

Aggregations

MConstraint (org.apache.hadoop.hive.metastore.model.MConstraint)21 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)14 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)14 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)13 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)13 ArrayList (java.util.ArrayList)11 MFieldSchema (org.apache.hadoop.hive.metastore.model.MFieldSchema)9 Query (javax.jdo.Query)8 ScheduledQuery (org.apache.hadoop.hive.metastore.api.ScheduledQuery)8 MScheduledQuery (org.apache.hadoop.hive.metastore.model.MScheduledQuery)8 MTable (org.apache.hadoop.hive.metastore.model.MTable)8 Collection (java.util.Collection)7 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)7 MColumnDescriptor (org.apache.hadoop.hive.metastore.model.MColumnDescriptor)7 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)3 HashMap (java.util.HashMap)2 LinkedList (java.util.LinkedList)2 List (java.util.List)2 ValidReaderWriteIdList (org.apache.hadoop.hive.common.ValidReaderWriteIdList)2 ValidWriteIdList (org.apache.hadoop.hive.common.ValidWriteIdList)2