Search in sources :

Example 6 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method removeUnusedColumnDescriptor.

/**
 * Checks if a column descriptor has any remaining references by storage descriptors
 * in the db.  If it does not, then delete the CD.  If it does, then do nothing.
 * @param oldCD the column descriptor to delete if it is no longer referenced anywhere
 */
private void removeUnusedColumnDescriptor(MColumnDescriptor oldCD) {
    if (oldCD == null) {
        return;
    }
    Query query = null;
    boolean success = false;
    LOG.debug("execute removeUnusedColumnDescriptor");
    try {
        openTransaction();
        if (!hasRemainingCDReference(oldCD)) {
            // First remove any constraints that may be associated with this CD
            query = pm.newQuery(MConstraint.class, "parentColumn == inCD || childColumn == inCD");
            query.declareParameters("MColumnDescriptor inCD");
            List<MConstraint> mConstraintsList = (List<MConstraint>) query.execute(oldCD);
            if (CollectionUtils.isNotEmpty(mConstraintsList)) {
                pm.deletePersistentAll(mConstraintsList);
            }
            // Finally remove CD
            pm.retrieve(oldCD);
            pm.deletePersistent(oldCD);
            LOG.debug("successfully deleted a CD in removeUnusedColumnDescriptor");
        }
        success = commitTransaction();
    } finally {
        rollbackAndCleanup(success, query);
    }
}
Also used : ScheduledQuery(org.apache.hadoop.hive.metastore.api.ScheduledQuery) Query(javax.jdo.Query) MScheduledQuery(org.apache.hadoop.hive.metastore.model.MScheduledQuery) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) ValidWriteIdList(org.apache.hadoop.hive.common.ValidWriteIdList) ReplicationMetricList(org.apache.hadoop.hive.metastore.api.ReplicationMetricList) LinkedList(java.util.LinkedList) MStringList(org.apache.hadoop.hive.metastore.model.MStringList) ArrayList(java.util.ArrayList) ValidReaderWriteIdList(org.apache.hadoop.hive.common.ValidReaderWriteIdList) List(java.util.List)

Example 7 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method getForeignKeysViaJdo.

private List<SQLForeignKey> getForeignKeysViaJdo(String catName, String parentDbName, String parentTblName, String foreignDbName, String foreignTblName) {
    boolean commited = false;
    List<SQLForeignKey> foreignKeys = null;
    Collection<?> constraints = null;
    Query query = null;
    Map<String, String> tblToConstraint = new HashMap<>();
    try {
        openTransaction();
        String queryText = " parentTable.database.catalogName == catName1 &&" + "childTable.database.catalogName == catName2 && " + (parentTblName != null ? "parentTable.tableName == parent_tbl_name && " : "") + (parentDbName != null ? " parentTable.database.name == parent_db_name && " : "") + (foreignTblName != null ? " childTable.tableName == foreign_tbl_name && " : "") + (foreignDbName != null ? " childTable.database.name == foreign_db_name && " : "") + " constraintType == MConstraint.FOREIGN_KEY_CONSTRAINT";
        queryText = queryText.trim();
        query = pm.newQuery(MConstraint.class, queryText);
        String paramText = "java.lang.String catName1, java.lang.String catName2" + (parentTblName == null ? "" : ", java.lang.String parent_tbl_name") + (parentDbName == null ? "" : " , java.lang.String parent_db_name") + (foreignTblName == null ? "" : ", java.lang.String foreign_tbl_name") + (foreignDbName == null ? "" : " , java.lang.String foreign_db_name");
        query.declareParameters(paramText);
        List<String> params = new ArrayList<>();
        params.add(catName);
        // This is not a mistake, catName is in the where clause twice
        params.add(catName);
        if (parentTblName != null) {
            params.add(parentTblName);
        }
        if (parentDbName != null) {
            params.add(parentDbName);
        }
        if (foreignTblName != null) {
            params.add(foreignTblName);
        }
        if (foreignDbName != null) {
            params.add(foreignDbName);
        }
        constraints = (Collection<?>) query.executeWithArray(params.toArray(new String[0]));
        pm.retrieveAll(constraints);
        foreignKeys = new ArrayList<>();
        for (Iterator<?> i = constraints.iterator(); i.hasNext(); ) {
            MConstraint currPKFK = (MConstraint) i.next();
            List<MFieldSchema> parentCols = currPKFK.getParentColumn() != null ? currPKFK.getParentColumn().getCols() : currPKFK.getParentTable().getPartitionKeys();
            List<MFieldSchema> childCols = currPKFK.getChildColumn() != null ? currPKFK.getChildColumn().getCols() : currPKFK.getChildTable().getPartitionKeys();
            int enableValidateRely = currPKFK.getEnableValidateRely();
            boolean enable = (enableValidateRely & 4) != 0;
            boolean validate = (enableValidateRely & 2) != 0;
            boolean rely = (enableValidateRely & 1) != 0;
            String consolidatedtblName = currPKFK.getParentTable().getDatabase().getName() + "." + currPKFK.getParentTable().getTableName();
            String pkName;
            if (tblToConstraint.containsKey(consolidatedtblName)) {
                pkName = tblToConstraint.get(consolidatedtblName);
            } else {
                pkName = getPrimaryKeyConstraintName(currPKFK.getParentTable().getDatabase().getCatalogName(), currPKFK.getParentTable().getDatabase().getName(), currPKFK.getParentTable().getTableName());
                tblToConstraint.put(consolidatedtblName, pkName);
            }
            SQLForeignKey fk = new SQLForeignKey(currPKFK.getParentTable().getDatabase().getName(), currPKFK.getParentTable().getTableName(), parentCols.get(currPKFK.getParentIntegerIndex()).getName(), currPKFK.getChildTable().getDatabase().getName(), currPKFK.getChildTable().getTableName(), childCols.get(currPKFK.getChildIntegerIndex()).getName(), currPKFK.getPosition(), currPKFK.getUpdateRule(), currPKFK.getDeleteRule(), currPKFK.getConstraintName(), pkName, enable, validate, rely);
            fk.setCatName(catName);
            foreignKeys.add(fk);
        }
        commited = commitTransaction();
    } finally {
        rollbackAndCleanup(commited, query);
    }
    return foreignKeys;
}
Also used : SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ScheduledQuery(org.apache.hadoop.hive.metastore.api.ScheduledQuery) Query(javax.jdo.Query) MScheduledQuery(org.apache.hadoop.hive.metastore.model.MScheduledQuery) HashMap(java.util.HashMap) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) ArrayList(java.util.ArrayList) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)

Example 8 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method addUniqueConstraints.

private List<SQLUniqueConstraint> addUniqueConstraints(List<SQLUniqueConstraint> uks, boolean retrieveCD) throws InvalidObjectException, MetaException {
    List<MConstraint> cstrs = new ArrayList<>();
    String constraintName = null;
    for (SQLUniqueConstraint uk : uks) {
        final String catName = normalizeIdentifier(uk.getCatName());
        final String tableDB = normalizeIdentifier(uk.getTable_db());
        final String tableName = normalizeIdentifier(uk.getTable_name());
        final String columnName = normalizeIdentifier(uk.getColumn_name());
        // If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
        // For instance, this is the case when we are creating the table.
        AttachedMTableInfo nParentTable = getMTable(catName, tableDB, tableName, retrieveCD);
        MTable parentTable = nParentTable.mtbl;
        if (parentTable == null) {
            throw new InvalidObjectException("Parent table not found: " + tableName);
        }
        MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
        int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), columnName);
        if (parentIntegerIndex == -1) {
            if (parentTable.getPartitionKeys() != null) {
                parentCD = null;
                parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), columnName);
            }
            if (parentIntegerIndex == -1) {
                throw new InvalidObjectException("Parent column not found: " + columnName);
            }
        }
        if (uk.getUk_name() == null) {
            if (uk.getKey_seq() == 1) {
                constraintName = generateConstraintName(parentTable, tableDB, tableName, columnName, "uk");
            }
        } else {
            constraintName = normalizeIdentifier(uk.getUk_name());
            if (constraintNameAlreadyExists(parentTable, constraintName)) {
                String fqConstraintName = String.format("%s.%s.%s", parentTable.getDatabase().getName(), parentTable.getTableName(), constraintName);
                throw new InvalidObjectException("Constraint name already exists: " + fqConstraintName);
            }
        }
        int enableValidateRely = (uk.isEnable_cstr() ? 4 : 0) + (uk.isValidate_cstr() ? 2 : 0) + (uk.isRely_cstr() ? 1 : 0);
        MConstraint muk = new MConstraint(constraintName, uk.getKey_seq(), MConstraint.UNIQUE_CONSTRAINT, null, null, enableValidateRely, parentTable, null, parentCD, null, null, parentIntegerIndex);
        cstrs.add(muk);
        // Add normalized identifier back to result
        uk.setCatName(catName);
        uk.setTable_db(tableDB);
        uk.setTable_name(tableName);
        uk.setColumn_name(columnName);
        uk.setUk_name(constraintName);
    }
    pm.makePersistentAll(cstrs);
    return uks;
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) MTable(org.apache.hadoop.hive.metastore.model.MTable) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) ArrayList(java.util.ArrayList) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MColumnDescriptor(org.apache.hadoop.hive.metastore.model.MColumnDescriptor) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)

Example 9 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method getDefaultConstraintsViaJdo.

private List<SQLDefaultConstraint> getDefaultConstraintsViaJdo(String catName, String dbName, String tblName) {
    boolean commited = false;
    List<SQLDefaultConstraint> defaultConstraints = null;
    Query query = null;
    try {
        openTransaction();
        query = pm.newQuery(MConstraint.class, "parentTable.tableName == tbl_name && parentTable.database.name == db_name &&" + " parentTable.database.catalogName == catName &&" + " constraintType == MConstraint.DEFAULT_CONSTRAINT");
        query.declareParameters("java.lang.String tbl_name, java.lang.String db_name, java.lang.String catName");
        Collection<?> constraints = (Collection<?>) query.execute(tblName, dbName, catName);
        pm.retrieveAll(constraints);
        defaultConstraints = new ArrayList<>();
        for (Iterator<?> i = constraints.iterator(); i.hasNext(); ) {
            MConstraint currConstraint = (MConstraint) i.next();
            List<MFieldSchema> cols = currConstraint.getParentColumn() != null ? currConstraint.getParentColumn().getCols() : currConstraint.getParentTable().getPartitionKeys();
            int enableValidateRely = currConstraint.getEnableValidateRely();
            boolean enable = (enableValidateRely & 4) != 0;
            boolean validate = (enableValidateRely & 2) != 0;
            boolean rely = (enableValidateRely & 1) != 0;
            defaultConstraints.add(new SQLDefaultConstraint(catName, dbName, tblName, cols.get(currConstraint.getParentIntegerIndex()).getName(), currConstraint.getDefaultValue(), currConstraint.getConstraintName(), enable, validate, rely));
        }
        commited = commitTransaction();
    } finally {
        rollbackAndCleanup(commited, query);
    }
    return defaultConstraints;
}
Also used : ScheduledQuery(org.apache.hadoop.hive.metastore.api.ScheduledQuery) Query(javax.jdo.Query) MScheduledQuery(org.apache.hadoop.hive.metastore.model.MScheduledQuery) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) Collection(java.util.Collection) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 10 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method addForeignKeys.

// TODO: clean up this method
private List<SQLForeignKey> addForeignKeys(List<SQLForeignKey> foreignKeys, boolean retrieveCD, List<SQLPrimaryKey> primaryKeys, List<SQLUniqueConstraint> uniqueConstraints) throws InvalidObjectException, MetaException {
    if (CollectionUtils.isNotEmpty(foreignKeys)) {
        List<MConstraint> mpkfks = new ArrayList<>();
        String currentConstraintName = null;
        String catName = null;
        // checks / adds information about each column.
        for (int i = 0; i < foreignKeys.size(); i++) {
            if (catName == null) {
                catName = normalizeIdentifier(foreignKeys.get(i).isSetCatName() ? foreignKeys.get(i).getCatName() : getDefaultCatalog(conf));
            } else {
                String tmpCatName = normalizeIdentifier(foreignKeys.get(i).isSetCatName() ? foreignKeys.get(i).getCatName() : getDefaultCatalog(conf));
                if (!catName.equals(tmpCatName)) {
                    throw new InvalidObjectException("Foreign keys cannot span catalogs");
                }
            }
            final String fkTableDB = normalizeIdentifier(foreignKeys.get(i).getFktable_db());
            final String fkTableName = normalizeIdentifier(foreignKeys.get(i).getFktable_name());
            // If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
            // For instance, this is the case when we are creating the table.
            final AttachedMTableInfo nChildTable = getMTable(catName, fkTableDB, fkTableName, retrieveCD);
            final MTable childTable = nChildTable.mtbl;
            if (childTable == null) {
                throw new InvalidObjectException("Child table not found: " + fkTableName);
            }
            MColumnDescriptor childCD = retrieveCD ? nChildTable.mcd : childTable.getSd().getCD();
            final List<MFieldSchema> childCols = childCD == null || childCD.getCols() == null ? new ArrayList<>() : new ArrayList<>(childCD.getCols());
            if (childTable.getPartitionKeys() != null) {
                childCols.addAll(childTable.getPartitionKeys());
            }
            final String pkTableDB = normalizeIdentifier(foreignKeys.get(i).getPktable_db());
            final String pkTableName = normalizeIdentifier(foreignKeys.get(i).getPktable_name());
            // For primary keys, we retrieve the column descriptors if retrieveCD is true (which means
            // it is an alter table statement) or if it is a create table statement but we are
            // referencing another table instead of self for the primary key.
            final AttachedMTableInfo nParentTable;
            final MTable parentTable;
            MColumnDescriptor parentCD;
            final List<MFieldSchema> parentCols;
            final List<SQLPrimaryKey> existingTablePrimaryKeys;
            final List<SQLUniqueConstraint> existingTableUniqueConstraints;
            final boolean sameTable = fkTableDB.equals(pkTableDB) && fkTableName.equals(pkTableName);
            if (sameTable) {
                nParentTable = nChildTable;
                parentTable = childTable;
                parentCD = childCD;
                parentCols = childCols;
                existingTablePrimaryKeys = primaryKeys;
                existingTableUniqueConstraints = uniqueConstraints;
            } else {
                nParentTable = getMTable(catName, pkTableDB, pkTableName, true);
                parentTable = nParentTable.mtbl;
                if (parentTable == null) {
                    throw new InvalidObjectException("Parent table not found: " + pkTableName);
                }
                parentCD = nParentTable.mcd;
                parentCols = parentCD == null || parentCD.getCols() == null ? new ArrayList<>() : new ArrayList<>(parentCD.getCols());
                if (parentTable.getPartitionKeys() != null) {
                    parentCols.addAll(parentTable.getPartitionKeys());
                }
                existingTablePrimaryKeys = getPrimaryKeys(catName, pkTableDB, pkTableName);
                existingTableUniqueConstraints = getUniqueConstraints(catName, pkTableDB, pkTableName);
            }
            // the columns correspond.
            if (existingTablePrimaryKeys.isEmpty() && existingTableUniqueConstraints.isEmpty()) {
                throw new MetaException("Trying to define foreign key but there are no primary keys or unique keys for referenced table");
            }
            final Set<String> validPKsOrUnique = generateValidPKsOrUniqueSignatures(parentCols, existingTablePrimaryKeys, existingTableUniqueConstraints);
            StringBuilder fkSignature = new StringBuilder();
            StringBuilder referencedKSignature = new StringBuilder();
            for (; i < foreignKeys.size(); i++) {
                SQLForeignKey foreignKey = foreignKeys.get(i);
                final String fkColumnName = normalizeIdentifier(foreignKey.getFkcolumn_name());
                int childIntegerIndex = getColumnIndexFromTableColumns(childCD.getCols(), fkColumnName);
                if (childIntegerIndex == -1) {
                    if (childTable.getPartitionKeys() != null) {
                        childCD = null;
                        childIntegerIndex = getColumnIndexFromTableColumns(childTable.getPartitionKeys(), fkColumnName);
                    }
                    if (childIntegerIndex == -1) {
                        throw new InvalidObjectException("Child column not found: " + fkColumnName);
                    }
                }
                final String pkColumnName = normalizeIdentifier(foreignKey.getPkcolumn_name());
                int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD.getCols(), pkColumnName);
                if (parentIntegerIndex == -1) {
                    if (parentTable.getPartitionKeys() != null) {
                        parentCD = null;
                        parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), pkColumnName);
                    }
                    if (parentIntegerIndex == -1) {
                        throw new InvalidObjectException("Parent column not found: " + pkColumnName);
                    }
                }
                if (foreignKey.getFk_name() == null) {
                    // the uniqueness of the generated constraint name.
                    if (foreignKey.getKey_seq() == 1) {
                        currentConstraintName = generateConstraintName(parentTable, fkTableDB, fkTableName, pkTableDB, pkTableName, pkColumnName, fkColumnName, "fk");
                    }
                } else {
                    currentConstraintName = normalizeIdentifier(foreignKey.getFk_name());
                    if (constraintNameAlreadyExists(parentTable, currentConstraintName)) {
                        String fqConstraintName = String.format("%s.%s.%s", parentTable.getDatabase().getName(), parentTable.getTableName(), currentConstraintName);
                        throw new InvalidObjectException("Constraint name already exists: " + fqConstraintName);
                    }
                }
                // Update Column, keys, table, database, catalog name
                foreignKey.setFk_name(currentConstraintName);
                foreignKey.setCatName(catName);
                foreignKey.setFktable_db(fkTableDB);
                foreignKey.setFktable_name(fkTableName);
                foreignKey.setPktable_db(pkTableDB);
                foreignKey.setPktable_name(pkTableName);
                foreignKey.setFkcolumn_name(fkColumnName);
                foreignKey.setPkcolumn_name(pkColumnName);
                Integer updateRule = foreignKey.getUpdate_rule();
                Integer deleteRule = foreignKey.getDelete_rule();
                int enableValidateRely = (foreignKey.isEnable_cstr() ? 4 : 0) + (foreignKey.isValidate_cstr() ? 2 : 0) + (foreignKey.isRely_cstr() ? 1 : 0);
                MConstraint mpkfk = new MConstraint(currentConstraintName, foreignKey.getKey_seq(), MConstraint.FOREIGN_KEY_CONSTRAINT, deleteRule, updateRule, enableValidateRely, parentTable, childTable, parentCD, childCD, childIntegerIndex, parentIntegerIndex);
                mpkfks.add(mpkfk);
                final String fkColType = getColumnFromTableColumns(childCols, fkColumnName).getType();
                fkSignature.append(generateColNameTypeSignature(fkColumnName, fkColType));
                referencedKSignature.append(generateColNameTypeSignature(pkColumnName, fkColType));
                if (i + 1 < foreignKeys.size() && foreignKeys.get(i + 1).getKey_seq() == 1) {
                    // Next one is a new key, we bail out from the inner loop
                    break;
                }
            }
            String referenced = referencedKSignature.toString();
            if (!validPKsOrUnique.contains(referenced)) {
                throw new MetaException("Foreign key references " + referenced + " but no corresponding " + "primary key or unique key exists. Possible keys: " + validPKsOrUnique);
            }
            if (sameTable && fkSignature.toString().equals(referenced)) {
                throw new MetaException("Cannot be both foreign key and primary/unique key on same table: " + referenced);
            }
            fkSignature = new StringBuilder();
            referencedKSignature = new StringBuilder();
        }
        pm.makePersistentAll(mpkfks);
    }
    return foreignKeys;
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) ArrayList(java.util.ArrayList) MColumnDescriptor(org.apache.hadoop.hive.metastore.model.MColumnDescriptor) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) MTable(org.apache.hadoop.hive.metastore.model.MTable) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Aggregations

MConstraint (org.apache.hadoop.hive.metastore.model.MConstraint)21 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)14 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)14 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)13 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)13 ArrayList (java.util.ArrayList)11 MFieldSchema (org.apache.hadoop.hive.metastore.model.MFieldSchema)9 Query (javax.jdo.Query)8 ScheduledQuery (org.apache.hadoop.hive.metastore.api.ScheduledQuery)8 MScheduledQuery (org.apache.hadoop.hive.metastore.model.MScheduledQuery)8 MTable (org.apache.hadoop.hive.metastore.model.MTable)8 Collection (java.util.Collection)7 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)7 MColumnDescriptor (org.apache.hadoop.hive.metastore.model.MColumnDescriptor)7 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)3 HashMap (java.util.HashMap)2 LinkedList (java.util.LinkedList)2 List (java.util.List)2 ValidReaderWriteIdList (org.apache.hadoop.hive.common.ValidReaderWriteIdList)2 ValidWriteIdList (org.apache.hadoop.hive.common.ValidWriteIdList)2