Search in sources :

Example 1 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method addForeignKeys.

private void addForeignKeys(List<SQLForeignKey> fks, boolean retrieveCD) throws InvalidObjectException, MetaException {
    List<MConstraint> mpkfks = new ArrayList<MConstraint>();
    String currentConstraintName = null;
    for (int i = 0; i < fks.size(); i++) {
        AttachedMTableInfo nParentTable = getMTable(fks.get(i).getPktable_db(), fks.get(i).getPktable_name(), retrieveCD);
        MTable parentTable = nParentTable.mtbl;
        if (parentTable == null) {
            throw new InvalidObjectException("Parent table not found: " + fks.get(i).getPktable_name());
        }
        AttachedMTableInfo nChildTable = getMTable(fks.get(i).getFktable_db(), fks.get(i).getFktable_name(), retrieveCD);
        MTable childTable = nChildTable.mtbl;
        if (childTable == null) {
            throw new InvalidObjectException("Child table not found: " + fks.get(i).getFktable_name());
        }
        MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
        List<MFieldSchema> parentCols = parentCD == null ? null : parentCD.getCols();
        int parentIntegerIndex = getColumnIndexFromTableColumns(parentCols, fks.get(i).getPkcolumn_name());
        if (parentIntegerIndex == -1) {
            throw new InvalidObjectException("Parent column not found: " + fks.get(i).getPkcolumn_name());
        }
        MColumnDescriptor childCD = retrieveCD ? nChildTable.mcd : childTable.getSd().getCD();
        List<MFieldSchema> childCols = childCD.getCols();
        int childIntegerIndex = getColumnIndexFromTableColumns(childCols, fks.get(i).getFkcolumn_name());
        if (childIntegerIndex == -1) {
            throw new InvalidObjectException("Child column not found: " + fks.get(i).getFkcolumn_name());
        }
        if (fks.get(i).getFk_name() == null) {
            // the uniqueness of the generated constraint name.
            if (fks.get(i).getKey_seq() == 1) {
                currentConstraintName = generateConstraintName(fks.get(i).getFktable_db(), fks.get(i).getFktable_name(), fks.get(i).getPktable_db(), fks.get(i).getPktable_name(), fks.get(i).getPkcolumn_name(), fks.get(i).getFkcolumn_name(), "fk");
            }
        } else {
            currentConstraintName = fks.get(i).getFk_name();
        }
        Integer updateRule = fks.get(i).getUpdate_rule();
        Integer deleteRule = fks.get(i).getDelete_rule();
        int enableValidateRely = (fks.get(i).isEnable_cstr() ? 4 : 0) + (fks.get(i).isValidate_cstr() ? 2 : 0) + (fks.get(i).isRely_cstr() ? 1 : 0);
        MConstraint mpkfk = new MConstraint(currentConstraintName, MConstraint.FOREIGN_KEY_CONSTRAINT, fks.get(i).getKey_seq(), deleteRule, updateRule, enableValidateRely, parentTable, childTable, parentCD, childCD, childIntegerIndex, parentIntegerIndex);
        mpkfks.add(mpkfk);
    }
    pm.makePersistentAll(mpkfks);
}
Also used : MTable(org.apache.hadoop.hive.metastore.model.MTable) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) ArrayList(java.util.ArrayList) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MColumnDescriptor(org.apache.hadoop.hive.metastore.model.MColumnDescriptor) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint)

Example 2 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method addConstraint.

private void addConstraint(String tableDB, String tableName, String columnName, String ccName, boolean isEnable, boolean isRely, boolean isValidate, int constraintType, String constraintValue, boolean retrieveCD, List<String> nnNames, List<MConstraint> cstrs) throws InvalidObjectException, MetaException {
    String constraintName = null;
    // If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
    // For instance, this is the case when we are creating the table.
    AttachedMTableInfo nParentTable = getMTable(tableDB, tableName, retrieveCD);
    MTable parentTable = nParentTable.mtbl;
    if (parentTable == null) {
        throw new InvalidObjectException("Parent table not found: " + tableName);
    }
    MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
    int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), columnName);
    if (parentIntegerIndex == -1) {
        if (parentTable.getPartitionKeys() != null) {
            parentCD = null;
            parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), columnName);
        }
        if (parentIntegerIndex == -1) {
            throw new InvalidObjectException("Parent column not found: " + columnName);
        }
    }
    if (ccName == null) {
        constraintName = generateConstraintName(tableDB, tableName, columnName, "dc");
    } else {
        constraintName = normalizeIdentifier(ccName);
        if (constraintNameAlreadyExists(constraintName)) {
            throw new InvalidObjectException("Constraint name already exists: " + constraintName);
        }
    }
    nnNames.add(constraintName);
    int enableValidateRely = (isEnable ? 4 : 0) + (isValidate ? 2 : 0) + (isRely ? 1 : 0);
    MConstraint muk = new MConstraint(constraintName, constraintType, // Not null constraint should reference a single column
    1, null, null, enableValidateRely, parentTable, null, parentCD, null, null, parentIntegerIndex, constraintValue);
    cstrs.add(muk);
}
Also used : MTable(org.apache.hadoop.hive.metastore.model.MTable) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MColumnDescriptor(org.apache.hadoop.hive.metastore.model.MColumnDescriptor) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)

Example 3 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method addDefaultConstraints.

private List<SQLDefaultConstraint> addDefaultConstraints(List<SQLDefaultConstraint> dcs, boolean retrieveCD) throws InvalidObjectException, MetaException {
    List<MConstraint> cstrs = new ArrayList<>();
    for (SQLDefaultConstraint dc : dcs) {
        final String catName = normalizeIdentifier(dc.getCatName());
        final String tableDB = normalizeIdentifier(dc.getTable_db());
        final String tableName = normalizeIdentifier(dc.getTable_name());
        final String columnName = normalizeIdentifier(dc.getColumn_name());
        final String dcName = dc.getDc_name();
        boolean isEnable = dc.isEnable_cstr();
        boolean isValidate = dc.isValidate_cstr();
        boolean isRely = dc.isRely_cstr();
        String constraintValue = dc.getDefault_value();
        MConstraint muk = addConstraint(catName, tableDB, tableName, columnName, dcName, isEnable, isRely, isValidate, MConstraint.DEFAULT_CONSTRAINT, constraintValue, retrieveCD);
        cstrs.add(muk);
        // Add normalized identifier back to result
        dc.setCatName(catName);
        dc.setTable_db(tableDB);
        dc.setTable_name(tableName);
        dc.setColumn_name(columnName);
        dc.setDc_name(muk.getConstraintName());
    }
    pm.makePersistentAll(cstrs);
    return dcs;
}
Also used : MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) ArrayList(java.util.ArrayList) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 4 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method copyMSD.

private void copyMSD(MStorageDescriptor newSd, MStorageDescriptor oldSd) {
    oldSd.setLocation(newSd.getLocation());
    // If the columns of the old column descriptor != the columns of the new one,
    // then change the old storage descriptor's column descriptor.
    // Convert the MFieldSchema's to their thrift object counterparts, because we maintain
    // datastore identity (i.e., identity of the model objects are managed by JDO,
    // not the application).
    List<FieldSchema> oldCols = oldSd.getCD() != null && oldSd.getCD().getCols() != null ? convertToFieldSchemas(oldSd.getCD().getCols()) : null;
    List<FieldSchema> newCols = newSd.getCD() != null && newSd.getCD().getCols() != null ? convertToFieldSchemas(newSd.getCD().getCols()) : null;
    if (oldCols == null || !oldCols.equals(newCols)) {
        // Create mapping from old col indexes to new col indexes
        if (oldCols != null && newCols != null) {
            Map<Integer, Integer> mapping = new HashMap<>();
            for (int i = 0; i < oldCols.size(); i++) {
                FieldSchema oldCol = oldCols.get(i);
                // TODO: replace for loop with list.indexOf()
                for (int j = 0; j < newCols.size(); j++) {
                    FieldSchema newCol = newCols.get(j);
                    if (oldCol.equals(newCol)) {
                        mapping.put(i, j);
                        break;
                    }
                }
            }
            // and we let the logic in removeUnusedColumnDescriptor take care of it
            try (QueryWrapper query = new QueryWrapper(pm.newQuery(MConstraint.class, "parentColumn == inCD || childColumn == inCD"))) {
                query.declareParameters("MColumnDescriptor inCD");
                List<MConstraint> mConstraintsList = (List<MConstraint>) query.execute(oldSd.getCD());
                pm.retrieveAll(mConstraintsList);
                for (MConstraint mConstraint : mConstraintsList) {
                    if (oldSd.getCD().equals(mConstraint.getParentColumn())) {
                        Integer newIdx = mapping.get(mConstraint.getParentIntegerIndex());
                        if (newIdx != null) {
                            mConstraint.setParentColumn(newSd.getCD());
                            mConstraint.setParentIntegerIndex(newIdx);
                        }
                    }
                    if (oldSd.getCD().equals(mConstraint.getChildColumn())) {
                        Integer newIdx = mapping.get(mConstraint.getChildIntegerIndex());
                        if (newIdx != null) {
                            mConstraint.setChildColumn(newSd.getCD());
                            mConstraint.setChildIntegerIndex(newIdx);
                        }
                    }
                }
                pm.makePersistentAll(mConstraintsList);
            }
            // Finally replace CD
            oldSd.setCD(newSd.getCD());
        }
    }
    oldSd.setBucketCols(newSd.getBucketCols());
    oldSd.setIsCompressed(newSd.isCompressed());
    oldSd.setInputFormat(newSd.getInputFormat());
    oldSd.setOutputFormat(newSd.getOutputFormat());
    oldSd.setNumBuckets(newSd.getNumBuckets());
    oldSd.getSerDeInfo().setName(newSd.getSerDeInfo().getName());
    oldSd.getSerDeInfo().setSerializationLib(newSd.getSerDeInfo().getSerializationLib());
    oldSd.getSerDeInfo().setParameters(newSd.getSerDeInfo().getParameters());
    oldSd.setSkewedColNames(newSd.getSkewedColNames());
    oldSd.setSkewedColValues(newSd.getSkewedColValues());
    oldSd.setSkewedColValueLocationMaps(newSd.getSkewedColValueLocationMaps());
    oldSd.setSortCols(newSd.getSortCols());
    oldSd.setParameters(newSd.getParameters());
    oldSd.setStoredAsSubDirectories(newSd.isStoredAsSubDirectories());
}
Also used : HashMap(java.util.HashMap) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) ValidWriteIdList(org.apache.hadoop.hive.common.ValidWriteIdList) ReplicationMetricList(org.apache.hadoop.hive.metastore.api.ReplicationMetricList) LinkedList(java.util.LinkedList) MStringList(org.apache.hadoop.hive.metastore.model.MStringList) ArrayList(java.util.ArrayList) ValidReaderWriteIdList(org.apache.hadoop.hive.common.ValidReaderWriteIdList) List(java.util.List) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)

Example 5 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method addNotNullConstraints.

private List<SQLNotNullConstraint> addNotNullConstraints(List<SQLNotNullConstraint> nns, boolean retrieveCD) throws InvalidObjectException, MetaException {
    List<MConstraint> cstrs = new ArrayList<>();
    String constraintName;
    for (SQLNotNullConstraint nn : nns) {
        final String catName = normalizeIdentifier(nn.getCatName());
        final String tableDB = normalizeIdentifier(nn.getTable_db());
        final String tableName = normalizeIdentifier(nn.getTable_name());
        final String columnName = normalizeIdentifier(nn.getColumn_name());
        // If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
        // For instance, this is the case when we are creating the table.
        AttachedMTableInfo nParentTable = getMTable(catName, tableDB, tableName, retrieveCD);
        MTable parentTable = nParentTable.mtbl;
        if (parentTable == null) {
            throw new InvalidObjectException("Parent table not found: " + tableName);
        }
        MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
        int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), columnName);
        if (parentIntegerIndex == -1) {
            if (parentTable.getPartitionKeys() != null) {
                parentCD = null;
                parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), columnName);
            }
            if (parentIntegerIndex == -1) {
                throw new InvalidObjectException("Parent column not found: " + columnName);
            }
        }
        if (nn.getNn_name() == null) {
            constraintName = generateConstraintName(parentTable, tableDB, tableName, columnName, "nn");
        } else {
            constraintName = normalizeIdentifier(nn.getNn_name());
            if (constraintNameAlreadyExists(parentTable, constraintName)) {
                String fqConstraintName = String.format("%s.%s.%s", parentTable.getDatabase().getName(), parentTable.getTableName(), constraintName);
                throw new InvalidObjectException("Constraint name already exists: " + fqConstraintName);
            }
        }
        int enableValidateRely = (nn.isEnable_cstr() ? 4 : 0) + (nn.isValidate_cstr() ? 2 : 0) + (nn.isRely_cstr() ? 1 : 0);
        MConstraint muk = new MConstraint(constraintName, 1, // Not null constraint should reference a single column
        MConstraint.NOT_NULL_CONSTRAINT, null, null, enableValidateRely, parentTable, null, parentCD, null, null, parentIntegerIndex);
        cstrs.add(muk);
        // Add normalized identifier back to result
        nn.setCatName(catName);
        nn.setTable_db(tableDB);
        nn.setTable_name(tableName);
        nn.setColumn_name(columnName);
        nn.setNn_name(constraintName);
    }
    pm.makePersistentAll(cstrs);
    return nns;
}
Also used : SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) MTable(org.apache.hadoop.hive.metastore.model.MTable) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) ArrayList(java.util.ArrayList) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MColumnDescriptor(org.apache.hadoop.hive.metastore.model.MColumnDescriptor) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)

Aggregations

MConstraint (org.apache.hadoop.hive.metastore.model.MConstraint)21 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)14 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)14 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)13 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)13 ArrayList (java.util.ArrayList)11 MFieldSchema (org.apache.hadoop.hive.metastore.model.MFieldSchema)9 Query (javax.jdo.Query)8 ScheduledQuery (org.apache.hadoop.hive.metastore.api.ScheduledQuery)8 MScheduledQuery (org.apache.hadoop.hive.metastore.model.MScheduledQuery)8 MTable (org.apache.hadoop.hive.metastore.model.MTable)8 Collection (java.util.Collection)7 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)7 MColumnDescriptor (org.apache.hadoop.hive.metastore.model.MColumnDescriptor)7 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)3 HashMap (java.util.HashMap)2 LinkedList (java.util.LinkedList)2 List (java.util.List)2 ValidReaderWriteIdList (org.apache.hadoop.hive.common.ValidReaderWriteIdList)2 ValidWriteIdList (org.apache.hadoop.hive.common.ValidWriteIdList)2