Search in sources :

Example 11 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method getUniqueConstraintsViaJdo.

private List<SQLUniqueConstraint> getUniqueConstraintsViaJdo(String catName, String dbName, String tblName) {
    boolean commited = false;
    List<SQLUniqueConstraint> uniqueConstraints = null;
    Query query = null;
    try {
        openTransaction();
        query = pm.newQuery(MConstraint.class, "parentTable.tableName == tbl_name && parentTable.database.name == db_name && parentTable.database.catalogName == catName &&" + " constraintType == MConstraint.UNIQUE_CONSTRAINT");
        query.declareParameters("java.lang.String tbl_name, java.lang.String db_name, java.lang.String catName");
        Collection<?> constraints = (Collection<?>) query.execute(tblName, dbName, catName);
        pm.retrieveAll(constraints);
        uniqueConstraints = new ArrayList<>();
        for (Iterator<?> i = constraints.iterator(); i.hasNext(); ) {
            MConstraint currConstraint = (MConstraint) i.next();
            List<MFieldSchema> cols = currConstraint.getParentColumn() != null ? currConstraint.getParentColumn().getCols() : currConstraint.getParentTable().getPartitionKeys();
            int enableValidateRely = currConstraint.getEnableValidateRely();
            boolean enable = (enableValidateRely & 4) != 0;
            boolean validate = (enableValidateRely & 2) != 0;
            boolean rely = (enableValidateRely & 1) != 0;
            uniqueConstraints.add(new SQLUniqueConstraint(catName, dbName, tblName, cols.get(currConstraint.getParentIntegerIndex()).getName(), currConstraint.getPosition(), currConstraint.getConstraintName(), enable, validate, rely));
        }
        commited = commitTransaction();
    } finally {
        rollbackAndCleanup(commited, query);
    }
    return uniqueConstraints;
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) ScheduledQuery(org.apache.hadoop.hive.metastore.api.ScheduledQuery) Query(javax.jdo.Query) MScheduledQuery(org.apache.hadoop.hive.metastore.model.MScheduledQuery) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) Collection(java.util.Collection) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)

Example 12 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method listAllTableConstraintsWithOptionalConstraintName.

private List<MConstraint> listAllTableConstraintsWithOptionalConstraintName(String catName, String dbName, String tableName, String constraintname) {
    catName = normalizeIdentifier(catName);
    dbName = normalizeIdentifier(dbName);
    tableName = normalizeIdentifier(tableName);
    constraintname = constraintname != null ? normalizeIdentifier(constraintname) : null;
    List<MConstraint> mConstraints = null;
    List<String> constraintNames = new ArrayList<>();
    try (QueryWrapper queryForConstraintName = new QueryWrapper(pm.newQuery("select constraintName from org.apache.hadoop.hive.metastore.model.MConstraint  where " + "((parentTable.tableName == ptblname && parentTable.database.name == pdbname && " + "parentTable.database.catalogName == pcatname) || " + "(childTable != null && childTable.tableName == ctblname &&" + "childTable.database.name == cdbname && childTable.database.catalogName == ccatname)) " + (constraintname != null ? " && constraintName == constraintname" : "")));
        QueryWrapper queryForMConstraint = new QueryWrapper(pm.newQuery(MConstraint.class))) {
        queryForConstraintName.declareParameters("java.lang.String ptblname, java.lang.String pdbname," + "java.lang.String pcatname, java.lang.String ctblname, java.lang.String cdbname," + "java.lang.String ccatname" + (constraintname != null ? ", java.lang.String constraintname" : ""));
        Collection<?> constraintNamesColl = constraintname != null ? ((Collection<?>) queryForConstraintName.executeWithArray(tableName, dbName, catName, tableName, dbName, catName, constraintname)) : ((Collection<?>) queryForConstraintName.executeWithArray(tableName, dbName, catName, tableName, dbName, catName));
        for (Iterator<?> i = constraintNamesColl.iterator(); i.hasNext(); ) {
            String currName = (String) i.next();
            constraintNames.add(currName);
        }
        queryForMConstraint.setFilter("param.contains(constraintName)");
        queryForMConstraint.declareParameters("java.util.Collection param");
        Collection<?> constraints = (Collection<?>) queryForMConstraint.execute(constraintNames);
        mConstraints = new ArrayList<>();
        for (Iterator<?> i = constraints.iterator(); i.hasNext(); ) {
            MConstraint currConstraint = (MConstraint) i.next();
            mConstraints.add(currConstraint);
        }
    }
    return mConstraints;
}
Also used : MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) ArrayList(java.util.ArrayList) Collection(java.util.Collection)

Example 13 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method addConstraint.

private MConstraint addConstraint(String catName, String tableDB, String tableName, String columnName, String ccName, boolean isEnable, boolean isRely, boolean isValidate, int constraintType, String constraintValue, boolean retrieveCD) throws InvalidObjectException, MetaException {
    String constraintName = null;
    // If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
    // For instance, this is the case when we are creating the table.
    AttachedMTableInfo nParentTable = getMTable(catName, tableDB, tableName, retrieveCD);
    MTable parentTable = nParentTable.mtbl;
    if (parentTable == null) {
        throw new InvalidObjectException("Parent table not found: " + tableName);
    }
    MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
    int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), columnName);
    if (parentIntegerIndex == -1) {
        if (parentTable.getPartitionKeys() != null) {
            parentCD = null;
            parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), columnName);
        }
    }
    if (ccName == null) {
        constraintName = generateConstraintName(parentTable, tableDB, tableName, columnName, "dc");
    } else {
        constraintName = normalizeIdentifier(ccName);
        if (constraintNameAlreadyExists(parentTable, constraintName)) {
            String fqConstraintName = String.format("%s.%s.%s", parentTable.getDatabase().getName(), parentTable.getTableName(), constraintName);
            throw new InvalidObjectException("Constraint name already exists: " + fqConstraintName);
        }
    }
    int enableValidateRely = (isEnable ? 4 : 0) + (isValidate ? 2 : 0) + (isRely ? 1 : 0);
    MConstraint muk = new MConstraint(constraintName, 1, // Not null constraint should reference a single column
    constraintType, null, null, enableValidateRely, parentTable, null, parentCD, null, null, parentIntegerIndex, constraintValue);
    return muk;
}
Also used : MTable(org.apache.hadoop.hive.metastore.model.MTable) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MColumnDescriptor(org.apache.hadoop.hive.metastore.model.MColumnDescriptor) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)

Example 14 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method dropConstraint.

@Override
public void dropConstraint(String catName, String dbName, String tableName, String constraintName, boolean missingOk) throws NoSuchObjectException {
    boolean success = false;
    try {
        openTransaction();
        List<MConstraint> tabConstraints = listAllTableConstraintsWithOptionalConstraintName(catName, dbName, tableName, constraintName);
        if (CollectionUtils.isNotEmpty(tabConstraints)) {
            pm.deletePersistentAll(tabConstraints);
        } else if (!missingOk) {
            throw new NoSuchObjectException("The constraint: " + constraintName + " does not exist for the associated table: " + dbName + "." + tableName);
        }
        success = commitTransaction();
    } finally {
        if (!success) {
            rollbackTransaction();
        }
    }
}
Also used : MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException)

Example 15 with MConstraint

use of org.apache.hadoop.hive.metastore.model.MConstraint in project hive by apache.

the class ObjectStore method getCheckConstraintsViaJdo.

private List<SQLCheckConstraint> getCheckConstraintsViaJdo(String catName, String dbName, String tblName) {
    boolean commited = false;
    List<SQLCheckConstraint> checkConstraints = null;
    Query query = null;
    try {
        openTransaction();
        query = pm.newQuery(MConstraint.class, "parentTable.tableName == tbl_name && parentTable.database.name == db_name &&" + " parentTable.database.catalogName == catName && constraintType == MConstraint.CHECK_CONSTRAINT");
        query.declareParameters("java.lang.String tbl_name, java.lang.String db_name, java.lang.String catName");
        Collection<?> constraints = (Collection<?>) query.execute(tblName, dbName, catName);
        pm.retrieveAll(constraints);
        checkConstraints = new ArrayList<>();
        for (Iterator<?> i = constraints.iterator(); i.hasNext(); ) {
            MConstraint currConstraint = (MConstraint) i.next();
            List<MFieldSchema> cols = currConstraint.getParentColumn() != null ? currConstraint.getParentColumn().getCols() : currConstraint.getParentTable().getPartitionKeys();
            int enableValidateRely = currConstraint.getEnableValidateRely();
            boolean enable = (enableValidateRely & 4) != 0;
            boolean validate = (enableValidateRely & 2) != 0;
            boolean rely = (enableValidateRely & 1) != 0;
            checkConstraints.add(new SQLCheckConstraint(catName, dbName, tblName, cols.get(currConstraint.getParentIntegerIndex()).getName(), currConstraint.getDefaultValue(), currConstraint.getConstraintName(), enable, validate, rely));
        }
        commited = commitTransaction();
    } finally {
        rollbackAndCleanup(commited, query);
    }
    return checkConstraints;
}
Also used : SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) ScheduledQuery(org.apache.hadoop.hive.metastore.api.ScheduledQuery) Query(javax.jdo.Query) MScheduledQuery(org.apache.hadoop.hive.metastore.model.MScheduledQuery) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) Collection(java.util.Collection) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)

Aggregations

MConstraint (org.apache.hadoop.hive.metastore.model.MConstraint)21 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)14 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)14 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)13 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)13 ArrayList (java.util.ArrayList)11 MFieldSchema (org.apache.hadoop.hive.metastore.model.MFieldSchema)9 Query (javax.jdo.Query)8 ScheduledQuery (org.apache.hadoop.hive.metastore.api.ScheduledQuery)8 MScheduledQuery (org.apache.hadoop.hive.metastore.model.MScheduledQuery)8 MTable (org.apache.hadoop.hive.metastore.model.MTable)8 Collection (java.util.Collection)7 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)7 MColumnDescriptor (org.apache.hadoop.hive.metastore.model.MColumnDescriptor)7 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)3 HashMap (java.util.HashMap)2 LinkedList (java.util.LinkedList)2 List (java.util.List)2 ValidReaderWriteIdList (org.apache.hadoop.hive.common.ValidReaderWriteIdList)2 ValidWriteIdList (org.apache.hadoop.hive.common.ValidWriteIdList)2