Search in sources :

Example 16 with MPartition

use of org.apache.hadoop.hive.metastore.model.MPartition in project hive by apache.

the class ObjectStore method getPartitionsViaOrmFilter.

/**
 * Gets partition names from the table via ORM (JDOQL) name filter.
 * @param dbName Database name.
 * @param tblName Table name.
 * @param partNames Partition names to get the objects for.
 * @return Resulting partitions.
 */
private List<Partition> getPartitionsViaOrmFilter(String dbName, String tblName, List<String> partNames) throws MetaException {
    if (partNames.isEmpty()) {
        return new ArrayList<>();
    }
    ObjectPair<Query, Map<String, String>> queryWithParams = getPartQueryWithParams(dbName, tblName, partNames);
    Query query = queryWithParams.getFirst();
    query.setResultClass(MPartition.class);
    query.setClass(MPartition.class);
    query.setOrdering("partitionName ascending");
    @SuppressWarnings("unchecked") List<MPartition> mparts = (List<MPartition>) query.executeWithMap(queryWithParams.getSecond());
    List<Partition> partitions = convertToParts(dbName, tblName, mparts);
    if (query != null) {
        query.closeAll();
    }
    return partitions;
}
Also used : MPartition(org.apache.hadoop.hive.metastore.model.MPartition) Partition(org.apache.hadoop.hive.metastore.api.Partition) Query(javax.jdo.Query) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) MStringList(org.apache.hadoop.hive.metastore.model.MStringList) ArrayList(java.util.ArrayList) List(java.util.List) Map(java.util.Map) WeakValueMap(org.datanucleus.util.WeakValueMap) MRoleMap(org.apache.hadoop.hive.metastore.model.MRoleMap) HashMap(java.util.HashMap) MPartition(org.apache.hadoop.hive.metastore.model.MPartition)

Example 17 with MPartition

use of org.apache.hadoop.hive.metastore.model.MPartition in project hive by apache.

the class ObjectStore method grantPrivileges.

@Override
public boolean grantPrivileges(PrivilegeBag privileges) throws InvalidObjectException, MetaException, NoSuchObjectException {
    boolean committed = false;
    int now = (int) (System.currentTimeMillis() / 1000);
    try {
        openTransaction();
        List<Object> persistentObjs = new ArrayList<>();
        List<HiveObjectPrivilege> privilegeList = privileges.getPrivileges();
        if (CollectionUtils.isNotEmpty(privilegeList)) {
            Iterator<HiveObjectPrivilege> privIter = privilegeList.iterator();
            Set<String> privSet = new HashSet<>();
            while (privIter.hasNext()) {
                HiveObjectPrivilege privDef = privIter.next();
                HiveObjectRef hiveObject = privDef.getHiveObject();
                String privilegeStr = privDef.getGrantInfo().getPrivilege();
                String[] privs = privilegeStr.split(",");
                String userName = privDef.getPrincipalName();
                PrincipalType principalType = privDef.getPrincipalType();
                String grantor = privDef.getGrantInfo().getGrantor();
                String grantorType = privDef.getGrantInfo().getGrantorType().toString();
                boolean grantOption = privDef.getGrantInfo().isGrantOption();
                privSet.clear();
                if (principalType == PrincipalType.ROLE) {
                    validateRole(userName);
                }
                if (hiveObject.getObjectType() == HiveObjectType.GLOBAL) {
                    List<MGlobalPrivilege> globalPrivs = this.listPrincipalMGlobalGrants(userName, principalType);
                    if (globalPrivs != null) {
                        for (MGlobalPrivilege priv : globalPrivs) {
                            if (priv.getGrantor().equalsIgnoreCase(grantor)) {
                                privSet.add(priv.getPrivilege());
                            }
                        }
                    }
                    for (String privilege : privs) {
                        if (privSet.contains(privilege)) {
                            throw new InvalidObjectException(privilege + " is already granted by " + grantor);
                        }
                        MGlobalPrivilege mGlobalPrivs = new MGlobalPrivilege(userName, principalType.toString(), privilege, now, grantor, grantorType, grantOption);
                        persistentObjs.add(mGlobalPrivs);
                    }
                } else if (hiveObject.getObjectType() == HiveObjectType.DATABASE) {
                    MDatabase dbObj = getMDatabase(hiveObject.getDbName());
                    if (dbObj != null) {
                        List<MDBPrivilege> dbPrivs = this.listPrincipalMDBGrants(userName, principalType, hiveObject.getDbName());
                        if (dbPrivs != null) {
                            for (MDBPrivilege priv : dbPrivs) {
                                if (priv.getGrantor().equalsIgnoreCase(grantor)) {
                                    privSet.add(priv.getPrivilege());
                                }
                            }
                        }
                        for (String privilege : privs) {
                            if (privSet.contains(privilege)) {
                                throw new InvalidObjectException(privilege + " is already granted on database " + hiveObject.getDbName() + " by " + grantor);
                            }
                            MDBPrivilege mDb = new MDBPrivilege(userName, principalType.toString(), dbObj, privilege, now, grantor, grantorType, grantOption);
                            persistentObjs.add(mDb);
                        }
                    }
                } else if (hiveObject.getObjectType() == HiveObjectType.TABLE) {
                    MTable tblObj = getMTable(hiveObject.getDbName(), hiveObject.getObjectName());
                    if (tblObj != null) {
                        List<MTablePrivilege> tablePrivs = this.listAllMTableGrants(userName, principalType, hiveObject.getDbName(), hiveObject.getObjectName());
                        if (tablePrivs != null) {
                            for (MTablePrivilege priv : tablePrivs) {
                                if (priv.getGrantor() != null && priv.getGrantor().equalsIgnoreCase(grantor)) {
                                    privSet.add(priv.getPrivilege());
                                }
                            }
                        }
                        for (String privilege : privs) {
                            if (privSet.contains(privilege)) {
                                throw new InvalidObjectException(privilege + " is already granted on table [" + hiveObject.getDbName() + "," + hiveObject.getObjectName() + "] by " + grantor);
                            }
                            MTablePrivilege mTab = new MTablePrivilege(userName, principalType.toString(), tblObj, privilege, now, grantor, grantorType, grantOption);
                            persistentObjs.add(mTab);
                        }
                    }
                } else if (hiveObject.getObjectType() == HiveObjectType.PARTITION) {
                    MPartition partObj = this.getMPartition(hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject.getPartValues());
                    String partName = null;
                    if (partObj != null) {
                        partName = partObj.getPartitionName();
                        List<MPartitionPrivilege> partPrivs = this.listPrincipalMPartitionGrants(userName, principalType, hiveObject.getDbName(), hiveObject.getObjectName(), partObj.getPartitionName());
                        if (partPrivs != null) {
                            for (MPartitionPrivilege priv : partPrivs) {
                                if (priv.getGrantor().equalsIgnoreCase(grantor)) {
                                    privSet.add(priv.getPrivilege());
                                }
                            }
                        }
                        for (String privilege : privs) {
                            if (privSet.contains(privilege)) {
                                throw new InvalidObjectException(privilege + " is already granted on partition [" + hiveObject.getDbName() + "," + hiveObject.getObjectName() + "," + partName + "] by " + grantor);
                            }
                            MPartitionPrivilege mTab = new MPartitionPrivilege(userName, principalType.toString(), partObj, privilege, now, grantor, grantorType, grantOption);
                            persistentObjs.add(mTab);
                        }
                    }
                } else if (hiveObject.getObjectType() == HiveObjectType.COLUMN) {
                    MTable tblObj = getMTable(hiveObject.getDbName(), hiveObject.getObjectName());
                    if (tblObj != null) {
                        if (hiveObject.getPartValues() != null) {
                            MPartition partObj = null;
                            List<MPartitionColumnPrivilege> colPrivs = null;
                            partObj = this.getMPartition(hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject.getPartValues());
                            if (partObj == null) {
                                continue;
                            }
                            colPrivs = this.listPrincipalMPartitionColumnGrants(userName, principalType, hiveObject.getDbName(), hiveObject.getObjectName(), partObj.getPartitionName(), hiveObject.getColumnName());
                            if (colPrivs != null) {
                                for (MPartitionColumnPrivilege priv : colPrivs) {
                                    if (priv.getGrantor().equalsIgnoreCase(grantor)) {
                                        privSet.add(priv.getPrivilege());
                                    }
                                }
                            }
                            for (String privilege : privs) {
                                if (privSet.contains(privilege)) {
                                    throw new InvalidObjectException(privilege + " is already granted on column " + hiveObject.getColumnName() + " [" + hiveObject.getDbName() + "," + hiveObject.getObjectName() + "," + partObj.getPartitionName() + "] by " + grantor);
                                }
                                MPartitionColumnPrivilege mCol = new MPartitionColumnPrivilege(userName, principalType.toString(), partObj, hiveObject.getColumnName(), privilege, now, grantor, grantorType, grantOption);
                                persistentObjs.add(mCol);
                            }
                        } else {
                            List<MTableColumnPrivilege> colPrivs = null;
                            colPrivs = this.listPrincipalMTableColumnGrants(userName, principalType, hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject.getColumnName());
                            if (colPrivs != null) {
                                for (MTableColumnPrivilege priv : colPrivs) {
                                    if (priv.getGrantor().equalsIgnoreCase(grantor)) {
                                        privSet.add(priv.getPrivilege());
                                    }
                                }
                            }
                            for (String privilege : privs) {
                                if (privSet.contains(privilege)) {
                                    throw new InvalidObjectException(privilege + " is already granted on column " + hiveObject.getColumnName() + " [" + hiveObject.getDbName() + "," + hiveObject.getObjectName() + "] by " + grantor);
                                }
                                MTableColumnPrivilege mCol = new MTableColumnPrivilege(userName, principalType.toString(), tblObj, hiveObject.getColumnName(), privilege, now, grantor, grantorType, grantOption);
                                persistentObjs.add(mCol);
                            }
                        }
                    }
                }
            }
        }
        if (CollectionUtils.isNotEmpty(persistentObjs)) {
            pm.makePersistentAll(persistentObjs);
        }
        committed = commitTransaction();
    } finally {
        if (!committed) {
            rollbackTransaction();
        }
    }
    return committed;
}
Also used : ArrayList(java.util.ArrayList) MPartitionColumnPrivilege(org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) LinkedList(java.util.LinkedList) MStringList(org.apache.hadoop.hive.metastore.model.MStringList) ArrayList(java.util.ArrayList) List(java.util.List) MTableColumnPrivilege(org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege) HashSet(java.util.HashSet) MPartition(org.apache.hadoop.hive.metastore.model.MPartition) HiveObjectRef(org.apache.hadoop.hive.metastore.api.HiveObjectRef) MDBPrivilege(org.apache.hadoop.hive.metastore.model.MDBPrivilege) MGlobalPrivilege(org.apache.hadoop.hive.metastore.model.MGlobalPrivilege) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) MDatabase(org.apache.hadoop.hive.metastore.model.MDatabase) HiveObjectPrivilege(org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege) MTable(org.apache.hadoop.hive.metastore.model.MTable) MPartitionPrivilege(org.apache.hadoop.hive.metastore.model.MPartitionPrivilege) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType) MTablePrivilege(org.apache.hadoop.hive.metastore.model.MTablePrivilege)

Example 18 with MPartition

use of org.apache.hadoop.hive.metastore.model.MPartition in project hive by apache.

the class ObjectStore method getPartitionPsQueryResults.

/**
 * Retrieves a Collection of partition-related results from the database that match
 *  the partial specification given for a specific table.
 * @param dbName the name of the database
 * @param tableName the name of the table
 * @param part_vals the partial specification values
 * @param max_parts the maximum number of partitions to return
 * @param resultsCol the metadata column of the data to return, e.g. partitionName, etc.
 *        if resultsCol is empty or null, a collection of MPartition objects is returned
 * @throws NoSuchObjectException
 * @results A Collection of partition-related items from the db that match the partial spec
 *          for a table.  The type of each item in the collection corresponds to the column
 *          you want results for.  E.g., if resultsCol is partitionName, the Collection
 *          has types of String, and if resultsCol is null, the types are MPartition.
 */
private Collection getPartitionPsQueryResults(String dbName, String tableName, List<String> part_vals, short max_parts, String resultsCol, QueryWrapper queryWrapper) throws MetaException, NoSuchObjectException {
    dbName = normalizeIdentifier(dbName);
    tableName = normalizeIdentifier(tableName);
    Table table = getTable(dbName, tableName);
    if (table == null) {
        throw new NoSuchObjectException(dbName + "." + tableName + " table not found");
    }
    List<FieldSchema> partCols = table.getPartitionKeys();
    int numPartKeys = partCols.size();
    if (part_vals.size() > numPartKeys) {
        throw new MetaException("Incorrect number of partition values." + " numPartKeys=" + numPartKeys + ", part_val=" + part_vals.size());
    }
    partCols = partCols.subList(0, part_vals.size());
    // Construct a pattern of the form: partKey=partVal/partKey2=partVal2/...
    // where partVal is either the escaped partition value given as input,
    // or a regex of the form ".*"
    // This works because the "=" and "/" separating key names and partition key/values
    // are not escaped.
    String partNameMatcher = Warehouse.makePartName(partCols, part_vals, ".*");
    // add ".*" to the regex to match anything else afterwards the partial spec.
    if (part_vals.size() < numPartKeys) {
        partNameMatcher += ".*";
    }
    Query query = queryWrapper.query = pm.newQuery(MPartition.class);
    StringBuilder queryFilter = new StringBuilder("table.database.name == dbName");
    queryFilter.append(" && table.tableName == tableName");
    queryFilter.append(" && partitionName.matches(partialRegex)");
    query.setFilter(queryFilter.toString());
    query.declareParameters("java.lang.String dbName, " + "java.lang.String tableName, java.lang.String partialRegex");
    if (max_parts >= 0) {
        // User specified a row limit, set it on the Query
        query.setRange(0, max_parts);
    }
    if (resultsCol != null && !resultsCol.isEmpty()) {
        query.setResult(resultsCol);
    }
    return (Collection) query.execute(dbName, tableName, partNameMatcher);
}
Also used : MVersionTable(org.apache.hadoop.hive.metastore.model.MVersionTable) Table(org.apache.hadoop.hive.metastore.api.Table) MTable(org.apache.hadoop.hive.metastore.model.MTable) Query(javax.jdo.Query) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Collection(java.util.Collection) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) MPartition(org.apache.hadoop.hive.metastore.model.MPartition)

Example 19 with MPartition

use of org.apache.hadoop.hive.metastore.model.MPartition in project hive by apache.

the class ObjectStore method updatePartitionColumnStatistics.

@Override
public boolean updatePartitionColumnStatistics(ColumnStatistics colStats, List<String> partVals) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
    boolean committed = false;
    try {
        openTransaction();
        List<ColumnStatisticsObj> statsObjs = colStats.getStatsObj();
        ColumnStatisticsDesc statsDesc = colStats.getStatsDesc();
        Table table = ensureGetTable(statsDesc.getDbName(), statsDesc.getTableName());
        Partition partition = convertToPart(getMPartition(statsDesc.getDbName(), statsDesc.getTableName(), partVals));
        List<String> colNames = new ArrayList<>();
        for (ColumnStatisticsObj statsObj : statsObjs) {
            colNames.add(statsObj.getColName());
        }
        Map<String, MPartitionColumnStatistics> oldStats = getPartitionColStats(table, statsDesc.getPartName(), colNames);
        MPartition mPartition = getMPartition(statsDesc.getDbName(), statsDesc.getTableName(), partVals);
        if (partition == null) {
            throw new NoSuchObjectException("Partition for which stats is gathered doesn't exist.");
        }
        for (ColumnStatisticsObj statsObj : statsObjs) {
            MPartitionColumnStatistics mStatsObj = StatObjectConverter.convertToMPartitionColumnStatistics(mPartition, statsDesc, statsObj);
            writeMPartitionColumnStatistics(table, partition, mStatsObj, oldStats.get(statsObj.getColName()));
        }
        Map<String, String> parameters = mPartition.getParameters();
        StatsSetupConst.setColumnStatsState(parameters, colNames);
        mPartition.setParameters(parameters);
        committed = commitTransaction();
        return committed;
    } finally {
        if (!committed) {
            rollbackTransaction();
        }
    }
}
Also used : MPartition(org.apache.hadoop.hive.metastore.model.MPartition) Partition(org.apache.hadoop.hive.metastore.api.Partition) MVersionTable(org.apache.hadoop.hive.metastore.model.MVersionTable) Table(org.apache.hadoop.hive.metastore.api.Table) MTable(org.apache.hadoop.hive.metastore.model.MTable) ArrayList(java.util.ArrayList) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) ColumnStatisticsDesc(org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MPartitionColumnStatistics(org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics) MPartition(org.apache.hadoop.hive.metastore.model.MPartition)

Example 20 with MPartition

use of org.apache.hadoop.hive.metastore.model.MPartition in project hive by apache.

the class ObjectStore method listPartitionsPsWithAuth.

@Override
public List<Partition> listPartitionsPsWithAuth(String db_name, String tbl_name, List<String> part_vals, short max_parts, String userName, List<String> groupNames) throws MetaException, InvalidObjectException, NoSuchObjectException {
    List<Partition> partitions = new ArrayList<>();
    boolean success = false;
    QueryWrapper queryWrapper = new QueryWrapper();
    try {
        openTransaction();
        LOG.debug("executing listPartitionNamesPsWithAuth");
        Collection parts = getPartitionPsQueryResults(db_name, tbl_name, part_vals, max_parts, null, queryWrapper);
        MTable mtbl = getMTable(db_name, tbl_name);
        for (Object o : parts) {
            Partition part = convertToPart((MPartition) o);
            // set auth privileges
            if (null != userName && null != groupNames && "TRUE".equalsIgnoreCase(mtbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) {
                String partName = Warehouse.makePartName(this.convertToFieldSchemas(mtbl.getPartitionKeys()), part.getValues());
                PrincipalPrivilegeSet partAuth = getPartitionPrivilegeSet(db_name, tbl_name, partName, userName, groupNames);
                part.setPrivileges(partAuth);
            }
            partitions.add(part);
        }
        success = commitTransaction();
    } finally {
        rollbackAndCleanup(success, queryWrapper);
    }
    return partitions;
}
Also used : MPartition(org.apache.hadoop.hive.metastore.model.MPartition) Partition(org.apache.hadoop.hive.metastore.api.Partition) MTable(org.apache.hadoop.hive.metastore.model.MTable) PrincipalPrivilegeSet(org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet) ArrayList(java.util.ArrayList) Collection(java.util.Collection)

Aggregations

MPartition (org.apache.hadoop.hive.metastore.model.MPartition)21 MTable (org.apache.hadoop.hive.metastore.model.MTable)14 ArrayList (java.util.ArrayList)11 Partition (org.apache.hadoop.hive.metastore.api.Partition)9 Query (javax.jdo.Query)6 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)5 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)5 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)5 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)5 MConstraint (org.apache.hadoop.hive.metastore.model.MConstraint)5 MPartitionColumnPrivilege (org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege)5 MPartitionPrivilege (org.apache.hadoop.hive.metastore.model.MPartitionPrivilege)5 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)4 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)4 MTableColumnPrivilege (org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege)4 LinkedList (java.util.LinkedList)3 List (java.util.List)3 HiveObjectPrivilege (org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege)3 HiveObjectRef (org.apache.hadoop.hive.metastore.api.HiveObjectRef)3 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)3