Search in sources :

Example 1 with MWMPool

use of org.apache.hadoop.hive.metastore.model.MWMPool in project hive by apache.

the class ObjectStore method alterPool.

@Override
public void alterPool(WMNullablePool pool, String poolPath) throws AlreadyExistsException, NoSuchObjectException, InvalidOperationException, MetaException {
    boolean commited = false;
    try {
        openTransaction();
        MWMResourcePlan resourcePlan = getMWMResourcePlan(pool.getResourcePlanName(), true);
        MWMPool mPool = getPool(resourcePlan, poolPath);
        pm.retrieve(mPool);
        if (pool.isSetAllocFraction()) {
            mPool.setAllocFraction(pool.getAllocFraction());
        }
        if (pool.isSetQueryParallelism()) {
            mPool.setQueryParallelism(pool.getQueryParallelism());
        }
        if (pool.isSetIsSetSchedulingPolicy() && pool.isIsSetSchedulingPolicy()) {
            if (pool.isSetSchedulingPolicy()) {
                String policy = pool.getSchedulingPolicy();
                if (!MetaStoreUtils.isValidSchedulingPolicy(policy)) {
                    throw new InvalidOperationException("Invalid scheduling policy " + policy);
                }
                mPool.setSchedulingPolicy(pool.getSchedulingPolicy());
            } else {
                mPool.setSchedulingPolicy(null);
            }
        }
        if (pool.isSetPoolPath() && !pool.getPoolPath().equals(mPool.getPath())) {
            moveDescendents(resourcePlan, mPool.getPath(), pool.getPoolPath());
            mPool.setPath(pool.getPoolPath());
        }
        commited = commitTransaction();
    } finally {
        rollbackAndCleanup(commited, (Query) null);
    }
}
Also used : MWMPool(org.apache.hadoop.hive.metastore.model.MWMPool) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) MWMResourcePlan(org.apache.hadoop.hive.metastore.model.MWMResourcePlan)

Example 2 with MWMPool

use of org.apache.hadoop.hive.metastore.model.MWMPool in project hive by apache.

the class ObjectStore method getResourcePlanErrors.

private WMValidateResourcePlanResponse getResourcePlanErrors(MWMResourcePlan mResourcePlan) {
    WMValidateResourcePlanResponse response = new WMValidateResourcePlanResponse();
    response.setErrors(new ArrayList());
    response.setWarnings(new ArrayList());
    Integer rpParallelism = mResourcePlan.getQueryParallelism();
    if (rpParallelism != null && rpParallelism < 1) {
        response.addToErrors("Query parallelism should for resource plan be positive. Got: " + rpParallelism);
    }
    int totalQueryParallelism = 0;
    Map<String, PoolData> poolInfo = new HashMap<>();
    for (MWMPool pool : mResourcePlan.getPools()) {
        PoolData currentPoolData = getPoolData(poolInfo, pool.getPath());
        currentPoolData.found = true;
        String parent = getParentPath(pool.getPath(), "");
        PoolData parentPoolData = getPoolData(poolInfo, parent);
        parentPoolData.hasChildren = true;
        parentPoolData.totalChildrenAllocFraction += pool.getAllocFraction();
        if (pool.getQueryParallelism() != null && pool.getQueryParallelism() < 1) {
            response.addToErrors("Invalid query parallelism for pool: " + pool.getPath());
        } else {
            totalQueryParallelism += pool.getQueryParallelism();
        }
        if (!MetaStoreUtils.isValidSchedulingPolicy(pool.getSchedulingPolicy())) {
            response.addToErrors("Invalid scheduling policy " + pool.getSchedulingPolicy() + " for pool: " + pool.getPath());
        }
    }
    if (rpParallelism != null) {
        if (rpParallelism < totalQueryParallelism) {
            response.addToErrors("Sum of all pools' query parallelism: " + totalQueryParallelism + " exceeds resource plan query parallelism: " + rpParallelism);
        } else if (rpParallelism != totalQueryParallelism) {
            response.addToWarnings("Sum of all pools' query parallelism: " + totalQueryParallelism + " is less than resource plan query parallelism: " + rpParallelism);
        }
    }
    for (Entry<String, PoolData> entry : poolInfo.entrySet()) {
        final PoolData poolData = entry.getValue();
        final boolean isRoot = entry.getKey().isEmpty();
        // Special case for root parent
        if (isRoot) {
            poolData.found = true;
            if (!poolData.hasChildren) {
                response.addToErrors("Root has no children");
            } else if (Math.abs(1.0 - poolData.totalChildrenAllocFraction) > 0.001) {
                response.addToErrors("Sum of root children pools' alloc fraction should be 1.0 got: " + poolData.totalChildrenAllocFraction + " for pool: " + entry.getKey());
            }
        }
        if (!poolData.found) {
            response.addToErrors("Pool does not exists but has children: " + entry.getKey());
        }
        if (poolData.hasChildren) {
            if (!isRoot && 1.0 <= poolData.totalChildrenAllocFraction) {
                response.addToErrors("Sum of children pools' alloc fraction should be less than 1 got: " + poolData.totalChildrenAllocFraction + " for pool: " + entry.getKey());
            }
        }
    }
    // available and grammar check is there in the language itself.
    return response;
}
Also used : HashMap(java.util.HashMap) MWMPool(org.apache.hadoop.hive.metastore.model.MWMPool) ArrayList(java.util.ArrayList) WMValidateResourcePlanResponse(org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)

Example 3 with MWMPool

use of org.apache.hadoop.hive.metastore.model.MWMPool in project hive by apache.

the class ObjectStore method createOrUpdateWMMapping.

@Override
public void createOrUpdateWMMapping(WMMapping mapping, boolean update) throws AlreadyExistsException, NoSuchObjectException, InvalidOperationException, MetaException {
    EntityType entityType = EntityType.valueOf(mapping.getEntityType().trim().toUpperCase());
    String entityName = normalizeIdentifier(mapping.getEntityName());
    boolean commited = false;
    Query query = null;
    try {
        openTransaction();
        MWMResourcePlan resourcePlan = getMWMResourcePlan(mapping.getResourcePlanName(), true);
        MWMPool pool = null;
        if (mapping.isSetPoolPath()) {
            pool = getPool(resourcePlan, mapping.getPoolPath());
        }
        if (!update) {
            MWMMapping mMapping = new MWMMapping(resourcePlan, entityType, entityName, pool, mapping.getOrdering());
            pm.makePersistent(mMapping);
        } else {
            query = pm.newQuery(MWMMapping.class, "resourcePlan == rp && entityType == type " + "&& entityName == name");
            query.declareParameters("MWMResourcePlan rp, java.lang.String type, java.lang.String name");
            query.setUnique(true);
            MWMMapping mMapping = (MWMMapping) query.execute(resourcePlan, entityType.toString(), entityName);
            mMapping.setPool(pool);
        }
        commited = commitTransaction();
    } finally {
        rollbackAndCleanup(commited, query);
    }
}
Also used : EntityType(org.apache.hadoop.hive.metastore.model.MWMMapping.EntityType) Query(javax.jdo.Query) MWMPool(org.apache.hadoop.hive.metastore.model.MWMPool) MWMMapping(org.apache.hadoop.hive.metastore.model.MWMMapping) MWMResourcePlan(org.apache.hadoop.hive.metastore.model.MWMResourcePlan)

Example 4 with MWMPool

use of org.apache.hadoop.hive.metastore.model.MWMPool in project hive by apache.

the class ObjectStore method moveDescendents.

private void moveDescendents(MWMResourcePlan resourcePlan, String path, String newPoolPath) throws NoSuchObjectException {
    if (!poolParentExists(resourcePlan, newPoolPath)) {
        throw new NoSuchObjectException("Pool path is invalid, the parent does not exist");
    }
    boolean commited = false;
    Query query = null;
    openTransaction();
    try {
        query = pm.newQuery(MWMPool.class, "resourcePlan == rp && path.startsWith(poolPath)");
        query.declareParameters("MWMResourcePlan rp, java.lang.String poolPath");
        List<MWMPool> descPools = (List<MWMPool>) query.execute(resourcePlan, path + ".");
        pm.retrieveAll(descPools);
        for (MWMPool pool : descPools) {
            pool.setPath(newPoolPath + pool.getPath().substring(path.length()));
        }
        commited = commitTransaction();
    } finally {
        rollbackAndCleanup(commited, query);
    }
}
Also used : Query(javax.jdo.Query) MWMPool(org.apache.hadoop.hive.metastore.model.MWMPool) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) LinkedList(java.util.LinkedList) MStringList(org.apache.hadoop.hive.metastore.model.MStringList) ArrayList(java.util.ArrayList) List(java.util.List)

Example 5 with MWMPool

use of org.apache.hadoop.hive.metastore.model.MWMPool in project hive by apache.

the class ObjectStore method createWMTriggerToPoolMapping.

@Override
public void createWMTriggerToPoolMapping(String resourcePlanName, String triggerName, String poolPath) throws AlreadyExistsException, NoSuchObjectException, InvalidOperationException, MetaException {
    boolean commited = false;
    try {
        openTransaction();
        MWMResourcePlan resourcePlan = getMWMResourcePlan(resourcePlanName, true);
        MWMPool pool = getPool(resourcePlan, poolPath);
        MWMTrigger trigger = getTrigger(resourcePlan, triggerName);
        pool.getTriggers().add(trigger);
        trigger.getPools().add(pool);
        pm.makePersistent(pool);
        pm.makePersistent(trigger);
        commited = commitTransaction();
    } finally {
        rollbackAndCleanup(commited, (Query) null);
    }
}
Also used : MWMPool(org.apache.hadoop.hive.metastore.model.MWMPool) MWMTrigger(org.apache.hadoop.hive.metastore.model.MWMTrigger) MWMResourcePlan(org.apache.hadoop.hive.metastore.model.MWMResourcePlan)

Aggregations

MWMPool (org.apache.hadoop.hive.metastore.model.MWMPool)13 MWMResourcePlan (org.apache.hadoop.hive.metastore.model.MWMResourcePlan)7 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)4 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)4 MWMTrigger (org.apache.hadoop.hive.metastore.model.MWMTrigger)4 Query (javax.jdo.Query)3 MWMMapping (org.apache.hadoop.hive.metastore.model.MWMMapping)3 IOException (java.io.IOException)2 SQLException (java.sql.SQLException)2 SQLIntegrityConstraintViolationException (java.sql.SQLIntegrityConstraintViolationException)2 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 JDOCanRetryException (javax.jdo.JDOCanRetryException)2 JDODataStoreException (javax.jdo.JDODataStoreException)2 JDOException (javax.jdo.JDOException)2 JDOObjectNotFoundException (javax.jdo.JDOObjectNotFoundException)2 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)2 InvalidInputException (org.apache.hadoop.hive.metastore.api.InvalidInputException)2 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)2 InvalidPartitionException (org.apache.hadoop.hive.metastore.api.InvalidPartitionException)2