Search in sources :

Example 1 with HiveLock

use of org.apache.hadoop.hive.ql.lockmgr.HiveLock in project hive by apache.

the class DDLTask method showLocks.

/**
   * Write a list of the current locks to a file.
   * @param db
   *
   * @param showLocks
   *          the locks we're interested in.
   * @return Returns 0 when execution succeeds and above 0 if it fails.
   * @throws HiveException
   *           Throws this exception if an unexpected error occurs.
   */
private int showLocks(Hive db, ShowLocksDesc showLocks) throws HiveException {
    Context ctx = driverContext.getCtx();
    HiveTxnManager txnManager = ctx.getHiveTxnManager();
    HiveLockManager lockMgr = txnManager.getLockManager();
    if (txnManager.useNewShowLocksFormat())
        return showLocksNewFormat(showLocks, lockMgr);
    boolean isExt = showLocks.isExt();
    if (lockMgr == null) {
        throw new HiveException("show Locks LockManager not specified");
    }
    // write the results in the file
    DataOutputStream outStream = getOutputStream(showLocks.getResFile());
    try {
        List<HiveLock> locks = null;
        if (showLocks.getTableName() == null) {
            // TODO should be doing security check here.  Users should not be
            // able to see each other's locks.
            locks = lockMgr.getLocks(false, isExt);
        } else {
            locks = lockMgr.getLocks(HiveLockObject.createFrom(db, showLocks.getTableName(), showLocks.getPartSpec()), true, isExt);
        }
        Collections.sort(locks, new Comparator<HiveLock>() {

            @Override
            public int compare(HiveLock o1, HiveLock o2) {
                int cmp = o1.getHiveLockObject().getName().compareTo(o2.getHiveLockObject().getName());
                if (cmp == 0) {
                    if (o1.getHiveLockMode() == o2.getHiveLockMode()) {
                        return cmp;
                    }
                    // EXCLUSIVE locks occur before SHARED locks
                    if (o1.getHiveLockMode() == HiveLockMode.EXCLUSIVE) {
                        return -1;
                    }
                    return +1;
                }
                return cmp;
            }
        });
        Iterator<HiveLock> locksIter = locks.iterator();
        while (locksIter.hasNext()) {
            HiveLock lock = locksIter.next();
            outStream.writeBytes(lock.getHiveLockObject().getDisplayName());
            outStream.write(separator);
            outStream.writeBytes(lock.getHiveLockMode().toString());
            if (isExt) {
                HiveLockObjectData lockData = lock.getHiveLockObject().getData();
                if (lockData != null) {
                    outStream.write(terminator);
                    outStream.writeBytes("LOCK_QUERYID:" + lockData.getQueryId());
                    outStream.write(terminator);
                    outStream.writeBytes("LOCK_TIME:" + lockData.getLockTime());
                    outStream.write(terminator);
                    outStream.writeBytes("LOCK_MODE:" + lockData.getLockMode());
                    outStream.write(terminator);
                    outStream.writeBytes("LOCK_QUERYSTRING:" + lockData.getQueryStr());
                }
            }
            outStream.write(terminator);
        }
    } catch (FileNotFoundException e) {
        LOG.warn("show function: " + stringifyException(e));
        return 1;
    } catch (IOException e) {
        LOG.warn("show function: " + stringifyException(e));
        return 1;
    } catch (Exception e) {
        throw new HiveException(e.toString(), e);
    } finally {
        IOUtils.closeStream(outStream);
    }
    return 0;
}
Also used : EnvironmentContext(org.apache.hadoop.hive.metastore.api.EnvironmentContext) Context(org.apache.hadoop.hive.ql.Context) CompilationOpContext(org.apache.hadoop.hive.ql.CompilationOpContext) DriverContext(org.apache.hadoop.hive.ql.DriverContext) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) DataOutputStream(java.io.DataOutputStream) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) FileNotFoundException(java.io.FileNotFoundException) HiveLock(org.apache.hadoop.hive.ql.lockmgr.HiveLock) IOException(java.io.IOException) HiveLockManager(org.apache.hadoop.hive.ql.lockmgr.HiveLockManager) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) IOException(java.io.IOException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) URISyntaxException(java.net.URISyntaxException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) StringUtils.stringifyException(org.apache.hadoop.util.StringUtils.stringifyException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) SQLException(java.sql.SQLException) FileNotFoundException(java.io.FileNotFoundException) HiveAuthzPluginException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException) HiveLockObjectData(org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData) HiveTxnManager(org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager)

Example 2 with HiveLock

use of org.apache.hadoop.hive.ql.lockmgr.HiveLock in project hive by apache.

the class MoveTask method releaseLocks.

// Release all the locks acquired for this object
// This becomes important for multi-table inserts when one branch may take much more
// time than the others. It is better to release the lock for this particular insert.
// The other option is to wait for all the branches to finish, or set
// hive.multi.insert.move.tasks.share.dependencies to true, which will mean that the
// first multi-insert results will be available when all of the branches of multi-table
// inserts are done.
private void releaseLocks(LoadTableDesc ltd) throws HiveException {
    // nothing needs to be done
    if (!conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY)) {
        return;
    }
    Context ctx = driverContext.getCtx();
    if (ctx.getHiveTxnManager().supportsAcid()) {
        //Acid LM doesn't maintain getOutputLockObjects(); this 'if' just makes it more explicit
        return;
    }
    HiveLockManager lockMgr = ctx.getHiveTxnManager().getLockManager();
    WriteEntity output = ctx.getLoadTableOutputMap().get(ltd);
    List<HiveLockObj> lockObjects = ctx.getOutputLockObjects().get(output);
    if (lockObjects == null) {
        return;
    }
    for (HiveLockObj lockObj : lockObjects) {
        List<HiveLock> locks = lockMgr.getLocks(lockObj.getObj(), false, true);
        for (HiveLock lock : locks) {
            if (lock.getHiveLockMode() == lockObj.getMode()) {
                if (ctx.getHiveLocks().remove(lock)) {
                    LOG.info("about to release lock for output: " + output.toString() + " lock: " + lock.getHiveLockObject().getName());
                    try {
                        lockMgr.unlock(lock);
                    } catch (LockException le) {
                        // should be OK since the lock is ephemeral and will eventually be deleted
                        // when the query finishes and zookeeper session is closed.
                        LOG.warn("Could not release lock " + lock.getHiveLockObject().getName());
                    }
                }
            }
        }
    }
}
Also used : Context(org.apache.hadoop.hive.ql.Context) DriverContext(org.apache.hadoop.hive.ql.DriverContext) LockException(org.apache.hadoop.hive.ql.lockmgr.LockException) HiveLockObj(org.apache.hadoop.hive.ql.lockmgr.HiveLockObj) HiveLock(org.apache.hadoop.hive.ql.lockmgr.HiveLock) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) HiveLockManager(org.apache.hadoop.hive.ql.lockmgr.HiveLockManager)

Aggregations

Context (org.apache.hadoop.hive.ql.Context)2 DriverContext (org.apache.hadoop.hive.ql.DriverContext)2 HiveLock (org.apache.hadoop.hive.ql.lockmgr.HiveLock)2 HiveLockManager (org.apache.hadoop.hive.ql.lockmgr.HiveLockManager)2 DataOutputStream (java.io.DataOutputStream)1 FileNotFoundException (java.io.FileNotFoundException)1 IOException (java.io.IOException)1 URISyntaxException (java.net.URISyntaxException)1 SQLException (java.sql.SQLException)1 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)1 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)1 EnvironmentContext (org.apache.hadoop.hive.metastore.api.EnvironmentContext)1 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)1 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)1 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)1 CompilationOpContext (org.apache.hadoop.hive.ql.CompilationOpContext)1 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)1 HiveLockObj (org.apache.hadoop.hive.ql.lockmgr.HiveLockObj)1 HiveLockObjectData (org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData)1 HiveTxnManager (org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager)1