Search in sources :

Example 1 with ShowLocksResponse

use of org.apache.hadoop.hive.metastore.api.ShowLocksResponse in project hive by apache.

the class DDLTask method showLocksNewFormat.

private int showLocksNewFormat(ShowLocksDesc showLocks, HiveLockManager lm) throws HiveException {
    DbLockManager lockMgr;
    if (!(lm instanceof DbLockManager)) {
        throw new RuntimeException("New lock format only supported with db lock manager.");
    }
    lockMgr = (DbLockManager) lm;
    String dbName = showLocks.getDbName();
    String tblName = showLocks.getTableName();
    Map<String, String> partSpec = showLocks.getPartSpec();
    if (dbName == null && tblName != null) {
        dbName = SessionState.get().getCurrentDatabase();
    }
    ShowLocksRequest rqst = new ShowLocksRequest();
    rqst.setDbname(dbName);
    rqst.setTablename(tblName);
    if (partSpec != null) {
        List<String> keyList = new ArrayList<String>();
        List<String> valList = new ArrayList<String>();
        for (String partKey : partSpec.keySet()) {
            String partVal = partSpec.remove(partKey);
            keyList.add(partKey);
            valList.add(partVal);
        }
        String partName = FileUtils.makePartName(keyList, valList);
        rqst.setPartname(partName);
    }
    ShowLocksResponse rsp = lockMgr.getLocks(rqst);
    // write the results in the file
    DataOutputStream os = getOutputStream(showLocks.getResFile());
    try {
        dumpLockInfo(os, rsp);
    } catch (FileNotFoundException e) {
        LOG.warn("show function: ", e);
        return 1;
    } catch (IOException e) {
        LOG.warn("show function: ", e);
        return 1;
    } catch (Exception e) {
        throw new HiveException(e.toString());
    } finally {
        IOUtils.closeStream(os);
    }
    return 0;
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) DbLockManager(org.apache.hadoop.hive.ql.lockmgr.DbLockManager) DataOutputStream(java.io.DataOutputStream) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) ArrayList(java.util.ArrayList) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) URISyntaxException(java.net.URISyntaxException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) SQLException(java.sql.SQLException) FileNotFoundException(java.io.FileNotFoundException) HiveAuthzPluginException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException) ShowLocksRequest(org.apache.hadoop.hive.metastore.api.ShowLocksRequest) ShowLocksResponse(org.apache.hadoop.hive.metastore.api.ShowLocksResponse)

Example 2 with ShowLocksResponse

use of org.apache.hadoop.hive.metastore.api.ShowLocksResponse in project hive by apache.

the class TestStreaming method testHeartbeat.

@Test
public void testHeartbeat() throws Exception {
    HiveEndPoint endPt = new HiveEndPoint(metaStoreURI, dbName2, tblName2, null);
    StreamingConnection connection = endPt.newConnection(false, "UT_" + Thread.currentThread().getName());
    DelimitedInputWriter writer = new DelimitedInputWriter(fieldNames2, ",", endPt, connection);
    TransactionBatch txnBatch = connection.fetchTransactionBatch(5, writer);
    txnBatch.beginNextTransaction();
    // todo: this should ideally check Transaction heartbeat as well, but heartbeat
    // timestamp is not reported yet
    // GetOpenTxnsInfoResponse txnresp = msClient.showTxns();
    ShowLocksRequest request = new ShowLocksRequest();
    request.setDbname(dbName2);
    request.setTablename(tblName2);
    ShowLocksResponse response = msClient.showLocks(request);
    Assert.assertEquals("Wrong nubmer of locks: " + response, 1, response.getLocks().size());
    ShowLocksResponseElement lock = response.getLocks().get(0);
    long acquiredAt = lock.getAcquiredat();
    long heartbeatAt = lock.getLastheartbeat();
    txnBatch.heartbeat();
    response = msClient.showLocks(request);
    Assert.assertEquals("Wrong number of locks2: " + response, 1, response.getLocks().size());
    lock = response.getLocks().get(0);
    Assert.assertEquals("Acquired timestamp didn't match", acquiredAt, lock.getAcquiredat());
    Assert.assertTrue("Expected new heartbeat (" + lock.getLastheartbeat() + ") == old heartbeat(" + heartbeatAt + ")", lock.getLastheartbeat() == heartbeatAt);
    txnBatch.close();
    int txnBatchSize = 200;
    txnBatch = connection.fetchTransactionBatch(txnBatchSize, writer);
    for (int i = 0; i < txnBatchSize; i++) {
        txnBatch.beginNextTransaction();
        if (i % 47 == 0) {
            txnBatch.heartbeat();
        }
        if (i % 10 == 0) {
            txnBatch.abort();
        } else {
            txnBatch.commit();
        }
        if (i % 37 == 0) {
            txnBatch.heartbeat();
        }
    }
}
Also used : ShowLocksRequest(org.apache.hadoop.hive.metastore.api.ShowLocksRequest) ShowLocksResponse(org.apache.hadoop.hive.metastore.api.ShowLocksResponse) ShowLocksResponseElement(org.apache.hadoop.hive.metastore.api.ShowLocksResponseElement) Test(org.junit.Test)

Example 3 with ShowLocksResponse

use of org.apache.hadoop.hive.metastore.api.ShowLocksResponse in project hive by apache.

the class TestTxnCommands method testTimeOutReaper.

@Test
public void testTimeOutReaper() throws Exception {
    runStatementOnDriver("start transaction");
    runStatementOnDriver("delete from " + Table.ACIDTBL + " where a = 5");
    // make sure currently running txn is considered aborted by housekeeper
    hiveConf.setTimeVar(HiveConf.ConfVars.HIVE_TXN_TIMEOUT, 2, TimeUnit.MILLISECONDS);
    MetastoreTaskThread houseKeeperService = new AcidHouseKeeperService();
    houseKeeperService.setConf(hiveConf);
    // this will abort the txn
    houseKeeperService.run();
    // this should fail because txn aborted due to timeout
    CommandProcessorException e = runStatementOnDriverNegative("delete from " + Table.ACIDTBL + " where a = 5");
    Assert.assertTrue("Actual: " + e.getMessage(), e.getMessage().contains("Transaction manager has aborted the transaction txnid:1"));
    // now test that we don't timeout locks we should not
    // heartbeater should be running in the background every 1/2 second
    hiveConf.setTimeVar(HiveConf.ConfVars.HIVE_TXN_TIMEOUT, 1, TimeUnit.SECONDS);
    // Have to reset the conf when we change it so that the change takes affect
    houseKeeperService.setConf(hiveConf);
    runStatementOnDriver("start transaction");
    runStatementOnDriver("select count(*) from " + Table.ACIDTBL + " where a = 17");
    pause(750);
    TxnStore txnHandler = TxnUtils.getTxnStore(hiveConf);
    // since there is txn open, we are heartbeating the txn not individual locks
    GetOpenTxnsInfoResponse txnsInfoResponse = txnHandler.getOpenTxnsInfo();
    Assert.assertEquals(2, txnsInfoResponse.getOpen_txns().size());
    TxnInfo txnInfo = null;
    for (TxnInfo ti : txnsInfoResponse.getOpen_txns()) {
        if (ti.getState() == TxnState.OPEN) {
            txnInfo = ti;
            break;
        }
    }
    Assert.assertNotNull(txnInfo);
    Assert.assertEquals(16, txnInfo.getId());
    Assert.assertEquals(TxnState.OPEN, txnInfo.getState());
    String s = TestTxnDbUtil.queryToString(hiveConf, "select TXN_STARTED, TXN_LAST_HEARTBEAT from TXNS where TXN_ID = " + txnInfo.getId(), false);
    String[] vals = s.split("\\s+");
    Assert.assertEquals("Didn't get expected timestamps", 2, vals.length);
    long lastHeartbeat = Long.parseLong(vals[1]);
    // these 2 values are equal when TXN entry is made.  Should never be equal after 1st heartbeat, which we
    // expect to have happened by now since HIVE_TXN_TIMEOUT=1sec
    Assert.assertNotEquals("Didn't see heartbeat happen", Long.parseLong(vals[0]), lastHeartbeat);
    ShowLocksResponse slr = txnHandler.showLocks(new ShowLocksRequest());
    TestDbTxnManager2.checkLock(LockType.SHARED_READ, LockState.ACQUIRED, "default", Table.ACIDTBL.name, null, slr.getLocks());
    pause(750);
    houseKeeperService.run();
    pause(750);
    slr = txnHandler.showLocks(new ShowLocksRequest());
    Assert.assertEquals("Unexpected lock count: " + slr, 1, slr.getLocks().size());
    TestDbTxnManager2.checkLock(LockType.SHARED_READ, LockState.ACQUIRED, "default", Table.ACIDTBL.name, null, slr.getLocks());
    pause(750);
    houseKeeperService.run();
    slr = txnHandler.showLocks(new ShowLocksRequest());
    Assert.assertEquals("Unexpected lock count: " + slr, 1, slr.getLocks().size());
    TestDbTxnManager2.checkLock(LockType.SHARED_READ, LockState.ACQUIRED, "default", Table.ACIDTBL.name, null, slr.getLocks());
    // should've done several heartbeats
    s = TestTxnDbUtil.queryToString(hiveConf, "select TXN_STARTED, TXN_LAST_HEARTBEAT from TXNS where TXN_ID = " + txnInfo.getId(), false);
    vals = s.split("\\s+");
    Assert.assertEquals("Didn't get expected timestamps", 2, vals.length);
    Assert.assertTrue("Heartbeat didn't progress: (old,new) (" + lastHeartbeat + "," + vals[1] + ")", lastHeartbeat < Long.parseLong(vals[1]));
    runStatementOnDriver("rollback");
    slr = txnHandler.showLocks(new ShowLocksRequest());
    Assert.assertEquals("Unexpected lock count", 0, slr.getLocks().size());
}
Also used : ShowLocksRequest(org.apache.hadoop.hive.metastore.api.ShowLocksRequest) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) TxnInfo(org.apache.hadoop.hive.metastore.api.TxnInfo) MetastoreTaskThread(org.apache.hadoop.hive.metastore.MetastoreTaskThread) AcidHouseKeeperService(org.apache.hadoop.hive.metastore.txn.AcidHouseKeeperService) TxnStore(org.apache.hadoop.hive.metastore.txn.TxnStore) ShowLocksResponse(org.apache.hadoop.hive.metastore.api.ShowLocksResponse) GetOpenTxnsInfoResponse(org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse) Test(org.junit.Test)

Example 4 with ShowLocksResponse

use of org.apache.hadoop.hive.metastore.api.ShowLocksResponse in project hive by apache.

the class TestDbTxnManager method testWriteDynamicPartition.

@Test
public void testWriteDynamicPartition() throws Exception {
    WriteEntity we = addDynamicPartitionedOutput(newTable(true), WriteEntity.WriteType.INSERT);
    QueryPlan qp = new MockQueryPlan(this, HiveOperation.QUERY);
    txnMgr.openTxn(ctx, "fred");
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertEquals(1, locks.size());
    /*Assert.assertEquals(1,
        TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
    */
    // Make sure we're locking the whole table, since this is dynamic partitioning
    ShowLocksResponse rsp = ((DbLockManager) txnMgr.getLockManager()).getLocks();
    List<ShowLocksResponseElement> elms = rsp.getLocks();
    Assert.assertEquals(1, elms.size());
    Assert.assertNotNull(elms.get(0).getTablename());
    Assert.assertNull(elms.get(0).getPartname());
    txnMgr.commitTxn();
    locks = txnMgr.getLockManager().getLocks(false, false);
    Assert.assertEquals(0, locks.size());
}
Also used : QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) ShowLocksResponse(org.apache.hadoop.hive.metastore.api.ShowLocksResponse) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) ShowLocksResponseElement(org.apache.hadoop.hive.metastore.api.ShowLocksResponseElement) Test(org.junit.Test)

Example 5 with ShowLocksResponse

use of org.apache.hadoop.hive.metastore.api.ShowLocksResponse in project hive by apache.

the class TxnHandler method showLocks.

@RetrySemantics.ReadOnly
public ShowLocksResponse showLocks(ShowLocksRequest rqst) throws MetaException {
    try {
        Connection dbConn = null;
        ShowLocksResponse rsp = new ShowLocksResponse();
        List<ShowLocksResponseElement> elems = new ArrayList<>();
        List<LockInfoExt> sortedList = new ArrayList<>();
        PreparedStatement pst = null;
        try {
            dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
            String s = "SELECT \"HL_LOCK_EXT_ID\", \"HL_TXNID\", \"HL_DB\", \"HL_TABLE\", \"HL_PARTITION\", \"HL_LOCK_STATE\", " + "\"HL_LOCK_TYPE\", \"HL_LAST_HEARTBEAT\", \"HL_ACQUIRED_AT\", \"HL_USER\", \"HL_HOST\", \"HL_LOCK_INT_ID\"," + "\"HL_BLOCKEDBY_EXT_ID\", \"HL_BLOCKEDBY_INT_ID\", \"HL_AGENT_INFO\" FROM \"HIVE_LOCKS\"";
            // Some filters may have been specified in the SHOW LOCKS statement. Add them to the query.
            String dbName = rqst.getDbname();
            String tableName = rqst.getTablename();
            String partName = rqst.getPartname();
            List<String> params = new ArrayList<>();
            StringBuilder filter = new StringBuilder();
            if (dbName != null && !dbName.isEmpty()) {
                filter.append("\"HL_DB\"=?");
                params.add(dbName);
            }
            if (tableName != null && !tableName.isEmpty()) {
                if (filter.length() > 0) {
                    filter.append(" and ");
                }
                filter.append("\"HL_TABLE\"=?");
                params.add(tableName);
            }
            if (partName != null && !partName.isEmpty()) {
                if (filter.length() > 0) {
                    filter.append(" and ");
                }
                filter.append("\"HL_PARTITION\"=?");
                params.add(partName);
            }
            if (rqst.isSetTxnid()) {
                if (filter.length() > 0) {
                    filter.append(" and ");
                }
                filter.append("\"HL_TXNID\"=" + rqst.getTxnid());
            }
            String whereClause = filter.toString();
            if (!whereClause.isEmpty()) {
                s = s + " where " + whereClause;
            }
            pst = sqlGenerator.prepareStmtWithParameters(dbConn, s, params);
            LOG.debug("Going to execute query <" + s + ">");
            ResultSet rs = pst.executeQuery();
            while (rs.next()) {
                ShowLocksResponseElement e = new ShowLocksResponseElement();
                e.setLockid(rs.getLong(1));
                long txnid = rs.getLong(2);
                if (!rs.wasNull())
                    e.setTxnid(txnid);
                e.setDbname(rs.getString(3));
                e.setTablename(rs.getString(4));
                String partition = rs.getString(5);
                if (partition != null)
                    e.setPartname(partition);
                switch(rs.getString(6).charAt(0)) {
                    case LOCK_ACQUIRED:
                        e.setState(LockState.ACQUIRED);
                        break;
                    case LOCK_WAITING:
                        e.setState(LockState.WAITING);
                        break;
                    default:
                        throw new MetaException("Unknown lock state " + rs.getString(6).charAt(0));
                }
                char lockChar = rs.getString(7).charAt(0);
                LockType lockType = LockTypeUtil.getLockTypeFromEncoding(lockChar).orElseThrow(() -> new MetaException("Unknown lock type: " + lockChar));
                e.setType(lockType);
                e.setLastheartbeat(rs.getLong(8));
                long acquiredAt = rs.getLong(9);
                if (!rs.wasNull())
                    e.setAcquiredat(acquiredAt);
                e.setUser(rs.getString(10));
                e.setHostname(rs.getString(11));
                e.setLockIdInternal(rs.getLong(12));
                long id = rs.getLong(13);
                if (!rs.wasNull()) {
                    e.setBlockedByExtId(id);
                }
                id = rs.getLong(14);
                if (!rs.wasNull()) {
                    e.setBlockedByIntId(id);
                }
                e.setAgentInfo(rs.getString(15));
                sortedList.add(new LockInfoExt(e));
            }
        } catch (SQLException e) {
            checkRetryable(e, "showLocks(" + rqst + ")");
            throw new MetaException("Unable to select from transaction database " + StringUtils.stringifyException(e));
        } finally {
            closeStmt(pst);
            closeDbConn(dbConn);
        }
        // this ensures that "SHOW LOCKS" prints the locks in the same order as they are examined
        // by checkLock() - makes diagnostics easier.
        Collections.sort(sortedList, new LockInfoComparator());
        for (LockInfoExt lockInfoExt : sortedList) {
            elems.add(lockInfoExt.e);
        }
        rsp.setLocks(elems);
        return rsp;
    } catch (RetryException e) {
        return showLocks(rqst);
    }
}
Also used : SQLException(java.sql.SQLException) Connection(java.sql.Connection) ArrayList(java.util.ArrayList) PreparedStatement(java.sql.PreparedStatement) LockType(org.apache.hadoop.hive.metastore.api.LockType) ShowLocksResponseElement(org.apache.hadoop.hive.metastore.api.ShowLocksResponseElement) ResultSet(java.sql.ResultSet) ShowLocksResponse(org.apache.hadoop.hive.metastore.api.ShowLocksResponse) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Aggregations

ShowLocksResponse (org.apache.hadoop.hive.metastore.api.ShowLocksResponse)16 ShowLocksRequest (org.apache.hadoop.hive.metastore.api.ShowLocksRequest)13 Test (org.junit.Test)10 ShowLocksResponseElement (org.apache.hadoop.hive.metastore.api.ShowLocksResponseElement)7 ArrayList (java.util.ArrayList)4 TxnStore (org.apache.hadoop.hive.metastore.txn.TxnStore)4 DbLockManager (org.apache.hadoop.hive.ql.lockmgr.DbLockManager)3 SQLException (java.sql.SQLException)2 HashSet (java.util.HashSet)2 HiveConf (org.apache.hadoop.hive.conf.HiveConf)2 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)2 HiveLockManager (org.apache.hadoop.hive.ql.lockmgr.HiveLockManager)2 DataOutputStream (java.io.DataOutputStream)1 FileNotFoundException (java.io.FileNotFoundException)1 IOException (java.io.IOException)1 URISyntaxException (java.net.URISyntaxException)1 Connection (java.sql.Connection)1 PreparedStatement (java.sql.PreparedStatement)1 ResultSet (java.sql.ResultSet)1 ExecutionException (java.util.concurrent.ExecutionException)1