use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestHiveDecimalParse method testDecimalType4.
@Test
public void testDecimalType4() throws ParseException {
String query = "create table `dec` (d decimal(0,7))";
Driver driver = createDriver();
try {
driver.compile(query, true, false);
} catch (CommandProcessorException cpe) {
Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0);
Assert.assertTrue(cpe.getMessage(), cpe.getMessage().contains("Decimal precision out of allowed range [1,38]"));
return;
}
Assert.assertTrue("Expected to receive an exception", false);
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestTxnCommands method testTimeOutReaper.
@Test
public void testTimeOutReaper() throws Exception {
runStatementOnDriver("start transaction");
runStatementOnDriver("delete from " + Table.ACIDTBL + " where a = 5");
// make sure currently running txn is considered aborted by housekeeper
hiveConf.setTimeVar(HiveConf.ConfVars.HIVE_TXN_TIMEOUT, 2, TimeUnit.MILLISECONDS);
MetastoreTaskThread houseKeeperService = new AcidHouseKeeperService();
houseKeeperService.setConf(hiveConf);
// this will abort the txn
houseKeeperService.run();
// this should fail because txn aborted due to timeout
CommandProcessorException e = runStatementOnDriverNegative("delete from " + Table.ACIDTBL + " where a = 5");
Assert.assertTrue("Actual: " + e.getMessage(), e.getMessage().contains("Transaction manager has aborted the transaction txnid:1"));
// now test that we don't timeout locks we should not
// heartbeater should be running in the background every 1/2 second
hiveConf.setTimeVar(HiveConf.ConfVars.HIVE_TXN_TIMEOUT, 1, TimeUnit.SECONDS);
// Have to reset the conf when we change it so that the change takes affect
houseKeeperService.setConf(hiveConf);
runStatementOnDriver("start transaction");
runStatementOnDriver("select count(*) from " + Table.ACIDTBL + " where a = 17");
pause(750);
TxnStore txnHandler = TxnUtils.getTxnStore(hiveConf);
// since there is txn open, we are heartbeating the txn not individual locks
GetOpenTxnsInfoResponse txnsInfoResponse = txnHandler.getOpenTxnsInfo();
Assert.assertEquals(2, txnsInfoResponse.getOpen_txns().size());
TxnInfo txnInfo = null;
for (TxnInfo ti : txnsInfoResponse.getOpen_txns()) {
if (ti.getState() == TxnState.OPEN) {
txnInfo = ti;
break;
}
}
Assert.assertNotNull(txnInfo);
Assert.assertEquals(16, txnInfo.getId());
Assert.assertEquals(TxnState.OPEN, txnInfo.getState());
String s = TestTxnDbUtil.queryToString(hiveConf, "select TXN_STARTED, TXN_LAST_HEARTBEAT from TXNS where TXN_ID = " + txnInfo.getId(), false);
String[] vals = s.split("\\s+");
Assert.assertEquals("Didn't get expected timestamps", 2, vals.length);
long lastHeartbeat = Long.parseLong(vals[1]);
// these 2 values are equal when TXN entry is made. Should never be equal after 1st heartbeat, which we
// expect to have happened by now since HIVE_TXN_TIMEOUT=1sec
Assert.assertNotEquals("Didn't see heartbeat happen", Long.parseLong(vals[0]), lastHeartbeat);
ShowLocksResponse slr = txnHandler.showLocks(new ShowLocksRequest());
TestDbTxnManager2.checkLock(LockType.SHARED_READ, LockState.ACQUIRED, "default", Table.ACIDTBL.name, null, slr.getLocks());
pause(750);
houseKeeperService.run();
pause(750);
slr = txnHandler.showLocks(new ShowLocksRequest());
Assert.assertEquals("Unexpected lock count: " + slr, 1, slr.getLocks().size());
TestDbTxnManager2.checkLock(LockType.SHARED_READ, LockState.ACQUIRED, "default", Table.ACIDTBL.name, null, slr.getLocks());
pause(750);
houseKeeperService.run();
slr = txnHandler.showLocks(new ShowLocksRequest());
Assert.assertEquals("Unexpected lock count: " + slr, 1, slr.getLocks().size());
TestDbTxnManager2.checkLock(LockType.SHARED_READ, LockState.ACQUIRED, "default", Table.ACIDTBL.name, null, slr.getLocks());
// should've done several heartbeats
s = TestTxnDbUtil.queryToString(hiveConf, "select TXN_STARTED, TXN_LAST_HEARTBEAT from TXNS where TXN_ID = " + txnInfo.getId(), false);
vals = s.split("\\s+");
Assert.assertEquals("Didn't get expected timestamps", 2, vals.length);
Assert.assertTrue("Heartbeat didn't progress: (old,new) (" + lastHeartbeat + "," + vals[1] + ")", lastHeartbeat < Long.parseLong(vals[1]));
runStatementOnDriver("rollback");
slr = txnHandler.showLocks(new ShowLocksRequest());
Assert.assertEquals("Unexpected lock count", 0, slr.getLocks().size());
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestTxnCommands method testErrors.
/**
* add tests for all transitions - AC=t, AC=t, AC=f, commit (for example)
* @throws Exception
*/
@Test
public void testErrors() throws Exception {
runStatementOnDriver("start transaction");
CommandProcessorException e1 = runStatementOnDriverNegative("create table foo(x int, y int)");
Assert.assertEquals("Expected DDL to fail in an open txn", ErrorMsg.OP_NOT_ALLOWED_IN_TXN.getErrorCode(), e1.getErrorCode());
CommandProcessorException e2 = runStatementOnDriverNegative("update " + Table.ACIDTBL + " set a = 1 where b != 1");
Assert.assertEquals("Expected update of bucket column to fail", "FAILED: SemanticException [Error 10302]: Updating values of bucketing columns is not supported. Column a.", e2.getMessage());
Assert.assertEquals("Expected update of bucket column to fail", ErrorMsg.UPDATE_CANNOT_UPDATE_BUCKET_VALUE.getErrorCode(), e2.getErrorCode());
// not allowed in w/o tx
CommandProcessorException e3 = runStatementOnDriverNegative("commit");
Assert.assertEquals("Error didn't match: " + e3, ErrorMsg.OP_NOT_ALLOWED_WITHOUT_TXN.getErrorCode(), e3.getErrorCode());
// not allowed in w/o tx
CommandProcessorException e4 = runStatementOnDriverNegative("rollback");
Assert.assertEquals("Error didn't match: " + e4, ErrorMsg.OP_NOT_ALLOWED_WITHOUT_TXN.getErrorCode(), e4.getErrorCode());
runStatementOnDriver("start transaction");
// not allowed in a tx
CommandProcessorException e5 = runStatementOnDriverNegative("start transaction");
Assert.assertEquals("Expected start transaction to fail", ErrorMsg.OP_NOT_ALLOWED_IN_TXN.getErrorCode(), e5.getErrorCode());
// ok since previously opened txn was killed
runStatementOnDriver("start transaction");
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) values(1,2)");
List<String> rs0 = runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Can't see my own write", 1, rs0.size());
runStatementOnDriver("commit work");
rs0 = runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Can't see my own write", 1, rs0.size());
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestTxnCommands method testSimpleAcidInsert.
@Test
public void testSimpleAcidInsert() throws Exception {
int[][] rows1 = { { 1, 2 }, { 3, 4 } };
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) " + makeValuesClause(rows1));
// List<String> rs = runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
// Assert.assertEquals("Data didn't match in autocommit=true (rs)", stringifyValues(rows1), rs);
runStatementOnDriver("START TRANSACTION");
int[][] rows2 = { { 5, 6 }, { 7, 8 } };
runStatementOnDriver("insert into " + Table.ACIDTBL + "(a,b) " + makeValuesClause(rows2));
List<String> allData = stringifyValues(rows1);
allData.addAll(stringifyValues(rows2));
List<String> rs0 = runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Data didn't match inside tx (rs0)", allData, rs0);
runStatementOnDriver("COMMIT WORK");
dumpTableData(Table.ACIDTBL, 1, 0);
dumpTableData(Table.ACIDTBL, 2, 0);
runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
// txn started implicitly by previous statement
CommandProcessorException e = runStatementOnDriverNegative("COMMIT");
Assert.assertEquals("Error didn't match: " + e, ErrorMsg.OP_NOT_ALLOWED_WITHOUT_TXN.getErrorCode(), e.getErrorCode());
List<String> rs1 = runStatementOnDriver("select a,b from " + Table.ACIDTBL + " order by a,b");
Assert.assertEquals("Data didn't match inside tx (rs0)", allData, rs1);
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestCompileLock method compileAndRespond.
private List<Integer> compileAndRespond(String query, boolean reuseSession, int threadCount) throws Exception {
List<Integer> responseList = new ArrayList<>();
SessionState sessionState = new SessionState(conf);
List<Callable<CommandProcessorResponse>> callables = new ArrayList<>();
for (int i = 0; i < threadCount; i++) {
callables.add(() -> {
SessionState ss = (reuseSession) ? sessionState : new SessionState(conf);
SessionState.setCurrentSessionState(ss);
CommandProcessorResponse response;
try {
response = driver.compileAndRespond(query);
} finally {
SessionState.detachSession();
}
return response;
});
}
ExecutorService pool = Executors.newFixedThreadPool(callables.size());
try {
List<Future<CommandProcessorResponse>> futures = pool.invokeAll(callables);
for (Future<CommandProcessorResponse> future : futures) {
try {
future.get();
responseList.add(0);
} catch (ExecutionException ex) {
responseList.add(ex.getCause() instanceof CommandProcessorException ? ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCode() : CONCURRENT_COMPILATION);
}
}
} finally {
pool.shutdown();
}
return responseList;
}
Aggregations