use of org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse in project hive by apache.
the class TestStreaming method testErrorHandling.
@Test
public void testErrorHandling() throws Exception {
String agentInfo = "UT_" + Thread.currentThread().getName();
runCmdOnDriver("create database testErrors");
runCmdOnDriver("use testErrors");
runCmdOnDriver("create table T(a int, b int) clustered by (b) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')");
StrictDelimitedInputWriter innerWriter = StrictDelimitedInputWriter.newBuilder().withFieldDelimiter(',').build();
HiveStreamingConnection connection = HiveStreamingConnection.newBuilder().withDatabase("testErrors").withTable("T").withAgentInfo(agentInfo).withTransactionBatchSize(2).withRecordWriter(innerWriter).withHiveConf(conf).connect();
connection.beginTransaction();
FaultyWriter writer = new FaultyWriter(innerWriter);
connection.close();
Exception expectedEx = null;
GetOpenTxnsInfoResponse r = msClient.showTxns();
Assert.assertEquals("HWM didn'table match", 17, r.getTxn_high_water_mark());
List<TxnInfo> ti = r.getOpen_txns();
Assert.assertEquals("wrong status ti(0)", org.apache.hadoop.hive.metastore.api.TxnState.ABORTED, ti.get(0).getState());
Assert.assertEquals("wrong status ti(1)", org.apache.hadoop.hive.metastore.api.TxnState.ABORTED, ti.get(1).getState());
try {
connection.beginTransaction();
} catch (StreamingException ex) {
expectedEx = ex;
}
Assert.assertTrue("beginTransaction() should have failed", expectedEx != null && expectedEx.getMessage().contains("Streaming connection is closed already."));
connection = HiveStreamingConnection.newBuilder().withDatabase("testErrors").withTable("T").withAgentInfo(agentInfo).withTransactionBatchSize(2).withRecordWriter(innerWriter).withHiveConf(conf).connect();
expectedEx = null;
try {
connection.write("name0,1,Hello streaming".getBytes());
} catch (StreamingException ex) {
expectedEx = ex;
}
Assert.assertTrue("write() should have failed", expectedEx != null && expectedEx.getMessage().equals("Transaction batch is null. Missing beginTransaction?"));
expectedEx = null;
try {
connection.commitTransaction();
} catch (StreamingException ex) {
expectedEx = ex;
}
Assert.assertTrue("commitTransaction() should have failed", expectedEx != null && expectedEx.getMessage().equals("Transaction batch is null. Missing beginTransaction?"));
connection = HiveStreamingConnection.newBuilder().withDatabase("testErrors").withTable("T").withAgentInfo(agentInfo).withTransactionBatchSize(2).withRecordWriter(writer).withHiveConf(conf).connect();
connection.beginTransaction();
connection.write("name2,2,Welcome to streaming".getBytes());
connection.write("name4,2,more Streaming unlimited".getBytes());
connection.write("name5,2,even more Streaming unlimited".getBytes());
connection.commitTransaction();
// test toString()
String s = connection.toTransactionString();
Assert.assertTrue("Actual: " + s, s.contains("LastUsed " + JavaUtils.txnIdToString(connection.getCurrentTxnId())));
Assert.assertTrue("Actual: " + s, s.contains("TxnStatus[CO]"));
expectedEx = null;
connection.beginTransaction();
writer.enableErrors();
try {
connection.write("name6,2,Doh!".getBytes());
} catch (StreamingIOFailure ex) {
expectedEx = ex;
}
Assert.assertTrue("Wrong exception: " + (expectedEx != null ? expectedEx.getMessage() : "?"), expectedEx != null && expectedEx.getMessage().contains("Simulated fault occurred"));
expectedEx = null;
try {
connection.commitTransaction();
} catch (StreamingException ex) {
expectedEx = ex;
}
Assert.assertTrue("commitTransaction() should have failed", expectedEx != null && expectedEx.getMessage().equals("Transaction state is not OPEN. Missing beginTransaction?"));
// test toString()
s = connection.toTransactionString();
Assert.assertTrue("Actual: " + s, s.contains("LastUsed " + JavaUtils.txnIdToString(connection.getCurrentTxnId())));
Assert.assertTrue("Actual: " + s, s.contains("TxnStatus[CA]"));
r = msClient.showTxns();
Assert.assertEquals("HWM didn't match", 19, r.getTxn_high_water_mark());
ti = r.getOpen_txns();
Assert.assertEquals("wrong status ti(0)", org.apache.hadoop.hive.metastore.api.TxnState.ABORTED, ti.get(0).getState());
Assert.assertEquals("wrong status ti(1)", org.apache.hadoop.hive.metastore.api.TxnState.ABORTED, ti.get(1).getState());
// txnid 3 was committed and thus not open
Assert.assertEquals("wrong status ti(2)", org.apache.hadoop.hive.metastore.api.TxnState.ABORTED, ti.get(2).getState());
connection.close();
writer.disableErrors();
connection = HiveStreamingConnection.newBuilder().withDatabase("testErrors").withTable("T").withAgentInfo(agentInfo).withTransactionBatchSize(2).withRecordWriter(writer).withHiveConf(conf).connect();
connection.beginTransaction();
connection.write("name2,2,Welcome to streaming".getBytes());
writer.enableErrors();
expectedEx = null;
try {
connection.commitTransaction();
} catch (StreamingIOFailure ex) {
expectedEx = ex;
}
Assert.assertTrue("Wrong exception: " + (expectedEx != null ? expectedEx.getMessage() : "?"), expectedEx != null && expectedEx.getMessage().contains("Simulated fault occurred"));
r = msClient.showTxns();
Assert.assertEquals("HWM didn'table match", 21, r.getTxn_high_water_mark());
ti = r.getOpen_txns();
Assert.assertEquals("wrong status ti(3)", org.apache.hadoop.hive.metastore.api.TxnState.ABORTED, ti.get(3).getState());
Assert.assertEquals("wrong status ti(4)", org.apache.hadoop.hive.metastore.api.TxnState.ABORTED, ti.get(4).getState());
}
Aggregations