use of org.apache.hive.hcatalog.streaming.StreamingException in project nifi by apache.
the class HiveWriter method nextTxnBatch.
protected TransactionBatch nextTxnBatch(final RecordWriter recordWriter) throws InterruptedException, TxnBatchFailure {
LOG.debug("Fetching new Txn Batch for {}", endPoint);
TransactionBatch batch = null;
try {
batch = callWithTimeout(() -> {
// could block
return connection.fetchTransactionBatch(txnsPerBatch, recordWriter);
});
batch.beginNextTransaction();
LOG.debug("Acquired {}. Switching to first txn", batch);
} catch (TimeoutException | StreamingException e) {
throw new TxnBatchFailure(endPoint, e);
}
return batch;
}
use of org.apache.hive.hcatalog.streaming.StreamingException in project nifi by apache.
the class HiveWriter method write.
/**
* Write the record data to Hive
*
* @throws IOException if an error occurs during the write
* @throws InterruptedException if the write operation is interrupted
*/
public synchronized void write(final byte[] record) throws WriteFailure, SerializationError, InterruptedException {
if (closed) {
throw new IllegalStateException("This hive streaming writer was closed " + "and thus no longer able to write : " + endPoint);
}
// write the tuple
try {
LOG.debug("Writing event to {}", endPoint);
callWithTimeout(new CallRunner<Void>() {
@Override
public Void call() throws StreamingException, InterruptedException {
txnBatch.write(record);
totalRecords++;
return null;
}
});
} catch (SerializationError se) {
throw new SerializationError(endPoint.toString() + " SerializationError", se);
} catch (StreamingException | TimeoutException e) {
throw new WriteFailure(endPoint, txnBatch.getCurrentTxnId(), e);
}
}
use of org.apache.hive.hcatalog.streaming.StreamingException in project nifi by apache.
the class HiveWriterTest method testRecordWriterStreamingException.
@Test(expected = HiveWriter.ConnectFailure.class)
public void testRecordWriterStreamingException() throws Exception {
recordWriterCallable = mock(Callable.class);
StreamingException streamingException = new StreamingException("Test Exception");
when(recordWriterCallable.call()).thenThrow(streamingException);
try {
initWriter();
} catch (HiveWriter.ConnectFailure e) {
assertEquals(streamingException, e.getCause());
throw e;
}
}
use of org.apache.hive.hcatalog.streaming.StreamingException in project storm by apache.
the class HiveWriter method write.
/**
* Write data.
*/
public synchronized void write(final byte[] record) throws WriteFailure, SerializationError, InterruptedException {
if (closed) {
throw new IllegalStateException("This hive streaming writer was closed " + "and thus no longer able to write : " + endPoint);
}
// write the tuple
try {
LOG.debug("Writing event to {}", endPoint);
callWithTimeout(new CallRunner<Void>() {
@Override
public Void call() throws StreamingException, InterruptedException {
txnBatch.write(record);
totalRecords++;
return null;
}
});
} catch (SerializationError se) {
throw new SerializationError(endPoint.toString() + " SerializationError", se);
} catch (StreamingException e) {
throw new WriteFailure(endPoint, txnBatch.getCurrentTxnId(), e);
} catch (TimeoutException e) {
throw new WriteFailure(endPoint, txnBatch.getCurrentTxnId(), e);
}
}
use of org.apache.hive.hcatalog.streaming.StreamingException in project storm by apache.
the class HiveWriter method nextTxnBatch.
private TransactionBatch nextTxnBatch(final RecordWriter recordWriter) throws InterruptedException, TxnBatchFailure {
LOG.debug("Fetching new Txn Batch for {}", endPoint);
TransactionBatch batch = null;
try {
batch = callWithTimeout(new CallRunner<TransactionBatch>() {
@Override
public TransactionBatch call() throws Exception {
// could block
return connection.fetchTransactionBatch(txnsPerBatch, recordWriter);
}
});
batch.beginNextTransaction();
LOG.debug("Acquired {}. Switching to first txn", batch);
} catch (TimeoutException e) {
throw new TxnBatchFailure(endPoint, e);
} catch (StreamingException e) {
throw new TxnBatchFailure(endPoint, e);
}
return batch;
}
Aggregations