use of org.apache.hive.hcatalog.streaming.TransactionBatch in project nifi by apache.
the class HiveWriter method nextTxnBatch.
protected TransactionBatch nextTxnBatch(final RecordWriter recordWriter) throws InterruptedException, TxnBatchFailure {
LOG.debug("Fetching new Txn Batch for {}", endPoint);
TransactionBatch batch = null;
try {
batch = callWithTimeout(() -> {
// could block
return connection.fetchTransactionBatch(txnsPerBatch, recordWriter);
});
batch.beginNextTransaction();
LOG.debug("Acquired {}. Switching to first txn", batch);
} catch (TimeoutException | StreamingException e) {
throw new TxnBatchFailure(endPoint, e);
}
return batch;
}
use of org.apache.hive.hcatalog.streaming.TransactionBatch in project nifi by apache.
the class HiveWriterTest method setup.
@Before
public void setup() throws Exception {
hiveEndPoint = mock(HiveEndPoint.class);
txnsPerBatch = 100;
autoCreatePartitions = true;
callTimeout = 0;
executorService = mock(ExecutorService.class);
streamingConnection = mock(StreamingConnection.class);
transactionBatch = mock(TransactionBatch.class);
userGroupInformation = mock(UserGroupInformation.class);
hiveConf = mock(HiveConf.class);
recordWriter = mock(RecordWriter.class);
recordWriterCallable = mock(Callable.class);
when(recordWriterCallable.call()).thenReturn(recordWriter);
when(hiveEndPoint.newConnection(autoCreatePartitions, hiveConf, userGroupInformation)).thenReturn(streamingConnection);
when(streamingConnection.fetchTransactionBatch(txnsPerBatch, recordWriter)).thenReturn(transactionBatch);
when(executorService.submit(isA(Callable.class))).thenAnswer(invocation -> {
Future future = mock(Future.class);
Answer<Object> answer = i -> ((Callable) invocation.getArguments()[0]).call();
when(future.get()).thenAnswer(answer);
when(future.get(anyLong(), any(TimeUnit.class))).thenAnswer(answer);
return future;
});
when(userGroupInformation.doAs(isA(PrivilegedExceptionAction.class))).thenAnswer(invocation -> {
try {
try {
return ((PrivilegedExceptionAction) invocation.getArguments()[0]).run();
} catch (UncheckedExecutionException e) {
// Creation of strict json writer will fail due to external deps, this gives us chance to catch it
for (StackTraceElement stackTraceElement : e.getStackTrace()) {
if (stackTraceElement.toString().startsWith("org.apache.hive.hcatalog.streaming.StrictJsonWriter.<init>(")) {
return recordWriterCallable.call();
}
}
throw e;
}
} catch (IOException | Error | RuntimeException | InterruptedException e) {
throw e;
} catch (Throwable e) {
throw new UndeclaredThrowableException(e);
}
});
initWriter();
}
use of org.apache.hive.hcatalog.streaming.TransactionBatch in project hive by apache.
the class TestCompactor method writeBatch.
private void writeBatch(StreamingConnection connection, DelimitedInputWriter writer, boolean closeEarly) throws InterruptedException, StreamingException {
TransactionBatch txnBatch = connection.fetchTransactionBatch(2, writer);
txnBatch.beginNextTransaction();
txnBatch.write("50,Kiev".getBytes());
txnBatch.write("51,St. Petersburg".getBytes());
txnBatch.write("44,Boston".getBytes());
txnBatch.commit();
if (!closeEarly) {
txnBatch.beginNextTransaction();
txnBatch.write("52,Tel Aviv".getBytes());
txnBatch.write("53,Atlantis".getBytes());
txnBatch.write("53,Boston".getBytes());
txnBatch.commit();
txnBatch.close();
}
}
use of org.apache.hive.hcatalog.streaming.TransactionBatch in project storm by apache.
the class HiveWriter method nextTxnBatch.
private TransactionBatch nextTxnBatch(final RecordWriter recordWriter) throws InterruptedException, TxnBatchFailure {
LOG.debug("Fetching new Txn Batch for {}", endPoint);
TransactionBatch batch = null;
try {
batch = callWithTimeout(new CallRunner<TransactionBatch>() {
@Override
public TransactionBatch call() throws Exception {
// could block
return connection.fetchTransactionBatch(txnsPerBatch, recordWriter);
}
});
batch.beginNextTransaction();
LOG.debug("Acquired {}. Switching to first txn", batch);
} catch (TimeoutException e) {
throw new TxnBatchFailure(endPoint, e);
} catch (StreamingException e) {
throw new TxnBatchFailure(endPoint, e);
}
return batch;
}
Aggregations