use of org.apache.storm.tuple.Tuple in project storm by apache.
the class OpenTsdbBolt method execute.
@Override
public void execute(Tuple tuple) {
try {
if (batchHelper.shouldHandle(tuple)) {
final List<OpenTsdbMetricDatapoint> metricDataPoints = getMetricPoints(tuple);
for (OpenTsdbMetricDatapoint metricDataPoint : metricDataPoints) {
metricPointsWithTuple.put(metricDataPoint, tuple);
}
batchHelper.addBatch(tuple);
}
if (batchHelper.shouldFlush()) {
LOG.debug("Sending metrics of size [{}]", metricPointsWithTuple.size());
ClientResponse.Details clientResponse = openTsdbClient.writeMetricPoints(metricPointsWithTuple.keySet());
if (failTupleForFailedMetrics && clientResponse != null && clientResponse.getFailed() > 0) {
final List<ClientResponse.Details.Error> errors = clientResponse.getErrors();
LOG.error("Some of the metric points failed with errors: [{}]", clientResponse);
if (errors != null && !errors.isEmpty()) {
Set<Tuple> failedTuples = new HashSet<>();
for (ClientResponse.Details.Error error : errors) {
final Tuple failedTuple = metricPointsWithTuple.get(error.getDatapoint());
if (failedTuple != null) {
failedTuples.add(failedTuple);
}
}
for (Tuple batchedTuple : batchHelper.getBatchTuples()) {
if (failedTuples.contains(batchedTuple)) {
collector.fail(batchedTuple);
} else {
collector.ack(batchedTuple);
}
}
} else {
throw new RuntimeException("Some of the metric points failed with details: " + errors);
}
} else {
LOG.debug("Acknowledging batched tuples");
batchHelper.ack();
}
metricPointsWithTuple.clear();
}
} catch (Exception e) {
batchHelper.fail(e);
metricPointsWithTuple.clear();
}
}
use of org.apache.storm.tuple.Tuple in project storm by apache.
the class MongoInsertBolt method flushTuples.
private void flushTuples() {
List<Document> docs = new LinkedList<>();
for (Tuple t : batchHelper.getBatchTuples()) {
Document doc = mapper.toDocument(t);
docs.add(doc);
}
mongoClient.insert(docs, ordered);
}
use of org.apache.storm.tuple.Tuple in project storm by apache.
the class KafkaBoltTest method executeWithByteArrayKeyAndMessageAsync.
/* test asynchronous sending (default) */
@Test
public void executeWithByteArrayKeyAndMessageAsync() {
boolean async = true;
boolean fireAndForget = false;
String keyString = "test-key";
String messageString = "test-message";
byte[] key = keyString.getBytes();
byte[] message = messageString.getBytes();
final Tuple tuple = generateTestTuple(key, message);
final ByteBufferMessageSet mockMsg = mockSingleMessage(key, message);
simpleConsumer.close();
simpleConsumer = mockSimpleConsumer(mockMsg);
KafkaProducer<?, ?> producer = mock(KafkaProducer.class);
when(producer.send(any(ProducerRecord.class), any(Callback.class))).thenAnswer(new Answer<Future>() {
@Override
public Future answer(InvocationOnMock invocationOnMock) throws Throwable {
Callback cb = (Callback) invocationOnMock.getArguments()[1];
cb.onCompletion(null, null);
return mock(Future.class);
}
});
bolt = generateDefaultSerializerBolt(async, fireAndForget, producer);
bolt.execute(tuple);
verify(collector).ack(tuple);
verifyMessage(keyString, messageString);
}
use of org.apache.storm.tuple.Tuple in project storm by apache.
the class KafkaBoltTest method executeWithByteArrayKeyAndMessageSync.
/* test synchronous sending */
@Test
public void executeWithByteArrayKeyAndMessageSync() {
boolean async = false;
boolean fireAndForget = false;
bolt = generateDefaultSerializerBolt(async, fireAndForget, null);
String keyString = "test-key";
String messageString = "test-message";
byte[] key = keyString.getBytes();
byte[] message = messageString.getBytes();
Tuple tuple = generateTestTuple(key, message);
bolt.execute(tuple);
verify(collector).ack(tuple);
verifyMessage(keyString, messageString);
}
use of org.apache.storm.tuple.Tuple in project storm by apache.
the class JoinResult method execute.
public void execute(Tuple tuple) {
Object requestId = tuple.getValue(0);
if (tuple.getSourceComponent().equals(returnComponent)) {
returns.put(requestId, tuple);
} else {
results.put(requestId, tuple);
}
if (returns.containsKey(requestId) && results.containsKey(requestId)) {
Tuple result = results.remove(requestId);
Tuple returner = returns.remove(requestId);
LOG.debug(result.getValue(1).toString());
List<Tuple> anchors = new ArrayList<>();
anchors.add(result);
anchors.add(returner);
_collector.emit(anchors, new Values("" + result.getValue(1), returner.getValue(1)));
_collector.ack(result);
_collector.ack(returner);
}
}
Aggregations