Search in sources :

Example 26 with FailedException

use of org.apache.storm.topology.FailedException in project storm by apache.

the class TridentKafkaState method updateState.

/**
 * Write the given tuples to Kafka.
 * @param tuples The tuples to write.
 * @param collector The Trident collector.
 */
public void updateState(List<TridentTuple> tuples, TridentCollector collector) {
    String topic = null;
    try {
        final long startTime = System.currentTimeMillis();
        int numberOfRecords = tuples.size();
        List<Future<RecordMetadata>> futures = new ArrayList<>(numberOfRecords);
        for (TridentTuple tuple : tuples) {
            topic = topicSelector.getTopic(tuple);
            V messageFromTuple = mapper.getMessageFromTuple(tuple);
            K keyFromTuple = mapper.getKeyFromTuple(tuple);
            if (topic != null) {
                if (messageFromTuple != null) {
                    Future<RecordMetadata> result = producer.send(new ProducerRecord<>(topic, keyFromTuple, messageFromTuple));
                    futures.add(result);
                } else {
                    LOG.warn("skipping Message with Key {} as message was null", keyFromTuple);
                }
            } else {
                LOG.warn("skipping key = {}, topic selector returned null.", keyFromTuple);
            }
        }
        int emittedRecords = futures.size();
        List<ExecutionException> exceptions = new ArrayList<>(emittedRecords);
        for (Future<RecordMetadata> future : futures) {
            try {
                future.get();
            } catch (ExecutionException e) {
                exceptions.add(e);
            }
        }
        if (exceptions.size() > 0) {
            StringBuilder errorMsg = new StringBuilder("Could not retrieve result for messages ");
            errorMsg.append(tuples).append(" from topic = ").append(topic).append(" because of the following exceptions:").append(System.lineSeparator());
            for (ExecutionException exception : exceptions) {
                errorMsg = errorMsg.append(exception.getMessage()).append(System.lineSeparator());
            }
            String message = errorMsg.toString();
            LOG.error(message);
            throw new FailedException(message);
        }
        long latestTime = System.currentTimeMillis();
        LOG.info("Emitted record {} sucessfully in {} ms to topic {} ", emittedRecords, latestTime - startTime, topic);
    } catch (Exception ex) {
        String errorMsg = "Could not send messages " + tuples + " to topic = " + topic;
        LOG.warn(errorMsg, ex);
        throw new FailedException(errorMsg, ex);
    }
}
Also used : ArrayList(java.util.ArrayList) FailedException(org.apache.storm.topology.FailedException) ExecutionException(java.util.concurrent.ExecutionException) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) FailedException(org.apache.storm.topology.FailedException) Future(java.util.concurrent.Future) ExecutionException(java.util.concurrent.ExecutionException) TridentTuple(org.apache.storm.trident.tuple.TridentTuple)

Example 27 with FailedException

use of org.apache.storm.topology.FailedException in project storm by apache.

the class OpenTsdbState method update.

public void update(List<TridentTuple> tridentTuples, TridentCollector collector) {
    try {
        List<OpenTsdbMetricDatapoint> metricDataPoints = new ArrayList<>();
        for (TridentTuple tridentTuple : tridentTuples) {
            for (ITupleOpenTsdbDatapointMapper tupleOpenTsdbDatapointMapper : tupleMetricPointMappers) {
                metricDataPoints.add(tupleOpenTsdbDatapointMapper.getMetricPoint(tridentTuple));
            }
        }
        final ClientResponse.Details details = openTsdbClient.writeMetricPoints(metricDataPoints);
        if (details != null && (details.getFailed() > 0)) {
            final String errorMsg = "Failed in writing metrics to TSDB with details: " + details;
            LOG.error(errorMsg);
            throw new RuntimeException(errorMsg);
        }
    } catch (Exception e) {
        collector.reportError(e);
        throw new FailedException(e);
    }
}
Also used : ClientResponse(org.apache.storm.opentsdb.client.ClientResponse) OpenTsdbMetricDatapoint(org.apache.storm.opentsdb.OpenTsdbMetricDatapoint) FailedException(org.apache.storm.topology.FailedException) ArrayList(java.util.ArrayList) ITupleOpenTsdbDatapointMapper(org.apache.storm.opentsdb.bolt.ITupleOpenTsdbDatapointMapper) FailedException(org.apache.storm.topology.FailedException) TridentTuple(org.apache.storm.trident.tuple.TridentTuple)

Aggregations

FailedException (org.apache.storm.topology.FailedException)27 TridentTuple (org.apache.storm.trident.tuple.TridentTuple)11 ArrayList (java.util.ArrayList)10 Values (org.apache.storm.tuple.Values)8 List (java.util.List)5 IOException (java.io.IOException)4 Document (org.bson.Document)4 Statement (com.datastax.driver.core.Statement)3 Bson (org.bson.conversions.Bson)3 BatchStatement (com.datastax.driver.core.BatchStatement)2 InterruptedIOException (java.io.InterruptedIOException)2 BigInteger (java.math.BigInteger)2 Get (org.apache.hadoop.hbase.client.Get)2 Result (org.apache.hadoop.hbase.client.Result)2 ColumnList (org.apache.storm.hbase.common.ColumnList)2 ReportedFailedException (org.apache.storm.topology.ReportedFailedException)2 ResultSet (com.datastax.driver.core.ResultSet)1 Row (com.datastax.driver.core.Row)1 BufferedWriter (java.io.BufferedWriter)1 OutputStreamWriter (java.io.OutputStreamWriter)1