Search in sources :

Example 6 with ReceiveTimeoutTransportException

use of org.elasticsearch.transport.ReceiveTimeoutTransportException in project nifi by apache.

the class FetchElasticsearch method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final String index = context.getProperty(INDEX).evaluateAttributeExpressions(flowFile).getValue();
    final String docId = context.getProperty(DOC_ID).evaluateAttributeExpressions(flowFile).getValue();
    final String docType = context.getProperty(TYPE).evaluateAttributeExpressions(flowFile).getValue();
    final Charset charset = Charset.forName(context.getProperty(CHARSET).evaluateAttributeExpressions(flowFile).getValue());
    final ComponentLog logger = getLogger();
    try {
        logger.debug("Fetching {}/{}/{} from Elasticsearch", new Object[] { index, docType, docId });
        final long startNanos = System.nanoTime();
        GetRequestBuilder getRequestBuilder = esClient.get().prepareGet(index, docType, docId);
        if (authToken != null) {
            getRequestBuilder.putHeader("Authorization", authToken);
        }
        final GetResponse getResponse = getRequestBuilder.execute().actionGet();
        if (getResponse == null || !getResponse.isExists()) {
            logger.debug("Failed to read {}/{}/{} from Elasticsearch: Document not found", new Object[] { index, docType, docId });
            // We couldn't find the document, so penalize it and send it to "not found"
            flowFile = session.penalize(flowFile);
            session.transfer(flowFile, REL_NOT_FOUND);
        } else {
            flowFile = session.putAttribute(flowFile, "filename", docId);
            flowFile = session.putAttribute(flowFile, "es.index", index);
            flowFile = session.putAttribute(flowFile, "es.type", docType);
            flowFile = session.write(flowFile, new OutputStreamCallback() {

                @Override
                public void process(OutputStream out) throws IOException {
                    out.write(getResponse.getSourceAsString().getBytes(charset));
                }
            });
            logger.debug("Elasticsearch document " + docId + " fetched, routing to success");
            final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
            final String uri = context.getProperty(HOSTS).evaluateAttributeExpressions().getValue() + "/" + index + "/" + docType + "/" + docId;
            session.getProvenanceReporter().fetch(flowFile, uri, millis);
            session.transfer(flowFile, REL_SUCCESS);
        }
    } catch (NoNodeAvailableException | ElasticsearchTimeoutException | ReceiveTimeoutTransportException | NodeClosedException exceptionToRetry) {
        logger.error("Failed to read into Elasticsearch due to {}, this may indicate an error in configuration " + "(hosts, username/password, etc.). Routing to retry", new Object[] { exceptionToRetry.getLocalizedMessage() }, exceptionToRetry);
        session.transfer(flowFile, REL_RETRY);
        context.yield();
    } catch (Exception e) {
        logger.error("Failed to read {} from Elasticsearch due to {}", new Object[] { flowFile, e.getLocalizedMessage() }, e);
        session.transfer(flowFile, REL_FAILURE);
        context.yield();
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) OutputStream(java.io.OutputStream) Charset(java.nio.charset.Charset) NoNodeAvailableException(org.elasticsearch.client.transport.NoNodeAvailableException) ComponentLog(org.apache.nifi.logging.ComponentLog) GetResponse(org.elasticsearch.action.get.GetResponse) NodeClosedException(org.elasticsearch.node.NodeClosedException) ProcessException(org.apache.nifi.processor.exception.ProcessException) ElasticsearchTimeoutException(org.elasticsearch.ElasticsearchTimeoutException) ReceiveTimeoutTransportException(org.elasticsearch.transport.ReceiveTimeoutTransportException) NoNodeAvailableException(org.elasticsearch.client.transport.NoNodeAvailableException) IOException(java.io.IOException) ReceiveTimeoutTransportException(org.elasticsearch.transport.ReceiveTimeoutTransportException) ElasticsearchTimeoutException(org.elasticsearch.ElasticsearchTimeoutException) NodeClosedException(org.elasticsearch.node.NodeClosedException) OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) GetRequestBuilder(org.elasticsearch.action.get.GetRequestBuilder)

Example 7 with ReceiveTimeoutTransportException

use of org.elasticsearch.transport.ReceiveTimeoutTransportException in project nifi by apache.

the class PutElasticsearch method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();
    final String id_attribute = context.getProperty(ID_ATTRIBUTE).getValue();
    final int batchSize = context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger();
    final List<FlowFile> flowFiles = session.get(batchSize);
    if (flowFiles.isEmpty()) {
        return;
    }
    // Keep track of the list of flow files that need to be transferred. As they are transferred, remove them from the list.
    List<FlowFile> flowFilesToTransfer = new LinkedList<>(flowFiles);
    try {
        final BulkRequestBuilder bulk = esClient.get().prepareBulk();
        if (authToken != null) {
            bulk.putHeader("Authorization", authToken);
        }
        for (FlowFile file : flowFiles) {
            final String index = context.getProperty(INDEX).evaluateAttributeExpressions(file).getValue();
            final String docType = context.getProperty(TYPE).evaluateAttributeExpressions(file).getValue();
            final String indexOp = context.getProperty(INDEX_OP).evaluateAttributeExpressions(file).getValue();
            final Charset charset = Charset.forName(context.getProperty(CHARSET).evaluateAttributeExpressions(file).getValue());
            final String id = file.getAttribute(id_attribute);
            if (id == null) {
                logger.error("No value in identifier attribute {} for {}, transferring to failure", new Object[] { id_attribute, file });
                flowFilesToTransfer.remove(file);
                session.transfer(file, REL_FAILURE);
            } else {
                session.read(file, new InputStreamCallback() {

                    @Override
                    public void process(final InputStream in) throws IOException {
                        String json = IOUtils.toString(in, charset).replace("\r\n", " ").replace('\n', ' ').replace('\r', ' ');
                        if (indexOp.equalsIgnoreCase("index")) {
                            bulk.add(esClient.get().prepareIndex(index, docType, id).setSource(json.getBytes(charset)));
                        } else if (indexOp.equalsIgnoreCase("upsert")) {
                            bulk.add(esClient.get().prepareUpdate(index, docType, id).setDoc(json.getBytes(charset)).setDocAsUpsert(true));
                        } else if (indexOp.equalsIgnoreCase("update")) {
                            bulk.add(esClient.get().prepareUpdate(index, docType, id).setDoc(json.getBytes(charset)));
                        } else {
                            throw new IOException("Index operation: " + indexOp + " not supported.");
                        }
                    }
                });
            }
        }
        final BulkResponse response = bulk.execute().actionGet();
        if (response.hasFailures()) {
            // Responses are guaranteed to be in order, remove them in reverse order
            BulkItemResponse[] responses = response.getItems();
            if (responses != null && responses.length > 0) {
                for (int i = responses.length - 1; i >= 0; i--) {
                    final FlowFile flowFile = flowFilesToTransfer.get(i);
                    if (responses[i].isFailed()) {
                        logger.error("Failed to insert {} into Elasticsearch due to {}, transferring to failure", new Object[] { flowFile, responses[i].getFailure().getMessage() });
                        session.transfer(flowFile, REL_FAILURE);
                    } else {
                        session.getProvenanceReporter().send(flowFile, context.getProperty(HOSTS).evaluateAttributeExpressions().getValue() + "/" + responses[i].getIndex());
                        session.transfer(flowFile, REL_SUCCESS);
                    }
                    flowFilesToTransfer.remove(flowFile);
                }
            }
        }
        // Transfer any remaining flowfiles to success
        flowFilesToTransfer.forEach(file -> {
            session.transfer(file, REL_SUCCESS);
            // Record provenance event
            session.getProvenanceReporter().send(file, context.getProperty(HOSTS).evaluateAttributeExpressions().getValue() + "/" + context.getProperty(INDEX).evaluateAttributeExpressions(file).getValue());
        });
    } catch (NoNodeAvailableException | ElasticsearchTimeoutException | ReceiveTimeoutTransportException | NodeClosedException exceptionToRetry) {
        // Authorization errors and other problems are often returned as NoNodeAvailableExceptions without a
        // traceable cause. However the cause seems to be logged, just not available to this caught exception.
        // Since the error message will show up as a bulletin, we make specific mention to check the logs for
        // more details.
        logger.error("Failed to insert into Elasticsearch due to {}. More detailed information may be available in " + "the NiFi logs.", new Object[] { exceptionToRetry.getLocalizedMessage() }, exceptionToRetry);
        session.transfer(flowFilesToTransfer, REL_RETRY);
        context.yield();
    } catch (Exception exceptionToFail) {
        logger.error("Failed to insert into Elasticsearch due to {}, transferring to failure", new Object[] { exceptionToFail.getLocalizedMessage() }, exceptionToFail);
        session.transfer(flowFilesToTransfer, REL_FAILURE);
        context.yield();
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) InputStream(java.io.InputStream) Charset(java.nio.charset.Charset) BulkItemResponse(org.elasticsearch.action.bulk.BulkItemResponse) BulkResponse(org.elasticsearch.action.bulk.BulkResponse) IOException(java.io.IOException) NoNodeAvailableException(org.elasticsearch.client.transport.NoNodeAvailableException) ComponentLog(org.apache.nifi.logging.ComponentLog) LinkedList(java.util.LinkedList) NodeClosedException(org.elasticsearch.node.NodeClosedException) ProcessException(org.apache.nifi.processor.exception.ProcessException) ElasticsearchTimeoutException(org.elasticsearch.ElasticsearchTimeoutException) ReceiveTimeoutTransportException(org.elasticsearch.transport.ReceiveTimeoutTransportException) NoNodeAvailableException(org.elasticsearch.client.transport.NoNodeAvailableException) IOException(java.io.IOException) ReceiveTimeoutTransportException(org.elasticsearch.transport.ReceiveTimeoutTransportException) ElasticsearchTimeoutException(org.elasticsearch.ElasticsearchTimeoutException) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) NodeClosedException(org.elasticsearch.node.NodeClosedException) BulkRequestBuilder(org.elasticsearch.action.bulk.BulkRequestBuilder)

Example 8 with ReceiveTimeoutTransportException

use of org.elasticsearch.transport.ReceiveTimeoutTransportException in project elasticsearch by elastic.

the class AsyncShardFetch method processAsyncFetch.

/**
     * Called by the response handler of the async action to fetch data. Verifies that its still working
     * on the same cache generation, otherwise the results are discarded. It then goes and fills the relevant data for
     * the shard (response + failures), issuing a reroute at the end of it to make sure there will be another round
     * of allocations taking this new data into account.
     */
protected synchronized void processAsyncFetch(ShardId shardId, List<T> responses, List<FailedNodeException> failures) {
    if (closed) {
        // we are closed, no need to process this async fetch at all
        logger.trace("{} ignoring fetched [{}] results, already closed", shardId, type);
        return;
    }
    logger.trace("{} processing fetched [{}] results", shardId, type);
    if (responses != null) {
        for (T response : responses) {
            NodeEntry<T> nodeEntry = cache.get(response.getNode().getId());
            // if the entry is there, and not marked as failed already, process it
            if (nodeEntry == null) {
                continue;
            }
            if (nodeEntry.isFailed()) {
                logger.trace("{} node {} has failed for [{}] (failure [{}])", shardId, nodeEntry.getNodeId(), type, nodeEntry.getFailure());
            } else {
                logger.trace("{} marking {} as done for [{}], result is [{}]", shardId, nodeEntry.getNodeId(), type, response);
                nodeEntry.doneFetching(response);
            }
        }
    }
    if (failures != null) {
        for (FailedNodeException failure : failures) {
            logger.trace("{} processing failure {} for [{}]", shardId, failure, type);
            NodeEntry<T> nodeEntry = cache.get(failure.nodeId());
            // if the entry is there, and not marked as failed already, process it
            if (nodeEntry != null && nodeEntry.isFailed() == false) {
                Throwable unwrappedCause = ExceptionsHelper.unwrapCause(failure.getCause());
                // if the request got rejected or timed out, we need to try it again next time...
                if (unwrappedCause instanceof EsRejectedExecutionException || unwrappedCause instanceof ReceiveTimeoutTransportException || unwrappedCause instanceof ElasticsearchTimeoutException) {
                    nodeEntry.restartFetching();
                } else {
                    logger.warn((Supplier<?>) () -> new ParameterizedMessage("{}: failed to list shard for {} on node [{}]", shardId, type, failure.nodeId()), failure);
                    nodeEntry.doneFetching(failure.getCause());
                }
            }
        }
    }
    reroute(shardId, "post_response");
}
Also used : ReceiveTimeoutTransportException(org.elasticsearch.transport.ReceiveTimeoutTransportException) ElasticsearchTimeoutException(org.elasticsearch.ElasticsearchTimeoutException) FailedNodeException(org.elasticsearch.action.FailedNodeException) ParameterizedMessage(org.apache.logging.log4j.message.ParameterizedMessage) EsRejectedExecutionException(org.elasticsearch.common.util.concurrent.EsRejectedExecutionException)

Example 9 with ReceiveTimeoutTransportException

use of org.elasticsearch.transport.ReceiveTimeoutTransportException in project nifi by apache.

the class DeleteElasticsearch5 method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    synchronized (esClient) {
        if (esClient.get() == null) {
            setup(context);
        }
    }
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final String index = context.getProperty(INDEX).evaluateAttributeExpressions(flowFile).getValue();
    final String documentId = context.getProperty(DOCUMENT_ID).evaluateAttributeExpressions(flowFile).getValue();
    final String documentType = context.getProperty(TYPE).evaluateAttributeExpressions(flowFile).getValue();
    final ComponentLog logger = getLogger();
    if (StringUtils.isBlank(index)) {
        logger.debug("Index is required but was empty {}", new Object[] { index });
        flowFile = session.putAttribute(flowFile, ES_ERROR_MESSAGE, "Index is required but was empty");
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    if (StringUtils.isBlank(documentType)) {
        logger.debug("Document type is required but was empty {}", new Object[] { documentType });
        flowFile = session.putAttribute(flowFile, ES_ERROR_MESSAGE, "Document type is required but was empty");
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    if (StringUtils.isBlank(documentId)) {
        logger.debug("Document id is required but was empty {}", new Object[] { documentId });
        flowFile = session.putAttribute(flowFile, ES_ERROR_MESSAGE, "Document id is required but was empty");
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    flowFile = session.putAllAttributes(flowFile, new HashMap<String, String>() {

        {
            put(ES_FILENAME, documentId);
            put(ES_INDEX, index);
            put(ES_TYPE, documentType);
        }
    });
    try {
        logger.debug("Deleting document {}/{}/{} from Elasticsearch", new Object[] { index, documentType, documentId });
        DeleteRequestBuilder requestBuilder = prepareDeleteRequest(index, documentId, documentType);
        final DeleteResponse response = doDelete(requestBuilder);
        if (response.status() != RestStatus.OK) {
            logger.warn("Failed to delete document {}/{}/{} from Elasticsearch: Status {}", new Object[] { index, documentType, documentId, response.status() });
            flowFile = session.putAttribute(flowFile, ES_ERROR_MESSAGE, UNABLE_TO_DELETE_DOCUMENT_MESSAGE);
            flowFile = session.putAttribute(flowFile, ES_REST_STATUS, response.status().toString());
            context.yield();
            if (response.status() == RestStatus.NOT_FOUND) {
                session.transfer(flowFile, REL_NOT_FOUND);
            } else {
                session.transfer(flowFile, REL_FAILURE);
            }
        } else {
            logger.debug("Elasticsearch document " + documentId + " deleted");
            session.transfer(flowFile, REL_SUCCESS);
        }
    } catch (ElasticsearchTimeoutException | ReceiveTimeoutTransportException exception) {
        logger.error("Failed to delete document {} from Elasticsearch due to {}", new Object[] { documentId, exception.getLocalizedMessage() }, exception);
        flowFile = session.putAttribute(flowFile, ES_ERROR_MESSAGE, exception.getLocalizedMessage());
        session.transfer(flowFile, REL_RETRY);
        context.yield();
    } catch (Exception e) {
        logger.error("Failed to delete document {} from Elasticsearch due to {}", new Object[] { documentId, e.getLocalizedMessage() }, e);
        flowFile = session.putAttribute(flowFile, ES_ERROR_MESSAGE, e.getLocalizedMessage());
        session.transfer(flowFile, REL_FAILURE);
        context.yield();
    }
}
Also used : DeleteRequestBuilder(org.elasticsearch.action.delete.DeleteRequestBuilder) FlowFile(org.apache.nifi.flowfile.FlowFile) ReceiveTimeoutTransportException(org.elasticsearch.transport.ReceiveTimeoutTransportException) DeleteResponse(org.elasticsearch.action.delete.DeleteResponse) ElasticsearchTimeoutException(org.elasticsearch.ElasticsearchTimeoutException) HashMap(java.util.HashMap) ComponentLog(org.apache.nifi.logging.ComponentLog) ProcessException(org.apache.nifi.processor.exception.ProcessException) ElasticsearchTimeoutException(org.elasticsearch.ElasticsearchTimeoutException) ReceiveTimeoutTransportException(org.elasticsearch.transport.ReceiveTimeoutTransportException) ExecutionException(java.util.concurrent.ExecutionException)

Example 10 with ReceiveTimeoutTransportException

use of org.elasticsearch.transport.ReceiveTimeoutTransportException in project nifi by apache.

the class TestPutElasticsearch5 method testPutElasticsearch5OnTriggerWithExceptions.

@Test
public void testPutElasticsearch5OnTriggerWithExceptions() throws IOException {
    PutElasticsearch5TestProcessor processor = new PutElasticsearch5TestProcessor(false);
    runner = TestRunners.newTestRunner(processor);
    runner.setProperty(AbstractElasticsearch5TransportClientProcessor.CLUSTER_NAME, "elasticsearch");
    runner.setProperty(AbstractElasticsearch5TransportClientProcessor.HOSTS, "127.0.0.1:9300");
    runner.setProperty(AbstractElasticsearch5TransportClientProcessor.PING_TIMEOUT, "5s");
    runner.setProperty(AbstractElasticsearch5TransportClientProcessor.SAMPLER_INTERVAL, "5s");
    runner.setProperty(PutElasticsearch5.INDEX, "doc");
    runner.setProperty(PutElasticsearch5.TYPE, "status");
    runner.setValidateExpressionUsage(true);
    runner.setProperty(PutElasticsearch5.ID_ATTRIBUTE, "doc_id");
    // No Node Available exception
    processor.setExceptionToThrow(new NoNodeAvailableException("test"));
    runner.enqueue(docExample, new HashMap<String, String>() {

        {
            put("doc_id", "28039652140");
        }
    });
    runner.run(1, true, true);
    runner.assertAllFlowFilesTransferred(FetchElasticsearch5.REL_RETRY, 1);
    runner.clearTransferState();
    // Elasticsearch5 Timeout exception
    processor.setExceptionToThrow(new ElasticsearchTimeoutException("test"));
    runner.enqueue(docExample, new HashMap<String, String>() {

        {
            put("doc_id", "28039652141");
        }
    });
    runner.run(1, true, true);
    runner.assertAllFlowFilesTransferred(FetchElasticsearch5.REL_RETRY, 1);
    runner.clearTransferState();
    // Receive Timeout Transport exception
    processor.setExceptionToThrow(new ReceiveTimeoutTransportException(mock(StreamInput.class)));
    runner.enqueue(docExample, new HashMap<String, String>() {

        {
            put("doc_id", "28039652142");
        }
    });
    runner.run(1, true, true);
    runner.assertAllFlowFilesTransferred(FetchElasticsearch5.REL_RETRY, 1);
    runner.clearTransferState();
    // Node Closed exception
    processor.setExceptionToThrow(new NodeClosedException(mock(StreamInput.class)));
    runner.enqueue(docExample, new HashMap<String, String>() {

        {
            put("doc_id", "28039652143");
        }
    });
    runner.run(1, true, true);
    runner.assertAllFlowFilesTransferred(FetchElasticsearch5.REL_RETRY, 1);
    runner.clearTransferState();
    // Elasticsearch5 Parse exception
    processor.setExceptionToThrow(new ElasticsearchParseException("test"));
    runner.enqueue(docExample, new HashMap<String, String>() {

        {
            put("doc_id", "28039652144");
        }
    });
    runner.run(1, true, true);
    // This test generates an exception on execute(),routes to failure
    runner.assertTransferCount(PutElasticsearch5.REL_FAILURE, 1);
}
Also used : ReceiveTimeoutTransportException(org.elasticsearch.transport.ReceiveTimeoutTransportException) ElasticsearchTimeoutException(org.elasticsearch.ElasticsearchTimeoutException) ElasticsearchParseException(org.elasticsearch.ElasticsearchParseException) NodeClosedException(org.elasticsearch.node.NodeClosedException) Matchers.anyString(org.mockito.Matchers.anyString) NoNodeAvailableException(org.elasticsearch.client.transport.NoNodeAvailableException) Test(org.junit.Test)

Aggregations

ReceiveTimeoutTransportException (org.elasticsearch.transport.ReceiveTimeoutTransportException)14 ElasticsearchTimeoutException (org.elasticsearch.ElasticsearchTimeoutException)11 NoNodeAvailableException (org.elasticsearch.client.transport.NoNodeAvailableException)8 NodeClosedException (org.elasticsearch.node.NodeClosedException)8 FlowFile (org.apache.nifi.flowfile.FlowFile)5 ComponentLog (org.apache.nifi.logging.ComponentLog)5 ProcessException (org.apache.nifi.processor.exception.ProcessException)5 IOException (java.io.IOException)4 Charset (java.nio.charset.Charset)4 ElasticsearchParseException (org.elasticsearch.ElasticsearchParseException)4 EsRejectedExecutionException (org.elasticsearch.common.util.concurrent.EsRejectedExecutionException)4 Test (org.junit.Test)4 Matchers.anyString (org.mockito.Matchers.anyString)4 ParameterizedMessage (org.apache.logging.log4j.message.ParameterizedMessage)3 InputStream (java.io.InputStream)2 OutputStream (java.io.OutputStream)2 HashMap (java.util.HashMap)2 LinkedList (java.util.LinkedList)2 CountDownLatch (java.util.concurrent.CountDownLatch)2 InputStreamCallback (org.apache.nifi.processor.io.InputStreamCallback)2