Search in sources :

Example 96 with ProcessSession

use of org.apache.nifi.processor.ProcessSession in project nifi by apache.

the class MonitorActivity method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    final long thresholdMillis = context.getProperty(THRESHOLD).asTimePeriod(TimeUnit.MILLISECONDS);
    final long now = System.currentTimeMillis();
    final ComponentLog logger = getLogger();
    final boolean copyAttributes = context.getProperty(COPY_ATTRIBUTES).asBoolean();
    final boolean isClusterScope = isClusterScope(context, false);
    final boolean shouldReportOnlyOnPrimary = shouldReportOnlyOnPrimary(isClusterScope, context);
    final List<FlowFile> flowFiles = session.get(50);
    boolean isInactive = false;
    long updatedLatestSuccessTransfer = -1;
    StateMap clusterState = null;
    if (flowFiles.isEmpty()) {
        final long previousSuccessMillis = latestSuccessTransfer.get();
        boolean sendInactiveMarker = false;
        isInactive = (now >= previousSuccessMillis + thresholdMillis);
        logger.debug("isInactive={}, previousSuccessMillis={}, now={}", new Object[] { isInactive, previousSuccessMillis, now });
        if (isInactive && isClusterScope) {
            // However, if this node is active, we don't have to look at cluster state.
            try {
                clusterState = context.getStateManager().getState(Scope.CLUSTER);
                if (clusterState != null && !StringUtils.isEmpty(clusterState.get(STATE_KEY_LATEST_SUCCESS_TRANSFER))) {
                    final long latestReportedClusterActivity = Long.valueOf(clusterState.get(STATE_KEY_LATEST_SUCCESS_TRANSFER));
                    isInactive = (now >= latestReportedClusterActivity + thresholdMillis);
                    if (!isInactive) {
                        // This node has been inactive, but other node has more recent activity.
                        updatedLatestSuccessTransfer = latestReportedClusterActivity;
                    }
                    logger.debug("isInactive={}, latestReportedClusterActivity={}", new Object[] { isInactive, latestReportedClusterActivity });
                }
            } catch (IOException e) {
                logger.error("Failed to access cluster state. Activity will not be monitored properly until this is addressed.", e);
            }
        }
        if (isInactive) {
            final boolean continual = context.getProperty(CONTINUALLY_SEND_MESSAGES).asBoolean();
            sendInactiveMarker = !inactive.getAndSet(true) || (continual && (now > lastInactiveMessage.get() + thresholdMillis));
        }
        if (sendInactiveMarker && shouldThisNodeReport(isClusterScope, shouldReportOnlyOnPrimary)) {
            lastInactiveMessage.set(System.currentTimeMillis());
            FlowFile inactiveFlowFile = session.create();
            inactiveFlowFile = session.putAttribute(inactiveFlowFile, "inactivityStartMillis", String.valueOf(previousSuccessMillis));
            inactiveFlowFile = session.putAttribute(inactiveFlowFile, "inactivityDurationMillis", String.valueOf(now - previousSuccessMillis));
            final byte[] outBytes = context.getProperty(INACTIVITY_MESSAGE).evaluateAttributeExpressions(inactiveFlowFile).getValue().getBytes(UTF8);
            inactiveFlowFile = session.write(inactiveFlowFile, new OutputStreamCallback() {

                @Override
                public void process(final OutputStream out) throws IOException {
                    out.write(outBytes);
                }
            });
            session.getProvenanceReporter().create(inactiveFlowFile);
            session.transfer(inactiveFlowFile, REL_INACTIVE);
            logger.info("Transferred {} to 'inactive'", new Object[] { inactiveFlowFile });
        } else {
            // no need to dominate CPU checking times; let other processors run for a bit.
            context.yield();
        }
    } else {
        session.transfer(flowFiles, REL_SUCCESS);
        updatedLatestSuccessTransfer = now;
        logger.info("Transferred {} FlowFiles to 'success'", new Object[] { flowFiles.size() });
        final long latestStateReportTimestamp = latestReportedNodeState.get();
        if (isClusterScope && (now - latestStateReportTimestamp) > (thresholdMillis / 3)) {
            // We don't want to hit the state manager every onTrigger(), but often enough to detect activeness.
            try {
                final StateManager stateManager = context.getStateManager();
                final StateMap state = stateManager.getState(Scope.CLUSTER);
                final Map<String, String> newValues = new HashMap<>();
                // Persist attributes so that other nodes can copy it
                if (copyAttributes) {
                    newValues.putAll(flowFiles.get(0).getAttributes());
                }
                newValues.put(STATE_KEY_LATEST_SUCCESS_TRANSFER, String.valueOf(now));
                if (state == null || state.getVersion() == -1) {
                    stateManager.setState(newValues, Scope.CLUSTER);
                } else {
                    final String existingTimestamp = state.get(STATE_KEY_LATEST_SUCCESS_TRANSFER);
                    if (StringUtils.isEmpty(existingTimestamp) || Long.parseLong(existingTimestamp) < now) {
                        // If this returns false due to race condition, it's not a problem since we just need
                        // the latest active timestamp.
                        stateManager.replace(state, newValues, Scope.CLUSTER);
                    } else {
                        logger.debug("Existing state has more recent timestamp, didn't update state.");
                    }
                }
                latestReportedNodeState.set(now);
            } catch (IOException e) {
                logger.error("Failed to access cluster state. Activity will not be monitored properly until this is addressed.", e);
            }
        }
    }
    if (!isInactive) {
        final long inactivityStartMillis = latestSuccessTransfer.get();
        if (updatedLatestSuccessTransfer > -1) {
            latestSuccessTransfer.set(updatedLatestSuccessTransfer);
        }
        if (inactive.getAndSet(false) && shouldThisNodeReport(isClusterScope, shouldReportOnlyOnPrimary)) {
            FlowFile activityRestoredFlowFile = session.create();
            if (copyAttributes) {
                final Map<String, String> attributes = new HashMap<>();
                if (flowFiles.size() > 0) {
                    // copy attributes from the first flow file in the list
                    attributes.putAll(flowFiles.get(0).getAttributes());
                } else if (clusterState != null) {
                    attributes.putAll(clusterState.toMap());
                    attributes.remove(STATE_KEY_LATEST_SUCCESS_TRANSFER);
                }
                // don't copy the UUID
                attributes.remove(CoreAttributes.UUID.key());
                activityRestoredFlowFile = session.putAllAttributes(activityRestoredFlowFile, attributes);
            }
            activityRestoredFlowFile = session.putAttribute(activityRestoredFlowFile, "inactivityStartMillis", String.valueOf(inactivityStartMillis));
            activityRestoredFlowFile = session.putAttribute(activityRestoredFlowFile, "inactivityDurationMillis", String.valueOf(now - inactivityStartMillis));
            final byte[] outBytes = context.getProperty(ACTIVITY_RESTORED_MESSAGE).evaluateAttributeExpressions(activityRestoredFlowFile).getValue().getBytes(UTF8);
            activityRestoredFlowFile = session.write(activityRestoredFlowFile, out -> out.write(outBytes));
            session.getProvenanceReporter().create(activityRestoredFlowFile);
            session.transfer(activityRestoredFlowFile, REL_ACTIVITY_RESTORED);
            logger.info("Transferred {} to 'activity.restored'", new Object[] { activityRestoredFlowFile });
        }
    }
}
Also used : OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) StandardValidators(org.apache.nifi.processor.util.StandardValidators) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) ComponentLog(org.apache.nifi.logging.ComponentLog) SideEffectFree(org.apache.nifi.annotation.behavior.SideEffectFree) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Charset(java.nio.charset.Charset) WritesAttributes(org.apache.nifi.annotation.behavior.WritesAttributes) Scope(org.apache.nifi.components.state.Scope) Relationship(org.apache.nifi.processor.Relationship) Map(java.util.Map) Requirement(org.apache.nifi.annotation.behavior.InputRequirement.Requirement) TriggerSerially(org.apache.nifi.annotation.behavior.TriggerSerially) ValidationResult(org.apache.nifi.components.ValidationResult) OutputStream(java.io.OutputStream) TriggerWhenEmpty(org.apache.nifi.annotation.behavior.TriggerWhenEmpty) FlowFile(org.apache.nifi.flowfile.FlowFile) StateManager(org.apache.nifi.components.state.StateManager) ProcessContext(org.apache.nifi.processor.ProcessContext) Set(java.util.Set) IOException(java.io.IOException) ProcessSession(org.apache.nifi.processor.ProcessSession) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) StringUtils(org.apache.nifi.util.StringUtils) AllowableValue(org.apache.nifi.components.AllowableValue) StateMap(org.apache.nifi.components.state.StateMap) TimeUnit(java.util.concurrent.TimeUnit) AtomicLong(java.util.concurrent.atomic.AtomicLong) List(java.util.List) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) Stateful(org.apache.nifi.annotation.behavior.Stateful) OnScheduled(org.apache.nifi.annotation.lifecycle.OnScheduled) AbstractProcessor(org.apache.nifi.processor.AbstractProcessor) Tags(org.apache.nifi.annotation.documentation.Tags) CoreAttributes(org.apache.nifi.flowfile.attributes.CoreAttributes) Collections(java.util.Collections) OnStopped(org.apache.nifi.annotation.lifecycle.OnStopped) ProcessorInitializationContext(org.apache.nifi.processor.ProcessorInitializationContext) FlowFile(org.apache.nifi.flowfile.FlowFile) HashMap(java.util.HashMap) StateMap(org.apache.nifi.components.state.StateMap) OutputStream(java.io.OutputStream) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog) StateManager(org.apache.nifi.components.state.StateManager) OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback)

Example 97 with ProcessSession

use of org.apache.nifi.processor.ProcessSession in project nifi by apache.

the class PartitionRecord method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
    final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
    final Map<String, RecordPath> recordPaths;
    try {
        recordPaths = context.getProperties().keySet().stream().filter(prop -> prop.isDynamic()).collect(Collectors.toMap(prop -> prop.getName(), prop -> getRecordPath(context, prop, flowFile)));
    } catch (final Exception e) {
        getLogger().error("Failed to compile RecordPath for {}; routing to failure", new Object[] { flowFile, e });
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    final Map<RecordValueMap, RecordSetWriter> writerMap = new HashMap<>();
    try (final InputStream in = session.read(flowFile)) {
        final Map<String, String> originalAttributes = flowFile.getAttributes();
        final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger());
        final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema());
        Record record;
        while ((record = reader.nextRecord()) != null) {
            final Map<String, List<ValueWrapper>> recordMap = new HashMap<>();
            // Evaluate all of the RecordPath's for this Record
            for (final Map.Entry<String, RecordPath> entry : recordPaths.entrySet()) {
                final String propName = entry.getKey();
                final RecordPath recordPath = entry.getValue();
                final Stream<FieldValue> fieldValueStream = recordPath.evaluate(record).getSelectedFields();
                final List<ValueWrapper> fieldValues = fieldValueStream.map(fieldVal -> new ValueWrapper(fieldVal.getValue())).collect(Collectors.toList());
                recordMap.put(propName, fieldValues);
            }
            final RecordValueMap recordValueMap = new RecordValueMap(recordMap);
            // Get the RecordSetWriter that contains the same values for all RecordPaths - or create one if none exists.
            RecordSetWriter writer = writerMap.get(recordValueMap);
            if (writer == null) {
                final FlowFile childFlowFile = session.create(flowFile);
                recordValueMap.setFlowFile(childFlowFile);
                final OutputStream out = session.write(childFlowFile);
                writer = writerFactory.createWriter(getLogger(), writeSchema, out);
                writer.beginRecordSet();
                writerMap.put(recordValueMap, writer);
            }
            writer.write(record);
        }
        // For each RecordSetWriter, finish the record set and close the writer.
        for (final Map.Entry<RecordValueMap, RecordSetWriter> entry : writerMap.entrySet()) {
            final RecordValueMap valueMap = entry.getKey();
            final RecordSetWriter writer = entry.getValue();
            final WriteResult writeResult = writer.finishRecordSet();
            writer.close();
            final Map<String, String> attributes = new HashMap<>();
            attributes.putAll(valueMap.getAttributes());
            attributes.putAll(writeResult.getAttributes());
            attributes.put("record.count", String.valueOf(writeResult.getRecordCount()));
            attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType());
            FlowFile childFlowFile = valueMap.getFlowFile();
            childFlowFile = session.putAllAttributes(childFlowFile, attributes);
            session.adjustCounter("Record Processed", writeResult.getRecordCount(), false);
        }
    } catch (final Exception e) {
        for (final Map.Entry<RecordValueMap, RecordSetWriter> entry : writerMap.entrySet()) {
            final RecordValueMap valueMap = entry.getKey();
            final RecordSetWriter writer = entry.getValue();
            try {
                writer.close();
            } catch (final IOException e1) {
                getLogger().warn("Failed to close Record Writer for {}; some resources may not be cleaned up appropriately", new Object[] { flowFile, e1 });
            }
            session.remove(valueMap.getFlowFile());
        }
        getLogger().error("Failed to partition {}", new Object[] { flowFile, e });
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    // because we want to ensure that we are able to remove the child flowfiles in case of a failure.
    for (final RecordValueMap valueMap : writerMap.keySet()) {
        session.transfer(valueMap.getFlowFile(), REL_SUCCESS);
    }
    session.transfer(flowFile, REL_ORIGINAL);
}
Also used : Arrays(java.util.Arrays) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) ValidationContext(org.apache.nifi.components.ValidationContext) HashMap(java.util.HashMap) EventDriven(org.apache.nifi.annotation.behavior.EventDriven) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ProcessException(org.apache.nifi.processor.exception.ProcessException) RecordPath(org.apache.nifi.record.path.RecordPath) ArrayList(java.util.ArrayList) RecordPathValidator(org.apache.nifi.record.path.validation.RecordPathValidator) HashSet(java.util.HashSet) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) WritesAttributes(org.apache.nifi.annotation.behavior.WritesAttributes) Relationship(org.apache.nifi.processor.Relationship) RecordReader(org.apache.nifi.serialization.RecordReader) Map(java.util.Map) Requirement(org.apache.nifi.annotation.behavior.InputRequirement.Requirement) ValidationResult(org.apache.nifi.components.ValidationResult) Record(org.apache.nifi.serialization.record.Record) OutputStream(java.io.OutputStream) FlowFile(org.apache.nifi.flowfile.FlowFile) Collection(java.util.Collection) WriteResult(org.apache.nifi.serialization.WriteResult) DataTypeUtils(org.apache.nifi.serialization.record.util.DataTypeUtils) ProcessContext(org.apache.nifi.processor.ProcessContext) Set(java.util.Set) IOException(java.io.IOException) ProcessSession(org.apache.nifi.processor.ProcessSession) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) SeeAlso(org.apache.nifi.annotation.documentation.SeeAlso) Collectors(java.util.stream.Collectors) List(java.util.List) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) Stream(java.util.stream.Stream) DynamicProperty(org.apache.nifi.annotation.behavior.DynamicProperty) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) SupportsBatching(org.apache.nifi.annotation.behavior.SupportsBatching) RecordPathCache(org.apache.nifi.record.path.util.RecordPathCache) AbstractProcessor(org.apache.nifi.processor.AbstractProcessor) Tags(org.apache.nifi.annotation.documentation.Tags) CoreAttributes(org.apache.nifi.flowfile.attributes.CoreAttributes) FieldValue(org.apache.nifi.record.path.FieldValue) Collections(java.util.Collections) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) InputStream(java.io.InputStream) HashMap(java.util.HashMap) RecordReader(org.apache.nifi.serialization.RecordReader) OutputStream(java.io.OutputStream) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) Record(org.apache.nifi.serialization.record.Record) ArrayList(java.util.ArrayList) List(java.util.List) FieldValue(org.apache.nifi.record.path.FieldValue) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) FlowFile(org.apache.nifi.flowfile.FlowFile) InputStream(java.io.InputStream) RecordPath(org.apache.nifi.record.path.RecordPath) IOException(java.io.IOException) ProcessException(org.apache.nifi.processor.exception.ProcessException) IOException(java.io.IOException) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) WriteResult(org.apache.nifi.serialization.WriteResult) HashMap(java.util.HashMap) Map(java.util.Map)

Example 98 with ProcessSession

use of org.apache.nifi.processor.ProcessSession in project nifi by apache.

the class EvaluateJsonPath method onTrigger.

@Override
public void onTrigger(final ProcessContext processContext, final ProcessSession processSession) throws ProcessException {
    FlowFile flowFile = processSession.get();
    if (flowFile == null) {
        return;
    }
    final ComponentLog logger = getLogger();
    DocumentContext documentContext;
    try {
        documentContext = validateAndEstablishJsonContext(processSession, flowFile);
    } catch (InvalidJsonException e) {
        logger.error("FlowFile {} did not have valid JSON content.", new Object[] { flowFile });
        processSession.transfer(flowFile, REL_FAILURE);
        return;
    }
    Set<Map.Entry<String, JsonPath>> attributeJsonPathEntries = attributeToJsonPathEntrySetQueue.poll();
    if (attributeJsonPathEntries == null) {
        attributeJsonPathEntries = processContext.getProperties().entrySet().stream().filter(e -> e.getKey().isDynamic()).collect(Collectors.toMap(e -> e.getKey().getName(), e -> JsonPath.compile(e.getValue()))).entrySet();
    }
    try {
        // We'll only be using this map if destinationIsAttribute == true
        final Map<String, String> jsonPathResults = destinationIsAttribute ? new HashMap<>(attributeJsonPathEntries.size()) : Collections.EMPTY_MAP;
        for (final Map.Entry<String, JsonPath> attributeJsonPathEntry : attributeJsonPathEntries) {
            final String jsonPathAttrKey = attributeJsonPathEntry.getKey();
            final JsonPath jsonPathExp = attributeJsonPathEntry.getValue();
            Object result;
            try {
                Object potentialResult = documentContext.read(jsonPathExp);
                if (returnType.equals(RETURN_TYPE_SCALAR) && !isJsonScalar(potentialResult)) {
                    logger.error("Unable to return a scalar value for the expression {} for FlowFile {}. Evaluated value was {}. Transferring to {}.", new Object[] { jsonPathExp.getPath(), flowFile.getId(), potentialResult.toString(), REL_FAILURE.getName() });
                    processSession.transfer(flowFile, REL_FAILURE);
                    return;
                }
                result = potentialResult;
            } catch (PathNotFoundException e) {
                if (pathNotFound.equals(PATH_NOT_FOUND_WARN)) {
                    logger.warn("FlowFile {} could not find path {} for attribute key {}.", new Object[] { flowFile.getId(), jsonPathExp.getPath(), jsonPathAttrKey }, e);
                }
                if (destinationIsAttribute) {
                    jsonPathResults.put(jsonPathAttrKey, StringUtils.EMPTY);
                    continue;
                } else {
                    processSession.transfer(flowFile, REL_NO_MATCH);
                    return;
                }
            }
            final String resultRepresentation = getResultRepresentation(result, nullDefaultValue);
            if (destinationIsAttribute) {
                jsonPathResults.put(jsonPathAttrKey, resultRepresentation);
            } else {
                flowFile = processSession.write(flowFile, out -> {
                    try (OutputStream outputStream = new BufferedOutputStream(out)) {
                        outputStream.write(resultRepresentation.getBytes(StandardCharsets.UTF_8));
                    }
                });
                processSession.getProvenanceReporter().modifyContent(flowFile, "Replaced content with result of expression " + jsonPathExp.getPath());
            }
        }
        // jsonPathResults map will be empty if this is false
        if (destinationIsAttribute) {
            flowFile = processSession.putAllAttributes(flowFile, jsonPathResults);
        }
        processSession.transfer(flowFile, REL_MATCH);
    } finally {
        attributeToJsonPathEntrySetQueue.offer(attributeJsonPathEntries);
    }
}
Also used : CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) OnRemoved(org.apache.nifi.annotation.lifecycle.OnRemoved) ValidationContext(org.apache.nifi.components.ValidationContext) HashMap(java.util.HashMap) EventDriven(org.apache.nifi.annotation.behavior.EventDriven) ComponentLog(org.apache.nifi.logging.ComponentLog) StringUtils(org.apache.commons.lang3.StringUtils) SideEffectFree(org.apache.nifi.annotation.behavior.SideEffectFree) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ProcessException(org.apache.nifi.processor.exception.ProcessException) BufferedOutputStream(java.io.BufferedOutputStream) ArrayList(java.util.ArrayList) ConcurrentMap(java.util.concurrent.ConcurrentMap) InvalidJsonException(com.jayway.jsonpath.InvalidJsonException) HashSet(java.util.HashSet) Relationship(org.apache.nifi.processor.Relationship) Map(java.util.Map) Requirement(org.apache.nifi.annotation.behavior.InputRequirement.Requirement) ValidationResult(org.apache.nifi.components.ValidationResult) OutputStream(java.io.OutputStream) OnUnscheduled(org.apache.nifi.annotation.lifecycle.OnUnscheduled) FlowFile(org.apache.nifi.flowfile.FlowFile) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ProcessContext(org.apache.nifi.processor.ProcessContext) Set(java.util.Set) ProcessSession(org.apache.nifi.processor.ProcessSession) JsonPath(com.jayway.jsonpath.JsonPath) Collectors(java.util.stream.Collectors) StandardCharsets(java.nio.charset.StandardCharsets) List(java.util.List) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) OnScheduled(org.apache.nifi.annotation.lifecycle.OnScheduled) DynamicProperty(org.apache.nifi.annotation.behavior.DynamicProperty) SupportsBatching(org.apache.nifi.annotation.behavior.SupportsBatching) DocumentContext(com.jayway.jsonpath.DocumentContext) Queue(java.util.Queue) Tags(org.apache.nifi.annotation.documentation.Tags) PathNotFoundException(com.jayway.jsonpath.PathNotFoundException) Collections(java.util.Collections) ProcessorInitializationContext(org.apache.nifi.processor.ProcessorInitializationContext) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) FlowFile(org.apache.nifi.flowfile.FlowFile) BufferedOutputStream(java.io.BufferedOutputStream) OutputStream(java.io.OutputStream) JsonPath(com.jayway.jsonpath.JsonPath) ComponentLog(org.apache.nifi.logging.ComponentLog) InvalidJsonException(com.jayway.jsonpath.InvalidJsonException) PathNotFoundException(com.jayway.jsonpath.PathNotFoundException) DocumentContext(com.jayway.jsonpath.DocumentContext) HashMap(java.util.HashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) BufferedOutputStream(java.io.BufferedOutputStream)

Example 99 with ProcessSession

use of org.apache.nifi.processor.ProcessSession in project nifi by apache.

the class GenerateTableFetch method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) throws ProcessException {
    // Fetch the column/table info once (if the table name and max value columns are not dynamic). Otherwise do the setup later
    if (!isDynamicTableName && !isDynamicMaxValues && !setupComplete.get()) {
        super.setup(context);
    }
    ProcessSession session = sessionFactory.createSession();
    FlowFile fileToProcess = null;
    if (context.hasIncomingConnection()) {
        fileToProcess = session.get();
        if (fileToProcess == null) {
            // Incoming connection with no flow file available, do no work (see capability description)
            return;
        }
    }
    final ComponentLog logger = getLogger();
    final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class);
    final DatabaseAdapter dbAdapter = dbAdapters.get(context.getProperty(DB_TYPE).getValue());
    final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(fileToProcess).getValue();
    final String columnNames = context.getProperty(COLUMN_NAMES).evaluateAttributeExpressions(fileToProcess).getValue();
    final String maxValueColumnNames = context.getProperty(MAX_VALUE_COLUMN_NAMES).evaluateAttributeExpressions(fileToProcess).getValue();
    final int partitionSize = context.getProperty(PARTITION_SIZE).evaluateAttributeExpressions(fileToProcess).asInteger();
    final String customWhereClause = context.getProperty(WHERE_CLAUSE).evaluateAttributeExpressions(fileToProcess).getValue();
    final StateManager stateManager = context.getStateManager();
    final StateMap stateMap;
    FlowFile finalFileToProcess = fileToProcess;
    try {
        stateMap = stateManager.getState(Scope.CLUSTER);
    } catch (final IOException ioe) {
        logger.error("Failed to retrieve observed maximum values from the State Manager. Will not perform " + "query until this is accomplished.", ioe);
        context.yield();
        return;
    }
    try {
        // Make a mutable copy of the current state property map. This will be updated by the result row callback, and eventually
        // set as the current state map (after the session has been committed)
        final Map<String, String> statePropertyMap = new HashMap<>(stateMap.toMap());
        // If an initial max value for column(s) has been specified using properties, and this column is not in the state manager, sync them to the state property map
        for (final Map.Entry<String, String> maxProp : maxValueProperties.entrySet()) {
            String maxPropKey = maxProp.getKey().toLowerCase();
            String fullyQualifiedMaxPropKey = getStateKey(tableName, maxPropKey);
            if (!statePropertyMap.containsKey(fullyQualifiedMaxPropKey)) {
                String newMaxPropValue;
                // but store the new initial max value under the fully-qualified key.
                if (statePropertyMap.containsKey(maxPropKey)) {
                    newMaxPropValue = statePropertyMap.get(maxPropKey);
                } else {
                    newMaxPropValue = maxProp.getValue();
                }
                statePropertyMap.put(fullyQualifiedMaxPropKey, newMaxPropValue);
            }
        }
        // Build a WHERE clause with maximum-value columns (if they exist), and a list of column names that will contain MAX(<column>) aliases. The
        // executed SQL query will retrieve the count of all records after the filter(s) have been applied, as well as the new maximum values for the
        // specified columns. This allows the processor to generate the correctly partitioned SQL statements as well as to update the state with the
        // latest observed maximum values.
        String whereClause = null;
        List<String> maxValueColumnNameList = StringUtils.isEmpty(maxValueColumnNames) ? new ArrayList<>(0) : Arrays.asList(maxValueColumnNames.split("\\s*,\\s*"));
        List<String> maxValueClauses = new ArrayList<>(maxValueColumnNameList.size());
        String columnsClause = null;
        List<String> maxValueSelectColumns = new ArrayList<>(maxValueColumnNameList.size() + 1);
        maxValueSelectColumns.add("COUNT(*)");
        // For each maximum-value column, get a WHERE filter and a MAX(column) alias
        IntStream.range(0, maxValueColumnNameList.size()).forEach((index) -> {
            String colName = maxValueColumnNameList.get(index);
            maxValueSelectColumns.add("MAX(" + colName + ") " + colName);
            String maxValue = getColumnStateMaxValue(tableName, statePropertyMap, colName);
            if (!StringUtils.isEmpty(maxValue)) {
                if (columnTypeMap.isEmpty() || getColumnType(tableName, colName) == null) {
                    // This means column type cache is clean after instance reboot. We should re-cache column type
                    super.setup(context, false, finalFileToProcess);
                }
                Integer type = getColumnType(tableName, colName);
                // Add a condition for the WHERE clause
                maxValueClauses.add(colName + (index == 0 ? " > " : " >= ") + getLiteralByType(type, maxValue, dbAdapter.getName()));
            }
        });
        if (customWhereClause != null) {
            // adding the custom WHERE clause (if defined) to the list of existing clauses.
            maxValueClauses.add("(" + customWhereClause + ")");
        }
        whereClause = StringUtils.join(maxValueClauses, " AND ");
        columnsClause = StringUtils.join(maxValueSelectColumns, ", ");
        // Build a SELECT query with maximum-value columns (if present)
        final String selectQuery = dbAdapter.getSelectStatement(tableName, columnsClause, whereClause, null, null, null);
        long rowCount = 0;
        try (final Connection con = dbcpService.getConnection();
            final Statement st = con.createStatement()) {
            final Integer queryTimeout = context.getProperty(QUERY_TIMEOUT).evaluateAttributeExpressions(fileToProcess).asTimePeriod(TimeUnit.SECONDS).intValue();
            // timeout in seconds
            st.setQueryTimeout(queryTimeout);
            logger.debug("Executing {}", new Object[] { selectQuery });
            ResultSet resultSet;
            resultSet = st.executeQuery(selectQuery);
            if (resultSet.next()) {
                // Total row count is in the first column
                rowCount = resultSet.getLong(1);
                // Update the state map with the newly-observed maximum values
                ResultSetMetaData rsmd = resultSet.getMetaData();
                for (int i = 2; i <= rsmd.getColumnCount(); i++) {
                    // Some JDBC drivers consider the columns name and label to be very different things.
                    // Since this column has been aliased lets check the label first,
                    // if there is no label we'll use the column name.
                    String resultColumnName = (StringUtils.isNotEmpty(rsmd.getColumnLabel(i)) ? rsmd.getColumnLabel(i) : rsmd.getColumnName(i)).toLowerCase();
                    String fullyQualifiedStateKey = getStateKey(tableName, resultColumnName);
                    String resultColumnCurrentMax = statePropertyMap.get(fullyQualifiedStateKey);
                    if (StringUtils.isEmpty(resultColumnCurrentMax) && !isDynamicTableName) {
                        // If we can't find the value at the fully-qualified key name and the table name is static, it is possible (under a previous scheme)
                        // the value has been stored under a key that is only the column name. Fall back to check the column name; either way, when a new
                        // maximum value is observed, it will be stored under the fully-qualified key from then on.
                        resultColumnCurrentMax = statePropertyMap.get(resultColumnName);
                    }
                    int type = rsmd.getColumnType(i);
                    if (isDynamicTableName) {
                        // We haven't pre-populated the column type map if the table name is dynamic, so do it here
                        columnTypeMap.put(fullyQualifiedStateKey, type);
                    }
                    try {
                        String newMaxValue = getMaxValueFromRow(resultSet, i, type, resultColumnCurrentMax, dbAdapter.getName());
                        if (newMaxValue != null) {
                            statePropertyMap.put(fullyQualifiedStateKey, newMaxValue);
                        }
                    } catch (ParseException | IOException pie) {
                        // Fail the whole thing here before we start creating flow files and such
                        throw new ProcessException(pie);
                    }
                }
            } else {
                // Something is very wrong here, one row (even if count is zero) should be returned
                throw new SQLException("No rows returned from metadata query: " + selectQuery);
            }
            // for each maximum-value column get a right bounding WHERE condition
            IntStream.range(0, maxValueColumnNameList.size()).forEach((index) -> {
                String colName = maxValueColumnNameList.get(index);
                maxValueSelectColumns.add("MAX(" + colName + ") " + colName);
                String maxValue = getColumnStateMaxValue(tableName, statePropertyMap, colName);
                if (!StringUtils.isEmpty(maxValue)) {
                    if (columnTypeMap.isEmpty() || getColumnType(tableName, colName) == null) {
                        // This means column type cache is clean after instance reboot. We should re-cache column type
                        super.setup(context, false, finalFileToProcess);
                    }
                    Integer type = getColumnType(tableName, colName);
                    // Add a condition for the WHERE clause
                    maxValueClauses.add(colName + " <= " + getLiteralByType(type, maxValue, dbAdapter.getName()));
                }
            });
            // Update WHERE list to include new right hand boundaries
            whereClause = StringUtils.join(maxValueClauses, " AND ");
            final long numberOfFetches = (partitionSize == 0) ? 1 : (rowCount / partitionSize) + (rowCount % partitionSize == 0 ? 0 : 1);
            // Generate SQL statements to read "pages" of data
            for (long i = 0; i < numberOfFetches; i++) {
                Long limit = partitionSize == 0 ? null : (long) partitionSize;
                Long offset = partitionSize == 0 ? null : i * partitionSize;
                final String maxColumnNames = StringUtils.join(maxValueColumnNameList, ", ");
                final String query = dbAdapter.getSelectStatement(tableName, columnNames, whereClause, maxColumnNames, limit, offset);
                FlowFile sqlFlowFile = (fileToProcess == null) ? session.create() : session.create(fileToProcess);
                sqlFlowFile = session.write(sqlFlowFile, out -> out.write(query.getBytes()));
                sqlFlowFile = session.putAttribute(sqlFlowFile, "generatetablefetch.tableName", tableName);
                if (columnNames != null) {
                    sqlFlowFile = session.putAttribute(sqlFlowFile, "generatetablefetch.columnNames", columnNames);
                }
                if (StringUtils.isNotBlank(whereClause)) {
                    sqlFlowFile = session.putAttribute(sqlFlowFile, "generatetablefetch.whereClause", whereClause);
                }
                if (StringUtils.isNotBlank(maxColumnNames)) {
                    sqlFlowFile = session.putAttribute(sqlFlowFile, "generatetablefetch.maxColumnNames", maxColumnNames);
                }
                sqlFlowFile = session.putAttribute(sqlFlowFile, "generatetablefetch.limit", String.valueOf(limit));
                if (partitionSize != 0) {
                    sqlFlowFile = session.putAttribute(sqlFlowFile, "generatetablefetch.offset", String.valueOf(offset));
                }
                session.transfer(sqlFlowFile, REL_SUCCESS);
            }
            if (fileToProcess != null) {
                session.remove(fileToProcess);
            }
        } catch (SQLException e) {
            if (fileToProcess != null) {
                logger.error("Unable to execute SQL select query {} due to {}, routing {} to failure", new Object[] { selectQuery, e, fileToProcess });
                fileToProcess = session.putAttribute(fileToProcess, "generatetablefetch.sql.error", e.getMessage());
                session.transfer(fileToProcess, REL_FAILURE);
            } else {
                logger.error("Unable to execute SQL select query {} due to {}", new Object[] { selectQuery, e });
                throw new ProcessException(e);
            }
        }
        session.commit();
        try {
            // Update the state
            stateManager.setState(statePropertyMap, Scope.CLUSTER);
        } catch (IOException ioe) {
            logger.error("{} failed to update State Manager, observed maximum values will not be recorded. " + "Also, any generated SQL statements may be duplicated.", new Object[] { this, ioe });
        }
    } catch (final ProcessException pe) {
        // Log the cause of the ProcessException if it is available
        Throwable t = (pe.getCause() == null ? pe : pe.getCause());
        logger.error("Error during processing: {}", new Object[] { t.getMessage() }, t);
        session.rollback();
        context.yield();
    }
}
Also used : ProcessSession(org.apache.nifi.processor.ProcessSession) StandardValidators(org.apache.nifi.processor.util.StandardValidators) IntStream(java.util.stream.IntStream) Arrays(java.util.Arrays) Connection(java.sql.Connection) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) ValidationContext(org.apache.nifi.components.ValidationContext) HashMap(java.util.HashMap) ComponentLog(org.apache.nifi.logging.ComponentLog) StringUtils(org.apache.commons.lang3.StringUtils) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) SQLException(java.sql.SQLException) WritesAttributes(org.apache.nifi.annotation.behavior.WritesAttributes) Scope(org.apache.nifi.components.state.Scope) Relationship(org.apache.nifi.processor.Relationship) ResultSet(java.sql.ResultSet) Map(java.util.Map) Requirement(org.apache.nifi.annotation.behavior.InputRequirement.Requirement) ParseException(java.text.ParseException) TriggerSerially(org.apache.nifi.annotation.behavior.TriggerSerially) ValidationResult(org.apache.nifi.components.ValidationResult) DatabaseAdapter(org.apache.nifi.processors.standard.db.DatabaseAdapter) FlowFile(org.apache.nifi.flowfile.FlowFile) StateManager(org.apache.nifi.components.state.StateManager) Collection(java.util.Collection) ProcessContext(org.apache.nifi.processor.ProcessContext) Set(java.util.Set) ProcessSession(org.apache.nifi.processor.ProcessSession) IOException(java.io.IOException) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) SeeAlso(org.apache.nifi.annotation.documentation.SeeAlso) ProcessSessionFactory(org.apache.nifi.processor.ProcessSessionFactory) StateMap(org.apache.nifi.components.state.StateMap) TimeUnit(java.util.concurrent.TimeUnit) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) Stateful(org.apache.nifi.annotation.behavior.Stateful) OnScheduled(org.apache.nifi.annotation.lifecycle.OnScheduled) List(java.util.List) DynamicProperty(org.apache.nifi.annotation.behavior.DynamicProperty) Statement(java.sql.Statement) Tags(org.apache.nifi.annotation.documentation.Tags) DBCPService(org.apache.nifi.dbcp.DBCPService) Collections(java.util.Collections) ResultSetMetaData(java.sql.ResultSetMetaData) HashMap(java.util.HashMap) SQLException(java.sql.SQLException) StateMap(org.apache.nifi.components.state.StateMap) ArrayList(java.util.ArrayList) ResultSetMetaData(java.sql.ResultSetMetaData) StateManager(org.apache.nifi.components.state.StateManager) ResultSet(java.sql.ResultSet) FlowFile(org.apache.nifi.flowfile.FlowFile) Statement(java.sql.Statement) Connection(java.sql.Connection) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog) DatabaseAdapter(org.apache.nifi.processors.standard.db.DatabaseAdapter) ProcessException(org.apache.nifi.processor.exception.ProcessException) DBCPService(org.apache.nifi.dbcp.DBCPService) ParseException(java.text.ParseException) HashMap(java.util.HashMap) Map(java.util.Map) StateMap(org.apache.nifi.components.state.StateMap)

Example 100 with ProcessSession

use of org.apache.nifi.processor.ProcessSession in project nifi by apache.

the class GetHTTP method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) throws ProcessException {
    final ComponentLog logger = getLogger();
    final ProcessSession session = sessionFactory.createSession();
    final FlowFile incomingFlowFile = session.get();
    if (incomingFlowFile != null) {
        session.transfer(incomingFlowFile, REL_SUCCESS);
        logger.warn("found FlowFile {} in input queue; transferring to success", new Object[] { incomingFlowFile });
    }
    // get the URL
    final String url = context.getProperty(URL).evaluateAttributeExpressions().getValue();
    final URI uri;
    String source = url;
    try {
        uri = new URI(url);
        source = uri.getHost();
    } catch (final URISyntaxException swallow) {
    // this won't happen as the url has already been validated
    }
    // get the ssl context service
    final SSLContextService sslContextService = context.getProperty(SSL_CONTEXT_SERVICE).asControllerService(SSLContextService.class);
    // create the connection manager
    final HttpClientConnectionManager conMan;
    if (sslContextService == null) {
        conMan = new BasicHttpClientConnectionManager();
    } else {
        final SSLContext sslContext;
        try {
            sslContext = createSSLContext(sslContextService);
        } catch (final Exception e) {
            throw new ProcessException(e);
        }
        final SSLConnectionSocketFactory sslsf = new SSLConnectionSocketFactory(sslContext);
        // Also include a plain socket factory for regular http connections (especially proxies)
        final Registry<ConnectionSocketFactory> socketFactoryRegistry = RegistryBuilder.<ConnectionSocketFactory>create().register("https", sslsf).register("http", PlainConnectionSocketFactory.getSocketFactory()).build();
        conMan = new BasicHttpClientConnectionManager(socketFactoryRegistry);
    }
    try {
        // build the request configuration
        final RequestConfig.Builder requestConfigBuilder = RequestConfig.custom();
        requestConfigBuilder.setConnectionRequestTimeout(context.getProperty(DATA_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue());
        requestConfigBuilder.setConnectTimeout(context.getProperty(CONNECTION_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue());
        requestConfigBuilder.setSocketTimeout(context.getProperty(DATA_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue());
        requestConfigBuilder.setRedirectsEnabled(context.getProperty(FOLLOW_REDIRECTS).asBoolean());
        switch(context.getProperty(REDIRECT_COOKIE_POLICY).getValue()) {
            case STANDARD_COOKIE_POLICY_STR:
                requestConfigBuilder.setCookieSpec(CookieSpecs.STANDARD);
                break;
            case STRICT_COOKIE_POLICY_STR:
                requestConfigBuilder.setCookieSpec(CookieSpecs.STANDARD_STRICT);
                break;
            case NETSCAPE_COOKIE_POLICY_STR:
                requestConfigBuilder.setCookieSpec(CookieSpecs.NETSCAPE);
                break;
            case IGNORE_COOKIE_POLICY_STR:
                requestConfigBuilder.setCookieSpec(CookieSpecs.IGNORE_COOKIES);
                break;
            case DEFAULT_COOKIE_POLICY_STR:
            default:
                requestConfigBuilder.setCookieSpec(CookieSpecs.DEFAULT);
        }
        // build the http client
        final HttpClientBuilder clientBuilder = HttpClientBuilder.create();
        clientBuilder.setConnectionManager(conMan);
        // include the user agent
        final String userAgent = context.getProperty(USER_AGENT).getValue();
        if (userAgent != null) {
            clientBuilder.setUserAgent(userAgent);
        }
        // set the ssl context if necessary
        if (sslContextService != null) {
            clientBuilder.setSslcontext(sslContextService.createSSLContext(ClientAuth.REQUIRED));
        }
        final String username = context.getProperty(USERNAME).getValue();
        final String password = context.getProperty(PASSWORD).getValue();
        // set the credentials if appropriate
        if (username != null) {
            final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
            if (password == null) {
                credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(username));
            } else {
                credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(username, password));
            }
            clientBuilder.setDefaultCredentialsProvider(credentialsProvider);
        }
        // Set the proxy if specified
        if (context.getProperty(PROXY_HOST).isSet() && context.getProperty(PROXY_PORT).isSet()) {
            final String host = context.getProperty(PROXY_HOST).getValue();
            final int port = context.getProperty(PROXY_PORT).asInteger();
            clientBuilder.setProxy(new HttpHost(host, port));
        }
        // create request
        final HttpGet get = new HttpGet(url);
        get.setConfig(requestConfigBuilder.build());
        final StateMap beforeStateMap;
        try {
            beforeStateMap = context.getStateManager().getState(Scope.LOCAL);
            final String lastModified = beforeStateMap.get(LAST_MODIFIED + ":" + url);
            if (lastModified != null) {
                get.addHeader(HEADER_IF_MODIFIED_SINCE, parseStateValue(lastModified).getValue());
            }
            final String etag = beforeStateMap.get(ETAG + ":" + url);
            if (etag != null) {
                get.addHeader(HEADER_IF_NONE_MATCH, parseStateValue(etag).getValue());
            }
        } catch (final IOException ioe) {
            throw new ProcessException(ioe);
        }
        final String accept = context.getProperty(ACCEPT_CONTENT_TYPE).getValue();
        if (accept != null) {
            get.addHeader(HEADER_ACCEPT, accept);
        }
        // Add dynamic headers
        PropertyValue customHeaderValue;
        for (PropertyDescriptor customProperty : customHeaders) {
            customHeaderValue = context.getProperty(customProperty).evaluateAttributeExpressions();
            if (StringUtils.isNotBlank(customHeaderValue.getValue())) {
                get.addHeader(customProperty.getName(), customHeaderValue.getValue());
            }
        }
        // create the http client
        try (final CloseableHttpClient client = clientBuilder.build()) {
            // NOTE: including this inner try in order to swallow exceptions on close
            try {
                final StopWatch stopWatch = new StopWatch(true);
                final HttpResponse response = client.execute(get);
                final int statusCode = response.getStatusLine().getStatusCode();
                if (statusCode == NOT_MODIFIED) {
                    logger.info("content not retrieved because server returned HTTP Status Code {}: Not Modified", new Object[] { NOT_MODIFIED });
                    context.yield();
                    // doing a commit in case there were flow files in the input queue
                    session.commit();
                    return;
                }
                final String statusExplanation = response.getStatusLine().getReasonPhrase();
                if ((statusCode >= 300) || (statusCode == 204)) {
                    logger.error("received status code {}:{} from {}", new Object[] { statusCode, statusExplanation, url });
                    // doing a commit in case there were flow files in the input queue
                    session.commit();
                    return;
                }
                FlowFile flowFile = session.create();
                flowFile = session.putAttribute(flowFile, CoreAttributes.FILENAME.key(), context.getProperty(FILENAME).evaluateAttributeExpressions().getValue());
                flowFile = session.putAttribute(flowFile, this.getClass().getSimpleName().toLowerCase() + ".remote.source", source);
                flowFile = session.importFrom(response.getEntity().getContent(), flowFile);
                final Header contentTypeHeader = response.getFirstHeader("Content-Type");
                if (contentTypeHeader != null) {
                    final String contentType = contentTypeHeader.getValue();
                    if (!contentType.trim().isEmpty()) {
                        flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), contentType.trim());
                    }
                }
                final long flowFileSize = flowFile.getSize();
                stopWatch.stop();
                final String dataRate = stopWatch.calculateDataRate(flowFileSize);
                session.getProvenanceReporter().receive(flowFile, url, stopWatch.getDuration(TimeUnit.MILLISECONDS));
                session.transfer(flowFile, REL_SUCCESS);
                logger.info("Successfully received {} from {} at a rate of {}; transferred to success", new Object[] { flowFile, url, dataRate });
                session.commit();
                updateStateMap(context, response, beforeStateMap, url);
            } catch (final IOException e) {
                context.yield();
                session.rollback();
                logger.error("Failed to retrieve file from {} due to {}; rolling back session", new Object[] { url, e.getMessage() }, e);
                throw new ProcessException(e);
            } catch (final Throwable t) {
                context.yield();
                session.rollback();
                logger.error("Failed to process due to {}; rolling back session", new Object[] { t.getMessage() }, t);
                throw t;
            }
        } catch (final IOException e) {
            logger.debug("Error closing client due to {}, continuing.", new Object[] { e.getMessage() });
        }
    } finally {
        conMan.shutdown();
    }
}
Also used : ProcessSession(org.apache.nifi.processor.ProcessSession) BasicCredentialsProvider(org.apache.http.impl.client.BasicCredentialsProvider) HttpGet(org.apache.http.client.methods.HttpGet) StateMap(org.apache.nifi.components.state.StateMap) URISyntaxException(java.net.URISyntaxException) HttpClientBuilder(org.apache.http.impl.client.HttpClientBuilder) URI(java.net.URI) SSLConnectionSocketFactory(org.apache.http.conn.ssl.SSLConnectionSocketFactory) SSLConnectionSocketFactory(org.apache.http.conn.ssl.SSLConnectionSocketFactory) ConnectionSocketFactory(org.apache.http.conn.socket.ConnectionSocketFactory) PlainConnectionSocketFactory(org.apache.http.conn.socket.PlainConnectionSocketFactory) HttpHost(org.apache.http.HttpHost) HttpClientConnectionManager(org.apache.http.conn.HttpClientConnectionManager) BasicHttpClientConnectionManager(org.apache.http.impl.conn.BasicHttpClientConnectionManager) BasicHttpClientConnectionManager(org.apache.http.impl.conn.BasicHttpClientConnectionManager) FlowFile(org.apache.nifi.flowfile.FlowFile) RequestConfig(org.apache.http.client.config.RequestConfig) CloseableHttpClient(org.apache.http.impl.client.CloseableHttpClient) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) PropertyValue(org.apache.nifi.components.PropertyValue) HttpResponse(org.apache.http.HttpResponse) SSLContext(javax.net.ssl.SSLContext) BasicCredentialsProvider(org.apache.http.impl.client.BasicCredentialsProvider) CredentialsProvider(org.apache.http.client.CredentialsProvider) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog) URISyntaxException(java.net.URISyntaxException) KeyStoreException(java.security.KeyStoreException) KeyManagementException(java.security.KeyManagementException) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) ProcessException(org.apache.nifi.processor.exception.ProcessException) UnrecoverableKeyException(java.security.UnrecoverableKeyException) IOException(java.io.IOException) CertificateException(java.security.cert.CertificateException) UsernamePasswordCredentials(org.apache.http.auth.UsernamePasswordCredentials) StopWatch(org.apache.nifi.util.StopWatch) ProcessException(org.apache.nifi.processor.exception.ProcessException) Header(org.apache.http.Header) SSLContextService(org.apache.nifi.ssl.SSLContextService)

Aggregations

ProcessSession (org.apache.nifi.processor.ProcessSession)129 FlowFile (org.apache.nifi.flowfile.FlowFile)96 ProcessContext (org.apache.nifi.processor.ProcessContext)55 IOException (java.io.IOException)54 ProcessException (org.apache.nifi.processor.exception.ProcessException)51 Test (org.junit.Test)47 Relationship (org.apache.nifi.processor.Relationship)45 List (java.util.List)42 ArrayList (java.util.ArrayList)41 Map (java.util.Map)39 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)39 ComponentLog (org.apache.nifi.logging.ComponentLog)39 HashSet (java.util.HashSet)38 Set (java.util.Set)38 HashMap (java.util.HashMap)35 Collections (java.util.Collections)33 CapabilityDescription (org.apache.nifi.annotation.documentation.CapabilityDescription)33 Tags (org.apache.nifi.annotation.documentation.Tags)33 InputRequirement (org.apache.nifi.annotation.behavior.InputRequirement)31 MockFlowFile (org.apache.nifi.util.MockFlowFile)31