Search in sources :

Example 21 with ProcessContext

use of org.apache.nifi.processor.ProcessContext in project nifi by apache.

the class ListDatabaseTables method onTrigger.

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();
    final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class);
    final String catalog = context.getProperty(CATALOG).getValue();
    final String schemaPattern = context.getProperty(SCHEMA_PATTERN).getValue();
    final String tableNamePattern = context.getProperty(TABLE_NAME_PATTERN).getValue();
    final String[] tableTypes = context.getProperty(TABLE_TYPES).isSet() ? context.getProperty(TABLE_TYPES).getValue().split("\\s*,\\s*") : null;
    final boolean includeCount = context.getProperty(INCLUDE_COUNT).asBoolean();
    final long refreshInterval = context.getProperty(REFRESH_INTERVAL).asTimePeriod(TimeUnit.MILLISECONDS);
    final StateManager stateManager = context.getStateManager();
    final StateMap stateMap;
    final Map<String, String> stateMapProperties;
    try {
        stateMap = stateManager.getState(Scope.CLUSTER);
        stateMapProperties = new HashMap<>(stateMap.toMap());
    } catch (IOException ioe) {
        throw new ProcessException(ioe);
    }
    try (final Connection con = dbcpService.getConnection()) {
        DatabaseMetaData dbMetaData = con.getMetaData();
        ResultSet rs = dbMetaData.getTables(catalog, schemaPattern, tableNamePattern, tableTypes);
        while (rs.next()) {
            final String tableCatalog = rs.getString(1);
            final String tableSchema = rs.getString(2);
            final String tableName = rs.getString(3);
            final String tableType = rs.getString(4);
            final String tableRemarks = rs.getString(5);
            // Build fully-qualified name
            String fqn = Stream.of(tableCatalog, tableSchema, tableName).filter(segment -> !StringUtils.isEmpty(segment)).collect(Collectors.joining("."));
            String lastTimestampForTable = stateMapProperties.get(fqn);
            boolean refreshTable = true;
            try {
                // Refresh state if the interval has elapsed
                long lastRefreshed = -1;
                final long currentTime = System.currentTimeMillis();
                if (!StringUtils.isEmpty(lastTimestampForTable)) {
                    lastRefreshed = Long.parseLong(lastTimestampForTable);
                }
                if (lastRefreshed == -1 || (refreshInterval > 0 && currentTime >= (lastRefreshed + refreshInterval))) {
                    stateMapProperties.remove(lastTimestampForTable);
                } else {
                    refreshTable = false;
                }
            } catch (final NumberFormatException nfe) {
                getLogger().error("Failed to retrieve observed last table fetches from the State Manager. Will not perform " + "query until this is accomplished.", nfe);
                context.yield();
                return;
            }
            if (refreshTable) {
                FlowFile flowFile = session.create();
                logger.info("Found {}: {}", new Object[] { tableType, fqn });
                if (includeCount) {
                    try (Statement st = con.createStatement()) {
                        final String countQuery = "SELECT COUNT(1) FROM " + fqn;
                        logger.debug("Executing query: {}", new Object[] { countQuery });
                        ResultSet countResult = st.executeQuery(countQuery);
                        if (countResult.next()) {
                            flowFile = session.putAttribute(flowFile, DB_TABLE_COUNT, Long.toString(countResult.getLong(1)));
                        }
                    } catch (SQLException se) {
                        logger.error("Couldn't get row count for {}", new Object[] { fqn });
                        session.remove(flowFile);
                        continue;
                    }
                }
                if (tableCatalog != null) {
                    flowFile = session.putAttribute(flowFile, DB_TABLE_CATALOG, tableCatalog);
                }
                if (tableSchema != null) {
                    flowFile = session.putAttribute(flowFile, DB_TABLE_SCHEMA, tableSchema);
                }
                flowFile = session.putAttribute(flowFile, DB_TABLE_NAME, tableName);
                flowFile = session.putAttribute(flowFile, DB_TABLE_FULLNAME, fqn);
                flowFile = session.putAttribute(flowFile, DB_TABLE_TYPE, tableType);
                if (tableRemarks != null) {
                    flowFile = session.putAttribute(flowFile, DB_TABLE_REMARKS, tableRemarks);
                }
                String transitUri;
                try {
                    transitUri = dbMetaData.getURL();
                } catch (SQLException sqle) {
                    transitUri = "<unknown>";
                }
                session.getProvenanceReporter().receive(flowFile, transitUri);
                session.transfer(flowFile, REL_SUCCESS);
                stateMapProperties.put(fqn, Long.toString(System.currentTimeMillis()));
            }
        }
        // Update the timestamps for listed tables
        if (stateMap.getVersion() == -1) {
            stateManager.setState(stateMapProperties, Scope.CLUSTER);
        } else {
            stateManager.replace(stateMap, stateMapProperties, Scope.CLUSTER);
        }
    } catch (final SQLException | IOException e) {
        throw new ProcessException(e);
    }
}
Also used : StandardValidators(org.apache.nifi.processor.util.StandardValidators) Connection(java.sql.Connection) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) DatabaseMetaData(java.sql.DatabaseMetaData) HashMap(java.util.HashMap) ComponentLog(org.apache.nifi.logging.ComponentLog) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) SQLException(java.sql.SQLException) WritesAttributes(org.apache.nifi.annotation.behavior.WritesAttributes) Scope(org.apache.nifi.components.state.Scope) Relationship(org.apache.nifi.processor.Relationship) ResultSet(java.sql.ResultSet) Map(java.util.Map) TriggerSerially(org.apache.nifi.annotation.behavior.TriggerSerially) Validator(org.apache.nifi.components.Validator) FlowFile(org.apache.nifi.flowfile.FlowFile) StateManager(org.apache.nifi.components.state.StateManager) ProcessContext(org.apache.nifi.processor.ProcessContext) Set(java.util.Set) ProcessSession(org.apache.nifi.processor.ProcessSession) IOException(java.io.IOException) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) StringUtils(org.apache.nifi.util.StringUtils) Collectors(java.util.stream.Collectors) StateMap(org.apache.nifi.components.state.StateMap) TimeUnit(java.util.concurrent.TimeUnit) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) Stateful(org.apache.nifi.annotation.behavior.Stateful) List(java.util.List) Stream(java.util.stream.Stream) Statement(java.sql.Statement) AbstractProcessor(org.apache.nifi.processor.AbstractProcessor) Tags(org.apache.nifi.annotation.documentation.Tags) DBCPService(org.apache.nifi.dbcp.DBCPService) Collections(java.util.Collections) FlowFile(org.apache.nifi.flowfile.FlowFile) SQLException(java.sql.SQLException) Statement(java.sql.Statement) StateMap(org.apache.nifi.components.state.StateMap) Connection(java.sql.Connection) IOException(java.io.IOException) DatabaseMetaData(java.sql.DatabaseMetaData) ComponentLog(org.apache.nifi.logging.ComponentLog) ProcessException(org.apache.nifi.processor.exception.ProcessException) StateManager(org.apache.nifi.components.state.StateManager) DBCPService(org.apache.nifi.dbcp.DBCPService) ResultSet(java.sql.ResultSet)

Example 22 with ProcessContext

use of org.apache.nifi.processor.ProcessContext in project nifi by apache.

the class MergeRecord method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) throws ProcessException {
    RecordBinManager manager = binManager.get();
    while (manager == null) {
        manager = new RecordBinManager(context, sessionFactory, getLogger());
        manager.setMaxBinAge(context.getProperty(MAX_BIN_AGE).asTimePeriod(TimeUnit.NANOSECONDS), TimeUnit.NANOSECONDS);
        final boolean updated = binManager.compareAndSet(null, manager);
        if (!updated) {
            manager = binManager.get();
        }
    }
    final ProcessSession session = sessionFactory.createSession();
    final List<FlowFile> flowFiles = session.get(FlowFileFilters.newSizeBasedFilter(250, DataUnit.KB, 250));
    if (getLogger().isDebugEnabled()) {
        final List<String> ids = flowFiles.stream().map(ff -> "id=" + ff.getId()).collect(Collectors.toList());
        getLogger().debug("Pulled {} FlowFiles from queue: {}", new Object[] { ids.size(), ids });
    }
    final String mergeStrategy = context.getProperty(MERGE_STRATEGY).getValue();
    final boolean block;
    if (MERGE_STRATEGY_DEFRAGMENT.equals(mergeStrategy)) {
        block = true;
    } else if (context.getProperty(CORRELATION_ATTRIBUTE_NAME).isSet()) {
        block = true;
    } else {
        block = false;
    }
    try {
        for (final FlowFile flowFile : flowFiles) {
            try {
                binFlowFile(context, flowFile, session, manager, block);
            } catch (final Exception e) {
                getLogger().error("Failed to bin {} due to {}", new Object[] { flowFile, e });
                session.transfer(flowFile, REL_FAILURE);
            }
        }
    } finally {
        session.commit();
    }
    try {
        manager.completeExpiredBins();
    } catch (final Exception e) {
        getLogger().error("Failed to merge FlowFiles to create new bin due to " + e, e);
    }
    if (flowFiles.isEmpty()) {
        getLogger().debug("No FlowFiles to bin; will yield");
        context.yield();
    }
}
Also used : ProcessSession(org.apache.nifi.processor.ProcessSession) StandardValidators(org.apache.nifi.processor.util.StandardValidators) AttributeStrategyUtil(org.apache.nifi.processors.standard.merge.AttributeStrategyUtil) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) AtomicReference(java.util.concurrent.atomic.AtomicReference) SideEffectFree(org.apache.nifi.annotation.behavior.SideEffectFree) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) WritesAttributes(org.apache.nifi.annotation.behavior.WritesAttributes) Relationship(org.apache.nifi.processor.Relationship) RecordReader(org.apache.nifi.serialization.RecordReader) Requirement(org.apache.nifi.annotation.behavior.InputRequirement.Requirement) ReadsAttributes(org.apache.nifi.annotation.behavior.ReadsAttributes) AbstractSessionFactoryProcessor(org.apache.nifi.processor.AbstractSessionFactoryProcessor) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) TriggerWhenEmpty(org.apache.nifi.annotation.behavior.TriggerWhenEmpty) AvroTypeUtil(org.apache.nifi.avro.AvroTypeUtil) FlowFile(org.apache.nifi.flowfile.FlowFile) FragmentAttributes(org.apache.nifi.flowfile.attributes.FragmentAttributes) RecordBinManager(org.apache.nifi.processors.standard.merge.RecordBinManager) ProcessContext(org.apache.nifi.processor.ProcessContext) Set(java.util.Set) IOException(java.io.IOException) ProcessSession(org.apache.nifi.processor.ProcessSession) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) SeeAlso(org.apache.nifi.annotation.documentation.SeeAlso) AllowableValue(org.apache.nifi.components.AllowableValue) Collectors(java.util.stream.Collectors) ProcessSessionFactory(org.apache.nifi.processor.ProcessSessionFactory) FlowFileFilters(org.apache.nifi.processor.util.FlowFileFilters) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) Optional(java.util.Optional) Tags(org.apache.nifi.annotation.documentation.Tags) DataUnit(org.apache.nifi.processor.DataUnit) OnStopped(org.apache.nifi.annotation.lifecycle.OnStopped) InputStream(java.io.InputStream) ReadsAttribute(org.apache.nifi.annotation.behavior.ReadsAttribute) FlowFile(org.apache.nifi.flowfile.FlowFile) RecordBinManager(org.apache.nifi.processors.standard.merge.RecordBinManager) SchemaNotFoundException(org.apache.nifi.schema.access.SchemaNotFoundException) ProcessException(org.apache.nifi.processor.exception.ProcessException) MalformedRecordException(org.apache.nifi.serialization.MalformedRecordException) IOException(java.io.IOException)

Example 23 with ProcessContext

use of org.apache.nifi.processor.ProcessContext in project nifi by apache.

the class Notify method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();
    final PropertyValue signalIdProperty = context.getProperty(RELEASE_SIGNAL_IDENTIFIER);
    final PropertyValue counterNameProperty = context.getProperty(SIGNAL_COUNTER_NAME);
    final PropertyValue deltaProperty = context.getProperty(SIGNAL_COUNTER_DELTA);
    final String attributeCacheRegex = context.getProperty(ATTRIBUTE_CACHE_REGEX).getValue();
    final Integer bufferCount = context.getProperty(SIGNAL_BUFFER_COUNT).asInteger();
    // the cache client used to interact with the distributed cache.
    final AtomicDistributedMapCacheClient cache = context.getProperty(DISTRIBUTED_CACHE_SERVICE).asControllerService(AtomicDistributedMapCacheClient.class);
    final WaitNotifyProtocol protocol = new WaitNotifyProtocol(cache);
    final Map<String, SignalBuffer> signalBuffers = new HashMap<>();
    for (int i = 0; i < bufferCount; i++) {
        final FlowFile flowFile = session.get();
        if (flowFile == null) {
            break;
        }
        // Signal id is computed from attribute 'RELEASE_SIGNAL_IDENTIFIER' with expression language support
        final String signalId = signalIdProperty.evaluateAttributeExpressions(flowFile).getValue();
        // if the computed value is null, or empty, we transfer the flow file to failure relationship
        if (StringUtils.isBlank(signalId)) {
            logger.error("FlowFile {} has no attribute for given Release Signal Identifier", new Object[] { flowFile });
            // set 'notified' attribute
            session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(false)), REL_FAILURE);
            continue;
        }
        String counterName = counterNameProperty.evaluateAttributeExpressions(flowFile).getValue();
        if (StringUtils.isEmpty(counterName)) {
            counterName = WaitNotifyProtocol.DEFAULT_COUNT_NAME;
        }
        int delta = 1;
        if (deltaProperty.isSet()) {
            final String deltaStr = deltaProperty.evaluateAttributeExpressions(flowFile).getValue();
            try {
                delta = Integer.parseInt(deltaStr);
            } catch (final NumberFormatException e) {
                logger.error("Failed to calculate delta for FlowFile {} due to {}", new Object[] { flowFile, e }, e);
                session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(false)), REL_FAILURE);
                continue;
            }
        }
        if (!signalBuffers.containsKey(signalId)) {
            signalBuffers.put(signalId, new SignalBuffer());
        }
        final SignalBuffer signalBuffer = signalBuffers.get(signalId);
        if (StringUtils.isNotEmpty(attributeCacheRegex)) {
            flowFile.getAttributes().entrySet().stream().filter(e -> (!e.getKey().equals("uuid") && e.getKey().matches(attributeCacheRegex))).forEach(e -> signalBuffer.attributesToCache.put(e.getKey(), e.getValue()));
        }
        signalBuffer.incrementDelta(counterName, delta);
        signalBuffer.flowFiles.add(flowFile);
        if (logger.isDebugEnabled()) {
            logger.debug("Cached release signal identifier {} counterName {} from FlowFile {}", new Object[] { signalId, counterName, flowFile });
        }
    }
    signalBuffers.forEach((signalId, signalBuffer) -> {
        // retry after yielding for a while.
        try {
            protocol.notify(signalId, signalBuffer.deltas, signalBuffer.attributesToCache);
            signalBuffer.flowFiles.forEach(flowFile -> session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(true)), REL_SUCCESS));
        } catch (IOException e) {
            throw new RuntimeException(String.format("Unable to communicate with cache when processing %s due to %s", signalId, e), e);
        }
    });
}
Also used : StandardValidators(org.apache.nifi.processor.util.StandardValidators) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) ResultType(org.apache.nifi.expression.AttributeExpression.ResultType) HashMap(java.util.HashMap) EventDriven(org.apache.nifi.annotation.behavior.EventDriven) ComponentLog(org.apache.nifi.logging.ComponentLog) StringUtils(org.apache.commons.lang3.StringUtils) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) PropertyValue(org.apache.nifi.components.PropertyValue) HashSet(java.util.HashSet) Relationship(org.apache.nifi.processor.Relationship) Map(java.util.Map) Requirement(org.apache.nifi.annotation.behavior.InputRequirement.Requirement) AtomicDistributedMapCacheClient(org.apache.nifi.distributed.cache.client.AtomicDistributedMapCacheClient) FlowFile(org.apache.nifi.flowfile.FlowFile) ProcessContext(org.apache.nifi.processor.ProcessContext) Set(java.util.Set) IOException(java.io.IOException) ProcessSession(org.apache.nifi.processor.ProcessSession) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) SeeAlso(org.apache.nifi.annotation.documentation.SeeAlso) List(java.util.List) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) SupportsBatching(org.apache.nifi.annotation.behavior.SupportsBatching) AbstractProcessor(org.apache.nifi.processor.AbstractProcessor) Tags(org.apache.nifi.annotation.documentation.Tags) Collections(java.util.Collections) FlowFile(org.apache.nifi.flowfile.FlowFile) HashMap(java.util.HashMap) PropertyValue(org.apache.nifi.components.PropertyValue) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog) AtomicDistributedMapCacheClient(org.apache.nifi.distributed.cache.client.AtomicDistributedMapCacheClient)

Example 24 with ProcessContext

use of org.apache.nifi.processor.ProcessContext in project nifi by apache.

the class ListenHTTPServlet method doPost.

@Override
protected void doPost(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException {
    final ProcessContext context = processContext;
    ProcessSessionFactory sessionFactory;
    do {
        sessionFactory = sessionFactoryHolder.get();
        if (sessionFactory == null) {
            try {
                Thread.sleep(10);
            } catch (final InterruptedException e) {
            }
        }
    } while (sessionFactory == null);
    final ProcessSession session = sessionFactory.createSession();
    FlowFile flowFile = null;
    String holdUuid = null;
    String foundSubject = null;
    try {
        final long n = filesReceived.getAndIncrement() % FILES_BEFORE_CHECKING_DESTINATION_SPACE;
        if (n == 0 || !spaceAvailable.get()) {
            if (context.getAvailableRelationships().isEmpty()) {
                spaceAvailable.set(false);
                if (logger.isDebugEnabled()) {
                    logger.debug("Received request from " + request.getRemoteHost() + " but no space available; Indicating Service Unavailable");
                }
                response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
                return;
            } else {
                spaceAvailable.set(true);
            }
        }
        response.setHeader("Content-Type", MediaType.TEXT_PLAIN);
        final boolean contentGzipped = Boolean.parseBoolean(request.getHeader(GZIPPED_HEADER));
        final X509Certificate[] certs = (X509Certificate[]) request.getAttribute("javax.servlet.request.X509Certificate");
        foundSubject = DEFAULT_FOUND_SUBJECT;
        if (certs != null && certs.length > 0) {
            for (final X509Certificate cert : certs) {
                foundSubject = cert.getSubjectDN().getName();
                if (authorizedPattern.matcher(foundSubject).matches()) {
                    break;
                } else {
                    logger.warn("Rejecting transfer attempt from " + foundSubject + " because the DN is not authorized, host=" + request.getRemoteHost());
                    response.sendError(HttpServletResponse.SC_FORBIDDEN, "not allowed based on dn");
                    return;
                }
            }
        }
        final String destinationVersion = request.getHeader(PROTOCOL_VERSION_HEADER);
        Integer protocolVersion = null;
        if (destinationVersion != null) {
            try {
                protocolVersion = Integer.valueOf(destinationVersion);
            } catch (final NumberFormatException e) {
            // Value was invalid. Treat as if the header were missing.
            }
        }
        final boolean destinationIsLegacyNiFi = (protocolVersion == null);
        final boolean createHold = Boolean.parseBoolean(request.getHeader(FLOWFILE_CONFIRMATION_HEADER));
        final String contentType = request.getContentType();
        final InputStream unthrottled = contentGzipped ? new GZIPInputStream(request.getInputStream()) : request.getInputStream();
        final InputStream in = (streamThrottler == null) ? unthrottled : streamThrottler.newThrottledInputStream(unthrottled);
        if (logger.isDebugEnabled()) {
            logger.debug("Received request from " + request.getRemoteHost() + ", createHold=" + createHold + ", content-type=" + contentType + ", gzip=" + contentGzipped);
        }
        final AtomicBoolean hasMoreData = new AtomicBoolean(false);
        final FlowFileUnpackager unpackager;
        if (APPLICATION_FLOW_FILE_V3.equals(contentType)) {
            unpackager = new FlowFileUnpackagerV3();
        } else if (APPLICATION_FLOW_FILE_V2.equals(contentType)) {
            unpackager = new FlowFileUnpackagerV2();
        } else if (APPLICATION_FLOW_FILE_V1.equals(contentType)) {
            unpackager = new FlowFileUnpackagerV1();
        } else {
            unpackager = null;
        }
        final Set<FlowFile> flowFileSet = new HashSet<>();
        do {
            final long startNanos = System.nanoTime();
            final Map<String, String> attributes = new HashMap<>();
            flowFile = session.create();
            flowFile = session.write(flowFile, new OutputStreamCallback() {

                @Override
                public void process(final OutputStream rawOut) throws IOException {
                    try (final BufferedOutputStream bos = new BufferedOutputStream(rawOut, 65536)) {
                        if (unpackager == null) {
                            IOUtils.copy(in, bos);
                            hasMoreData.set(false);
                        } else {
                            attributes.putAll(unpackager.unpackageFlowFile(in, bos));
                            if (destinationIsLegacyNiFi) {
                                if (attributes.containsKey("nf.file.name")) {
                                    // for backward compatibility with old nifi...
                                    attributes.put(CoreAttributes.FILENAME.key(), attributes.remove("nf.file.name"));
                                }
                                if (attributes.containsKey("nf.file.path")) {
                                    attributes.put(CoreAttributes.PATH.key(), attributes.remove("nf.file.path"));
                                }
                            }
                            hasMoreData.set(unpackager.hasMoreData());
                        }
                    }
                }
            });
            final long transferNanos = System.nanoTime() - startNanos;
            final long transferMillis = TimeUnit.MILLISECONDS.convert(transferNanos, TimeUnit.NANOSECONDS);
            // put metadata on flowfile
            final String nameVal = request.getHeader(CoreAttributes.FILENAME.key());
            if (StringUtils.isNotBlank(nameVal)) {
                attributes.put(CoreAttributes.FILENAME.key(), nameVal);
            }
            // put arbitrary headers on flow file
            for (Enumeration<String> headerEnum = request.getHeaderNames(); headerEnum.hasMoreElements(); ) {
                String headerName = headerEnum.nextElement();
                if (headerPattern != null && headerPattern.matcher(headerName).matches()) {
                    String headerValue = request.getHeader(headerName);
                    attributes.put(headerName, headerValue);
                }
            }
            String sourceSystemFlowFileIdentifier = attributes.get(CoreAttributes.UUID.key());
            if (sourceSystemFlowFileIdentifier != null) {
                sourceSystemFlowFileIdentifier = "urn:nifi:" + sourceSystemFlowFileIdentifier;
                // If we receveied a UUID, we want to give the FlowFile a new UUID and register the sending system's
                // identifier as the SourceSystemFlowFileIdentifier field in the Provenance RECEIVE event
                attributes.put(CoreAttributes.UUID.key(), UUID.randomUUID().toString());
            }
            flowFile = session.putAllAttributes(flowFile, attributes);
            session.getProvenanceReporter().receive(flowFile, request.getRequestURL().toString(), sourceSystemFlowFileIdentifier, "Remote DN=" + foundSubject, transferMillis);
            flowFile = session.putAttribute(flowFile, "restlistener.remote.source.host", request.getRemoteHost());
            flowFile = session.putAttribute(flowFile, "restlistener.request.uri", request.getRequestURI());
            flowFile = session.putAttribute(flowFile, "restlistener.remote.user.dn", foundSubject);
            flowFileSet.add(flowFile);
            if (holdUuid == null) {
                holdUuid = flowFile.getAttribute(CoreAttributes.UUID.key());
            }
        } while (hasMoreData.get());
        if (createHold) {
            String uuid = (holdUuid == null) ? UUID.randomUUID().toString() : holdUuid;
            if (flowFileMap.containsKey(uuid)) {
                uuid = UUID.randomUUID().toString();
            }
            final FlowFileEntryTimeWrapper wrapper = new FlowFileEntryTimeWrapper(session, flowFileSet, System.currentTimeMillis(), request.getRemoteHost());
            FlowFileEntryTimeWrapper previousWrapper;
            do {
                previousWrapper = flowFileMap.putIfAbsent(uuid, wrapper);
                if (previousWrapper != null) {
                    uuid = UUID.randomUUID().toString();
                }
            } while (previousWrapper != null);
            response.setStatus(HttpServletResponse.SC_SEE_OTHER);
            final String ackUri = "/" + basePath + "/holds/" + uuid;
            response.addHeader(LOCATION_HEADER_NAME, ackUri);
            response.addHeader(LOCATION_URI_INTENT_NAME, LOCATION_URI_INTENT_VALUE);
            response.getOutputStream().write(ackUri.getBytes("UTF-8"));
            if (logger.isDebugEnabled()) {
                logger.debug("Ingested {} from Remote Host: [{}] Port [{}] SubjectDN [{}]; placed hold on these {} files with ID {}", new Object[] { flowFileSet, request.getRemoteHost(), request.getRemotePort(), foundSubject, flowFileSet.size(), uuid });
            }
        } else {
            response.setStatus(this.returnCode);
            logger.info("Received from Remote Host: [{}] Port [{}] SubjectDN [{}]; transferring to 'success' {}", new Object[] { request.getRemoteHost(), request.getRemotePort(), foundSubject, flowFile });
            session.transfer(flowFileSet, ListenHTTP.RELATIONSHIP_SUCCESS);
            session.commit();
        }
    } catch (final Throwable t) {
        session.rollback();
        if (flowFile == null) {
            logger.error("Unable to receive file from Remote Host: [{}] SubjectDN [{}] due to {}", new Object[] { request.getRemoteHost(), foundSubject, t });
        } else {
            logger.error("Unable to receive file {} from Remote Host: [{}] SubjectDN [{}] due to {}", new Object[] { flowFile, request.getRemoteHost(), foundSubject, t });
        }
        response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, t.toString());
    }
}
Also used : ProcessSession(org.apache.nifi.processor.ProcessSession) HashMap(java.util.HashMap) OutputStream(java.io.OutputStream) BufferedOutputStream(org.apache.nifi.stream.io.BufferedOutputStream) ProcessContext(org.apache.nifi.processor.ProcessContext) GZIPInputStream(java.util.zip.GZIPInputStream) FlowFileUnpackagerV3(org.apache.nifi.util.FlowFileUnpackagerV3) FlowFileUnpackagerV2(org.apache.nifi.util.FlowFileUnpackagerV2) FlowFileUnpackagerV1(org.apache.nifi.util.FlowFileUnpackagerV1) ProcessSessionFactory(org.apache.nifi.processor.ProcessSessionFactory) OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) BufferedOutputStream(org.apache.nifi.stream.io.BufferedOutputStream) HashSet(java.util.HashSet) FlowFile(org.apache.nifi.flowfile.FlowFile) GZIPInputStream(java.util.zip.GZIPInputStream) InputStream(java.io.InputStream) FlowFileUnpackager(org.apache.nifi.util.FlowFileUnpackager) X509Certificate(java.security.cert.X509Certificate) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) FlowFileEntryTimeWrapper(org.apache.nifi.processors.standard.ListenHTTP.FlowFileEntryTimeWrapper)

Example 25 with ProcessContext

use of org.apache.nifi.processor.ProcessContext in project nifi by apache.

the class ITListenAndPutSyslog method run.

/**
 * Sends numMessages from PutSyslog to ListenSyslog.
 */
private void run(String protocol, int numMessages, int expectedMessages) throws IOException, InterruptedException {
    // set the same protocol on both processors
    putSyslogRunner.setProperty(PutSyslog.PROTOCOL, protocol);
    listenSyslogRunner.setProperty(ListenSyslog.PROTOCOL, protocol);
    // set a listening port of 0 to get a random available port
    listenSyslogRunner.setProperty(ListenSyslog.PORT, "0");
    // call onScheduled to start ListenSyslog listening
    final ProcessSessionFactory processSessionFactory = listenSyslogRunner.getProcessSessionFactory();
    final ProcessContext context = listenSyslogRunner.getProcessContext();
    listenSyslog.onScheduled(context);
    // get the real port it is listening on and set that in PutSyslog
    final int listeningPort = listenSyslog.getPort();
    putSyslogRunner.setProperty(PutSyslog.PORT, String.valueOf(listeningPort));
    // configure the message properties on PutSyslog
    final String pri = "34";
    final String version = "1";
    final String stamp = "2016-02-05T22:14:15.003Z";
    final String host = "localhost";
    final String body = "some message";
    final String expectedMessage = "<" + pri + ">" + version + " " + stamp + " " + host + " " + body;
    putSyslogRunner.setProperty(PutSyslog.MSG_PRIORITY, pri);
    putSyslogRunner.setProperty(PutSyslog.MSG_VERSION, version);
    putSyslogRunner.setProperty(PutSyslog.MSG_TIMESTAMP, stamp);
    putSyslogRunner.setProperty(PutSyslog.MSG_HOSTNAME, host);
    putSyslogRunner.setProperty(PutSyslog.MSG_BODY, body);
    // send the messages
    for (int i = 0; i < numMessages; i++) {
        putSyslogRunner.enqueue("incoming data".getBytes(Charset.forName("UTF-8")));
    }
    putSyslogRunner.run(numMessages, false);
    // trigger ListenSyslog until we've seen all the messages
    int numTransfered = 0;
    long timeout = System.currentTimeMillis() + 30000;
    while (numTransfered < expectedMessages && System.currentTimeMillis() < timeout) {
        Thread.sleep(10);
        listenSyslog.onTrigger(context, processSessionFactory);
        numTransfered = listenSyslogRunner.getFlowFilesForRelationship(ListenSyslog.REL_SUCCESS).size();
    }
    Assert.assertEquals("Did not process all the messages", expectedMessages, numTransfered);
    if (expectedMessages > 0) {
        // check that one of flow files has the expected content
        MockFlowFile mockFlowFile = listenSyslogRunner.getFlowFilesForRelationship(ListenSyslog.REL_SUCCESS).get(0);
        mockFlowFile.assertContentEquals(expectedMessage);
    }
}
Also used : MockFlowFile(org.apache.nifi.util.MockFlowFile) ProcessSessionFactory(org.apache.nifi.processor.ProcessSessionFactory) ProcessContext(org.apache.nifi.processor.ProcessContext)

Aggregations

ProcessContext (org.apache.nifi.processor.ProcessContext)115 Test (org.junit.Test)67 TestRunner (org.apache.nifi.util.TestRunner)56 ProcessSession (org.apache.nifi.processor.ProcessSession)49 FlowFile (org.apache.nifi.flowfile.FlowFile)40 MockFlowFile (org.apache.nifi.util.MockFlowFile)39 HashSet (java.util.HashSet)35 Relationship (org.apache.nifi.processor.Relationship)35 List (java.util.List)34 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)34 ArrayList (java.util.ArrayList)33 Set (java.util.Set)33 Tags (org.apache.nifi.annotation.documentation.Tags)31 IOException (java.io.IOException)30 HashMap (java.util.HashMap)30 CapabilityDescription (org.apache.nifi.annotation.documentation.CapabilityDescription)30 ProcessException (org.apache.nifi.processor.exception.ProcessException)30 Collections (java.util.Collections)29 InputRequirement (org.apache.nifi.annotation.behavior.InputRequirement)29 ProcessSessionFactory (org.apache.nifi.processor.ProcessSessionFactory)29