Search in sources :

Example 36 with ProcessSession

use of org.apache.nifi.processor.ProcessSession in project nifi by apache.

the class ExecuteScript method onTrigger.

/**
 * Evaluates the given script body (or file) using the current session, context, and flowfile. The script
 * evaluation expects a FlowFile to be returned, in which case it will route the FlowFile to success. If a script
 * error occurs, the original FlowFile will be routed to failure. If the script succeeds but does not return a
 * FlowFile, the original FlowFile will be routed to no-flowfile
 *
 * @param context        the current process context
 * @param sessionFactory provides access to a {@link ProcessSessionFactory}, which
 *                       can be used for accessing FlowFiles, etc.
 * @throws ProcessException if the scripted processor's onTrigger() method throws an exception
 */
@Override
public void onTrigger(ProcessContext context, ProcessSessionFactory sessionFactory) throws ProcessException {
    synchronized (scriptingComponentHelper.isInitialized) {
        if (!scriptingComponentHelper.isInitialized.get()) {
            scriptingComponentHelper.createResources();
        }
    }
    ScriptEngine scriptEngine = scriptingComponentHelper.engineQ.poll();
    ComponentLog log = getLogger();
    if (scriptEngine == null) {
        // No engine available so nothing more to do here
        return;
    }
    ProcessSession session = sessionFactory.createSession();
    try {
        try {
            Bindings bindings = scriptEngine.getBindings(ScriptContext.ENGINE_SCOPE);
            if (bindings == null) {
                bindings = new SimpleBindings();
            }
            bindings.put("session", session);
            bindings.put("context", context);
            bindings.put("log", log);
            bindings.put("REL_SUCCESS", REL_SUCCESS);
            bindings.put("REL_FAILURE", REL_FAILURE);
            // Find the user-added properties and set them on the script
            for (Map.Entry<PropertyDescriptor, String> property : context.getProperties().entrySet()) {
                if (property.getKey().isDynamic()) {
                    // Add the dynamic property bound to its full PropertyValue to the script engine
                    if (property.getValue() != null) {
                        bindings.put(property.getKey().getName(), context.getProperty(property.getKey()));
                    }
                }
            }
            scriptEngine.setBindings(bindings, ScriptContext.ENGINE_SCOPE);
            // Execute any engine-specific configuration before the script is evaluated
            ScriptEngineConfigurator configurator = scriptingComponentHelper.scriptEngineConfiguratorMap.get(scriptingComponentHelper.getScriptEngineName().toLowerCase());
            // Evaluate the script with the configurator (if it exists) or the engine
            if (configurator != null) {
                configurator.eval(scriptEngine, scriptToRun, scriptingComponentHelper.getModules());
            } else {
                scriptEngine.eval(scriptToRun);
            }
            // Commit this session for the user. This plus the outermost catch statement mimics the behavior
            // of AbstractProcessor. This class doesn't extend AbstractProcessor in order to share a base
            // class with InvokeScriptedProcessor
            session.commit();
        } catch (ScriptException e) {
            throw new ProcessException(e);
        }
    } catch (final Throwable t) {
        // Mimic AbstractProcessor behavior here
        getLogger().error("{} failed to process due to {}; rolling back session", new Object[] { this, t });
        session.rollback(true);
        throw t;
    } finally {
        scriptingComponentHelper.engineQ.offer(scriptEngine);
    }
}
Also used : ProcessSession(org.apache.nifi.processor.ProcessSession) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ComponentLog(org.apache.nifi.logging.ComponentLog) Bindings(javax.script.Bindings) SimpleBindings(javax.script.SimpleBindings) ScriptEngine(javax.script.ScriptEngine) ScriptException(javax.script.ScriptException) ProcessException(org.apache.nifi.processor.exception.ProcessException) SimpleBindings(javax.script.SimpleBindings) Map(java.util.Map)

Example 37 with ProcessSession

use of org.apache.nifi.processor.ProcessSession in project nifi by apache.

the class ContentAcknowledgmentServlet method doDelete.

@Override
protected void doDelete(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException {
    final X509Certificate[] certs = (X509Certificate[]) request.getAttribute("javax.servlet.request.X509Certificate");
    String foundSubject = DEFAULT_FOUND_SUBJECT;
    if (certs != null && certs.length > 0) {
        for (final X509Certificate cert : certs) {
            foundSubject = cert.getSubjectDN().getName();
            if (authorizedPattern.matcher(foundSubject).matches()) {
                break;
            } else {
                logger.warn(processor + " rejecting transfer attempt from " + foundSubject + " because the DN is not authorized");
                response.sendError(HttpServletResponse.SC_FORBIDDEN, "not allowed based on dn");
                return;
            }
        }
    }
    final String uri = request.getRequestURI();
    final int slashIndex = uri.lastIndexOf("/");
    int questionIndex = uri.indexOf("?");
    if (questionIndex < 0) {
        questionIndex = uri.length();
    }
    final String uuid = uri.substring(slashIndex + 1, questionIndex);
    final FlowFileEntryTimeWrapper timeWrapper = flowFileMap.remove(uuid);
    if (timeWrapper == null) {
        logger.warn("received DELETE for HOLD with ID " + uuid + " from Remote Host: [" + request.getRemoteHost() + "] Port [" + request.getRemotePort() + "] SubjectDN [" + foundSubject + "], but no HOLD exists with that ID; sending response with Status Code 404");
        response.sendError(HttpServletResponse.SC_NOT_FOUND);
        return;
    }
    try {
        final Set<FlowFile> flowFiles = timeWrapper.getFlowFiles();
        final long transferTime = System.currentTimeMillis() - timeWrapper.getEntryTime();
        long totalFlowFileSize = 0;
        for (final FlowFile flowFile : flowFiles) {
            totalFlowFileSize += flowFile.getSize();
        }
        double seconds = (double) transferTime / 1000D;
        if (seconds <= 0D) {
            seconds = .00000001D;
        }
        final double bytesPerSecond = ((double) totalFlowFileSize / seconds);
        final String transferRate = FormatUtils.formatDataSize(bytesPerSecond) + "/sec";
        logger.info("received {} files/{} bytes from Remote Host: [{}] Port [{}] SubjectDN [{}] in {} milliseconds at a rate of {}; " + "transferring to 'success': {}", new Object[] { flowFiles.size(), totalFlowFileSize, request.getRemoteHost(), request.getRemotePort(), foundSubject, transferTime, transferRate, flowFiles });
        final ProcessSession session = timeWrapper.getSession();
        session.transfer(flowFiles, ListenHTTP.RELATIONSHIP_SUCCESS);
        session.commit();
        response.setStatus(HttpServletResponse.SC_OK);
        response.flushBuffer();
    } catch (final Throwable t) {
        timeWrapper.getSession().rollback();
        logger.error("received DELETE for HOLD with ID {} from Remote Host: [{}] Port [{}] SubjectDN [{}], but failed to process the request due to {}", new Object[] { uuid, request.getRemoteHost(), request.getRemotePort(), foundSubject, t.toString() });
        if (logger.isDebugEnabled()) {
            logger.error("", t);
        }
        response.sendError(HttpServletResponse.SC_NOT_FOUND);
    }
}
Also used : ProcessSession(org.apache.nifi.processor.ProcessSession) FlowFile(org.apache.nifi.flowfile.FlowFile) X509Certificate(java.security.cert.X509Certificate) FlowFileEntryTimeWrapper(org.apache.nifi.processors.standard.ListenHTTP.FlowFileEntryTimeWrapper)

Example 38 with ProcessSession

use of org.apache.nifi.processor.ProcessSession in project nifi by apache.

the class ListenHTTPServlet method doPost.

@Override
protected void doPost(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException {
    final ProcessContext context = processContext;
    ProcessSessionFactory sessionFactory;
    do {
        sessionFactory = sessionFactoryHolder.get();
        if (sessionFactory == null) {
            try {
                Thread.sleep(10);
            } catch (final InterruptedException e) {
            }
        }
    } while (sessionFactory == null);
    final ProcessSession session = sessionFactory.createSession();
    FlowFile flowFile = null;
    String holdUuid = null;
    String foundSubject = null;
    try {
        final long n = filesReceived.getAndIncrement() % FILES_BEFORE_CHECKING_DESTINATION_SPACE;
        if (n == 0 || !spaceAvailable.get()) {
            if (context.getAvailableRelationships().isEmpty()) {
                spaceAvailable.set(false);
                if (logger.isDebugEnabled()) {
                    logger.debug("Received request from " + request.getRemoteHost() + " but no space available; Indicating Service Unavailable");
                }
                response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
                return;
            } else {
                spaceAvailable.set(true);
            }
        }
        response.setHeader("Content-Type", MediaType.TEXT_PLAIN);
        final boolean contentGzipped = Boolean.parseBoolean(request.getHeader(GZIPPED_HEADER));
        final X509Certificate[] certs = (X509Certificate[]) request.getAttribute("javax.servlet.request.X509Certificate");
        foundSubject = DEFAULT_FOUND_SUBJECT;
        if (certs != null && certs.length > 0) {
            for (final X509Certificate cert : certs) {
                foundSubject = cert.getSubjectDN().getName();
                if (authorizedPattern.matcher(foundSubject).matches()) {
                    break;
                } else {
                    logger.warn("Rejecting transfer attempt from " + foundSubject + " because the DN is not authorized, host=" + request.getRemoteHost());
                    response.sendError(HttpServletResponse.SC_FORBIDDEN, "not allowed based on dn");
                    return;
                }
            }
        }
        final String destinationVersion = request.getHeader(PROTOCOL_VERSION_HEADER);
        Integer protocolVersion = null;
        if (destinationVersion != null) {
            try {
                protocolVersion = Integer.valueOf(destinationVersion);
            } catch (final NumberFormatException e) {
            // Value was invalid. Treat as if the header were missing.
            }
        }
        final boolean destinationIsLegacyNiFi = (protocolVersion == null);
        final boolean createHold = Boolean.parseBoolean(request.getHeader(FLOWFILE_CONFIRMATION_HEADER));
        final String contentType = request.getContentType();
        final InputStream unthrottled = contentGzipped ? new GZIPInputStream(request.getInputStream()) : request.getInputStream();
        final InputStream in = (streamThrottler == null) ? unthrottled : streamThrottler.newThrottledInputStream(unthrottled);
        if (logger.isDebugEnabled()) {
            logger.debug("Received request from " + request.getRemoteHost() + ", createHold=" + createHold + ", content-type=" + contentType + ", gzip=" + contentGzipped);
        }
        final AtomicBoolean hasMoreData = new AtomicBoolean(false);
        final FlowFileUnpackager unpackager;
        if (APPLICATION_FLOW_FILE_V3.equals(contentType)) {
            unpackager = new FlowFileUnpackagerV3();
        } else if (APPLICATION_FLOW_FILE_V2.equals(contentType)) {
            unpackager = new FlowFileUnpackagerV2();
        } else if (APPLICATION_FLOW_FILE_V1.equals(contentType)) {
            unpackager = new FlowFileUnpackagerV1();
        } else {
            unpackager = null;
        }
        final Set<FlowFile> flowFileSet = new HashSet<>();
        do {
            final long startNanos = System.nanoTime();
            final Map<String, String> attributes = new HashMap<>();
            flowFile = session.create();
            flowFile = session.write(flowFile, new OutputStreamCallback() {

                @Override
                public void process(final OutputStream rawOut) throws IOException {
                    try (final BufferedOutputStream bos = new BufferedOutputStream(rawOut, 65536)) {
                        if (unpackager == null) {
                            IOUtils.copy(in, bos);
                            hasMoreData.set(false);
                        } else {
                            attributes.putAll(unpackager.unpackageFlowFile(in, bos));
                            if (destinationIsLegacyNiFi) {
                                if (attributes.containsKey("nf.file.name")) {
                                    // for backward compatibility with old nifi...
                                    attributes.put(CoreAttributes.FILENAME.key(), attributes.remove("nf.file.name"));
                                }
                                if (attributes.containsKey("nf.file.path")) {
                                    attributes.put(CoreAttributes.PATH.key(), attributes.remove("nf.file.path"));
                                }
                            }
                            hasMoreData.set(unpackager.hasMoreData());
                        }
                    }
                }
            });
            final long transferNanos = System.nanoTime() - startNanos;
            final long transferMillis = TimeUnit.MILLISECONDS.convert(transferNanos, TimeUnit.NANOSECONDS);
            // put metadata on flowfile
            final String nameVal = request.getHeader(CoreAttributes.FILENAME.key());
            if (StringUtils.isNotBlank(nameVal)) {
                attributes.put(CoreAttributes.FILENAME.key(), nameVal);
            }
            // put arbitrary headers on flow file
            for (Enumeration<String> headerEnum = request.getHeaderNames(); headerEnum.hasMoreElements(); ) {
                String headerName = headerEnum.nextElement();
                if (headerPattern != null && headerPattern.matcher(headerName).matches()) {
                    String headerValue = request.getHeader(headerName);
                    attributes.put(headerName, headerValue);
                }
            }
            String sourceSystemFlowFileIdentifier = attributes.get(CoreAttributes.UUID.key());
            if (sourceSystemFlowFileIdentifier != null) {
                sourceSystemFlowFileIdentifier = "urn:nifi:" + sourceSystemFlowFileIdentifier;
                // If we receveied a UUID, we want to give the FlowFile a new UUID and register the sending system's
                // identifier as the SourceSystemFlowFileIdentifier field in the Provenance RECEIVE event
                attributes.put(CoreAttributes.UUID.key(), UUID.randomUUID().toString());
            }
            flowFile = session.putAllAttributes(flowFile, attributes);
            session.getProvenanceReporter().receive(flowFile, request.getRequestURL().toString(), sourceSystemFlowFileIdentifier, "Remote DN=" + foundSubject, transferMillis);
            flowFile = session.putAttribute(flowFile, "restlistener.remote.source.host", request.getRemoteHost());
            flowFile = session.putAttribute(flowFile, "restlistener.request.uri", request.getRequestURI());
            flowFile = session.putAttribute(flowFile, "restlistener.remote.user.dn", foundSubject);
            flowFileSet.add(flowFile);
            if (holdUuid == null) {
                holdUuid = flowFile.getAttribute(CoreAttributes.UUID.key());
            }
        } while (hasMoreData.get());
        if (createHold) {
            String uuid = (holdUuid == null) ? UUID.randomUUID().toString() : holdUuid;
            if (flowFileMap.containsKey(uuid)) {
                uuid = UUID.randomUUID().toString();
            }
            final FlowFileEntryTimeWrapper wrapper = new FlowFileEntryTimeWrapper(session, flowFileSet, System.currentTimeMillis(), request.getRemoteHost());
            FlowFileEntryTimeWrapper previousWrapper;
            do {
                previousWrapper = flowFileMap.putIfAbsent(uuid, wrapper);
                if (previousWrapper != null) {
                    uuid = UUID.randomUUID().toString();
                }
            } while (previousWrapper != null);
            response.setStatus(HttpServletResponse.SC_SEE_OTHER);
            final String ackUri = "/" + basePath + "/holds/" + uuid;
            response.addHeader(LOCATION_HEADER_NAME, ackUri);
            response.addHeader(LOCATION_URI_INTENT_NAME, LOCATION_URI_INTENT_VALUE);
            response.getOutputStream().write(ackUri.getBytes("UTF-8"));
            if (logger.isDebugEnabled()) {
                logger.debug("Ingested {} from Remote Host: [{}] Port [{}] SubjectDN [{}]; placed hold on these {} files with ID {}", new Object[] { flowFileSet, request.getRemoteHost(), request.getRemotePort(), foundSubject, flowFileSet.size(), uuid });
            }
        } else {
            response.setStatus(this.returnCode);
            logger.info("Received from Remote Host: [{}] Port [{}] SubjectDN [{}]; transferring to 'success' {}", new Object[] { request.getRemoteHost(), request.getRemotePort(), foundSubject, flowFile });
            session.transfer(flowFileSet, ListenHTTP.RELATIONSHIP_SUCCESS);
            session.commit();
        }
    } catch (final Throwable t) {
        session.rollback();
        if (flowFile == null) {
            logger.error("Unable to receive file from Remote Host: [{}] SubjectDN [{}] due to {}", new Object[] { request.getRemoteHost(), foundSubject, t });
        } else {
            logger.error("Unable to receive file {} from Remote Host: [{}] SubjectDN [{}] due to {}", new Object[] { flowFile, request.getRemoteHost(), foundSubject, t });
        }
        response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, t.toString());
    }
}
Also used : ProcessSession(org.apache.nifi.processor.ProcessSession) HashMap(java.util.HashMap) OutputStream(java.io.OutputStream) BufferedOutputStream(org.apache.nifi.stream.io.BufferedOutputStream) ProcessContext(org.apache.nifi.processor.ProcessContext) GZIPInputStream(java.util.zip.GZIPInputStream) FlowFileUnpackagerV3(org.apache.nifi.util.FlowFileUnpackagerV3) FlowFileUnpackagerV2(org.apache.nifi.util.FlowFileUnpackagerV2) FlowFileUnpackagerV1(org.apache.nifi.util.FlowFileUnpackagerV1) ProcessSessionFactory(org.apache.nifi.processor.ProcessSessionFactory) OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) BufferedOutputStream(org.apache.nifi.stream.io.BufferedOutputStream) HashSet(java.util.HashSet) FlowFile(org.apache.nifi.flowfile.FlowFile) GZIPInputStream(java.util.zip.GZIPInputStream) InputStream(java.io.InputStream) FlowFileUnpackager(org.apache.nifi.util.FlowFileUnpackager) X509Certificate(java.security.cert.X509Certificate) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) FlowFileEntryTimeWrapper(org.apache.nifi.processors.standard.ListenHTTP.FlowFileEntryTimeWrapper)

Example 39 with ProcessSession

use of org.apache.nifi.processor.ProcessSession in project nifi by apache.

the class ExecuteSQL method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile fileToProcess = null;
    if (context.hasIncomingConnection()) {
        fileToProcess = session.get();
        // we know that we should run only if we have a FlowFile.
        if (fileToProcess == null && context.hasNonLoopConnection()) {
            return;
        }
    }
    final ComponentLog logger = getLogger();
    final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class);
    final Integer queryTimeout = context.getProperty(QUERY_TIMEOUT).asTimePeriod(TimeUnit.SECONDS).intValue();
    final boolean convertNamesForAvro = context.getProperty(NORMALIZE_NAMES_FOR_AVRO).asBoolean();
    final Boolean useAvroLogicalTypes = context.getProperty(USE_AVRO_LOGICAL_TYPES).asBoolean();
    final Integer defaultPrecision = context.getProperty(DEFAULT_PRECISION).evaluateAttributeExpressions().asInteger();
    final Integer defaultScale = context.getProperty(DEFAULT_SCALE).evaluateAttributeExpressions().asInteger();
    final StopWatch stopWatch = new StopWatch(true);
    final String selectQuery;
    if (context.getProperty(SQL_SELECT_QUERY).isSet()) {
        selectQuery = context.getProperty(SQL_SELECT_QUERY).evaluateAttributeExpressions(fileToProcess).getValue();
    } else {
        // If the query is not set, then an incoming flow file is required, and expected to contain a valid SQL select query.
        // If there is no incoming connection, onTrigger will not be called as the processor will fail when scheduled.
        final StringBuilder queryContents = new StringBuilder();
        session.read(fileToProcess, in -> queryContents.append(IOUtils.toString(in, Charset.defaultCharset())));
        selectQuery = queryContents.toString();
    }
    int resultCount = 0;
    try (final Connection con = dbcpService.getConnection();
        final PreparedStatement st = con.prepareStatement(selectQuery)) {
        // timeout in seconds
        st.setQueryTimeout(queryTimeout);
        if (fileToProcess != null) {
            JdbcCommon.setParameters(st, fileToProcess.getAttributes());
        }
        logger.debug("Executing query {}", new Object[] { selectQuery });
        boolean results = st.execute();
        while (results) {
            FlowFile resultSetFF;
            if (fileToProcess == null) {
                resultSetFF = session.create();
            } else {
                resultSetFF = session.create(fileToProcess);
                resultSetFF = session.putAllAttributes(resultSetFF, fileToProcess.getAttributes());
            }
            final AtomicLong nrOfRows = new AtomicLong(0L);
            resultSetFF = session.write(resultSetFF, out -> {
                try {
                    final ResultSet resultSet = st.getResultSet();
                    final JdbcCommon.AvroConversionOptions options = JdbcCommon.AvroConversionOptions.builder().convertNames(convertNamesForAvro).useLogicalTypes(useAvroLogicalTypes).defaultPrecision(defaultPrecision).defaultScale(defaultScale).build();
                    nrOfRows.set(JdbcCommon.convertToAvroStream(resultSet, out, options, null));
                } catch (final SQLException e) {
                    throw new ProcessException(e);
                }
            });
            long duration = stopWatch.getElapsed(TimeUnit.MILLISECONDS);
            // set attribute how many rows were selected
            resultSetFF = session.putAttribute(resultSetFF, RESULT_ROW_COUNT, String.valueOf(nrOfRows.get()));
            resultSetFF = session.putAttribute(resultSetFF, RESULT_QUERY_DURATION, String.valueOf(duration));
            resultSetFF = session.putAttribute(resultSetFF, CoreAttributes.MIME_TYPE.key(), JdbcCommon.MIME_TYPE_AVRO_BINARY);
            logger.info("{} contains {} Avro records; transferring to 'success'", new Object[] { resultSetFF, nrOfRows.get() });
            session.getProvenanceReporter().modifyContent(resultSetFF, "Retrieved " + nrOfRows.get() + " rows", duration);
            session.transfer(resultSetFF, REL_SUCCESS);
            resultCount++;
            // are there anymore result sets?
            try {
                results = st.getMoreResults();
            } catch (SQLException ex) {
                results = false;
            }
        }
        // pass the original flow file down the line to trigger downstream processors
        if (fileToProcess != null) {
            if (resultCount > 0) {
                session.remove(fileToProcess);
            } else {
                fileToProcess = session.write(fileToProcess, JdbcCommon::createEmptyAvroStream);
                session.transfer(fileToProcess, REL_SUCCESS);
            }
        }
    } catch (final ProcessException | SQLException e) {
        // pass the original flow file down the line to trigger downstream processors
        if (fileToProcess == null) {
            // This can happen if any exceptions occur while setting up the connection, statement, etc.
            logger.error("Unable to execute SQL select query {} due to {}. No FlowFile to route to failure", new Object[] { selectQuery, e });
            context.yield();
        } else {
            if (context.hasIncomingConnection()) {
                logger.error("Unable to execute SQL select query {} for {} due to {}; routing to failure", new Object[] { selectQuery, fileToProcess, e });
                fileToProcess = session.penalize(fileToProcess);
            } else {
                logger.error("Unable to execute SQL select query {} due to {}; routing to failure", new Object[] { selectQuery, e });
                context.yield();
            }
            session.transfer(fileToProcess, REL_FAILURE);
        }
    }
}
Also used : StandardValidators(org.apache.nifi.processor.util.StandardValidators) Connection(java.sql.Connection) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) USE_AVRO_LOGICAL_TYPES(org.apache.nifi.processors.standard.util.JdbcCommon.USE_AVRO_LOGICAL_TYPES) EventDriven(org.apache.nifi.annotation.behavior.EventDriven) ComponentLog(org.apache.nifi.logging.ComponentLog) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) DEFAULT_SCALE(org.apache.nifi.processors.standard.util.JdbcCommon.DEFAULT_SCALE) HashSet(java.util.HashSet) SQLException(java.sql.SQLException) Charset(java.nio.charset.Charset) WritesAttributes(org.apache.nifi.annotation.behavior.WritesAttributes) Relationship(org.apache.nifi.processor.Relationship) ResultSet(java.sql.ResultSet) DEFAULT_PRECISION(org.apache.nifi.processors.standard.util.JdbcCommon.DEFAULT_PRECISION) Requirement(org.apache.nifi.annotation.behavior.InputRequirement.Requirement) ReadsAttributes(org.apache.nifi.annotation.behavior.ReadsAttributes) FlowFile(org.apache.nifi.flowfile.FlowFile) NORMALIZE_NAMES_FOR_AVRO(org.apache.nifi.processors.standard.util.JdbcCommon.NORMALIZE_NAMES_FOR_AVRO) ProcessContext(org.apache.nifi.processor.ProcessContext) Set(java.util.Set) ProcessSession(org.apache.nifi.processor.ProcessSession) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) PreparedStatement(java.sql.PreparedStatement) TimeUnit(java.util.concurrent.TimeUnit) AtomicLong(java.util.concurrent.atomic.AtomicLong) IOUtils(org.apache.commons.io.IOUtils) List(java.util.List) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) OnScheduled(org.apache.nifi.annotation.lifecycle.OnScheduled) JdbcCommon(org.apache.nifi.processors.standard.util.JdbcCommon) StopWatch(org.apache.nifi.util.StopWatch) AbstractProcessor(org.apache.nifi.processor.AbstractProcessor) Tags(org.apache.nifi.annotation.documentation.Tags) DBCPService(org.apache.nifi.dbcp.DBCPService) CoreAttributes(org.apache.nifi.flowfile.attributes.CoreAttributes) Collections(java.util.Collections) ReadsAttribute(org.apache.nifi.annotation.behavior.ReadsAttribute) FlowFile(org.apache.nifi.flowfile.FlowFile) SQLException(java.sql.SQLException) Connection(java.sql.Connection) PreparedStatement(java.sql.PreparedStatement) ComponentLog(org.apache.nifi.logging.ComponentLog) StopWatch(org.apache.nifi.util.StopWatch) AtomicLong(java.util.concurrent.atomic.AtomicLong) ProcessException(org.apache.nifi.processor.exception.ProcessException) DBCPService(org.apache.nifi.dbcp.DBCPService) ResultSet(java.sql.ResultSet)

Example 40 with ProcessSession

use of org.apache.nifi.processor.ProcessSession in project nifi by apache.

the class FlattenJson method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final String mode = context.getProperty(FLATTEN_MODE).getValue();
    final FlattenMode flattenMode = getFlattenMode(mode);
    String separator = context.getProperty(SEPARATOR).evaluateAttributeExpressions(flowFile).getValue();
    try {
        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        session.exportTo(flowFile, bos);
        bos.close();
        String raw = new String(bos.toByteArray());
        final String flattened = new JsonFlattener(raw).withFlattenMode(flattenMode).withSeparator(separator.charAt(0)).withStringEscapePolicy(() -> StringEscapeUtils.ESCAPE_JAVA).flatten();
        flowFile = session.write(flowFile, os -> os.write(flattened.getBytes()));
        session.transfer(flowFile, REL_SUCCESS);
    } catch (Exception ex) {
        session.transfer(flowFile, REL_FAILURE);
    }
}
Also used : CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ExpressionLanguageCompiler(org.apache.nifi.expression.ExpressionLanguageCompiler) SideEffectFree(org.apache.nifi.annotation.behavior.SideEffectFree) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Relationship(org.apache.nifi.processor.Relationship) Map(java.util.Map) ValidationResult(org.apache.nifi.components.ValidationResult) FlattenMode(com.github.wnameless.json.flattener.FlattenMode) FlowFile(org.apache.nifi.flowfile.FlowFile) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) ProcessContext(org.apache.nifi.processor.ProcessContext) Set(java.util.Set) StringEscapeUtils(org.apache.commons.text.StringEscapeUtils) ProcessSession(org.apache.nifi.processor.ProcessSession) IOException(java.io.IOException) AllowableValue(org.apache.nifi.components.AllowableValue) List(java.util.List) JsonFlattener(com.github.wnameless.json.flattener.JsonFlattener) AbstractProcessor(org.apache.nifi.processor.AbstractProcessor) Tags(org.apache.nifi.annotation.documentation.Tags) ProcessorInitializationContext(org.apache.nifi.processor.ProcessorInitializationContext) Collections(java.util.Collections) FlowFile(org.apache.nifi.flowfile.FlowFile) JsonFlattener(com.github.wnameless.json.flattener.JsonFlattener) ByteArrayOutputStream(java.io.ByteArrayOutputStream) FlattenMode(com.github.wnameless.json.flattener.FlattenMode) ProcessException(org.apache.nifi.processor.exception.ProcessException) IOException(java.io.IOException)

Aggregations

ProcessSession (org.apache.nifi.processor.ProcessSession)129 FlowFile (org.apache.nifi.flowfile.FlowFile)96 ProcessContext (org.apache.nifi.processor.ProcessContext)55 IOException (java.io.IOException)54 ProcessException (org.apache.nifi.processor.exception.ProcessException)51 Test (org.junit.Test)47 Relationship (org.apache.nifi.processor.Relationship)45 List (java.util.List)42 ArrayList (java.util.ArrayList)41 Map (java.util.Map)39 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)39 ComponentLog (org.apache.nifi.logging.ComponentLog)39 HashSet (java.util.HashSet)38 Set (java.util.Set)38 HashMap (java.util.HashMap)35 Collections (java.util.Collections)33 CapabilityDescription (org.apache.nifi.annotation.documentation.CapabilityDescription)33 Tags (org.apache.nifi.annotation.documentation.Tags)33 InputRequirement (org.apache.nifi.annotation.behavior.InputRequirement)31 MockFlowFile (org.apache.nifi.util.MockFlowFile)31