Search in sources :

Example 41 with Relationship

use of org.apache.nifi.processor.Relationship in project nifi by apache.

the class PutHiveStreaming method onTrigger.

private void onTrigger(ProcessContext context, ProcessSession session, FunctionContext functionContext) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final String dbName = context.getProperty(DB_NAME).evaluateAttributeExpressions(flowFile).getValue();
    final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue();
    // Only allow one thread to work on a DB/table at a time
    final Semaphore newSemaphore = new Semaphore(1);
    Semaphore semaphore = tableSemaphoreMap.putIfAbsent(dbName + "." + tableName, newSemaphore);
    if (semaphore == null) {
        semaphore = newSemaphore;
    }
    boolean gotSemaphore = false;
    try {
        gotSemaphore = semaphore.tryAcquire(0, TimeUnit.SECONDS);
    } catch (InterruptedException ie) {
    // Nothing to do, gotSemaphore defaults to false
    }
    if (!gotSemaphore) {
        // We didn't get a chance to acquire, so rollback the session and try again next time
        session.rollback();
        return;
    }
    final ComponentLog log = getLogger();
    final String metastoreUri = context.getProperty(METASTORE_URI).evaluateAttributeExpressions(flowFile).getValue();
    final boolean autoCreatePartitions = context.getProperty(AUTOCREATE_PARTITIONS).asBoolean();
    final Integer maxConnections = context.getProperty(MAX_OPEN_CONNECTIONS).asInteger();
    final Integer heartbeatInterval = context.getProperty(HEARTBEAT_INTERVAL).evaluateAttributeExpressions().asInteger();
    final Integer txnsPerBatch = context.getProperty(TXNS_PER_BATCH).evaluateAttributeExpressions(flowFile).asInteger();
    final Integer recordsPerTxn = context.getProperty(RECORDS_PER_TXN).evaluateAttributeExpressions(flowFile).asInteger();
    final Map<HiveEndPoint, HiveWriter> myWriters = new ConcurrentHashMap<>();
    threadWriterList.add(myWriters);
    HiveOptions o = new HiveOptions(metastoreUri, dbName, tableName).withTxnsPerBatch(txnsPerBatch).withAutoCreatePartitions(autoCreatePartitions).withMaxOpenConnections(maxConnections).withHeartBeatInterval(heartbeatInterval).withCallTimeout(callTimeout);
    if (SecurityUtil.isSecurityEnabled(hiveConfig)) {
        final String explicitPrincipal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
        final String explicitKeytab = context.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue();
        final KerberosCredentialsService credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
        final String resolvedPrincipal;
        final String resolvedKeytab;
        if (credentialsService == null) {
            resolvedPrincipal = explicitPrincipal;
            resolvedKeytab = explicitKeytab;
        } else {
            resolvedPrincipal = credentialsService.getPrincipal();
            resolvedKeytab = credentialsService.getKeytab();
        }
        o = o.withKerberosPrincipal(resolvedPrincipal).withKerberosKeytab(resolvedKeytab);
    }
    final HiveOptions options = o;
    // Store the original class loader, then explicitly set it to this class's classloader (for use by the Hive Metastore)
    ClassLoader originalClassloader = Thread.currentThread().getContextClassLoader();
    Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
    final List<String> partitionColumnList;
    final String partitionColumns = context.getProperty(PARTITION_COLUMNS).evaluateAttributeExpressions().getValue();
    if (partitionColumns == null || partitionColumns.isEmpty()) {
        partitionColumnList = Collections.emptyList();
    } else {
        String[] partitionCols = partitionColumns.split(",");
        partitionColumnList = new ArrayList<>(partitionCols.length);
        for (String col : partitionCols) {
            partitionColumnList.add(col.trim());
        }
    }
    final AtomicReference<List<HiveStreamingRecord>> successfulRecords = new AtomicReference<>();
    successfulRecords.set(new ArrayList<>());
    final FlowFile inputFlowFile = flowFile;
    final RoutingResult result = new RoutingResult();
    final ExceptionHandler<FunctionContext> exceptionHandler = new ExceptionHandler<>();
    exceptionHandler.mapException(s -> {
        try {
            if (s == null) {
                return ErrorTypes.PersistentFailure;
            }
            throw s;
        } catch (IllegalArgumentException | HiveWriter.WriteFailure | SerializationError inputError) {
            return ErrorTypes.InvalidInput;
        } catch (HiveWriter.CommitFailure | HiveWriter.TxnBatchFailure | HiveWriter.TxnFailure writerTxError) {
            return ErrorTypes.TemporalInputFailure;
        } catch (ConnectionError | HiveWriter.ConnectFailure connectionError) {
            // Can't connect to Hive endpoint.
            log.error("Error connecting to Hive endpoint: table {} at {}", new Object[] { options.getTableName(), options.getMetaStoreURI() });
            return ErrorTypes.TemporalFailure;
        } catch (IOException | InterruptedException tempError) {
            return ErrorTypes.TemporalFailure;
        } catch (Exception t) {
            return ErrorTypes.UnknownFailure;
        }
    });
    final BiFunction<FunctionContext, ErrorTypes, ErrorTypes.Result> adjustError = RollbackOnFailure.createAdjustError(getLogger());
    exceptionHandler.adjustError(adjustError);
    // Create output flow files and their Avro writers
    functionContext.setFlowFiles(session.create(inputFlowFile), session.create(inputFlowFile));
    try {
        session.read(inputFlowFile, new InputStreamCallback() {

            @Override
            public void process(InputStream in) throws IOException {
                try (final DataFileStream<GenericRecord> reader = new DataFileStream<>(in, new GenericDatumReader<GenericRecord>())) {
                    GenericRecord currRecord = null;
                    // Copy codec and schema information to all writers
                    final String codec = reader.getMetaString(DataFileConstants.CODEC) == null ? DataFileConstants.NULL_CODEC : reader.getMetaString(DataFileConstants.CODEC);
                    functionContext.initAvroWriters(session, codec, reader);
                    Runnable flushSuccessfulRecords = () -> {
                        // Now send the records to the successful FlowFile and update the success count
                        functionContext.appendRecordsToSuccess(session, successfulRecords.get());
                        // Clear the list of successful records, we'll use it at the end when we flush whatever records are left
                        successfulRecords.set(new ArrayList<>());
                    };
                    while (reader.hasNext()) {
                        // We can NOT reuse currRecord here, because currRecord is accumulated in successful records.
                        // If we use the same GenericRecord instance, every record ends up having the same contents.
                        // To avoid this, we need to create a brand new GenericRecord instance here each time.
                        currRecord = reader.next();
                        functionContext.recordCount.incrementAndGet();
                        // Extract the partition values (they must be put separately into the Hive Streaming API)
                        List<String> partitionValues = new ArrayList<>();
                        if (!exceptionHandler.execute(functionContext, currRecord, input -> {
                            for (String partition : partitionColumnList) {
                                Object partitionValue = input.get(partition);
                                if (partitionValue == null) {
                                    throw new IllegalArgumentException("Partition column '" + partition + "' not found in Avro record");
                                }
                                partitionValues.add(partitionValue.toString());
                            }
                        }, onRecordError(context, session, myWriters))) {
                            continue;
                        }
                        final HiveStreamingRecord record = new HiveStreamingRecord(partitionValues, currRecord);
                        final AtomicReference<HiveWriter> hiveWriterRef = new AtomicReference<>();
                        // Write record to Hive streaming
                        if (!exceptionHandler.execute(functionContext, record, input -> {
                            final HiveEndPoint endPoint = makeHiveEndPoint(record.getPartitionValues(), options);
                            final HiveWriter hiveWriter = getOrCreateWriter(myWriters, options, endPoint);
                            hiveWriterRef.set(hiveWriter);
                            hiveWriter.write(record.getRecord().toString().getBytes(StandardCharsets.UTF_8));
                            successfulRecords.get().add(record);
                        }, onHiveRecordError(context, session, myWriters))) {
                            continue;
                        }
                        // If we've reached the records-per-transaction limit, flush the Hive Writer and update the Avro Writer for successful records
                        final HiveWriter hiveWriter = hiveWriterRef.get();
                        if (hiveWriter.getTotalRecords() >= recordsPerTxn) {
                            exceptionHandler.execute(functionContext, successfulRecords.get(), input -> {
                                hiveWriter.flush(true);
                                // Proceed function context. Process session can't be rollback anymore.
                                functionContext.proceed();
                                // Now send the records to the success relationship and update the success count
                                flushSuccessfulRecords.run();
                            }, onHiveRecordsError(context, session, myWriters).andThen((fc, input, res, commitException) -> {
                                // Reset hiveWriter for succeeding records.
                                switch(res.destination()) {
                                    case Retry:
                                    case Failure:
                                        try {
                                            // Abort current tx and move to next.
                                            hiveWriter.abort();
                                        } catch (Exception e) {
                                            // Can't even abort properly, throw a process exception
                                            throw new ProcessException(e);
                                        }
                                }
                            }));
                        }
                    }
                    exceptionHandler.execute(functionContext, successfulRecords.get(), input -> {
                        // Finish any transactions
                        flushAllWriters(myWriters, true);
                        closeAllWriters(myWriters);
                        // Now send any remaining records to the success relationship and update the count
                        flushSuccessfulRecords.run();
                    // Append successfulRecords on failure.
                    }, onHiveRecordsError(context, session, myWriters));
                } catch (IOException ioe) {
                    // The Avro file is invalid (or may not be an Avro file at all), send it to failure
                    final ErrorTypes.Result adjusted = adjustError.apply(functionContext, ErrorTypes.InvalidInput);
                    final String msg = "The incoming flow file can not be read as an Avro file";
                    switch(adjusted.destination()) {
                        case Failure:
                            log.error(msg, ioe);
                            result.routeTo(inputFlowFile, REL_FAILURE);
                            break;
                        case ProcessException:
                            throw new ProcessException(msg, ioe);
                    }
                }
            }
        });
        // If we got here, we've processed the outgoing flow files correctly, so remove the incoming one if necessary
        if (result.getRoutedFlowFiles().values().stream().noneMatch(routed -> routed.contains(inputFlowFile))) {
            session.remove(inputFlowFile);
        }
    } catch (DiscontinuedException e) {
        // The input FlowFile processing is discontinued. Keep it in the input queue.
        getLogger().warn("Discontinued processing for {} due to {}", new Object[] { flowFile, e }, e);
        result.routeTo(flowFile, Relationship.SELF);
    } catch (ShouldRetryException e) {
        // This exception is already a result of adjusting an error, so simply transfer the FlowFile to retry.
        getLogger().error(e.getMessage(), e);
        flowFile = session.penalize(flowFile);
        result.routeTo(flowFile, REL_RETRY);
    } finally {
        threadWriterList.remove(myWriters);
        functionContext.transferFlowFiles(session, result, options);
        // Restore original class loader, might not be necessary but is good practice since the processor task changed it
        Thread.currentThread().setContextClassLoader(originalClassloader);
        semaphore.release();
    }
}
Also used : StandardValidators(org.apache.nifi.processor.util.StandardValidators) ConnectionError(org.apache.hive.hcatalog.streaming.ConnectionError) BiFunction(java.util.function.BiFunction) Timer(java.util.Timer) StreamingException(org.apache.hive.hcatalog.streaming.StreamingException) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ErrorTypes(org.apache.nifi.processor.util.pattern.ErrorTypes) HiveEndPoint(org.apache.hive.hcatalog.streaming.HiveEndPoint) Snappy(org.xerial.snappy.Snappy) RoutingResult(org.apache.nifi.processor.util.pattern.RoutingResult) DiscontinuedException(org.apache.nifi.processor.util.pattern.DiscontinuedException) WritesAttributes(org.apache.nifi.annotation.behavior.WritesAttributes) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AuthenticationFailedException(org.apache.nifi.util.hive.AuthenticationFailedException) Map(java.util.Map) ExceptionHandler(org.apache.nifi.processor.util.pattern.ExceptionHandler) CodecFactory(org.apache.avro.file.CodecFactory) TimerTask(java.util.TimerTask) DataFileConstants(org.apache.avro.file.DataFileConstants) HiveConfigurator(org.apache.nifi.util.hive.HiveConfigurator) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) SecurityUtil(org.apache.nifi.hadoop.SecurityUtil) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) RequiresInstanceClassLoading(org.apache.nifi.annotation.behavior.RequiresInstanceClassLoading) FlowFile(org.apache.nifi.flowfile.FlowFile) KerberosProperties(org.apache.nifi.hadoop.KerberosProperties) SerializationError(org.apache.hive.hcatalog.streaming.SerializationError) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) DataFileWriter(org.apache.avro.file.DataFileWriter) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) StandardCharsets(java.nio.charset.StandardCharsets) Executors(java.util.concurrent.Executors) List(java.util.List) SeekableByteArrayInput(org.apache.avro.file.SeekableByteArrayInput) Tags(org.apache.nifi.annotation.documentation.Tags) Pattern(java.util.regex.Pattern) ValidationResources(org.apache.nifi.util.hive.ValidationResources) ProcessorInitializationContext(org.apache.nifi.processor.ProcessorInitializationContext) GenericDatumReader(org.apache.avro.generic.GenericDatumReader) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) RollbackOnFailure(org.apache.nifi.processor.util.pattern.RollbackOnFailure) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ValidationContext(org.apache.nifi.components.ValidationContext) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) ComponentLog(org.apache.nifi.logging.ComponentLog) AtomicReference(java.util.concurrent.atomic.AtomicReference) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Relationship(org.apache.nifi.processor.Relationship) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) AbstractSessionFactoryProcessor(org.apache.nifi.processor.AbstractSessionFactoryProcessor) ValidationResult(org.apache.nifi.components.ValidationResult) ExecutorService(java.util.concurrent.ExecutorService) GenericRecord(org.apache.avro.generic.GenericRecord) Validator(org.apache.nifi.components.Validator) Semaphore(java.util.concurrent.Semaphore) HiveConf(org.apache.hadoop.hive.conf.HiveConf) ProcessContext(org.apache.nifi.processor.ProcessContext) DataFileStream(org.apache.avro.file.DataFileStream) ProcessSession(org.apache.nifi.processor.ProcessSession) IOException(java.io.IOException) ProcessSessionFactory(org.apache.nifi.processor.ProcessSessionFactory) File(java.io.File) TimeUnit(java.util.concurrent.TimeUnit) HiveUtils(org.apache.nifi.util.hive.HiveUtils) OnScheduled(org.apache.nifi.annotation.lifecycle.OnScheduled) KerberosCredentialsService(org.apache.nifi.kerberos.KerberosCredentialsService) HiveOptions(org.apache.nifi.util.hive.HiveOptions) HiveWriter(org.apache.nifi.util.hive.HiveWriter) OnStopped(org.apache.nifi.annotation.lifecycle.OnStopped) Collections(java.util.Collections) InputStream(java.io.InputStream) RoutingResult(org.apache.nifi.processor.util.pattern.RoutingResult) ValidationResult(org.apache.nifi.components.ValidationResult) RoutingResult(org.apache.nifi.processor.util.pattern.RoutingResult) ExceptionHandler(org.apache.nifi.processor.util.pattern.ExceptionHandler) List(java.util.List) ArrayList(java.util.ArrayList) FlowFile(org.apache.nifi.flowfile.FlowFile) ComponentLog(org.apache.nifi.logging.ComponentLog) SerializationError(org.apache.hive.hcatalog.streaming.SerializationError) GenericDatumReader(org.apache.avro.generic.GenericDatumReader) Semaphore(java.util.concurrent.Semaphore) ErrorTypes(org.apache.nifi.processor.util.pattern.ErrorTypes) HiveEndPoint(org.apache.hive.hcatalog.streaming.HiveEndPoint) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) GenericRecord(org.apache.avro.generic.GenericRecord) HiveWriter(org.apache.nifi.util.hive.HiveWriter) InputStream(java.io.InputStream) ConnectionError(org.apache.hive.hcatalog.streaming.ConnectionError) AtomicReference(java.util.concurrent.atomic.AtomicReference) IOException(java.io.IOException) DataFileStream(org.apache.avro.file.DataFileStream) StreamingException(org.apache.hive.hcatalog.streaming.StreamingException) DiscontinuedException(org.apache.nifi.processor.util.pattern.DiscontinuedException) AuthenticationFailedException(org.apache.nifi.util.hive.AuthenticationFailedException) ProcessException(org.apache.nifi.processor.exception.ProcessException) IOException(java.io.IOException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ProcessException(org.apache.nifi.processor.exception.ProcessException) KerberosCredentialsService(org.apache.nifi.kerberos.KerberosCredentialsService) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) DiscontinuedException(org.apache.nifi.processor.util.pattern.DiscontinuedException) HiveOptions(org.apache.nifi.util.hive.HiveOptions)

Example 42 with Relationship

use of org.apache.nifi.processor.Relationship in project nifi by apache.

the class RouteHL7 method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final Charset charset = Charset.forName(context.getProperty(CHARACTER_SET).evaluateAttributeExpressions(flowFile).getValue());
    final byte[] buffer = new byte[(int) flowFile.getSize()];
    session.read(flowFile, new InputStreamCallback() {

        @Override
        public void process(final InputStream in) throws IOException {
            StreamUtils.fillBuffer(in, buffer);
        }
    });
    @SuppressWarnings("resource") final HapiContext hapiContext = new DefaultHapiContext();
    hapiContext.setValidationContext((ca.uhn.hl7v2.validation.ValidationContext) ValidationContextFactory.noValidation());
    final PipeParser parser = hapiContext.getPipeParser();
    final String hl7Text = new String(buffer, charset);
    final HL7Message message;
    try {
        final Message hapiMessage = parser.parse(hl7Text);
        message = new HapiMessage(hapiMessage);
    } catch (final Exception e) {
        getLogger().error("Failed to parse {} as HL7 due to {}; routing to failure", new Object[] { flowFile, e });
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    final Set<String> matchingRels = new HashSet<>();
    final Map<Relationship, HL7Query> queryMap = queries;
    for (final Map.Entry<Relationship, HL7Query> entry : queryMap.entrySet()) {
        final Relationship relationship = entry.getKey();
        final HL7Query query = entry.getValue();
        final QueryResult result = query.evaluate(message);
        if (result.isMatch()) {
            FlowFile clone = session.clone(flowFile);
            clone = session.putAttribute(clone, "RouteHL7.Route", relationship.getName());
            session.transfer(clone, relationship);
            session.getProvenanceReporter().route(clone, relationship);
            matchingRels.add(relationship.getName());
        }
    }
    session.transfer(flowFile, REL_ORIGINAL);
    getLogger().info("Routed a copy of {} to {} relationships: {}", new Object[] { flowFile, matchingRels.size(), matchingRels });
}
Also used : HapiMessage(org.apache.nifi.hl7.hapi.HapiMessage) HapiMessage(org.apache.nifi.hl7.hapi.HapiMessage) Message(ca.uhn.hl7v2.model.Message) HL7Message(org.apache.nifi.hl7.model.HL7Message) QueryResult(org.apache.nifi.hl7.query.QueryResult) DefaultHapiContext(ca.uhn.hl7v2.DefaultHapiContext) HapiContext(ca.uhn.hl7v2.HapiContext) HashSet(java.util.HashSet) FlowFile(org.apache.nifi.flowfile.FlowFile) HL7Query(org.apache.nifi.hl7.query.HL7Query) PipeParser(ca.uhn.hl7v2.parser.PipeParser) InputStream(java.io.InputStream) Charset(java.nio.charset.Charset) HL7Message(org.apache.nifi.hl7.model.HL7Message) IOException(java.io.IOException) ProcessException(org.apache.nifi.processor.exception.ProcessException) IOException(java.io.IOException) HL7QueryParsingException(org.apache.nifi.hl7.query.exception.HL7QueryParsingException) DefaultHapiContext(ca.uhn.hl7v2.DefaultHapiContext) Relationship(org.apache.nifi.processor.Relationship) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) HashMap(java.util.HashMap) Map(java.util.Map)

Example 43 with Relationship

use of org.apache.nifi.processor.Relationship in project nifi by apache.

the class ScanHBase method finalizeFlowFile.

private void finalizeFlowFile(final ProcessSession session, final HBaseClientService hBaseClientService, FlowFile flowFile, final String tableName, Long rowsPulled, Exception e) {
    Relationship rel = REL_SUCCESS;
    flowFile = session.putAttribute(flowFile, HBASE_ROWS_COUNT_ATTR, rowsPulled.toString());
    final AtomicReference<IOException> ioe = new AtomicReference<>(null);
    flowFile = session.append(flowFile, (out) -> {
        try {
            out.write("]".getBytes());
        } catch (IOException ei) {
            ioe.set(ei);
        }
    });
    if (e != null || ioe.get() != null) {
        flowFile = session.putAttribute(flowFile, "scanhbase.error", (e == null ? e : ioe.get()).toString());
        rel = REL_FAILURE;
    } else {
        session.getProvenanceReporter().receive(flowFile, hBaseClientService.toTransitUri(tableName, "{ids}"));
    }
    session.transfer(flowFile, rel);
}
Also used : StandardValidators(org.apache.nifi.processor.util.StandardValidators) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) ValidationContext(org.apache.nifi.components.ValidationContext) JsonFullRowSerializer(org.apache.nifi.hbase.io.JsonFullRowSerializer) AtomicReference(java.util.concurrent.atomic.AtomicReference) StringUtils(org.apache.commons.lang3.StringUtils) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Charset(java.nio.charset.Charset) WritesAttributes(org.apache.nifi.annotation.behavior.WritesAttributes) RowSerializer(org.apache.nifi.hbase.io.RowSerializer) Relationship(org.apache.nifi.processor.Relationship) Column(org.apache.nifi.hbase.scan.Column) ResultHandler(org.apache.nifi.hbase.scan.ResultHandler) ValidationResult(org.apache.nifi.components.ValidationResult) ResultCell(org.apache.nifi.hbase.scan.ResultCell) FlowFile(org.apache.nifi.flowfile.FlowFile) Collection(java.util.Collection) ProcessContext(org.apache.nifi.processor.ProcessContext) Set(java.util.Set) IOException(java.io.IOException) ProcessSession(org.apache.nifi.processor.ProcessSession) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) AllowableValue(org.apache.nifi.components.AllowableValue) StandardCharsets(java.nio.charset.StandardCharsets) List(java.util.List) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) OnScheduled(org.apache.nifi.annotation.lifecycle.OnScheduled) JsonQualifierAndValueRowSerializer(org.apache.nifi.hbase.io.JsonQualifierAndValueRowSerializer) AbstractProcessor(org.apache.nifi.processor.AbstractProcessor) Pattern(java.util.regex.Pattern) Tags(org.apache.nifi.annotation.documentation.Tags) CoreAttributes(org.apache.nifi.flowfile.attributes.CoreAttributes) Collections(java.util.Collections) Relationship(org.apache.nifi.processor.Relationship) AtomicReference(java.util.concurrent.atomic.AtomicReference) IOException(java.io.IOException)

Example 44 with Relationship

use of org.apache.nifi.processor.Relationship in project nifi by apache.

the class StandardProcessGroup method updateProcessGroup.

private void updateProcessGroup(final ProcessGroup group, final VersionedProcessGroup proposed, final String componentIdSeed, final Set<String> updatedVersionedComponentIds, final boolean updatePosition, final boolean updateName, final boolean updateDescendantVersionedGroups, final Set<String> variablesToSkip) throws ProcessorInstantiationException {
    group.setComments(proposed.getComments());
    if (updateName) {
        group.setName(proposed.getName());
    }
    if (updatePosition && proposed.getPosition() != null) {
        group.setPosition(new Position(proposed.getPosition().getX(), proposed.getPosition().getY()));
    }
    // Determine which variables have been added/removed and add/remove them from this group's variable registry.
    // We don't worry about if a variable value has changed, because variables are designed to be 'environment specific.'
    // As a result, once imported, we won't update variables to match the remote flow, but we will add any missing variables
    // and remove any variables that are no longer part of the remote flow.
    final Set<String> existingVariableNames = group.getVariableRegistry().getVariableMap().keySet().stream().map(VariableDescriptor::getName).collect(Collectors.toSet());
    final Map<String, String> updatedVariableMap = new HashMap<>();
    // If any new variables exist in the proposed flow, add those to the variable registry.
    for (final Map.Entry<String, String> entry : proposed.getVariables().entrySet()) {
        if (!existingVariableNames.contains(entry.getKey()) && !variablesToSkip.contains(entry.getKey())) {
            updatedVariableMap.put(entry.getKey(), entry.getValue());
        }
    }
    group.setVariables(updatedVariableMap);
    final VersionedFlowCoordinates remoteCoordinates = proposed.getVersionedFlowCoordinates();
    if (remoteCoordinates == null) {
        group.disconnectVersionControl(false);
    } else {
        final String registryId = flowController.getFlowRegistryClient().getFlowRegistryId(remoteCoordinates.getRegistryUrl());
        final String bucketId = remoteCoordinates.getBucketId();
        final String flowId = remoteCoordinates.getFlowId();
        final int version = remoteCoordinates.getVersion();
        final FlowRegistry flowRegistry = flowController.getFlowRegistryClient().getFlowRegistry(registryId);
        final String registryName = flowRegistry == null ? registryId : flowRegistry.getName();
        final VersionedFlowState flowState = remoteCoordinates.getLatest() ? VersionedFlowState.UP_TO_DATE : VersionedFlowState.STALE;
        final VersionControlInformation vci = new StandardVersionControlInformation.Builder().registryId(registryId).registryName(registryName).bucketId(bucketId).bucketName(bucketId).flowId(flowId).flowName(flowId).version(version).flowSnapshot(proposed).status(new StandardVersionedFlowStatus(flowState, flowState.getDescription())).build();
        group.setVersionControlInformation(vci, Collections.emptyMap());
    }
    // Controller Services
    // Controller Services have to be handled a bit differently than other components. This is because Processors and Controller
    // Services may reference other Controller Services. Since we may be adding Service A, which depends on Service B, before adding
    // Service B, we need to ensure that we create all Controller Services first and then call updateControllerService for each
    // Controller Service. This way, we ensure that all services have been created before setting the properties. This allows us to
    // properly obtain the correct mapping of Controller Service VersionedComponentID to Controller Service instance id.
    final Map<String, ControllerServiceNode> servicesByVersionedId = group.getControllerServices(false).stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
    final Set<String> controllerServicesRemoved = new HashSet<>(servicesByVersionedId.keySet());
    final Map<ControllerServiceNode, VersionedControllerService> services = new HashMap<>();
    // Add any Controller Service that does not yet exist.
    for (final VersionedControllerService proposedService : proposed.getControllerServices()) {
        ControllerServiceNode service = servicesByVersionedId.get(proposedService.getIdentifier());
        if (service == null) {
            service = addControllerService(group, proposedService, componentIdSeed);
            LOG.info("Added {} to {}", service, this);
        }
        services.put(service, proposedService);
    }
    // Update all of the Controller Services to match the VersionedControllerService
    for (final Map.Entry<ControllerServiceNode, VersionedControllerService> entry : services.entrySet()) {
        final ControllerServiceNode service = entry.getKey();
        final VersionedControllerService proposedService = entry.getValue();
        if (updatedVersionedComponentIds.contains(proposedService.getIdentifier())) {
            updateControllerService(service, proposedService);
            LOG.info("Updated {}", service);
        }
        controllerServicesRemoved.remove(proposedService.getIdentifier());
    }
    // Child groups
    final Map<String, ProcessGroup> childGroupsByVersionedId = group.getProcessGroups().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
    final Set<String> childGroupsRemoved = new HashSet<>(childGroupsByVersionedId.keySet());
    for (final VersionedProcessGroup proposedChildGroup : proposed.getProcessGroups()) {
        final ProcessGroup childGroup = childGroupsByVersionedId.get(proposedChildGroup.getIdentifier());
        final VersionedFlowCoordinates childCoordinates = proposedChildGroup.getVersionedFlowCoordinates();
        if (childGroup == null) {
            final ProcessGroup added = addProcessGroup(group, proposedChildGroup, componentIdSeed, variablesToSkip);
            flowController.onProcessGroupAdded(added);
            added.findAllRemoteProcessGroups().stream().forEach(RemoteProcessGroup::initialize);
            LOG.info("Added {} to {}", added, this);
        } else if (childCoordinates == null || updateDescendantVersionedGroups) {
            updateProcessGroup(childGroup, proposedChildGroup, componentIdSeed, updatedVersionedComponentIds, true, true, updateDescendantVersionedGroups, variablesToSkip);
            LOG.info("Updated {}", childGroup);
        }
        childGroupsRemoved.remove(proposedChildGroup.getIdentifier());
    }
    // Funnels
    final Map<String, Funnel> funnelsByVersionedId = group.getFunnels().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
    final Set<String> funnelsRemoved = new HashSet<>(funnelsByVersionedId.keySet());
    for (final VersionedFunnel proposedFunnel : proposed.getFunnels()) {
        final Funnel funnel = funnelsByVersionedId.get(proposedFunnel.getIdentifier());
        if (funnel == null) {
            final Funnel added = addFunnel(group, proposedFunnel, componentIdSeed);
            flowController.onFunnelAdded(added);
            LOG.info("Added {} to {}", added, this);
        } else if (updatedVersionedComponentIds.contains(proposedFunnel.getIdentifier())) {
            updateFunnel(funnel, proposedFunnel);
            LOG.info("Updated {}", funnel);
        } else {
            funnel.setPosition(new Position(proposedFunnel.getPosition().getX(), proposedFunnel.getPosition().getY()));
        }
        funnelsRemoved.remove(proposedFunnel.getIdentifier());
    }
    // Input Ports
    final Map<String, Port> inputPortsByVersionedId = group.getInputPorts().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
    final Set<String> inputPortsRemoved = new HashSet<>(inputPortsByVersionedId.keySet());
    for (final VersionedPort proposedPort : proposed.getInputPorts()) {
        final Port port = inputPortsByVersionedId.get(proposedPort.getIdentifier());
        if (port == null) {
            final Port added = addInputPort(group, proposedPort, componentIdSeed);
            flowController.onInputPortAdded(added);
            LOG.info("Added {} to {}", added, this);
        } else if (updatedVersionedComponentIds.contains(proposedPort.getIdentifier())) {
            updatePort(port, proposedPort);
            LOG.info("Updated {}", port);
        } else {
            port.setPosition(new Position(proposedPort.getPosition().getX(), proposedPort.getPosition().getY()));
        }
        inputPortsRemoved.remove(proposedPort.getIdentifier());
    }
    // Output Ports
    final Map<String, Port> outputPortsByVersionedId = group.getOutputPorts().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
    final Set<String> outputPortsRemoved = new HashSet<>(outputPortsByVersionedId.keySet());
    for (final VersionedPort proposedPort : proposed.getOutputPorts()) {
        final Port port = outputPortsByVersionedId.get(proposedPort.getIdentifier());
        if (port == null) {
            final Port added = addOutputPort(group, proposedPort, componentIdSeed);
            flowController.onOutputPortAdded(added);
            LOG.info("Added {} to {}", added, this);
        } else if (updatedVersionedComponentIds.contains(proposedPort.getIdentifier())) {
            updatePort(port, proposedPort);
            LOG.info("Updated {}", port);
        } else {
            port.setPosition(new Position(proposedPort.getPosition().getX(), proposedPort.getPosition().getY()));
        }
        outputPortsRemoved.remove(proposedPort.getIdentifier());
    }
    // Labels
    final Map<String, Label> labelsByVersionedId = group.getLabels().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
    final Set<String> labelsRemoved = new HashSet<>(labelsByVersionedId.keySet());
    for (final VersionedLabel proposedLabel : proposed.getLabels()) {
        final Label label = labelsByVersionedId.get(proposedLabel.getIdentifier());
        if (label == null) {
            final Label added = addLabel(group, proposedLabel, componentIdSeed);
            LOG.info("Added {} to {}", added, this);
        } else if (updatedVersionedComponentIds.contains(proposedLabel.getIdentifier())) {
            updateLabel(label, proposedLabel);
            LOG.info("Updated {}", label);
        } else {
            label.setPosition(new Position(proposedLabel.getPosition().getX(), proposedLabel.getPosition().getY()));
        }
        labelsRemoved.remove(proposedLabel.getIdentifier());
    }
    // Processors
    final Map<String, ProcessorNode> processorsByVersionedId = group.getProcessors().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
    final Set<String> processorsRemoved = new HashSet<>(processorsByVersionedId.keySet());
    final Map<ProcessorNode, Set<Relationship>> autoTerminatedRelationships = new HashMap<>();
    for (final VersionedProcessor proposedProcessor : proposed.getProcessors()) {
        final ProcessorNode processor = processorsByVersionedId.get(proposedProcessor.getIdentifier());
        if (processor == null) {
            final ProcessorNode added = addProcessor(group, proposedProcessor, componentIdSeed);
            flowController.onProcessorAdded(added);
            final Set<Relationship> proposedAutoTerminated = proposedProcessor.getAutoTerminatedRelationships() == null ? Collections.emptySet() : proposedProcessor.getAutoTerminatedRelationships().stream().map(relName -> added.getRelationship(relName)).collect(Collectors.toSet());
            autoTerminatedRelationships.put(added, proposedAutoTerminated);
            LOG.info("Added {} to {}", added, this);
        } else if (updatedVersionedComponentIds.contains(proposedProcessor.getIdentifier())) {
            updateProcessor(processor, proposedProcessor);
            final Set<Relationship> proposedAutoTerminated = proposedProcessor.getAutoTerminatedRelationships() == null ? Collections.emptySet() : proposedProcessor.getAutoTerminatedRelationships().stream().map(relName -> processor.getRelationship(relName)).collect(Collectors.toSet());
            if (!processor.getAutoTerminatedRelationships().equals(proposedAutoTerminated)) {
                autoTerminatedRelationships.put(processor, proposedAutoTerminated);
            }
            LOG.info("Updated {}", processor);
        } else {
            processor.setPosition(new Position(proposedProcessor.getPosition().getX(), proposedProcessor.getPosition().getY()));
        }
        processorsRemoved.remove(proposedProcessor.getIdentifier());
    }
    // Remote Groups
    final Map<String, RemoteProcessGroup> rpgsByVersionedId = group.getRemoteProcessGroups().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
    final Set<String> rpgsRemoved = new HashSet<>(rpgsByVersionedId.keySet());
    for (final VersionedRemoteProcessGroup proposedRpg : proposed.getRemoteProcessGroups()) {
        final RemoteProcessGroup rpg = rpgsByVersionedId.get(proposedRpg.getIdentifier());
        if (rpg == null) {
            final RemoteProcessGroup added = addRemoteProcessGroup(group, proposedRpg, componentIdSeed);
            LOG.info("Added {} to {}", added, this);
        } else if (updatedVersionedComponentIds.contains(proposedRpg.getIdentifier())) {
            updateRemoteProcessGroup(rpg, proposedRpg, componentIdSeed);
            LOG.info("Updated {}", rpg);
        } else {
            rpg.setPosition(new Position(proposedRpg.getPosition().getX(), proposedRpg.getPosition().getY()));
        }
        rpgsRemoved.remove(proposedRpg.getIdentifier());
    }
    // Connections
    final Map<String, Connection> connectionsByVersionedId = group.getConnections().stream().collect(Collectors.toMap(component -> component.getVersionedComponentId().orElse(component.getIdentifier()), Function.identity()));
    final Set<String> connectionsRemoved = new HashSet<>(connectionsByVersionedId.keySet());
    for (final VersionedConnection proposedConnection : proposed.getConnections()) {
        final Connection connection = connectionsByVersionedId.get(proposedConnection.getIdentifier());
        if (connection == null) {
            final Connection added = addConnection(group, proposedConnection, componentIdSeed);
            flowController.onConnectionAdded(added);
            LOG.info("Added {} to {}", added, this);
        } else if (isUpdateable(connection)) {
            // If the connection needs to be updated, then the source and destination will already have
            // been stopped (else, the validation above would fail). So if the source or the destination is running,
            // then we know that we don't need to update the connection.
            updateConnection(connection, proposedConnection);
            LOG.info("Updated {}", connection);
        }
        connectionsRemoved.remove(proposedConnection.getIdentifier());
    }
    // to remove a component if it has a connection going to it!
    for (final String removedVersionedId : connectionsRemoved) {
        final Connection connection = connectionsByVersionedId.get(removedVersionedId);
        LOG.info("Removing {} from {}", connection, group);
        group.removeConnection(connection);
        flowController.onConnectionRemoved(connection);
    }
    // Once the appropriate connections have been removed, we may now update Processors' auto-terminated relationships.
    // We cannot do this above, in the 'updateProcessor' call because if a connection is removed and changed to auto-terminated,
    // then updating this in the updateProcessor call above would attempt to set the Relationship to being auto-terminated while a
    // Connection for that relationship exists. This will throw an Exception.
    autoTerminatedRelationships.forEach((proc, rels) -> proc.setAutoTerminatedRelationships(rels));
    // Remove all controller services no longer in use
    for (final String removedVersionedId : controllerServicesRemoved) {
        final ControllerServiceNode service = servicesByVersionedId.get(removedVersionedId);
        LOG.info("Removing {} from {}", service, group);
        // Must remove Controller Service through Flow Controller in order to remove from cache
        flowController.removeControllerService(service);
    }
    for (final String removedVersionedId : funnelsRemoved) {
        final Funnel funnel = funnelsByVersionedId.get(removedVersionedId);
        LOG.info("Removing {} from {}", funnel, group);
        group.removeFunnel(funnel);
    }
    for (final String removedVersionedId : inputPortsRemoved) {
        final Port port = inputPortsByVersionedId.get(removedVersionedId);
        LOG.info("Removing {} from {}", port, group);
        group.removeInputPort(port);
    }
    for (final String removedVersionedId : outputPortsRemoved) {
        final Port port = outputPortsByVersionedId.get(removedVersionedId);
        LOG.info("Removing {} from {}", port, group);
        group.removeOutputPort(port);
    }
    for (final String removedVersionedId : labelsRemoved) {
        final Label label = labelsByVersionedId.get(removedVersionedId);
        LOG.info("Removing {} from {}", label, group);
        group.removeLabel(label);
    }
    for (final String removedVersionedId : processorsRemoved) {
        final ProcessorNode processor = processorsByVersionedId.get(removedVersionedId);
        LOG.info("Removing {} from {}", processor, group);
        group.removeProcessor(processor);
    }
    for (final String removedVersionedId : rpgsRemoved) {
        final RemoteProcessGroup rpg = rpgsByVersionedId.get(removedVersionedId);
        LOG.info("Removing {} from {}", rpg, group);
        group.removeRemoteProcessGroup(rpg);
    }
    for (final String removedVersionedId : childGroupsRemoved) {
        final ProcessGroup childGroup = childGroupsByVersionedId.get(removedVersionedId);
        LOG.info("Removing {} from {}", childGroup, group);
        group.removeProcessGroup(childGroup);
    }
}
Also used : OnRemoved(org.apache.nifi.annotation.lifecycle.OnRemoved) ConfigurationContext(org.apache.nifi.controller.ConfigurationContext) Size(org.apache.nifi.connectable.Size) FlowComparison(org.apache.nifi.registry.flow.diff.FlowComparison) StringUtils(org.apache.commons.lang3.StringUtils) ReflectionUtils(org.apache.nifi.util.ReflectionUtils) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) SecureRandom(java.security.SecureRandom) NiFiRegistryException(org.apache.nifi.registry.client.NiFiRegistryException) ComponentType(org.apache.nifi.registry.flow.ComponentType) VersionedProcessGroup(org.apache.nifi.registry.flow.VersionedProcessGroup) SnippetUtils(org.apache.nifi.util.SnippetUtils) Map(java.util.Map) HashCodeBuilder(org.apache.commons.lang3.builder.HashCodeBuilder) RootGroupPort(org.apache.nifi.remote.RootGroupPort) Connectable(org.apache.nifi.connectable.Connectable) Connection(org.apache.nifi.connectable.Connection) Bundle(org.apache.nifi.registry.flow.Bundle) FlowFilePrioritizer(org.apache.nifi.flowfile.FlowFilePrioritizer) FlowDifferenceFilters(org.apache.nifi.util.FlowDifferenceFilters) VersionedFlowStatus(org.apache.nifi.registry.flow.VersionedFlowStatus) Set(java.util.Set) VersionedFlowCoordinates(org.apache.nifi.registry.flow.VersionedFlowCoordinates) VersionedRemoteGroupPort(org.apache.nifi.registry.flow.VersionedRemoteGroupPort) FlowController(org.apache.nifi.controller.FlowController) StandardCharsets(java.nio.charset.StandardCharsets) StateManagerProvider(org.apache.nifi.components.state.StateManagerProvider) Position(org.apache.nifi.connectable.Position) ScheduledState(org.apache.nifi.controller.ScheduledState) ControllerService(org.apache.nifi.controller.ControllerService) ExtensionManager(org.apache.nifi.nar.ExtensionManager) StandardVersionControlInformation(org.apache.nifi.registry.flow.StandardVersionControlInformation) Resource(org.apache.nifi.authorization.Resource) FlowComparator(org.apache.nifi.registry.flow.diff.FlowComparator) StaticDifferenceDescriptor(org.apache.nifi.registry.flow.diff.StaticDifferenceDescriptor) StandardComparableDataFlow(org.apache.nifi.registry.flow.diff.StandardComparableDataFlow) SiteToSiteTransportProtocol(org.apache.nifi.remote.protocol.SiteToSiteTransportProtocol) ReentrantReadWriteLock(java.util.concurrent.locks.ReentrantReadWriteLock) ArrayList(java.util.ArrayList) Relationship(org.apache.nifi.processor.Relationship) ControllerServiceReference(org.apache.nifi.controller.service.ControllerServiceReference) ControllerServiceProvider(org.apache.nifi.controller.service.ControllerServiceProvider) VersionedLabel(org.apache.nifi.registry.flow.VersionedLabel) LinkedHashSet(java.util.LinkedHashSet) VersionedFlowState(org.apache.nifi.registry.flow.VersionedFlowState) EvolvingDifferenceDescriptor(org.apache.nifi.registry.flow.diff.EvolvingDifferenceDescriptor) ConfiguredComponent(org.apache.nifi.controller.ConfiguredComponent) Positionable(org.apache.nifi.connectable.Positionable) ExecutionNode(org.apache.nifi.scheduling.ExecutionNode) IOException(java.io.IOException) VersionedFlowSnapshot(org.apache.nifi.registry.flow.VersionedFlowSnapshot) NiFiRegistryFlowMapper(org.apache.nifi.registry.flow.mapping.NiFiRegistryFlowMapper) Lock(java.util.concurrent.locks.Lock) NiFiProperties(org.apache.nifi.util.NiFiProperties) VariableImpact(org.apache.nifi.attribute.expression.language.VariableImpact) FlowFileQueue(org.apache.nifi.controller.queue.FlowFileQueue) ProcessorInstantiationException(org.apache.nifi.controller.exception.ProcessorInstantiationException) BundleCoordinate(org.apache.nifi.bundle.BundleCoordinate) URL(java.net.URL) ConnectableType(org.apache.nifi.connectable.ConnectableType) ConnectableComponent(org.apache.nifi.registry.flow.ConnectableComponent) VariableDescriptor(org.apache.nifi.registry.VariableDescriptor) LoggerFactory(org.slf4j.LoggerFactory) Port(org.apache.nifi.connectable.Port) StandardFlowComparator(org.apache.nifi.registry.flow.diff.StandardFlowComparator) Query(org.apache.nifi.attribute.expression.language.Query) ResourceType(org.apache.nifi.authorization.resource.ResourceType) TemplateDTO(org.apache.nifi.web.api.dto.TemplateDTO) SchedulingStrategy(org.apache.nifi.scheduling.SchedulingStrategy) VersionedPort(org.apache.nifi.registry.flow.VersionedPort) VersionedRemoteProcessGroup(org.apache.nifi.registry.flow.VersionedRemoteProcessGroup) StandardProcessScheduler(org.apache.nifi.controller.scheduling.StandardProcessScheduler) VersionedComponent(org.apache.nifi.registry.flow.VersionedComponent) DifferenceType(org.apache.nifi.registry.flow.diff.DifferenceType) VersionedConnection(org.apache.nifi.registry.flow.VersionedConnection) Template(org.apache.nifi.controller.Template) Label(org.apache.nifi.controller.label.Label) FlowRegistryClient(org.apache.nifi.registry.flow.FlowRegistryClient) OnShutdown(org.apache.nifi.annotation.lifecycle.OnShutdown) MutableVariableRegistry(org.apache.nifi.registry.variable.MutableVariableRegistry) Authorizable(org.apache.nifi.authorization.resource.Authorizable) UUID(java.util.UUID) ComponentLifeCycleException(org.apache.nifi.controller.exception.ComponentLifeCycleException) Snippet(org.apache.nifi.controller.Snippet) Collectors(java.util.stream.Collectors) ResourceFactory(org.apache.nifi.authorization.resource.ResourceFactory) Objects(java.util.Objects) List(java.util.List) BatchSize(org.apache.nifi.registry.flow.BatchSize) VersionedFunnel(org.apache.nifi.registry.flow.VersionedFunnel) ToStringBuilder(org.apache.commons.lang3.builder.ToStringBuilder) VersionControlInformation(org.apache.nifi.registry.flow.VersionControlInformation) Optional(java.util.Optional) LocalPort(org.apache.nifi.connectable.LocalPort) StandardProcessContext(org.apache.nifi.processor.StandardProcessContext) ProcessorNode(org.apache.nifi.controller.ProcessorNode) Revision(org.apache.nifi.web.Revision) Funnel(org.apache.nifi.connectable.Funnel) ControllerServiceNode(org.apache.nifi.controller.service.ControllerServiceNode) ToStringStyle(org.apache.commons.lang3.builder.ToStringStyle) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) Function(java.util.function.Function) FlowRegistry(org.apache.nifi.registry.flow.FlowRegistry) HashSet(java.util.HashSet) StringEncryptor(org.apache.nifi.encrypt.StringEncryptor) ComparableDataFlow(org.apache.nifi.registry.flow.diff.ComparableDataFlow) Objects.requireNonNull(java.util.Objects.requireNonNull) StandardConfigurationContext(org.apache.nifi.controller.service.StandardConfigurationContext) NarCloseable(org.apache.nifi.nar.NarCloseable) LogLevel(org.apache.nifi.logging.LogLevel) VersionedProcessor(org.apache.nifi.registry.flow.VersionedProcessor) Logger(org.slf4j.Logger) StateManager(org.apache.nifi.components.state.StateManager) RemoteGroupPort(org.apache.nifi.remote.RemoteGroupPort) StandardRemoteProcessGroupPortDescriptor(org.apache.nifi.remote.StandardRemoteProcessGroupPortDescriptor) VersionedFlow(org.apache.nifi.registry.flow.VersionedFlow) VersionedControllerService(org.apache.nifi.registry.flow.VersionedControllerService) TimeUnit(java.util.concurrent.TimeUnit) ComponentVariableRegistry(org.apache.nifi.registry.ComponentVariableRegistry) FlowDifference(org.apache.nifi.registry.flow.diff.FlowDifference) VersionedPropertyDescriptor(org.apache.nifi.registry.flow.VersionedPropertyDescriptor) Collections(java.util.Collections) LogRepositoryFactory(org.apache.nifi.logging.LogRepositoryFactory) VersionedFunnel(org.apache.nifi.registry.flow.VersionedFunnel) Funnel(org.apache.nifi.connectable.Funnel) Set(java.util.Set) LinkedHashSet(java.util.LinkedHashSet) HashSet(java.util.HashSet) HashMap(java.util.HashMap) FlowRegistry(org.apache.nifi.registry.flow.FlowRegistry) HashCodeBuilder(org.apache.commons.lang3.builder.HashCodeBuilder) ToStringBuilder(org.apache.commons.lang3.builder.ToStringBuilder) RootGroupPort(org.apache.nifi.remote.RootGroupPort) VersionedRemoteGroupPort(org.apache.nifi.registry.flow.VersionedRemoteGroupPort) Port(org.apache.nifi.connectable.Port) VersionedPort(org.apache.nifi.registry.flow.VersionedPort) LocalPort(org.apache.nifi.connectable.LocalPort) RemoteGroupPort(org.apache.nifi.remote.RemoteGroupPort) VersionedProcessGroup(org.apache.nifi.registry.flow.VersionedProcessGroup) VersionedLabel(org.apache.nifi.registry.flow.VersionedLabel) Label(org.apache.nifi.controller.label.Label) ProcessorNode(org.apache.nifi.controller.ProcessorNode) VersionedFlowState(org.apache.nifi.registry.flow.VersionedFlowState) LinkedHashSet(java.util.LinkedHashSet) HashSet(java.util.HashSet) VersionedRemoteProcessGroup(org.apache.nifi.registry.flow.VersionedRemoteProcessGroup) Position(org.apache.nifi.connectable.Position) Connection(org.apache.nifi.connectable.Connection) VersionedConnection(org.apache.nifi.registry.flow.VersionedConnection) VersionedPort(org.apache.nifi.registry.flow.VersionedPort) VersionedControllerService(org.apache.nifi.registry.flow.VersionedControllerService) VersionedFunnel(org.apache.nifi.registry.flow.VersionedFunnel) VersionedFlowCoordinates(org.apache.nifi.registry.flow.VersionedFlowCoordinates) VersionedLabel(org.apache.nifi.registry.flow.VersionedLabel) StandardVersionControlInformation(org.apache.nifi.registry.flow.StandardVersionControlInformation) VersionControlInformation(org.apache.nifi.registry.flow.VersionControlInformation) ControllerServiceNode(org.apache.nifi.controller.service.ControllerServiceNode) VersionedRemoteProcessGroup(org.apache.nifi.registry.flow.VersionedRemoteProcessGroup) Relationship(org.apache.nifi.processor.Relationship) VersionedProcessGroup(org.apache.nifi.registry.flow.VersionedProcessGroup) VersionedRemoteProcessGroup(org.apache.nifi.registry.flow.VersionedRemoteProcessGroup) VersionedConnection(org.apache.nifi.registry.flow.VersionedConnection) Map(java.util.Map) HashMap(java.util.HashMap) VersionedProcessor(org.apache.nifi.registry.flow.VersionedProcessor)

Example 45 with Relationship

use of org.apache.nifi.processor.Relationship in project nifi by apache.

the class NiFiRegistryFlowMapper method mapConnection.

public VersionedConnection mapConnection(final Connection connection) {
    final FlowFileQueue queue = connection.getFlowFileQueue();
    final VersionedConnection versionedConnection = new InstantiatedVersionedConnection(connection.getIdentifier(), connection.getProcessGroup().getIdentifier());
    versionedConnection.setIdentifier(getId(connection.getVersionedComponentId(), connection.getIdentifier()));
    versionedConnection.setGroupIdentifier(getGroupId(connection.getProcessGroup().getIdentifier()));
    versionedConnection.setName(connection.getName());
    versionedConnection.setBackPressureDataSizeThreshold(queue.getBackPressureDataSizeThreshold());
    versionedConnection.setBackPressureObjectThreshold(queue.getBackPressureObjectThreshold());
    versionedConnection.setFlowFileExpiration(queue.getFlowFileExpiration());
    versionedConnection.setLabelIndex(connection.getLabelIndex());
    versionedConnection.setPrioritizers(queue.getPriorities().stream().map(p -> p.getClass().getName()).collect(Collectors.toList()));
    versionedConnection.setSelectedRelationships(connection.getRelationships().stream().map(Relationship::getName).collect(Collectors.toSet()));
    versionedConnection.setzIndex(connection.getZIndex());
    versionedConnection.setBends(connection.getBendPoints().stream().map(this::mapPosition).collect(Collectors.toList()));
    versionedConnection.setSource(mapConnectable(connection.getSource()));
    versionedConnection.setDestination(mapConnectable(connection.getDestination()));
    return versionedConnection;
}
Also used : Relationship(org.apache.nifi.processor.Relationship) VersionedConnection(org.apache.nifi.registry.flow.VersionedConnection) FlowFileQueue(org.apache.nifi.controller.queue.FlowFileQueue)

Aggregations

Relationship (org.apache.nifi.processor.Relationship)106 ArrayList (java.util.ArrayList)41 HashSet (java.util.HashSet)40 HashMap (java.util.HashMap)32 FlowFile (org.apache.nifi.flowfile.FlowFile)32 Map (java.util.Map)31 IOException (java.io.IOException)26 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)26 Test (org.junit.Test)23 List (java.util.List)20 Set (java.util.Set)19 Connection (org.apache.nifi.connectable.Connection)18 TestRunner (org.apache.nifi.util.TestRunner)18 ProcessException (org.apache.nifi.processor.exception.ProcessException)17 ProcessSession (org.apache.nifi.processor.ProcessSession)15 InputStream (java.io.InputStream)14 DynamicRelationship (org.apache.nifi.annotation.behavior.DynamicRelationship)12 Processor (org.apache.nifi.processor.Processor)12 Collections (java.util.Collections)11 AtomicLong (java.util.concurrent.atomic.AtomicLong)10