Search in sources :

Example 66 with StateMap

use of org.apache.nifi.components.state.StateMap in project nifi by apache.

the class MockStateManager method assertStateNotEquals.

/**
 * Ensures that the state is not equal to the given values
 *
 * @param stateValues the unexpected values
 * @param scope the scope to compare the stateValues against
 */
public void assertStateNotEquals(final Map<String, String> stateValues, final Scope scope) {
    final StateMap stateMap = retrieveState(scope);
    Assert.assertNotSame(stateValues, stateMap.toMap());
}
Also used : StateMap(org.apache.nifi.components.state.StateMap)

Example 67 with StateMap

use of org.apache.nifi.components.state.StateMap in project nifi by apache.

the class UpdateAttribute method onScheduled.

@OnScheduled
public void onScheduled(final ProcessContext context) throws IOException {
    criteriaCache.set(CriteriaSerDe.deserialize(context.getAnnotationData()));
    propertyValues.clear();
    if (stateful) {
        StateManager stateManager = context.getStateManager();
        StateMap state = stateManager.getState(Scope.LOCAL);
        HashMap<String, String> tempMap = new HashMap<>();
        tempMap.putAll(state.toMap());
        String initValue = context.getProperty(STATEFUL_VARIABLES_INIT_VALUE).getValue();
        // Initialize the stateful default actions
        for (PropertyDescriptor entry : context.getProperties().keySet()) {
            if (entry.isDynamic()) {
                if (!tempMap.containsKey(entry.getName())) {
                    tempMap.put(entry.getName(), initValue);
                }
            }
        }
        // Initialize the stateful actions if the criteria exists
        final Criteria criteria = criteriaCache.get();
        if (criteria != null) {
            for (Rule rule : criteria.getRules()) {
                for (Action action : rule.getActions()) {
                    if (!tempMap.containsKey(action.getAttribute())) {
                        tempMap.put(action.getAttribute(), initValue);
                    }
                }
            }
        }
        context.getStateManager().setState(tempMap, Scope.LOCAL);
    }
    defaultActions = getDefaultActions(context.getProperties());
    debugEnabled = getLogger().isDebugEnabled();
}
Also used : Action(org.apache.nifi.update.attributes.Action) StateManager(org.apache.nifi.components.state.StateManager) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) StateMap(org.apache.nifi.components.state.StateMap) Criteria(org.apache.nifi.update.attributes.Criteria) Rule(org.apache.nifi.update.attributes.Rule) OnScheduled(org.apache.nifi.annotation.lifecycle.OnScheduled)

Example 68 with StateMap

use of org.apache.nifi.components.state.StateMap in project nifi by apache.

the class StandardNiFiServiceFacade method getReportingTaskState.

@Override
public ComponentStateDTO getReportingTaskState(final String reportingTaskId) {
    final StateMap clusterState = isClustered() ? reportingTaskDAO.getState(reportingTaskId, Scope.CLUSTER) : null;
    final StateMap localState = reportingTaskDAO.getState(reportingTaskId, Scope.LOCAL);
    // reporting task will be non null as it was already found when getting the state
    final ReportingTaskNode reportingTask = reportingTaskDAO.getReportingTask(reportingTaskId);
    return dtoFactory.createComponentStateDTO(reportingTaskId, reportingTask.getReportingTask().getClass(), localState, clusterState);
}
Also used : ReportingTaskNode(org.apache.nifi.controller.ReportingTaskNode) StateMap(org.apache.nifi.components.state.StateMap)

Example 69 with StateMap

use of org.apache.nifi.components.state.StateMap in project nifi by apache.

the class GetHDFSEvents method onTrigger.

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    final StateManager stateManager = context.getStateManager();
    try {
        StateMap state = stateManager.getState(Scope.CLUSTER);
        String txIdAsString = state.get(LAST_TX_ID);
        if (txIdAsString != null && !"".equals(txIdAsString)) {
            lastTxId = Long.parseLong(txIdAsString);
        }
    } catch (IOException e) {
        getLogger().error("Unable to retrieve last transaction ID. Must retrieve last processed transaction ID before processing can occur.", e);
        context.yield();
        return;
    }
    try {
        final int retries = context.getProperty(NUMBER_OF_RETRIES_FOR_POLL).asInteger();
        final TimeUnit pollDurationTimeUnit = TimeUnit.MICROSECONDS;
        final long pollDuration = context.getProperty(POLL_DURATION).asTimePeriod(pollDurationTimeUnit);
        final DFSInotifyEventInputStream eventStream = lastTxId == -1L ? getHdfsAdmin().getInotifyEventStream() : getHdfsAdmin().getInotifyEventStream(lastTxId);
        final EventBatch eventBatch = getEventBatch(eventStream, pollDuration, pollDurationTimeUnit, retries);
        if (eventBatch != null && eventBatch.getEvents() != null) {
            if (eventBatch.getEvents().length > 0) {
                List<FlowFile> flowFiles = new ArrayList<>(eventBatch.getEvents().length);
                for (Event e : eventBatch.getEvents()) {
                    if (toProcessEvent(context, e)) {
                        getLogger().debug("Creating flow file for event: {}.", new Object[] { e });
                        final String path = getPath(e);
                        FlowFile flowFile = session.create();
                        flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), "application/json");
                        flowFile = session.putAttribute(flowFile, EventAttributes.EVENT_TYPE, e.getEventType().name());
                        flowFile = session.putAttribute(flowFile, EventAttributes.EVENT_PATH, path);
                        flowFile = session.write(flowFile, new OutputStreamCallback() {

                            @Override
                            public void process(OutputStream out) throws IOException {
                                out.write(OBJECT_MAPPER.writeValueAsBytes(e));
                            }
                        });
                        flowFiles.add(flowFile);
                    }
                }
                for (FlowFile flowFile : flowFiles) {
                    final String path = flowFile.getAttribute(EventAttributes.EVENT_PATH);
                    final String transitUri = path.startsWith("/") ? "hdfs:/" + path : "hdfs://" + path;
                    getLogger().debug("Transferring flow file {} and creating provenance event with URI {}.", new Object[] { flowFile, transitUri });
                    session.transfer(flowFile, REL_SUCCESS);
                    session.getProvenanceReporter().receive(flowFile, transitUri);
                }
            }
            lastTxId = eventBatch.getTxid();
        }
    } catch (IOException | InterruptedException e) {
        getLogger().error("Unable to get notification information: {}", new Object[] { e });
        context.yield();
        return;
    } catch (MissingEventsException e) {
        // set lastTxId to -1 and update state. This may cause events not to be processed. The reason this exception is thrown is described in the
        // org.apache.hadoop.hdfs.client.HdfsAdmin#getInotifyEventStrea API. It suggests tuning a couple parameters if this API is used.
        lastTxId = -1L;
        getLogger().error("Unable to get notification information. Setting transaction id to -1. This may cause some events to get missed. " + "Please see javadoc for org.apache.hadoop.hdfs.client.HdfsAdmin#getInotifyEventStream: {}", new Object[] { e });
    }
    updateClusterStateForTxId(stateManager);
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) StateMap(org.apache.nifi.components.state.StateMap) OutputStream(java.io.OutputStream) ArrayList(java.util.ArrayList) IOException(java.io.IOException) MissingEventsException(org.apache.hadoop.hdfs.inotify.MissingEventsException) StateManager(org.apache.nifi.components.state.StateManager) TimeUnit(java.util.concurrent.TimeUnit) Event(org.apache.hadoop.hdfs.inotify.Event) DFSInotifyEventInputStream(org.apache.hadoop.hdfs.DFSInotifyEventInputStream) OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) EventBatch(org.apache.hadoop.hdfs.inotify.EventBatch)

Example 70 with StateMap

use of org.apache.nifi.components.state.StateMap in project nifi by apache.

the class ListGCSBucketTest method testPersistState.

@Test
public void testPersistState() throws Exception {
    reset(storage);
    final ListGCSBucket processor = getProcessor();
    final TestRunner runner = buildNewRunner(processor);
    addRequiredPropertiesToRunner(runner);
    runner.assertValid();
    assertEquals("Cluster StateMap should be fresh (version -1L)", -1L, runner.getProcessContext().getStateManager().getState(Scope.CLUSTER).getVersion());
    processor.currentKeys = ImmutableSet.of("test-key-0", "test-key-1");
    processor.currentTimestamp = 4L;
    processor.persistState(runner.getProcessContext());
    final StateMap stateMap = runner.getStateManager().getState(Scope.CLUSTER);
    assertEquals("Cluster StateMap should have been written to", 1L, stateMap.getVersion());
    assertEquals(ImmutableMap.of(ListGCSBucket.CURRENT_TIMESTAMP, String.valueOf(4L), ListGCSBucket.CURRENT_KEY_PREFIX + "0", "test-key-0", ListGCSBucket.CURRENT_KEY_PREFIX + "1", "test-key-1"), stateMap.toMap());
}
Also used : TestRunner(org.apache.nifi.util.TestRunner) StateMap(org.apache.nifi.components.state.StateMap) Test(org.junit.Test)

Aggregations

StateMap (org.apache.nifi.components.state.StateMap)70 HashMap (java.util.HashMap)31 Test (org.junit.Test)29 IOException (java.io.IOException)18 StateProvider (org.apache.nifi.components.state.StateProvider)14 ArrayList (java.util.ArrayList)11 StateManager (org.apache.nifi.components.state.StateManager)11 FlowFile (org.apache.nifi.flowfile.FlowFile)10 TestRunner (org.apache.nifi.util.TestRunner)10 OnScheduled (org.apache.nifi.annotation.lifecycle.OnScheduled)9 ComponentLog (org.apache.nifi.logging.ComponentLog)8 Map (java.util.Map)7 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)7 ProcessException (org.apache.nifi.processor.exception.ProcessException)7 Date (java.util.Date)6 List (java.util.List)6 TimeUnit (java.util.concurrent.TimeUnit)6 Scope (org.apache.nifi.components.state.Scope)6 ProcessSession (org.apache.nifi.processor.ProcessSession)6 Collections (java.util.Collections)5