Search in sources :

Example 21 with StateManager

use of org.apache.nifi.components.state.StateManager in project nifi by apache.

the class TestGenerateTableFetch method testBackwardsCompatibilityStateKeyVariableRegistry.

@Test
public void testBackwardsCompatibilityStateKeyVariableRegistry() throws Exception {
    // load test data to database
    final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection();
    Statement stmt = con.createStatement();
    try {
        stmt.execute("drop table TEST_QUERY_DB_TABLE");
    } catch (final SQLException sqle) {
    // Ignore this error, probably a "table does not exist" since Derby doesn't yet support DROP IF EXISTS [DERBY-4842]
    }
    stmt.execute("create table TEST_QUERY_DB_TABLE (id integer not null, bucket integer not null)");
    stmt.execute("insert into TEST_QUERY_DB_TABLE (id, bucket) VALUES (0, 0)");
    stmt.execute("insert into TEST_QUERY_DB_TABLE (id, bucket) VALUES (1, 0)");
    runner.setProperty(GenerateTableFetch.TABLE_NAME, "${tableName}");
    runner.setIncomingConnection(false);
    runner.setProperty(GenerateTableFetch.MAX_VALUE_COLUMN_NAMES, "${maxValueCol}");
    runner.setVariable("tableName", "TEST_QUERY_DB_TABLE");
    runner.setVariable("maxValueCol", "id");
    // Pre-populate the state with a key for column name (not fully-qualified)
    StateManager stateManager = runner.getStateManager();
    stateManager.setState(new HashMap<String, String>() {

        {
            put("id", "0");
        }
    }, Scope.CLUSTER);
    // Pre-populate the column type map with an entry for id (not fully-qualified)
    processor.columnTypeMap.put("id", 4);
    runner.run();
    runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1);
    MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
    // Note there is no WHERE clause here. Because we are using dynamic tables (i.e. Expression Language,
    // even when not referring to flow file attributes), the old state key/value is not retrieved
    assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE id <= 1 ORDER BY id FETCH NEXT 10000 ROWS ONLY", new String(flowFile.toByteArray()));
}
Also used : MockFlowFile(org.apache.nifi.util.MockFlowFile) StateManager(org.apache.nifi.components.state.StateManager) SQLException(java.sql.SQLException) Statement(java.sql.Statement) Connection(java.sql.Connection) DBCPService(org.apache.nifi.dbcp.DBCPService) Matchers.anyString(org.mockito.Matchers.anyString) Test(org.junit.Test)

Example 22 with StateManager

use of org.apache.nifi.components.state.StateManager in project nifi by apache.

the class StandardProcessorTestRunner method assertValid.

@Override
public void assertValid(final ControllerService service) {
    final StateManager serviceStateManager = controllerServiceStateManagers.get(service.getIdentifier());
    if (serviceStateManager == null) {
        throw new IllegalStateException("Controller Service has not been added to this TestRunner via the #addControllerService method");
    }
    final ValidationContext validationContext = new MockValidationContext(context, serviceStateManager, variableRegistry).getControllerServiceValidationContext(service);
    final Collection<ValidationResult> results = context.getControllerService(service.getIdentifier()).validate(validationContext);
    for (final ValidationResult result : results) {
        if (!result.isValid()) {
            Assert.fail("Expected Controller Service to be valid but it is invalid due to: " + result.toString());
        }
    }
}
Also used : MockStateManager(org.apache.nifi.state.MockStateManager) StateManager(org.apache.nifi.components.state.StateManager) ValidationResult(org.apache.nifi.components.ValidationResult) ValidationContext(org.apache.nifi.components.ValidationContext)

Example 23 with StateManager

use of org.apache.nifi.components.state.StateManager in project nifi by apache.

the class AttributeRollingWindow method microBatch.

private void microBatch(ProcessContext context, ProcessSession session, FlowFile flowFile, Long currTime) {
    final StateManager stateManager = context.getStateManager();
    Map<String, String> state = null;
    try {
        state = new HashMap<>(stateManager.getState(SCOPE).toMap());
    } catch (IOException e) {
        getLogger().error("Failed to get the initial state when processing {}; transferring FlowFile back to its incoming queue", new Object[] { flowFile }, e);
        session.transfer(flowFile);
        context.yield();
        return;
    }
    String currBatchStart = state.get(CURRENT_MICRO_BATCH_STATE_TS_KEY);
    boolean newBatch = false;
    if (currBatchStart != null) {
        if (currTime - Long.valueOf(currBatchStart) > microBatchTime) {
            newBatch = true;
            currBatchStart = String.valueOf(currTime);
            state.put(CURRENT_MICRO_BATCH_STATE_TS_KEY, currBatchStart);
        }
    } else {
        newBatch = true;
        currBatchStart = String.valueOf(currTime);
        state.put(CURRENT_MICRO_BATCH_STATE_TS_KEY, currBatchStart);
    }
    Long count = 0L;
    count += 1;
    Set<String> keysToRemove = new HashSet<>();
    for (String key : state.keySet()) {
        String timeStampString;
        if (key.endsWith(BATCH_APPEND_KEY)) {
            timeStampString = key.substring(0, key.length() - COUNT_APPEND_KEY_LENGTH);
            Long timeStamp = Long.decode(timeStampString);
            if (currTime - timeStamp > timeWindow) {
                keysToRemove.add(key);
            }
        } else if (key.endsWith(COUNT_APPEND_KEY)) {
            timeStampString = key.substring(0, key.length() - COUNT_APPEND_KEY_LENGTH);
            Long timeStamp = Long.decode(timeStampString);
            if (currTime - timeStamp > timeWindow) {
                keysToRemove.add(key);
            } else {
                count += Long.valueOf(state.get(key));
            }
        }
    }
    for (String key : keysToRemove) {
        state.remove(key);
    }
    keysToRemove.clear();
    Double aggregateValue = 0.0D;
    Double currentBatchValue = 0.0D;
    Long currentBatchCount = 0L;
    for (Map.Entry<String, String> entry : state.entrySet()) {
        String key = entry.getKey();
        if (key.endsWith(BATCH_APPEND_KEY)) {
            String timeStampString = key.substring(0, key.length() - COUNT_APPEND_KEY_LENGTH);
            Double batchValue = Double.valueOf(entry.getValue());
            Long batchCount = Long.valueOf(state.get(timeStampString + COUNT_APPEND_KEY));
            if (!newBatch && timeStampString.equals(currBatchStart)) {
                final Double currentFlowFileValue = context.getProperty(VALUE_TO_TRACK).evaluateAttributeExpressions(flowFile).asDouble();
                batchCount++;
                batchValue += currentFlowFileValue;
                currentBatchValue = batchValue;
                currentBatchCount = batchCount;
            }
            aggregateValue += batchValue;
        }
    }
    if (newBatch) {
        final Double currentFlowFileValue = context.getProperty(VALUE_TO_TRACK).evaluateAttributeExpressions(flowFile).asDouble();
        currentBatchValue += currentFlowFileValue;
        currentBatchCount = 1L;
        aggregateValue += currentBatchValue;
    }
    state.put(currBatchStart + BATCH_APPEND_KEY, String.valueOf(currentBatchValue));
    state.put(currBatchStart + COUNT_APPEND_KEY, String.valueOf(currentBatchCount));
    try {
        stateManager.setState(state, SCOPE);
    } catch (IOException e) {
        getLogger().error("Failed to get the initial state when processing {}; transferring FlowFile back to its incoming queue", new Object[] { flowFile }, e);
        session.transfer(flowFile);
        context.yield();
        return;
    }
    Double mean = aggregateValue / count;
    Map<String, String> attributesToAdd = new HashMap<>();
    attributesToAdd.put(ROLLING_WINDOW_VALUE_KEY, String.valueOf(aggregateValue));
    attributesToAdd.put(ROLLING_WINDOW_COUNT_KEY, String.valueOf(count));
    attributesToAdd.put(ROLLING_WINDOW_MEAN_KEY, String.valueOf(mean));
    flowFile = session.putAllAttributes(flowFile, attributesToAdd);
    session.transfer(flowFile, REL_SUCCESS);
}
Also used : HashMap(java.util.HashMap) IOException(java.io.IOException) StateManager(org.apache.nifi.components.state.StateManager) HashMap(java.util.HashMap) Map(java.util.Map) StateMap(org.apache.nifi.components.state.StateMap) HashSet(java.util.HashSet)

Example 24 with StateManager

use of org.apache.nifi.components.state.StateManager in project nifi by apache.

the class AttributeRollingWindow method noMicroBatch.

private void noMicroBatch(ProcessContext context, ProcessSession session, FlowFile flowFile, Long currTime) {
    final StateManager stateManager = context.getStateManager();
    Map<String, String> state = null;
    try {
        state = new HashMap<>(stateManager.getState(SCOPE).toMap());
    } catch (IOException e) {
        getLogger().error("Failed to get the initial state when processing {}; transferring FlowFile back to its incoming queue", new Object[] { flowFile }, e);
        session.transfer(flowFile);
        context.yield();
        return;
    }
    Long count = Long.valueOf(state.get(COUNT_KEY));
    count++;
    Set<String> keysToRemove = new HashSet<>();
    for (String key : state.keySet()) {
        if (!key.equals(COUNT_KEY)) {
            Long timeStamp = Long.decode(key);
            if (currTime - timeStamp > timeWindow) {
                keysToRemove.add(key);
                count--;
            }
        }
    }
    String countString = String.valueOf(count);
    for (String key : keysToRemove) {
        state.remove(key);
    }
    Double aggregateValue = 0.0D;
    for (Map.Entry<String, String> entry : state.entrySet()) {
        if (!entry.getKey().equals(COUNT_KEY)) {
            aggregateValue += Double.valueOf(entry.getValue());
        }
    }
    final Double currentFlowFileValue = context.getProperty(VALUE_TO_TRACK).evaluateAttributeExpressions(flowFile).asDouble();
    aggregateValue += currentFlowFileValue;
    state.put(String.valueOf(currTime), String.valueOf(currentFlowFileValue));
    state.put(COUNT_KEY, countString);
    try {
        stateManager.setState(state, SCOPE);
    } catch (IOException e) {
        getLogger().error("Failed to set the state after successfully processing {} due a failure when setting the state. Transferring to '{}'", new Object[] { flowFile, REL_FAILED_SET_STATE.getName() }, e);
        session.transfer(flowFile, REL_FAILED_SET_STATE);
        context.yield();
        return;
    }
    Double mean = aggregateValue / count;
    Map<String, String> attributesToAdd = new HashMap<>();
    attributesToAdd.put(ROLLING_WINDOW_VALUE_KEY, String.valueOf(aggregateValue));
    attributesToAdd.put(ROLLING_WINDOW_COUNT_KEY, String.valueOf(count));
    attributesToAdd.put(ROLLING_WINDOW_MEAN_KEY, String.valueOf(mean));
    flowFile = session.putAllAttributes(flowFile, attributesToAdd);
    session.transfer(flowFile, REL_SUCCESS);
}
Also used : HashMap(java.util.HashMap) IOException(java.io.IOException) StateManager(org.apache.nifi.components.state.StateManager) HashMap(java.util.HashMap) Map(java.util.Map) StateMap(org.apache.nifi.components.state.StateMap) HashSet(java.util.HashSet)

Example 25 with StateManager

use of org.apache.nifi.components.state.StateManager in project nifi by apache.

the class UpdateAttribute method onScheduled.

@OnScheduled
public void onScheduled(final ProcessContext context) throws IOException {
    criteriaCache.set(CriteriaSerDe.deserialize(context.getAnnotationData()));
    propertyValues.clear();
    if (stateful) {
        StateManager stateManager = context.getStateManager();
        StateMap state = stateManager.getState(Scope.LOCAL);
        HashMap<String, String> tempMap = new HashMap<>();
        tempMap.putAll(state.toMap());
        String initValue = context.getProperty(STATEFUL_VARIABLES_INIT_VALUE).getValue();
        // Initialize the stateful default actions
        for (PropertyDescriptor entry : context.getProperties().keySet()) {
            if (entry.isDynamic()) {
                if (!tempMap.containsKey(entry.getName())) {
                    tempMap.put(entry.getName(), initValue);
                }
            }
        }
        // Initialize the stateful actions if the criteria exists
        final Criteria criteria = criteriaCache.get();
        if (criteria != null) {
            for (Rule rule : criteria.getRules()) {
                for (Action action : rule.getActions()) {
                    if (!tempMap.containsKey(action.getAttribute())) {
                        tempMap.put(action.getAttribute(), initValue);
                    }
                }
            }
        }
        context.getStateManager().setState(tempMap, Scope.LOCAL);
    }
    defaultActions = getDefaultActions(context.getProperties());
    debugEnabled = getLogger().isDebugEnabled();
}
Also used : Action(org.apache.nifi.update.attributes.Action) StateManager(org.apache.nifi.components.state.StateManager) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) StateMap(org.apache.nifi.components.state.StateMap) Criteria(org.apache.nifi.update.attributes.Criteria) Rule(org.apache.nifi.update.attributes.Rule) OnScheduled(org.apache.nifi.annotation.lifecycle.OnScheduled)

Aggregations

StateManager (org.apache.nifi.components.state.StateManager)26 IOException (java.io.IOException)13 StateMap (org.apache.nifi.components.state.StateMap)12 HashMap (java.util.HashMap)11 SQLException (java.sql.SQLException)8 ComponentLog (org.apache.nifi.logging.ComponentLog)8 Connection (java.sql.Connection)7 Statement (java.sql.Statement)7 HashSet (java.util.HashSet)7 DBCPService (org.apache.nifi.dbcp.DBCPService)7 ProcessException (org.apache.nifi.processor.exception.ProcessException)7 Map (java.util.Map)6 ArrayList (java.util.ArrayList)5 TimeUnit (java.util.concurrent.TimeUnit)5 OnScheduled (org.apache.nifi.annotation.lifecycle.OnScheduled)5 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)5 ValidationResult (org.apache.nifi.components.ValidationResult)5 FlowFile (org.apache.nifi.flowfile.FlowFile)5 Test (org.junit.Test)5 Collections (java.util.Collections)4