use of org.apache.nifi.components.PropertyValue in project kylo by Teradata.
the class SetSavepoint method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final SavepointController controller = context.getProperty(SAVEPOINT_SERVICE).asControllerService(SavepointController.class);
final SavepointProvider provider = controller.getProvider();
final PropertyValue pvSavepointId = context.getProperty(SAVEPOINT_ID);
final String processorId = getIdentifier();
FlowFile flowFile = null;
long start = System.currentTimeMillis();
Optional<FlowFile> nextFlowfile = getNextFlowFile(context, session, controller, provider, pvSavepointId);
long stop = System.currentTimeMillis();
if (!nextFlowfile.isPresent()) {
return;
} else {
flowFile = nextFlowfile.get();
}
getLogger().info("Time to iterate over {} flow files: {} ms, {} ", new Object[] { session.getQueueSize(), (stop - start), nextFlowfile.isPresent() ? nextFlowfile.get() : " Nothing found " });
final ComponentLog logger = getLogger();
// We do processing on each flowfile here
final String savepointIdStr = pvSavepointId.evaluateAttributeExpressions(flowFile).getValue();
final String flowfileId = flowFile.getAttribute(CoreAttributes.UUID.key());
Lock lock = null;
try {
lock = provider.lock(savepointIdStr);
if (lock != null) {
SavepointEntry entry = provider.lookupEntry(savepointIdStr);
if (isExpired(context, session, provider, flowFile, savepointIdStr, lock)) {
return;
}
String waitStartTimestamp;
// add the processor id for the current savepoint
// this will be used to check on the next save point if the flow file should be examined and processed.
flowFile = session.putAttribute(flowFile, SAVEPOINT_PROCESSOR_ID, getIdentifier());
if (entry == null || entry.getState(processorId) == null) {
// Register new
provider.register(savepointIdStr, processorId, flowfileId, lock);
flowFile = tryFlowFile(session, flowFile, "-1");
// add in timestamps
// Set wait start timestamp if it's not set yet
waitStartTimestamp = flowFile.getAttribute(SAVEPOINT_START_TIMESTAMP);
if (waitStartTimestamp == null) {
waitStartTimestamp = String.valueOf(System.currentTimeMillis());
flowFile = session.putAttribute(flowFile, SAVEPOINT_START_TIMESTAMP, waitStartTimestamp);
}
session.transfer(flowFile);
} else {
SavepointEntry.SavePointState state = entry.getState(processorId);
switch(state) {
case RELEASE_SUCCESS:
provider.commitRelease(savepointIdStr, processorId, lock);
// add provenance to indicate success
flowFile = session.putAttribute(flowFile, SavepointProvenanceProperties.RELEASE_STATUS_KEY, SavepointProvenanceProperties.RELEASE_STATUS.SUCCESS.name());
session.transfer(flowFile, REL_RELEASE_SUCCESS);
break;
case RELEASE_FAILURE:
provider.commitRelease(savepointIdStr, processorId, lock);
// add provenance to indicate failure
flowFile = session.putAttribute(flowFile, SavepointProvenanceProperties.RELEASE_STATUS_KEY, SavepointProvenanceProperties.RELEASE_STATUS.FAILURE.name());
session.transfer(flowFile, REL_RELEASE_FAILURE);
break;
case RETRY:
String retryCount = flowFile.getAttribute(SAVEPOINT_RETRY_COUNT);
if (retryCount == null) {
retryCount = "0";
}
provider.commitRetry(savepointIdStr, processorId, lock);
flowFile = tryFlowFile(session, flowFile, retryCount);
session.transfer(flowFile);
break;
case WAIT:
session.transfer(flowFile, REL_SELF);
break;
default:
logger.warn("Unexpected savepoint state.");
session.transfer(flowFile, REL_FAILURE);
}
}
} else {
// Lock busy so try again later
// add it back to cache
controller.putFlowfileBack(processorId, flowfileId);
logger.info("Unable to obtain lock. It is already locked by another process. Adding back to queue {} ", new Object[] { flowfileId });
session.transfer(flowFile, REL_SELF);
}
} catch (IOException | InvalidLockException | InvalidSetpointException e) {
logger.warn("Failed to process flowfile {} for savepoint {}", new String[] { flowfileId, savepointIdStr }, e);
flowFile = session.putAttribute(flowFile, SAVEPOINT_EXCEPTION, "Failed to process flowfile " + flowfileId + " for savepoint " + savepointIdStr + ". " + e.getMessage());
session.transfer(flowFile, REL_FAILURE);
} finally {
if (lock != null) {
try {
provider.unlock(lock);
} catch (IOException e) {
logger.warn("Unable to unlock {}", new String[] { savepointIdStr });
}
}
}
}
use of org.apache.nifi.components.PropertyValue in project kylo by Teradata.
the class TriggerSavepoint method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
// Fetch the controller
final SavepointController controller = context.getProperty(SAVEPOINT_SERVICE).asControllerService(SavepointController.class);
final SavepointProvider provider = controller.getProvider();
final ComponentLog logger = getLogger();
final PropertyValue pvSavepointId = context.getProperty(SAVEPOINT_ID);
final PropertyValue pvBehavior = context.getProperty(BEHAVIOR);
final PropertyValue pvMaxRetries = context.getProperty(MAX_RETRIES);
// We do processing on each flowfile here
String behavior = pvBehavior.getValue();
if (!FAIL.equals(behavior)) {
final String savepointIdStr = pvSavepointId.evaluateAttributeExpressions(flowFile).getValue();
Lock lock = null;
try {
lock = provider.lock(savepointIdStr);
if (lock != null) {
if (RELEASE.equals(behavior)) {
provider.release(savepointIdStr, lock, true);
flowFile = session.putAttribute(flowFile, SavepointProvenanceProperties.SAVE_POINT_BEHAVIOR_STATUS, behavior);
session.transfer(flowFile, REL_SUCCESS);
} else if (RETRY.equals(behavior)) {
// Check the retry count from the SetSavepoint
String sRetryCount = flowFile.getAttribute(SetSavepoint.SAVEPOINT_RETRY_COUNT);
int retryCount = 0;
try {
if (sRetryCount != null) {
retryCount = Integer.parseInt(sRetryCount);
}
} catch (NumberFormatException nfe) {
logger.warn("{} has an invalid value '{}' on FlowFile {}", new Object[] { SetSavepoint.SAVEPOINT_RETRY_COUNT, sRetryCount, flowFile });
}
// Check retries
if (retryCount > pvMaxRetries.asInteger()) {
flowFile = session.putAttribute(flowFile, TriggerSavepoint.SAVE_POINT_MAX_RETRIES_EXCEEDED, sRetryCount);
session.transfer(flowFile, REL_MAX_RETRIES_EXCEEDED);
return;
}
// Penalize the flowfile once before retry is processed
String sRetryMarker = flowFile.getAttribute(SAVEPOINT_RETRY_MARKER);
if (StringUtils.isEmpty(sRetryMarker)) {
flowFile = session.penalize(flowFile);
flowFile = session.putAttribute(flowFile, SAVEPOINT_RETRY_MARKER, "1");
session.transfer(flowFile, REL_SELF);
return;
}
provider.retry(savepointIdStr, lock);
session.transfer(flowFile, REL_SUCCESS);
}
} else {
// Unable to obtain lock. Try again
session.transfer(flowFile, REL_SELF);
}
} catch (IOException | InvalidLockException | InvalidSetpointException e) {
logger.info("Exception occurred for FlowFile {} exception {}", new Object[] { flowFile, e.getLocalizedMessage() }, e);
// Check the retry count from the SetSavepoint
String sTriggerFailureCount = flowFile.getAttribute(TriggerSavepoint.SAVEPOINT_TRIGGER_FAILURE_COUNT);
int triggerFailureCount = 1;
try {
triggerFailureCount = (sTriggerFailureCount == null ? 0 : Integer.parseInt(sTriggerFailureCount));
triggerFailureCount += 1;
} catch (NumberFormatException nfe) {
logger.info("Invalid attribute {}", new Object[] { TriggerSavepoint.SAVEPOINT_TRIGGER_FAILURE_COUNT });
}
flowFile = session.putAttribute(flowFile, TriggerSavepoint.SAVEPOINT_TRIGGER_FAILURE_COUNT, String.valueOf(triggerFailureCount));
if (triggerFailureCount > MAX_FAILURES_ALLOWED) {
logger.info("Maximum failures reached for sp {}, will route to fail.", new String[] { savepointIdStr });
flowFile = session.putAttribute(flowFile, SavepointProvenanceProperties.SAVE_POINT_BEHAVIOR_STATUS, FAIL);
flowFile = session.putAttribute(flowFile, TriggerSavepoint.SAVE_POINT_BEHAVIOR_STATUS_DESC, "Maximum failures at " + triggerFailureCount + " were reached. Failing the flow");
// add in the trigger flow id so ops manager can get the key to retry if needed
String triggerFlowFile = flowFile.getAttribute(SavepointProvenanceProperties.PARENT_FLOWFILE_ID);
if (StringUtils.isNotBlank(triggerFlowFile)) {
flowFile = session.putAttribute(flowFile, SavepointProvenanceProperties.SAVE_POINT_TRIGGER_FLOWFILE, triggerFlowFile);
}
session.transfer(flowFile, REL_FAILURE);
} else {
logger.info("Failed to process flowfile for savepoint {}", new String[] { savepointIdStr }, e);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_SELF);
}
} finally {
if (lock != null) {
try {
provider.unlock(lock);
} catch (IOException e) {
logger.warn("Unable to unlock {}", new String[] { savepointIdStr });
}
}
}
} else {
// Route to failure
flowFile = session.putAttribute(flowFile, SavepointProvenanceProperties.SAVE_POINT_BEHAVIOR_STATUS, behavior);
String triggerFlowFile = flowFile.getAttribute(SavepointProvenanceProperties.PARENT_FLOWFILE_ID);
if (StringUtils.isNotBlank(triggerFlowFile)) {
flowFile = session.putAttribute(flowFile, SavepointProvenanceProperties.SAVE_POINT_TRIGGER_FLOWFILE, triggerFlowFile);
}
session.transfer(flowFile, REL_FAILURE);
}
}
use of org.apache.nifi.components.PropertyValue in project nifi by apache.
the class TestStandardPropertyValue method testEscaped.
@Test
public void testEscaped() {
final PropertyValue value = new StandardPropertyValue("Hello, $${audience}!", lookup);
final Map<String, String> attributes = new HashMap<>();
attributes.put("audience", "World");
assertEquals("Hello, ${audience}!", value.evaluateAttributeExpressions(createFlowFile(attributes)).getValue());
}
use of org.apache.nifi.components.PropertyValue in project nifi by apache.
the class TestStandardPropertyValue method testGetValueAsIntegerAfterSubstitute.
@Test
public void testGetValueAsIntegerAfterSubstitute() {
final PropertyValue value = new StandardPropertyValue("1${value}", lookup);
final Map<String, String> attributes = new HashMap<>();
attributes.put("value", "39");
assertEquals(139, value.evaluateAttributeExpressions(createFlowFile(attributes)).asInteger().intValue());
}
use of org.apache.nifi.components.PropertyValue in project nifi by apache.
the class TestStandardPropertyValue method testFlowFileEntryYear.
@Test
public void testFlowFileEntryYear() {
final Calendar now = Calendar.getInstance();
final int year = now.get(Calendar.YEAR);
final PropertyValue value = new StandardPropertyValue("${entryDate:toNumber():toDate():format('yyyy')}", lookup);
final FlowFile flowFile = new StandardFlowFileRecord.Builder().entryDate(now.getTimeInMillis()).build();
final int val = value.evaluateAttributeExpressions(flowFile).asInteger().intValue();
assertEquals(year, val);
}
Aggregations