use of org.apache.nifi.processor.ProcessSession in project nifi by apache.
the class ExecuteSparkInteractive method onTrigger.
@Override
public void onTrigger(ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final ComponentLog log = getLogger();
final LivySessionService livySessionService = context.getProperty(LIVY_CONTROLLER_SERVICE).asControllerService(LivySessionService.class);
final Map<String, String> livyController = livySessionService.getSession();
if (livyController == null || livyController.isEmpty()) {
log.debug("No Spark session available (yet), routing flowfile to wait");
session.transfer(flowFile, REL_WAIT);
return;
}
final long statusCheckInterval = context.getProperty(STATUS_CHECK_INTERVAL).evaluateAttributeExpressions(flowFile).asTimePeriod(TimeUnit.MILLISECONDS);
Charset charset;
try {
charset = Charset.forName(context.getProperty(CHARSET).evaluateAttributeExpressions(flowFile).getValue());
} catch (Exception e) {
log.warn("Illegal character set name specified, defaulting to UTF-8");
charset = StandardCharsets.UTF_8;
}
String sessionId = livyController.get("sessionId");
String livyUrl = livyController.get("livyUrl");
String code = context.getProperty(CODE).evaluateAttributeExpressions(flowFile).getValue();
if (StringUtils.isEmpty(code)) {
try (InputStream inputStream = session.read(flowFile)) {
// If no code was provided, assume it is in the content of the incoming flow file
code = IOUtils.toString(inputStream, charset);
} catch (IOException ioe) {
log.error("Error reading input flowfile, penalizing and routing to failure", new Object[] { flowFile, ioe.getMessage() }, ioe);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
return;
}
}
code = StringEscapeUtils.escapeJavaScript(code);
String payload = "{\"code\":\"" + code + "\"}";
try {
final JSONObject result = submitAndHandleJob(livyUrl, livySessionService, sessionId, payload, statusCheckInterval);
log.debug("ExecuteSparkInteractive Result of Job Submit: " + result);
if (result == null) {
session.transfer(flowFile, REL_FAILURE);
} else {
try {
final JSONObject output = result.getJSONObject("data");
flowFile = session.write(flowFile, out -> out.write(output.toString().getBytes()));
flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), LivySessionService.APPLICATION_JSON);
session.transfer(flowFile, REL_SUCCESS);
} catch (JSONException je) {
// The result doesn't contain the data, just send the output object as the flow file content to failure (after penalizing)
log.error("Spark Session returned an error, sending the output JSON object as the flow file content to failure (after penalizing)");
flowFile = session.write(flowFile, out -> out.write(result.toString().getBytes()));
flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), LivySessionService.APPLICATION_JSON);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
}
}
} catch (IOException ioe) {
log.error("Failure processing flowfile {} due to {}, penalizing and routing to failure", new Object[] { flowFile, ioe.getMessage() }, ioe);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
}
}
use of org.apache.nifi.processor.ProcessSession in project nifi by apache.
the class PutSplunk method onTrigger.
@Override
public void onTrigger(ProcessContext context, ProcessSessionFactory sessionFactory) throws ProcessException {
// first complete any batches from previous executions
FlowFileMessageBatch batch;
while ((batch = completeBatches.poll()) != null) {
batch.completeSession();
}
// create a session and try to get a FlowFile, if none available then close any idle senders
final ProcessSession session = sessionFactory.createSession();
final FlowFile flowFile = session.get();
if (flowFile == null) {
final PruneResult result = pruneIdleSenders(context.getProperty(IDLE_EXPIRATION).asTimePeriod(TimeUnit.MILLISECONDS).longValue());
// yield if we closed an idle connection, or if there were no connections in the first place
if (result.getNumClosed() > 0 || (result.getNumClosed() == 0 && result.getNumConsidered() == 0)) {
context.yield();
}
return;
}
// get a sender from the pool, or create a new one if the pool is empty
// if we can't create a new connection then route flow files to failure and yield
// acquireSender will handle the routing to failure and yielding
ChannelSender sender = acquireSender(context, session, flowFile);
if (sender == null) {
return;
}
try {
String delimiter = context.getProperty(MESSAGE_DELIMITER).evaluateAttributeExpressions(flowFile).getValue();
if (delimiter != null) {
delimiter = delimiter.replace("\\n", "\n").replace("\\r", "\r").replace("\\t", "\t");
}
// if no delimiter then treat the whole FlowFile as a single message
if (delimiter == null) {
processSingleMessage(context, session, flowFile, sender);
} else {
processDelimitedMessages(context, session, flowFile, sender, delimiter);
}
} finally {
relinquishSender(sender);
}
}
use of org.apache.nifi.processor.ProcessSession in project nifi by apache.
the class TestAttributesToJSON method testNullValueForEmptyAttribute.
@Test
public void testNullValueForEmptyAttribute() throws Exception {
final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON());
testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_ATTRIBUTE);
final String NON_PRESENT_ATTRIBUTE_KEY = "NonExistingAttributeKey";
testRunner.setProperty(AttributesToJSON.ATTRIBUTES_LIST, NON_PRESENT_ATTRIBUTE_KEY);
testRunner.setProperty(AttributesToJSON.NULL_VALUE_FOR_EMPTY_STRING, "true");
ProcessSession session = testRunner.getProcessSessionFactory().createSession();
FlowFile ff = session.create();
testRunner.enqueue(ff);
testRunner.run();
// Expecting success transition because Jackson is taking care of escaping the bad JSON characters
testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0).assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME);
testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1);
testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0);
// Make sure that the value is a true JSON null for the non existing attribute
String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME);
ObjectMapper mapper = new ObjectMapper();
Map<String, String> val = mapper.readValue(json, HashMap.class);
assertNull(val.get(NON_PRESENT_ATTRIBUTE_KEY));
}
use of org.apache.nifi.processor.ProcessSession in project nifi by apache.
the class TestAttributesToJSON method testInvalidJSONValueInAttribute.
@Test
public void testInvalidJSONValueInAttribute() throws Exception {
final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON());
testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_ATTRIBUTE);
ProcessSession session = testRunner.getProcessSessionFactory().createSession();
FlowFile ff = session.create();
// Create attribute that contains an invalid JSON Character
ff = session.putAttribute(ff, TEST_ATTRIBUTE_KEY, "'badjson'");
testRunner.enqueue(ff);
testRunner.run();
// Expecting success transition because Jackson is taking care of escaping the bad JSON characters
testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0).assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME);
testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1);
testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0);
}
use of org.apache.nifi.processor.ProcessSession in project nifi by apache.
the class TestAttributesToJSON method testContent_emptyListUserSpecifiedAttributes.
@Test
public void testContent_emptyListUserSpecifiedAttributes() throws Exception {
final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON());
testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_CONTENT);
testRunner.setProperty(AttributesToJSON.INCLUDE_CORE_ATTRIBUTES, "false");
ProcessSession session = testRunner.getProcessSessionFactory().createSession();
FlowFile ff = session.create();
testRunner.enqueue(ff);
testRunner.run();
testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0).assertAttributeNotExists(AttributesToJSON.JSON_ATTRIBUTE_NAME);
testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1);
testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0);
testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0).assertContentEquals("{}");
}
Aggregations