use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class EncryptContent method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final ComponentLog logger = getLogger();
final String method = context.getProperty(ENCRYPTION_ALGORITHM).getValue();
final EncryptionMethod encryptionMethod = EncryptionMethod.valueOf(method);
final String providerName = encryptionMethod.getProvider();
final String algorithm = encryptionMethod.getAlgorithm();
final String password = context.getProperty(PASSWORD).getValue();
final KeyDerivationFunction kdf = KeyDerivationFunction.valueOf(context.getProperty(KEY_DERIVATION_FUNCTION).getValue());
final boolean encrypt = context.getProperty(MODE).getValue().equalsIgnoreCase(ENCRYPT_MODE);
Encryptor encryptor;
StreamCallback callback;
try {
if (isPGPAlgorithm(algorithm)) {
final String filename = flowFile.getAttribute(CoreAttributes.FILENAME.key());
final String publicKeyring = context.getProperty(PUBLIC_KEYRING).getValue();
final String privateKeyring = context.getProperty(PRIVATE_KEYRING).getValue();
if (encrypt && publicKeyring != null) {
final String publicUserId = context.getProperty(PUBLIC_KEY_USERID).getValue();
encryptor = new OpenPGPKeyBasedEncryptor(algorithm, providerName, publicKeyring, publicUserId, null, filename);
} else if (!encrypt && privateKeyring != null) {
final char[] keyringPassphrase = context.getProperty(PRIVATE_KEYRING_PASSPHRASE).evaluateAttributeExpressions().getValue().toCharArray();
encryptor = new OpenPGPKeyBasedEncryptor(algorithm, providerName, privateKeyring, null, keyringPassphrase, filename);
} else {
final char[] passphrase = Normalizer.normalize(password, Normalizer.Form.NFC).toCharArray();
encryptor = new OpenPGPPasswordBasedEncryptor(algorithm, providerName, passphrase, filename);
}
} else if (kdf.equals(KeyDerivationFunction.NONE)) {
// Raw key
final String keyHex = context.getProperty(RAW_KEY_HEX).getValue();
encryptor = new KeyedEncryptor(encryptionMethod, Hex.decodeHex(keyHex.toCharArray()));
} else {
// PBE
final char[] passphrase = Normalizer.normalize(password, Normalizer.Form.NFC).toCharArray();
encryptor = new PasswordBasedEncryptor(encryptionMethod, passphrase, kdf);
}
if (encrypt) {
callback = encryptor.getEncryptionCallback();
} else {
callback = encryptor.getDecryptionCallback();
}
} catch (final Exception e) {
logger.error("Failed to initialize {}cryption algorithm because - ", new Object[] { encrypt ? "en" : "de", e });
session.rollback();
context.yield();
return;
}
try {
final StopWatch stopWatch = new StopWatch(true);
flowFile = session.write(flowFile, callback);
logger.info("successfully {}crypted {}", new Object[] { encrypt ? "en" : "de", flowFile });
session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
session.transfer(flowFile, REL_SUCCESS);
} catch (final ProcessException e) {
logger.error("Cannot {}crypt {} - ", new Object[] { encrypt ? "en" : "de", flowFile, e });
session.transfer(flowFile, REL_FAILURE);
}
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class ExecuteSQL method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile fileToProcess = null;
if (context.hasIncomingConnection()) {
fileToProcess = session.get();
// we know that we should run only if we have a FlowFile.
if (fileToProcess == null && context.hasNonLoopConnection()) {
return;
}
}
final ComponentLog logger = getLogger();
final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class);
final Integer queryTimeout = context.getProperty(QUERY_TIMEOUT).asTimePeriod(TimeUnit.SECONDS).intValue();
final boolean convertNamesForAvro = context.getProperty(NORMALIZE_NAMES_FOR_AVRO).asBoolean();
final Boolean useAvroLogicalTypes = context.getProperty(USE_AVRO_LOGICAL_TYPES).asBoolean();
final Integer defaultPrecision = context.getProperty(DEFAULT_PRECISION).evaluateAttributeExpressions().asInteger();
final Integer defaultScale = context.getProperty(DEFAULT_SCALE).evaluateAttributeExpressions().asInteger();
final StopWatch stopWatch = new StopWatch(true);
final String selectQuery;
if (context.getProperty(SQL_SELECT_QUERY).isSet()) {
selectQuery = context.getProperty(SQL_SELECT_QUERY).evaluateAttributeExpressions(fileToProcess).getValue();
} else {
// If the query is not set, then an incoming flow file is required, and expected to contain a valid SQL select query.
// If there is no incoming connection, onTrigger will not be called as the processor will fail when scheduled.
final StringBuilder queryContents = new StringBuilder();
session.read(fileToProcess, in -> queryContents.append(IOUtils.toString(in, Charset.defaultCharset())));
selectQuery = queryContents.toString();
}
int resultCount = 0;
try (final Connection con = dbcpService.getConnection();
final PreparedStatement st = con.prepareStatement(selectQuery)) {
// timeout in seconds
st.setQueryTimeout(queryTimeout);
if (fileToProcess != null) {
JdbcCommon.setParameters(st, fileToProcess.getAttributes());
}
logger.debug("Executing query {}", new Object[] { selectQuery });
boolean results = st.execute();
while (results) {
FlowFile resultSetFF;
if (fileToProcess == null) {
resultSetFF = session.create();
} else {
resultSetFF = session.create(fileToProcess);
resultSetFF = session.putAllAttributes(resultSetFF, fileToProcess.getAttributes());
}
final AtomicLong nrOfRows = new AtomicLong(0L);
resultSetFF = session.write(resultSetFF, out -> {
try {
final ResultSet resultSet = st.getResultSet();
final JdbcCommon.AvroConversionOptions options = JdbcCommon.AvroConversionOptions.builder().convertNames(convertNamesForAvro).useLogicalTypes(useAvroLogicalTypes).defaultPrecision(defaultPrecision).defaultScale(defaultScale).build();
nrOfRows.set(JdbcCommon.convertToAvroStream(resultSet, out, options, null));
} catch (final SQLException e) {
throw new ProcessException(e);
}
});
long duration = stopWatch.getElapsed(TimeUnit.MILLISECONDS);
// set attribute how many rows were selected
resultSetFF = session.putAttribute(resultSetFF, RESULT_ROW_COUNT, String.valueOf(nrOfRows.get()));
resultSetFF = session.putAttribute(resultSetFF, RESULT_QUERY_DURATION, String.valueOf(duration));
resultSetFF = session.putAttribute(resultSetFF, CoreAttributes.MIME_TYPE.key(), JdbcCommon.MIME_TYPE_AVRO_BINARY);
logger.info("{} contains {} Avro records; transferring to 'success'", new Object[] { resultSetFF, nrOfRows.get() });
session.getProvenanceReporter().modifyContent(resultSetFF, "Retrieved " + nrOfRows.get() + " rows", duration);
session.transfer(resultSetFF, REL_SUCCESS);
resultCount++;
// are there anymore result sets?
try {
results = st.getMoreResults();
} catch (SQLException ex) {
results = false;
}
}
// pass the original flow file down the line to trigger downstream processors
if (fileToProcess != null) {
if (resultCount > 0) {
session.remove(fileToProcess);
} else {
fileToProcess = session.write(fileToProcess, JdbcCommon::createEmptyAvroStream);
session.transfer(fileToProcess, REL_SUCCESS);
}
}
} catch (final ProcessException | SQLException e) {
// pass the original flow file down the line to trigger downstream processors
if (fileToProcess == null) {
// This can happen if any exceptions occur while setting up the connection, statement, etc.
logger.error("Unable to execute SQL select query {} due to {}. No FlowFile to route to failure", new Object[] { selectQuery, e });
context.yield();
} else {
if (context.hasIncomingConnection()) {
logger.error("Unable to execute SQL select query {} for {} due to {}; routing to failure", new Object[] { selectQuery, fileToProcess, e });
fileToProcess = session.penalize(fileToProcess);
} else {
logger.error("Unable to execute SQL select query {} due to {}; routing to failure", new Object[] { selectQuery, e });
context.yield();
}
session.transfer(fileToProcess, REL_FAILURE);
}
}
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class ExecuteStreamCommand method onTrigger.
@Override
public void onTrigger(ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile inputFlowFile = session.get();
if (null == inputFlowFile) {
return;
}
final ArrayList<String> args = new ArrayList<>();
final boolean putToAttribute = context.getProperty(PUT_OUTPUT_IN_ATTRIBUTE).isSet();
final Integer attributeSize = context.getProperty(PUT_ATTRIBUTE_MAX_LENGTH).asInteger();
final String attributeName = context.getProperty(PUT_OUTPUT_IN_ATTRIBUTE).getValue();
final String executeCommand = context.getProperty(EXECUTION_COMMAND).evaluateAttributeExpressions(inputFlowFile).getValue();
args.add(executeCommand);
final String commandArguments = context.getProperty(EXECUTION_ARGUMENTS).evaluateAttributeExpressions(inputFlowFile).getValue();
final boolean ignoreStdin = Boolean.parseBoolean(context.getProperty(IGNORE_STDIN).getValue());
if (!StringUtils.isBlank(commandArguments)) {
for (String arg : ArgumentUtils.splitArgs(commandArguments, context.getProperty(ARG_DELIMITER).getValue().charAt(0))) {
args.add(arg);
}
}
final String workingDir = context.getProperty(WORKING_DIR).evaluateAttributeExpressions(inputFlowFile).getValue();
final ProcessBuilder builder = new ProcessBuilder();
logger.debug("Executing and waiting for command {} with arguments {}", new Object[] { executeCommand, commandArguments });
File dir = null;
if (!StringUtils.isBlank(workingDir)) {
dir = new File(workingDir);
if (!dir.exists() && !dir.mkdirs()) {
logger.warn("Failed to create working directory {}, using current working directory {}", new Object[] { workingDir, System.getProperty("user.dir") });
}
}
final Map<String, String> environment = new HashMap<>();
for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) {
if (entry.getKey().isDynamic()) {
environment.put(entry.getKey().getName(), entry.getValue());
}
}
builder.environment().putAll(environment);
builder.command(args);
builder.directory(dir);
builder.redirectInput(Redirect.PIPE);
builder.redirectOutput(Redirect.PIPE);
final Process process;
try {
process = builder.start();
} catch (IOException e) {
logger.error("Could not create external process to run command", e);
throw new ProcessException(e);
}
try (final OutputStream pos = process.getOutputStream();
final InputStream pis = process.getInputStream();
final InputStream pes = process.getErrorStream();
final BufferedInputStream bis = new BufferedInputStream(pis);
final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(pes))) {
int exitCode = -1;
final BufferedOutputStream bos = new BufferedOutputStream(pos);
FlowFile outputFlowFile = putToAttribute ? inputFlowFile : session.create(inputFlowFile);
ProcessStreamWriterCallback callback = new ProcessStreamWriterCallback(ignoreStdin, bos, bis, logger, attributeName, session, outputFlowFile, process, putToAttribute, attributeSize);
session.read(inputFlowFile, callback);
outputFlowFile = callback.outputFlowFile;
if (putToAttribute) {
outputFlowFile = session.putAttribute(outputFlowFile, attributeName, new String(callback.outputBuffer, 0, callback.size));
}
exitCode = callback.exitCode;
logger.debug("Execution complete for command: {}. Exited with code: {}", new Object[] { executeCommand, exitCode });
Map<String, String> attributes = new HashMap<>();
final StringBuilder strBldr = new StringBuilder();
try {
String line;
while ((line = bufferedReader.readLine()) != null) {
strBldr.append(line).append("\n");
}
} catch (IOException e) {
strBldr.append("Unknown...could not read Process's Std Error");
}
int length = strBldr.length() > 4000 ? 4000 : strBldr.length();
attributes.put("execution.error", strBldr.substring(0, length));
final Relationship outputFlowFileRelationship = putToAttribute ? ORIGINAL_RELATIONSHIP : (exitCode != 0) ? NONZERO_STATUS_RELATIONSHIP : OUTPUT_STREAM_RELATIONSHIP;
if (exitCode == 0) {
logger.info("Transferring flow file {} to {}", new Object[] { outputFlowFile, outputFlowFileRelationship.getName() });
} else {
logger.error("Transferring flow file {} to {}. Executable command {} ended in an error: {}", new Object[] { outputFlowFile, outputFlowFileRelationship.getName(), executeCommand, strBldr.toString() });
}
attributes.put("execution.status", Integer.toString(exitCode));
attributes.put("execution.command", executeCommand);
attributes.put("execution.command.args", commandArguments);
outputFlowFile = session.putAllAttributes(outputFlowFile, attributes);
if (NONZERO_STATUS_RELATIONSHIP.equals(outputFlowFileRelationship)) {
outputFlowFile = session.penalize(outputFlowFile);
}
// This will transfer the FlowFile that received the stream output to its destined relationship.
// In the event the stream is put to the an attribute of the original, it will be transferred here.
session.transfer(outputFlowFile, outputFlowFileRelationship);
if (!putToAttribute) {
logger.info("Transferring flow file {} to original", new Object[] { inputFlowFile });
inputFlowFile = session.putAllAttributes(inputFlowFile, attributes);
session.transfer(inputFlowFile, ORIGINAL_RELATIONSHIP);
}
} catch (final IOException ex) {
// could not close Process related streams
logger.warn("Problem terminating Process {}", new Object[] { process }, ex);
} finally {
// last ditch effort to clean up that process.
process.destroy();
}
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class FlattenJson method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final String mode = context.getProperty(FLATTEN_MODE).getValue();
final FlattenMode flattenMode = getFlattenMode(mode);
String separator = context.getProperty(SEPARATOR).evaluateAttributeExpressions(flowFile).getValue();
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
session.exportTo(flowFile, bos);
bos.close();
String raw = new String(bos.toByteArray());
final String flattened = new JsonFlattener(raw).withFlattenMode(flattenMode).withSeparator(separator.charAt(0)).withStringEscapePolicy(() -> StringEscapeUtils.ESCAPE_JAVA).flatten();
flowFile = session.write(flowFile, os -> os.write(flattened.getBytes()));
session.transfer(flowFile, REL_SUCCESS);
} catch (Exception ex) {
session.transfer(flowFile, REL_FAILURE);
}
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class GetFile method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final File directory = new File(context.getProperty(DIRECTORY).evaluateAttributeExpressions().getValue());
final boolean keepingSourceFile = context.getProperty(KEEP_SOURCE_FILE).asBoolean();
final ComponentLog logger = getLogger();
if (fileQueue.size() < 100) {
final long pollingMillis = context.getProperty(POLLING_INTERVAL).asTimePeriod(TimeUnit.MILLISECONDS);
if ((queueLastUpdated.get() < System.currentTimeMillis() - pollingMillis) && listingLock.tryLock()) {
try {
final Set<File> listing = performListing(directory, fileFilterRef.get(), context.getProperty(RECURSE).asBoolean().booleanValue());
queueLock.lock();
try {
listing.removeAll(inProcess);
if (!keepingSourceFile) {
listing.removeAll(recentlyProcessed);
}
fileQueue.clear();
fileQueue.addAll(listing);
queueLastUpdated.set(System.currentTimeMillis());
recentlyProcessed.clear();
if (listing.isEmpty()) {
context.yield();
}
} finally {
queueLock.unlock();
}
} finally {
listingLock.unlock();
}
}
}
final int batchSize = context.getProperty(BATCH_SIZE).asInteger();
final List<File> files = new ArrayList<>(batchSize);
queueLock.lock();
try {
fileQueue.drainTo(files, batchSize);
if (files.isEmpty()) {
return;
} else {
inProcess.addAll(files);
}
} finally {
queueLock.unlock();
}
final ListIterator<File> itr = files.listIterator();
FlowFile flowFile = null;
try {
final Path directoryPath = directory.toPath();
while (itr.hasNext()) {
final File file = itr.next();
final Path filePath = file.toPath();
final Path relativePath = directoryPath.relativize(filePath.getParent());
String relativePathString = relativePath.toString() + "/";
if (relativePathString.isEmpty()) {
relativePathString = "./";
}
final Path absPath = filePath.toAbsolutePath();
final String absPathString = absPath.getParent().toString() + "/";
flowFile = session.create();
final long importStart = System.nanoTime();
flowFile = session.importFrom(filePath, keepingSourceFile, flowFile);
final long importNanos = System.nanoTime() - importStart;
final long importMillis = TimeUnit.MILLISECONDS.convert(importNanos, TimeUnit.NANOSECONDS);
flowFile = session.putAttribute(flowFile, CoreAttributes.FILENAME.key(), file.getName());
flowFile = session.putAttribute(flowFile, CoreAttributes.PATH.key(), relativePathString);
flowFile = session.putAttribute(flowFile, CoreAttributes.ABSOLUTE_PATH.key(), absPathString);
Map<String, String> attributes = getAttributesFromFile(filePath);
if (attributes.size() > 0) {
flowFile = session.putAllAttributes(flowFile, attributes);
}
session.getProvenanceReporter().receive(flowFile, file.toURI().toString(), importMillis);
session.transfer(flowFile, REL_SUCCESS);
logger.info("added {} to flow", new Object[] { flowFile });
if (!isScheduled()) {
// if processor stopped, put the rest of the files back on the queue.
queueLock.lock();
try {
while (itr.hasNext()) {
final File nextFile = itr.next();
fileQueue.add(nextFile);
inProcess.remove(nextFile);
}
} finally {
queueLock.unlock();
}
}
}
session.commit();
} catch (final Exception e) {
logger.error("Failed to retrieve files due to {}", e);
// anything that we've not already processed needs to be put back on the queue
if (flowFile != null) {
session.remove(flowFile);
}
} finally {
queueLock.lock();
try {
inProcess.removeAll(files);
recentlyProcessed.addAll(files);
} finally {
queueLock.unlock();
}
}
}
Aggregations