use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class ControlRate method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
List<FlowFile> flowFiles = session.get(new ThrottleFilter(MAX_FLOW_FILES_PER_BATCH));
if (flowFiles.isEmpty()) {
context.yield();
return;
}
// Periodically clear any Throttle that has not been used in more than 2 throttling periods
final long lastClearTime = lastThrottleClearTime.get();
final long throttleExpirationMillis = System.currentTimeMillis() - 2 * context.getProperty(TIME_PERIOD).asTimePeriod(TimeUnit.MILLISECONDS);
if (lastClearTime < throttleExpirationMillis) {
if (lastThrottleClearTime.compareAndSet(lastClearTime, System.currentTimeMillis())) {
final Iterator<Map.Entry<String, Throttle>> itr = throttleMap.entrySet().iterator();
while (itr.hasNext()) {
final Map.Entry<String, Throttle> entry = itr.next();
final Throttle throttle = entry.getValue();
if (throttle.tryLock()) {
try {
if (throttle.lastUpdateTime() < lastClearTime) {
itr.remove();
}
} finally {
throttle.unlock();
}
}
}
}
}
final ComponentLog logger = getLogger();
for (FlowFile flowFile : flowFiles) {
// call this to capture potential error
final long accrualAmount = getFlowFileAccrual(flowFile);
if (accrualAmount < 0) {
logger.error("Routing {} to 'failure' due to missing or invalid attribute", new Object[] { flowFile });
session.transfer(flowFile, REL_FAILURE);
} else {
logger.info("transferring {} to 'success'", new Object[] { flowFile });
session.transfer(flowFile, REL_SUCCESS);
}
}
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class EncodeContent method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final ComponentLog logger = getLogger();
boolean encode = context.getProperty(MODE).getValue().equalsIgnoreCase(ENCODE_MODE);
String encoding = context.getProperty(ENCODING).getValue();
StreamCallback encoder = null;
// Select the encoder/decoder to use
if (encode) {
if (encoding.equalsIgnoreCase(BASE64_ENCODING)) {
encoder = new EncodeBase64();
} else if (encoding.equalsIgnoreCase(BASE32_ENCODING)) {
encoder = new EncodeBase32();
} else if (encoding.equalsIgnoreCase(HEX_ENCODING)) {
encoder = new EncodeHex();
}
} else {
if (encoding.equalsIgnoreCase(BASE64_ENCODING)) {
encoder = new DecodeBase64();
} else if (encoding.equalsIgnoreCase(BASE32_ENCODING)) {
encoder = new DecodeBase32();
} else if (encoding.equalsIgnoreCase(HEX_ENCODING)) {
encoder = new DecodeHex();
}
}
if (encoder == null) {
logger.warn("Unknown operation: {} {}", new Object[] { encode ? "encode" : "decode", encoding });
return;
}
try {
final StopWatch stopWatch = new StopWatch(true);
flowFile = session.write(flowFile, encoder);
logger.info("Successfully {} {}", new Object[] { encode ? "encoded" : "decoded", flowFile });
session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
session.transfer(flowFile, REL_SUCCESS);
} catch (Exception e) {
logger.error("Failed to {} {} due to {}", new Object[] { encode ? "encode" : "decode", flowFile, e });
session.transfer(flowFile, REL_FAILURE);
}
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class EncryptContent method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final ComponentLog logger = getLogger();
final String method = context.getProperty(ENCRYPTION_ALGORITHM).getValue();
final EncryptionMethod encryptionMethod = EncryptionMethod.valueOf(method);
final String providerName = encryptionMethod.getProvider();
final String algorithm = encryptionMethod.getAlgorithm();
final String password = context.getProperty(PASSWORD).getValue();
final KeyDerivationFunction kdf = KeyDerivationFunction.valueOf(context.getProperty(KEY_DERIVATION_FUNCTION).getValue());
final boolean encrypt = context.getProperty(MODE).getValue().equalsIgnoreCase(ENCRYPT_MODE);
Encryptor encryptor;
StreamCallback callback;
try {
if (isPGPAlgorithm(algorithm)) {
final String filename = flowFile.getAttribute(CoreAttributes.FILENAME.key());
final String publicKeyring = context.getProperty(PUBLIC_KEYRING).getValue();
final String privateKeyring = context.getProperty(PRIVATE_KEYRING).getValue();
if (encrypt && publicKeyring != null) {
final String publicUserId = context.getProperty(PUBLIC_KEY_USERID).getValue();
encryptor = new OpenPGPKeyBasedEncryptor(algorithm, providerName, publicKeyring, publicUserId, null, filename);
} else if (!encrypt && privateKeyring != null) {
final char[] keyringPassphrase = context.getProperty(PRIVATE_KEYRING_PASSPHRASE).evaluateAttributeExpressions().getValue().toCharArray();
encryptor = new OpenPGPKeyBasedEncryptor(algorithm, providerName, privateKeyring, null, keyringPassphrase, filename);
} else {
final char[] passphrase = Normalizer.normalize(password, Normalizer.Form.NFC).toCharArray();
encryptor = new OpenPGPPasswordBasedEncryptor(algorithm, providerName, passphrase, filename);
}
} else if (kdf.equals(KeyDerivationFunction.NONE)) {
// Raw key
final String keyHex = context.getProperty(RAW_KEY_HEX).getValue();
encryptor = new KeyedEncryptor(encryptionMethod, Hex.decodeHex(keyHex.toCharArray()));
} else {
// PBE
final char[] passphrase = Normalizer.normalize(password, Normalizer.Form.NFC).toCharArray();
encryptor = new PasswordBasedEncryptor(encryptionMethod, passphrase, kdf);
}
if (encrypt) {
callback = encryptor.getEncryptionCallback();
} else {
callback = encryptor.getDecryptionCallback();
}
} catch (final Exception e) {
logger.error("Failed to initialize {}cryption algorithm because - ", new Object[] { encrypt ? "en" : "de", e });
session.rollback();
context.yield();
return;
}
try {
final StopWatch stopWatch = new StopWatch(true);
flowFile = session.write(flowFile, callback);
logger.info("successfully {}crypted {}", new Object[] { encrypt ? "en" : "de", flowFile });
session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
session.transfer(flowFile, REL_SUCCESS);
} catch (final ProcessException e) {
logger.error("Cannot {}crypt {} - ", new Object[] { encrypt ? "en" : "de", flowFile, e });
session.transfer(flowFile, REL_FAILURE);
}
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class ExecuteSQL method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile fileToProcess = null;
if (context.hasIncomingConnection()) {
fileToProcess = session.get();
// we know that we should run only if we have a FlowFile.
if (fileToProcess == null && context.hasNonLoopConnection()) {
return;
}
}
final ComponentLog logger = getLogger();
final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class);
final Integer queryTimeout = context.getProperty(QUERY_TIMEOUT).asTimePeriod(TimeUnit.SECONDS).intValue();
final boolean convertNamesForAvro = context.getProperty(NORMALIZE_NAMES_FOR_AVRO).asBoolean();
final Boolean useAvroLogicalTypes = context.getProperty(USE_AVRO_LOGICAL_TYPES).asBoolean();
final Integer defaultPrecision = context.getProperty(DEFAULT_PRECISION).evaluateAttributeExpressions().asInteger();
final Integer defaultScale = context.getProperty(DEFAULT_SCALE).evaluateAttributeExpressions().asInteger();
final StopWatch stopWatch = new StopWatch(true);
final String selectQuery;
if (context.getProperty(SQL_SELECT_QUERY).isSet()) {
selectQuery = context.getProperty(SQL_SELECT_QUERY).evaluateAttributeExpressions(fileToProcess).getValue();
} else {
// If the query is not set, then an incoming flow file is required, and expected to contain a valid SQL select query.
// If there is no incoming connection, onTrigger will not be called as the processor will fail when scheduled.
final StringBuilder queryContents = new StringBuilder();
session.read(fileToProcess, in -> queryContents.append(IOUtils.toString(in, Charset.defaultCharset())));
selectQuery = queryContents.toString();
}
int resultCount = 0;
try (final Connection con = dbcpService.getConnection();
final PreparedStatement st = con.prepareStatement(selectQuery)) {
// timeout in seconds
st.setQueryTimeout(queryTimeout);
if (fileToProcess != null) {
JdbcCommon.setParameters(st, fileToProcess.getAttributes());
}
logger.debug("Executing query {}", new Object[] { selectQuery });
boolean results = st.execute();
while (results) {
FlowFile resultSetFF;
if (fileToProcess == null) {
resultSetFF = session.create();
} else {
resultSetFF = session.create(fileToProcess);
resultSetFF = session.putAllAttributes(resultSetFF, fileToProcess.getAttributes());
}
final AtomicLong nrOfRows = new AtomicLong(0L);
resultSetFF = session.write(resultSetFF, out -> {
try {
final ResultSet resultSet = st.getResultSet();
final JdbcCommon.AvroConversionOptions options = JdbcCommon.AvroConversionOptions.builder().convertNames(convertNamesForAvro).useLogicalTypes(useAvroLogicalTypes).defaultPrecision(defaultPrecision).defaultScale(defaultScale).build();
nrOfRows.set(JdbcCommon.convertToAvroStream(resultSet, out, options, null));
} catch (final SQLException e) {
throw new ProcessException(e);
}
});
long duration = stopWatch.getElapsed(TimeUnit.MILLISECONDS);
// set attribute how many rows were selected
resultSetFF = session.putAttribute(resultSetFF, RESULT_ROW_COUNT, String.valueOf(nrOfRows.get()));
resultSetFF = session.putAttribute(resultSetFF, RESULT_QUERY_DURATION, String.valueOf(duration));
resultSetFF = session.putAttribute(resultSetFF, CoreAttributes.MIME_TYPE.key(), JdbcCommon.MIME_TYPE_AVRO_BINARY);
logger.info("{} contains {} Avro records; transferring to 'success'", new Object[] { resultSetFF, nrOfRows.get() });
session.getProvenanceReporter().modifyContent(resultSetFF, "Retrieved " + nrOfRows.get() + " rows", duration);
session.transfer(resultSetFF, REL_SUCCESS);
resultCount++;
// are there anymore result sets?
try {
results = st.getMoreResults();
} catch (SQLException ex) {
results = false;
}
}
// pass the original flow file down the line to trigger downstream processors
if (fileToProcess != null) {
if (resultCount > 0) {
session.remove(fileToProcess);
} else {
fileToProcess = session.write(fileToProcess, JdbcCommon::createEmptyAvroStream);
session.transfer(fileToProcess, REL_SUCCESS);
}
}
} catch (final ProcessException | SQLException e) {
// pass the original flow file down the line to trigger downstream processors
if (fileToProcess == null) {
// This can happen if any exceptions occur while setting up the connection, statement, etc.
logger.error("Unable to execute SQL select query {} due to {}. No FlowFile to route to failure", new Object[] { selectQuery, e });
context.yield();
} else {
if (context.hasIncomingConnection()) {
logger.error("Unable to execute SQL select query {} for {} due to {}; routing to failure", new Object[] { selectQuery, fileToProcess, e });
fileToProcess = session.penalize(fileToProcess);
} else {
logger.error("Unable to execute SQL select query {} due to {}; routing to failure", new Object[] { selectQuery, e });
context.yield();
}
session.transfer(fileToProcess, REL_FAILURE);
}
}
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class GetFile method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final File directory = new File(context.getProperty(DIRECTORY).evaluateAttributeExpressions().getValue());
final boolean keepingSourceFile = context.getProperty(KEEP_SOURCE_FILE).asBoolean();
final ComponentLog logger = getLogger();
if (fileQueue.size() < 100) {
final long pollingMillis = context.getProperty(POLLING_INTERVAL).asTimePeriod(TimeUnit.MILLISECONDS);
if ((queueLastUpdated.get() < System.currentTimeMillis() - pollingMillis) && listingLock.tryLock()) {
try {
final Set<File> listing = performListing(directory, fileFilterRef.get(), context.getProperty(RECURSE).asBoolean().booleanValue());
queueLock.lock();
try {
listing.removeAll(inProcess);
if (!keepingSourceFile) {
listing.removeAll(recentlyProcessed);
}
fileQueue.clear();
fileQueue.addAll(listing);
queueLastUpdated.set(System.currentTimeMillis());
recentlyProcessed.clear();
if (listing.isEmpty()) {
context.yield();
}
} finally {
queueLock.unlock();
}
} finally {
listingLock.unlock();
}
}
}
final int batchSize = context.getProperty(BATCH_SIZE).asInteger();
final List<File> files = new ArrayList<>(batchSize);
queueLock.lock();
try {
fileQueue.drainTo(files, batchSize);
if (files.isEmpty()) {
return;
} else {
inProcess.addAll(files);
}
} finally {
queueLock.unlock();
}
final ListIterator<File> itr = files.listIterator();
FlowFile flowFile = null;
try {
final Path directoryPath = directory.toPath();
while (itr.hasNext()) {
final File file = itr.next();
final Path filePath = file.toPath();
final Path relativePath = directoryPath.relativize(filePath.getParent());
String relativePathString = relativePath.toString() + "/";
if (relativePathString.isEmpty()) {
relativePathString = "./";
}
final Path absPath = filePath.toAbsolutePath();
final String absPathString = absPath.getParent().toString() + "/";
flowFile = session.create();
final long importStart = System.nanoTime();
flowFile = session.importFrom(filePath, keepingSourceFile, flowFile);
final long importNanos = System.nanoTime() - importStart;
final long importMillis = TimeUnit.MILLISECONDS.convert(importNanos, TimeUnit.NANOSECONDS);
flowFile = session.putAttribute(flowFile, CoreAttributes.FILENAME.key(), file.getName());
flowFile = session.putAttribute(flowFile, CoreAttributes.PATH.key(), relativePathString);
flowFile = session.putAttribute(flowFile, CoreAttributes.ABSOLUTE_PATH.key(), absPathString);
Map<String, String> attributes = getAttributesFromFile(filePath);
if (attributes.size() > 0) {
flowFile = session.putAllAttributes(flowFile, attributes);
}
session.getProvenanceReporter().receive(flowFile, file.toURI().toString(), importMillis);
session.transfer(flowFile, REL_SUCCESS);
logger.info("added {} to flow", new Object[] { flowFile });
if (!isScheduled()) {
// if processor stopped, put the rest of the files back on the queue.
queueLock.lock();
try {
while (itr.hasNext()) {
final File nextFile = itr.next();
fileQueue.add(nextFile);
inProcess.remove(nextFile);
}
} finally {
queueLock.unlock();
}
}
}
session.commit();
} catch (final Exception e) {
logger.error("Failed to retrieve files due to {}", e);
// anything that we've not already processed needs to be put back on the queue
if (flowFile != null) {
session.remove(flowFile);
}
} finally {
queueLock.lock();
try {
inProcess.removeAll(files);
recentlyProcessed.addAll(files);
} finally {
queueLock.unlock();
}
}
}
Aggregations