use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class MergeRecord method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) throws ProcessException {
RecordBinManager manager = binManager.get();
while (manager == null) {
manager = new RecordBinManager(context, sessionFactory, getLogger());
manager.setMaxBinAge(context.getProperty(MAX_BIN_AGE).asTimePeriod(TimeUnit.NANOSECONDS), TimeUnit.NANOSECONDS);
final boolean updated = binManager.compareAndSet(null, manager);
if (!updated) {
manager = binManager.get();
}
}
final ProcessSession session = sessionFactory.createSession();
final List<FlowFile> flowFiles = session.get(FlowFileFilters.newSizeBasedFilter(250, DataUnit.KB, 250));
if (getLogger().isDebugEnabled()) {
final List<String> ids = flowFiles.stream().map(ff -> "id=" + ff.getId()).collect(Collectors.toList());
getLogger().debug("Pulled {} FlowFiles from queue: {}", new Object[] { ids.size(), ids });
}
final String mergeStrategy = context.getProperty(MERGE_STRATEGY).getValue();
final boolean block;
if (MERGE_STRATEGY_DEFRAGMENT.equals(mergeStrategy)) {
block = true;
} else if (context.getProperty(CORRELATION_ATTRIBUTE_NAME).isSet()) {
block = true;
} else {
block = false;
}
try {
for (final FlowFile flowFile : flowFiles) {
try {
binFlowFile(context, flowFile, session, manager, block);
} catch (final Exception e) {
getLogger().error("Failed to bin {} due to {}", new Object[] { flowFile, e });
session.transfer(flowFile, REL_FAILURE);
}
}
} finally {
session.commit();
}
try {
manager.completeExpiredBins();
} catch (final Exception e) {
getLogger().error("Failed to merge FlowFiles to create new bin due to " + e, e);
}
if (flowFiles.isEmpty()) {
getLogger().debug("No FlowFiles to bin; will yield");
context.yield();
}
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class Notify method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final ComponentLog logger = getLogger();
final PropertyValue signalIdProperty = context.getProperty(RELEASE_SIGNAL_IDENTIFIER);
final PropertyValue counterNameProperty = context.getProperty(SIGNAL_COUNTER_NAME);
final PropertyValue deltaProperty = context.getProperty(SIGNAL_COUNTER_DELTA);
final String attributeCacheRegex = context.getProperty(ATTRIBUTE_CACHE_REGEX).getValue();
final Integer bufferCount = context.getProperty(SIGNAL_BUFFER_COUNT).asInteger();
// the cache client used to interact with the distributed cache.
final AtomicDistributedMapCacheClient cache = context.getProperty(DISTRIBUTED_CACHE_SERVICE).asControllerService(AtomicDistributedMapCacheClient.class);
final WaitNotifyProtocol protocol = new WaitNotifyProtocol(cache);
final Map<String, SignalBuffer> signalBuffers = new HashMap<>();
for (int i = 0; i < bufferCount; i++) {
final FlowFile flowFile = session.get();
if (flowFile == null) {
break;
}
// Signal id is computed from attribute 'RELEASE_SIGNAL_IDENTIFIER' with expression language support
final String signalId = signalIdProperty.evaluateAttributeExpressions(flowFile).getValue();
// if the computed value is null, or empty, we transfer the flow file to failure relationship
if (StringUtils.isBlank(signalId)) {
logger.error("FlowFile {} has no attribute for given Release Signal Identifier", new Object[] { flowFile });
// set 'notified' attribute
session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(false)), REL_FAILURE);
continue;
}
String counterName = counterNameProperty.evaluateAttributeExpressions(flowFile).getValue();
if (StringUtils.isEmpty(counterName)) {
counterName = WaitNotifyProtocol.DEFAULT_COUNT_NAME;
}
int delta = 1;
if (deltaProperty.isSet()) {
final String deltaStr = deltaProperty.evaluateAttributeExpressions(flowFile).getValue();
try {
delta = Integer.parseInt(deltaStr);
} catch (final NumberFormatException e) {
logger.error("Failed to calculate delta for FlowFile {} due to {}", new Object[] { flowFile, e }, e);
session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(false)), REL_FAILURE);
continue;
}
}
if (!signalBuffers.containsKey(signalId)) {
signalBuffers.put(signalId, new SignalBuffer());
}
final SignalBuffer signalBuffer = signalBuffers.get(signalId);
if (StringUtils.isNotEmpty(attributeCacheRegex)) {
flowFile.getAttributes().entrySet().stream().filter(e -> (!e.getKey().equals("uuid") && e.getKey().matches(attributeCacheRegex))).forEach(e -> signalBuffer.attributesToCache.put(e.getKey(), e.getValue()));
}
signalBuffer.incrementDelta(counterName, delta);
signalBuffer.flowFiles.add(flowFile);
if (logger.isDebugEnabled()) {
logger.debug("Cached release signal identifier {} counterName {} from FlowFile {}", new Object[] { signalId, counterName, flowFile });
}
}
signalBuffers.forEach((signalId, signalBuffer) -> {
// retry after yielding for a while.
try {
protocol.notify(signalId, signalBuffer.deltas, signalBuffer.attributesToCache);
signalBuffer.flowFiles.forEach(flowFile -> session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(true)), REL_SUCCESS));
} catch (IOException e) {
throw new RuntimeException(String.format("Unable to communicate with cache when processing %s due to %s", signalId, e), e);
}
});
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class ParseCEF method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final CEFParser parser = new CEFParser(validator);
final byte[] buffer = new byte[(int) flowFile.getSize()];
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(final InputStream in) throws IOException {
StreamUtils.fillBuffer(in, buffer);
}
});
CommonEvent event;
try {
// parcefoneLocale defaults to en_US, so this should not fail. But we force failure in case the custom
// validator failed to identify an invalid Locale
final Locale parcefoneLocale = Locale.forLanguageTag(context.getProperty(DATETIME_REPRESENTATION).getValue());
event = parser.parse(buffer, true, parcefoneLocale);
} catch (Exception e) {
// This should never trigger but adding in here as a fencing mechanism to
// address possible ParCEFone bugs.
getLogger().error("Parser returned unexpected Exception {} while processing {}; routing to failure", new Object[] { e, flowFile });
session.transfer(flowFile, REL_FAILURE);
return;
}
// event, so we test
if (event == null) {
getLogger().error("Failed to parse {} as a CEF message: it does not conform to the CEF standard; routing to failure", new Object[] { flowFile });
session.transfer(flowFile, REL_FAILURE);
return;
}
try {
final String destination = context.getProperty(FIELDS_DESTINATION).getValue();
switch(destination) {
case DESTINATION_ATTRIBUTES:
final Map<String, String> attributes = new HashMap<>();
// Process KVs of the Header field
for (Map.Entry<String, Object> entry : event.getHeader().entrySet()) {
attributes.put("cef.header." + entry.getKey(), prettyResult(entry.getValue(), tzId));
}
// Process KVs composing the Extension field
for (Map.Entry<String, Object> entry : event.getExtension(true).entrySet()) {
attributes.put("cef.extension." + entry.getKey(), prettyResult(entry.getValue(), tzId));
flowFile = session.putAllAttributes(flowFile, attributes);
}
break;
case DESTINATION_CONTENT:
ObjectNode results = mapper.createObjectNode();
// Add two JSON objects containing one CEF field each
results.set("header", mapper.valueToTree(event.getHeader()));
results.set("extension", mapper.valueToTree(event.getExtension(true)));
// to the resulting JSON
if (context.getProperty(APPEND_RAW_MESSAGE_TO_JSON).asBoolean()) {
results.set("_raw", mapper.valueToTree(new String(buffer)));
}
flowFile = session.write(flowFile, new OutputStreamCallback() {
@Override
public void process(OutputStream out) throws IOException {
try (OutputStream outputStream = new BufferedOutputStream(out)) {
outputStream.write(mapper.writeValueAsBytes(results));
}
}
});
// Adjust the FlowFile mime.type attribute
flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), "application/json");
// Update the provenance for good measure
session.getProvenanceReporter().modifyContent(flowFile, "Replaced content with parsed CEF fields and values");
break;
}
// whatever the parsing stratgy, ready to transfer to success and commit
session.transfer(flowFile, REL_SUCCESS);
session.commit();
} catch (CEFHandlingException e) {
// The flowfile has failed parsing & validation, routing to failure and committing
getLogger().error("Failed to parse {} as a CEF message due to {}; routing to failure", new Object[] { flowFile, e });
// Create a provenance event recording the routing to failure
session.getProvenanceReporter().route(flowFile, REL_FAILURE);
session.transfer(flowFile, REL_FAILURE);
session.commit();
return;
} finally {
session.rollback();
}
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class PutRiemann method onScheduled.
@OnScheduled
public void onScheduled(ProcessContext context) throws ProcessException {
if (batchSize == -1) {
batchSize = context.getProperty(BATCH_SIZE).asInteger();
}
if (riemannClient == null || !riemannClient.isConnected()) {
transport = Transport.valueOf(context.getProperty(TRANSPORT_PROTOCOL).getValue());
String host = context.getProperty(RIEMANN_HOST).getValue().trim();
int port = context.getProperty(RIEMANN_PORT).asInteger();
writeTimeout = context.getProperty(TIMEOUT).asLong();
RiemannClient client = null;
try {
switch(transport) {
case TCP:
client = RiemannClient.tcp(host, port);
break;
case UDP:
client = RiemannClient.udp(host, port);
break;
}
client.connect();
riemannClient = client;
} catch (IOException e) {
if (client != null) {
client.close();
}
context.yield();
throw new ProcessException(String.format("Unable to connect to Riemann [%s:%d] (%s)\n%s", host, port, transport, e.getMessage()));
}
}
if (customAttributes.size() == 0) {
for (Map.Entry<PropertyDescriptor, String> property : context.getProperties().entrySet()) {
// only custom defined properties
if (!getSupportedPropertyDescriptors().contains(property.getKey())) {
customAttributes.add(property.getKey());
}
}
}
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class PutRiemann method onTrigger.
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
// in the connection.
if (riemannClient == null || !riemannClient.isConnected()) {
// clean up the client and attempt to re-initialize the processor
cleanUpClient();
onScheduled(context);
}
List<FlowFile> incomingFlowFiles = session.get(batchSize);
List<FlowFile> successfulFlowFiles = new ArrayList<>(incomingFlowFiles.size());
List<Event> eventsQueue = new ArrayList<>(incomingFlowFiles.size());
for (FlowFile flowFile : incomingFlowFiles) {
try {
eventsQueue.add(FlowFileToEvent.fromAttributes(context, customAttributes, flowFile));
successfulFlowFiles.add(flowFile);
} catch (NumberFormatException e) {
getLogger().warn("Unable to create Riemann event.", e);
session.transfer(flowFile, REL_FAILURE);
}
}
try {
if (transport == Transport.TCP) {
Proto.Msg returnMessage = riemannClient.sendEvents(eventsQueue).deref(writeTimeout, TimeUnit.MILLISECONDS);
if (returnMessage == null) {
context.yield();
throw new ProcessException("Timed out writing to Riemann!");
}
} else {
riemannClient.sendEvents(eventsQueue);
}
riemannClient.flush();
session.transfer(successfulFlowFiles, REL_SUCCESS);
session.commit();
} catch (Exception e) {
context.yield();
session.transfer(incomingFlowFiles);
session.commit();
throw new ProcessException("Failed writing to Riemann\n" + e.getMessage());
}
}
Aggregations