use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class PutCassandraQLTest method testProcessorProcessException.
@Test
public void testProcessorProcessException() {
setUpStandardTestConfig();
processor.setExceptionToThrow(new ProcessException());
testRunner.enqueue("UPDATE users SET cities = [ 'New York', 'Los Angeles' ] WHERE user_id = 'coast2coast';");
testRunner.run(1, true, true);
testRunner.assertAllFlowFilesTransferred(PutCassandraQL.REL_FAILURE, 1);
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class ExtractCCDAAttributes method loadMappings.
protected void loadMappings() {
ClassLoader classloader = Thread.currentThread().getContextClassLoader();
Properties mappings = new Properties();
try (InputStream is = classloader.getResourceAsStream("mapping.properties")) {
mappings.load(is);
// each child element is key#value and multiple elements are separated by @
for (String property : mappings.stringPropertyNames()) {
String[] variables = StringUtils.split(mappings.getProperty(property), FIELD_SEPARATOR);
Map<String, String> map = new LinkedHashMap<String, String>();
for (String variable : variables) {
String[] keyvalue = StringUtils.split(variable, KEY_VALUE_SEPARATOR);
map.put(keyvalue[0], keyvalue[1]);
}
processMap.put(property, map);
}
} catch (IOException e) {
getLogger().error("Failed to load mappings", e);
throw new ProcessException("Failed to load mappings", e);
}
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class ExtractCCDAAttributes method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
// stores CDA attributes
Map<String, String> attributes = new TreeMap<String, String>();
getLogger().info("Processing CCDA");
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
if (processMap.isEmpty()) {
getLogger().error("Process Mapping is not loaded");
session.transfer(flowFile, REL_FAILURE);
return;
}
final Boolean skipValidation = context.getProperty(SKIP_VALIDATION).asBoolean();
final StopWatch stopWatch = new StopWatch(true);
ClinicalDocument cd = null;
try {
// Load and optionally validate CDA document
cd = loadDocument(session.read(flowFile), skipValidation);
} catch (ProcessException e) {
session.transfer(flowFile, REL_FAILURE);
return;
}
getLogger().debug("Loaded document for {} in {}", new Object[] { flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS) });
getLogger().debug("Processing elements");
// Process CDA element using mapping data
processElement(null, cd, attributes);
flowFile = session.putAllAttributes(flowFile, attributes);
stopWatch.stop();
getLogger().debug("Successfully processed {} in {}", new Object[] { flowFile, stopWatch.getDuration(TimeUnit.MILLISECONDS) });
if (getLogger().isDebugEnabled()) {
for (Entry<String, String> entry : attributes.entrySet()) {
getLogger().debug("Attribute: {}={}", new Object[] { entry.getKey(), entry.getValue() });
}
}
session.transfer(flowFile, REL_SUCCESS);
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class ConsumeAzureEventHub method onTrigger.
@Override
public void onTrigger(ProcessContext context, ProcessSessionFactory sessionFactory) throws ProcessException {
if (eventProcessorHost == null) {
try {
registerEventProcessor(context);
} catch (IllegalArgumentException e) {
// In order to show simple error message without wrapping it by another ProcessException, just throw it as it is.
throw e;
} catch (Exception e) {
throw new ProcessException("Failed to register the event processor due to " + e, e);
}
processSessionFactory = sessionFactory;
readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
}
// After a EventProcessor is registered successfully, nothing has to be done at onTrigger
// because new sessions are created when new messages are arrived by the EventProcessor.
context.yield();
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class GetAzureEventHub method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final BlockingQueue<String> partitionIds = this.partitionNames;
final String partitionId = partitionIds.poll();
if (partitionId == null) {
getLogger().debug("No partitions available");
return;
}
final StopWatch stopWatch = new StopWatch(true);
try {
final Iterable<EventData> receivedEvents = receiveEvents(context, partitionId);
if (receivedEvents == null) {
return;
}
for (final EventData eventData : receivedEvents) {
if (null != eventData) {
final Map<String, String> attributes = new HashMap<>();
FlowFile flowFile = session.create();
final EventData.SystemProperties systemProperties = eventData.getSystemProperties();
if (null != systemProperties) {
attributes.put("eventhub.enqueued.timestamp", String.valueOf(systemProperties.getEnqueuedTime()));
attributes.put("eventhub.offset", systemProperties.getOffset());
attributes.put("eventhub.sequence", String.valueOf(systemProperties.getSequenceNumber()));
}
attributes.put("eventhub.name", context.getProperty(EVENT_HUB_NAME).getValue());
attributes.put("eventhub.partition", partitionId);
flowFile = session.putAllAttributes(flowFile, attributes);
flowFile = session.write(flowFile, out -> {
out.write(eventData.getBytes());
});
session.transfer(flowFile, REL_SUCCESS);
final String namespace = context.getProperty(NAMESPACE).getValue();
final String eventHubName = context.getProperty(EVENT_HUB_NAME).getValue();
final String consumerGroup = context.getProperty(CONSUMER_GROUP).getValue();
final String serviceBusEndPoint = context.getProperty(SERVICE_BUS_ENDPOINT).getValue();
final String transitUri = "amqps://" + namespace + serviceBusEndPoint + "/" + eventHubName + "/ConsumerGroups/" + consumerGroup + "/Partitions/" + partitionId;
session.getProvenanceReporter().receive(flowFile, transitUri, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
}
}
} finally {
partitionIds.offer(partitionId);
}
}
Aggregations