use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class JoltTransformJSON method onTrigger.
@Override
public void onTrigger(final ProcessContext context, ProcessSession session) throws ProcessException {
final FlowFile original = session.get();
if (original == null) {
return;
}
final ComponentLog logger = getLogger();
final StopWatch stopWatch = new StopWatch(true);
final Object inputJson;
try (final InputStream in = session.read(original)) {
inputJson = JsonUtils.jsonToObject(in);
} catch (final Exception e) {
logger.error("Failed to transform {}; routing to failure", new Object[] { original, e });
session.transfer(original, REL_FAILURE);
return;
}
final String jsonString;
final ClassLoader originalContextClassLoader = Thread.currentThread().getContextClassLoader();
try {
final JoltTransform transform = getTransform(context, original);
if (customClassLoader != null) {
Thread.currentThread().setContextClassLoader(customClassLoader);
}
final Object transformedJson = TransformUtils.transform(transform, inputJson);
jsonString = JsonUtils.toJsonString(transformedJson);
} catch (final Exception ex) {
logger.error("Unable to transform {} due to {}", new Object[] { original, ex.toString(), ex });
session.transfer(original, REL_FAILURE);
return;
} finally {
if (customClassLoader != null && originalContextClassLoader != null) {
Thread.currentThread().setContextClassLoader(originalContextClassLoader);
}
}
FlowFile transformed = session.write(original, new OutputStreamCallback() {
@Override
public void process(OutputStream out) throws IOException {
out.write(jsonString.getBytes(DEFAULT_CHARSET));
}
});
final String transformType = context.getProperty(JOLT_TRANSFORM).getValue();
transformed = session.putAttribute(transformed, CoreAttributes.MIME_TYPE.key(), "application/json");
session.transfer(transformed, REL_SUCCESS);
session.getProvenanceReporter().modifyContent(transformed, "Modified With " + transformType, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
logger.info("Transformed {}", new Object[] { original });
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class ListDatabaseTables method onTrigger.
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
final ComponentLog logger = getLogger();
final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class);
final String catalog = context.getProperty(CATALOG).getValue();
final String schemaPattern = context.getProperty(SCHEMA_PATTERN).getValue();
final String tableNamePattern = context.getProperty(TABLE_NAME_PATTERN).getValue();
final String[] tableTypes = context.getProperty(TABLE_TYPES).isSet() ? context.getProperty(TABLE_TYPES).getValue().split("\\s*,\\s*") : null;
final boolean includeCount = context.getProperty(INCLUDE_COUNT).asBoolean();
final long refreshInterval = context.getProperty(REFRESH_INTERVAL).asTimePeriod(TimeUnit.MILLISECONDS);
final StateManager stateManager = context.getStateManager();
final StateMap stateMap;
final Map<String, String> stateMapProperties;
try {
stateMap = stateManager.getState(Scope.CLUSTER);
stateMapProperties = new HashMap<>(stateMap.toMap());
} catch (IOException ioe) {
throw new ProcessException(ioe);
}
try (final Connection con = dbcpService.getConnection()) {
DatabaseMetaData dbMetaData = con.getMetaData();
ResultSet rs = dbMetaData.getTables(catalog, schemaPattern, tableNamePattern, tableTypes);
while (rs.next()) {
final String tableCatalog = rs.getString(1);
final String tableSchema = rs.getString(2);
final String tableName = rs.getString(3);
final String tableType = rs.getString(4);
final String tableRemarks = rs.getString(5);
// Build fully-qualified name
String fqn = Stream.of(tableCatalog, tableSchema, tableName).filter(segment -> !StringUtils.isEmpty(segment)).collect(Collectors.joining("."));
String lastTimestampForTable = stateMapProperties.get(fqn);
boolean refreshTable = true;
try {
// Refresh state if the interval has elapsed
long lastRefreshed = -1;
final long currentTime = System.currentTimeMillis();
if (!StringUtils.isEmpty(lastTimestampForTable)) {
lastRefreshed = Long.parseLong(lastTimestampForTable);
}
if (lastRefreshed == -1 || (refreshInterval > 0 && currentTime >= (lastRefreshed + refreshInterval))) {
stateMapProperties.remove(lastTimestampForTable);
} else {
refreshTable = false;
}
} catch (final NumberFormatException nfe) {
getLogger().error("Failed to retrieve observed last table fetches from the State Manager. Will not perform " + "query until this is accomplished.", nfe);
context.yield();
return;
}
if (refreshTable) {
FlowFile flowFile = session.create();
logger.info("Found {}: {}", new Object[] { tableType, fqn });
if (includeCount) {
try (Statement st = con.createStatement()) {
final String countQuery = "SELECT COUNT(1) FROM " + fqn;
logger.debug("Executing query: {}", new Object[] { countQuery });
ResultSet countResult = st.executeQuery(countQuery);
if (countResult.next()) {
flowFile = session.putAttribute(flowFile, DB_TABLE_COUNT, Long.toString(countResult.getLong(1)));
}
} catch (SQLException se) {
logger.error("Couldn't get row count for {}", new Object[] { fqn });
session.remove(flowFile);
continue;
}
}
if (tableCatalog != null) {
flowFile = session.putAttribute(flowFile, DB_TABLE_CATALOG, tableCatalog);
}
if (tableSchema != null) {
flowFile = session.putAttribute(flowFile, DB_TABLE_SCHEMA, tableSchema);
}
flowFile = session.putAttribute(flowFile, DB_TABLE_NAME, tableName);
flowFile = session.putAttribute(flowFile, DB_TABLE_FULLNAME, fqn);
flowFile = session.putAttribute(flowFile, DB_TABLE_TYPE, tableType);
if (tableRemarks != null) {
flowFile = session.putAttribute(flowFile, DB_TABLE_REMARKS, tableRemarks);
}
String transitUri;
try {
transitUri = dbMetaData.getURL();
} catch (SQLException sqle) {
transitUri = "<unknown>";
}
session.getProvenanceReporter().receive(flowFile, transitUri);
session.transfer(flowFile, REL_SUCCESS);
stateMapProperties.put(fqn, Long.toString(System.currentTimeMillis()));
}
}
// Update the timestamps for listed tables
if (stateMap.getVersion() == -1) {
stateManager.setState(stateMapProperties, Scope.CLUSTER);
} else {
stateManager.replace(stateMap, stateMapProperties, Scope.CLUSTER);
}
} catch (final SQLException | IOException e) {
throw new ProcessException(e);
}
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class ListenSyslog method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
// poll the queue with a small timeout to avoid unnecessarily yielding below
RawSyslogEvent rawSyslogEvent = getMessage(true, true, session);
// throttling even when no data is available
if (rawSyslogEvent == null) {
return;
}
final int maxBatchSize = context.getProperty(MAX_BATCH_SIZE).asInteger();
final String port = context.getProperty(PORT).evaluateAttributeExpressions().getValue();
final String protocol = context.getProperty(PROTOCOL).getValue();
final Map<String, String> defaultAttributes = new HashMap<>(4);
defaultAttributes.put(SyslogAttributes.PROTOCOL.key(), protocol);
defaultAttributes.put(SyslogAttributes.PORT.key(), port);
defaultAttributes.put(CoreAttributes.MIME_TYPE.key(), "text/plain");
final int numAttributes = SyslogAttributes.values().length + 2;
final boolean shouldParse = context.getProperty(PARSE_MESSAGES).asBoolean();
final Map<String, FlowFile> flowFilePerSender = new HashMap<>();
final SyslogParser parser = getParser();
for (int i = 0; i < maxBatchSize; i++) {
SyslogEvent event = null;
// If this is our first iteration, we have already polled our queues. Otherwise, poll on each iteration.
if (i > 0) {
rawSyslogEvent = getMessage(true, false, session);
if (rawSyslogEvent == null) {
break;
}
}
final String sender = rawSyslogEvent.getSender();
FlowFile flowFile = flowFilePerSender.computeIfAbsent(sender, k -> session.create());
if (shouldParse) {
boolean valid = true;
try {
event = parser.parseEvent(rawSyslogEvent.getData(), sender);
} catch (final ProcessException pe) {
getLogger().warn("Failed to parse Syslog event; routing to invalid");
valid = false;
}
// because the 'flowFile' object may already have data written to it.
if (!valid || event == null || !event.isValid()) {
FlowFile invalidFlowFile = session.create();
invalidFlowFile = session.putAllAttributes(invalidFlowFile, defaultAttributes);
if (sender != null) {
invalidFlowFile = session.putAttribute(invalidFlowFile, SyslogAttributes.SENDER.key(), sender);
}
try {
final byte[] rawBytes = rawSyslogEvent.getData();
invalidFlowFile = session.write(invalidFlowFile, new OutputStreamCallback() {
@Override
public void process(final OutputStream out) throws IOException {
out.write(rawBytes);
}
});
} catch (final Exception e) {
getLogger().error("Failed to write contents of Syslog message to FlowFile due to {}; will re-queue message and try again", e);
errorEvents.offer(rawSyslogEvent);
session.remove(invalidFlowFile);
break;
}
session.transfer(invalidFlowFile, REL_INVALID);
break;
}
getLogger().trace(event.getFullMessage());
final Map<String, String> attributes = new HashMap<>(numAttributes);
attributes.put(SyslogAttributes.PRIORITY.key(), event.getPriority());
attributes.put(SyslogAttributes.SEVERITY.key(), event.getSeverity());
attributes.put(SyslogAttributes.FACILITY.key(), event.getFacility());
attributes.put(SyslogAttributes.VERSION.key(), event.getVersion());
attributes.put(SyslogAttributes.TIMESTAMP.key(), event.getTimeStamp());
attributes.put(SyslogAttributes.HOSTNAME.key(), event.getHostName());
attributes.put(SyslogAttributes.BODY.key(), event.getMsgBody());
attributes.put(SyslogAttributes.VALID.key(), String.valueOf(event.isValid()));
flowFile = session.putAllAttributes(flowFile, attributes);
}
// figure out if we should write the bytes from the raw event or parsed event
final boolean writeDemarcator = (i > 0);
try {
// write the raw bytes of the message as the FlowFile content
final byte[] rawMessage = (event == null) ? rawSyslogEvent.getData() : event.getRawMessage();
flowFile = session.append(flowFile, new OutputStreamCallback() {
@Override
public void process(final OutputStream out) throws IOException {
if (writeDemarcator) {
out.write(messageDemarcatorBytes);
}
out.write(rawMessage);
}
});
} catch (final Exception e) {
getLogger().error("Failed to write contents of Syslog message to FlowFile due to {}; will re-queue message and try again", e);
errorEvents.offer(rawSyslogEvent);
break;
}
flowFilePerSender.put(sender, flowFile);
}
for (final Map.Entry<String, FlowFile> entry : flowFilePerSender.entrySet()) {
final String sender = entry.getKey();
FlowFile flowFile = entry.getValue();
if (flowFile.getSize() == 0L) {
session.remove(flowFile);
getLogger().debug("No data written to FlowFile from Sender {}; removing FlowFile", new Object[] { sender });
continue;
}
final Map<String, String> newAttributes = new HashMap<>(defaultAttributes.size() + 1);
newAttributes.putAll(defaultAttributes);
newAttributes.put(SyslogAttributes.SENDER.key(), sender);
flowFile = session.putAllAttributes(flowFile, newAttributes);
getLogger().debug("Transferring {} to success", new Object[] { flowFile });
session.transfer(flowFile, REL_SUCCESS);
session.adjustCounter("FlowFiles Transferred to Success", 1L, false);
final String senderHost = sender.startsWith("/") && sender.length() > 1 ? sender.substring(1) : sender;
final String transitUri = new StringBuilder().append(protocol.toLowerCase()).append("://").append(senderHost).append(":").append(port).toString();
session.getProvenanceReporter().receive(flowFile, transitUri);
}
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class LookupAttribute method onTrigger.
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
final ComponentLog logger = getLogger();
final LookupService lookupService = context.getProperty(LOOKUP_SERVICE).asControllerService(LookupService.class);
final boolean includeEmptyValues = context.getProperty(INCLUDE_EMPTY_VALUES).asBoolean();
for (FlowFile flowFile : session.get(50)) {
try {
onTrigger(logger, lookupService, includeEmptyValues, flowFile, session);
} catch (final IOException e) {
throw new ProcessException(e.getMessage(), e);
}
}
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class MergeRecord method binFlowFile.
private void binFlowFile(final ProcessContext context, final FlowFile flowFile, final ProcessSession session, final RecordBinManager binManager, final boolean block) {
final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
try (final InputStream in = session.read(flowFile);
final RecordReader reader = readerFactory.createRecordReader(flowFile, in, getLogger())) {
final RecordSchema schema = reader.getSchema();
final String groupId = getGroupId(context, flowFile, schema, session);
getLogger().debug("Got Group ID {} for {}", new Object[] { groupId, flowFile });
binManager.add(groupId, flowFile, reader, session, block);
} catch (MalformedRecordException | IOException | SchemaNotFoundException e) {
throw new ProcessException(e);
}
}
Aggregations