use of org.apache.nifi.flowfile.FlowFile in project kylo by Teradata.
the class InitializeFeed method onTrigger.
/* (non-Javadoc)
* @see org.apache.nifi.processor.AbstractProcessor#onTrigger(org.apache.nifi.processor.ProcessContext, org.apache.nifi.processor.ProcessSession)
*/
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
FlowFile inputFF = session.get();
if (inputFF != null) {
inputFF = initialize(context, session, inputFF);
InitializationStatus status = getMetadataRecorder().getInitializationStatus(getFeedId(context, inputFF)).orElse(new InitializationStatus(State.PENDING));
switch(status.getState()) {
case PENDING:
pending(context, session, inputFF);
break;
case IN_PROGRESS:
inProgress(context, session, inputFF);
break;
case FAILED:
failed(context, session, inputFF, status.getTimestamp(), false);
break;
case REINITIALIZE:
reinitialize(context, session, inputFF);
break;
case REINITIALIZE_FAILED:
reinitializeFailed(context, session, inputFF, status.getTimestamp());
break;
case SUCCESS:
success(context, session, inputFF);
}
}
}
use of org.apache.nifi.flowfile.FlowFile in project kylo by Teradata.
the class BeginFeed method onTrigger.
/* (non-Javadoc)
* @see org.apache.nifi.processor.AbstractProcessor#onTrigger(org.apache.nifi.processor.ProcessContext, org.apache.nifi.processor.ProcessSession)
*/
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
FlowFile flowFile = produceFlowFile(session);
while (flowFile != null) {
// TODO Remove when we do more intelligent handling when the feed and datasource info has been
// removed from the metadata store.
Feed feed = ensureFeedMetadata(context);
flowFile = session.putAttribute(flowFile, MetadataConstants.FEED_ID_PROP, feed.getId().toString());
flowFile = session.putAttribute(flowFile, OPERATON_START_PROP, Formatters.print(new DateTime()));
session.transfer(flowFile, SUCCESS);
flowFile = produceFlowFile(session);
if (flowFile == null) {
context.yield();
}
}
}
use of org.apache.nifi.flowfile.FlowFile in project kylo by Teradata.
the class GetFeedMetadata method onTrigger.
@Override
public void onTrigger(@Nonnull final ProcessContext context, @Nonnull final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
String categoryName = context.getProperty(CATEGORY_NAME).evaluateAttributeExpressions(flowFile).getValue();
String feedName = context.getProperty(FEED_NAME).evaluateAttributeExpressions(flowFile).getValue();
getLog().debug("Triggered for {}.{}", new Object[] { categoryName, feedName });
String feedJson;
try {
feedJson = cachedFeed.get(new FeedKey(categoryName, feedName));
} catch (Exception e) {
getLog().error("Failure retrieving metadata for feed: {}.{}", new Object[] { categoryName, feedName }, e);
throw new IllegalStateException("Failed to retrieve feed metadata", e);
}
if (feedJson == null) {
throw new IllegalStateException(String.format("Failed to retrieve feed metadata for feed %s:%s", categoryName, feedName));
}
// Create attributes for FlowFile
Map<String, String> attributes = Maps.newHashMap();
attributes.put("feedJson", feedJson);
// Create a FlowFile from the event
flowFile = session.putAllAttributes(flowFile, attributes);
getLog().trace("Transferring flow file to Success relationship");
session.transfer(flowFile, REL_SUCCESS);
}
use of org.apache.nifi.flowfile.FlowFile in project kylo by Teradata.
the class TriggerCleanup method onTrigger.
@Override
public void onTrigger(@Nonnull final ProcessContext context, @Nonnull final ProcessSession session) throws ProcessException {
getLog().trace("Triggered for feed {}.{}", new Object[] { category, feed });
// Look for an event to process
FeedCleanupTriggerEvent event = queue.poll();
if (event == null) {
getLog().trace("Triggered, but no message in queue");
context.yield();
// nothing to do
return;
}
String feedId;
try {
feedId = getMetadataService(context).getProvider().getFeedId(category, feed);
getLog().debug("Triggered for feed " + feedId);
} catch (Exception e) {
getLog().error("Failure retrieving metadata for feed: {}.{}", new Object[] { category, feed }, e);
throw new IllegalStateException("Failed to retrieve feed metadata", e);
}
// Verify feed properties
Properties properties = (feedId != null) ? getMetadataService(context).getProvider().getFeedProperties(feedId) : null;
getLog().debug("Feed properties " + properties);
if (properties == null) {
throw new IllegalStateException("Failed to fetch properties for feed: " + feedId);
}
if (!properties.containsKey(FeedProperties.CLEANUP_ENABLED) || !"true".equals(properties.getProperty(FeedProperties.CLEANUP_ENABLED))) {
getLog().info("Ignoring cleanup event because deleteEnabled is false for feed: {}", new Object[] { feedId });
context.yield();
// ignore events if deleteEnabled is not true
return;
}
// Create attributes for FlowFile
Map<String, String> attributes = Maps.newHashMap();
for (Map.Entry<Object, Object> property : properties.entrySet()) {
attributes.put((String) property.getKey(), (String) property.getValue());
}
attributes.put("category", context.getProperty(CATEGORY_NAME).getValue());
attributes.put("feed", context.getProperty(FEED_NAME).getValue());
// Create a FlowFile from the event
FlowFile flowFile = session.create();
flowFile = session.putAllAttributes(flowFile, attributes);
getLog().debug("Transferring flow file to Success relationship");
session.transfer(flowFile, REL_SUCCESS);
}
use of org.apache.nifi.flowfile.FlowFile in project kylo by Teradata.
the class TriggerFeed method createFlowFile.
private FlowFile createFlowFile(ProcessContext context, ProcessSession session, FeedPreconditionTriggerEvent event) {
final String feedId = getFeedId(context);
getLog().info("createFlowFile for Feed {}", new Object[] { feedId });
FlowFile file = null;
if (feedId != null) {
FeedDependencyDeltaResults deltas = getProviderService(context).getProvider().getFeedDependentResultDeltas(feedId);
if (deltas != null && deltas.getDependentFeedNames() != null && !deltas.getDependentFeedNames().isEmpty()) {
file = session.create();
try {
List<String> keysToMatch = getMatchingExecutionContextKeys(context);
getLog().info("Reducing the Execution Context to match {} keys ", new Object[] { StringUtils.join((keysToMatch)) });
deltas.reduceExecutionContextToMatchingKeys(keysToMatch);
String value = MAPPER.writeValueAsString(deltas);
// add the json as an attr value?
// file = session.putAttribute(file, ComponentAttributes.FEED_DEPENDENT_RESULT_DELTAS.key(), value);
// write the json back to the flow file content
file = session.write(file, new OutputStreamCallback() {
@Override
public void process(OutputStream outputStream) throws IOException {
outputStream.write(value.getBytes(StandardCharsets.UTF_8));
}
});
} catch (JsonProcessingException e) {
getLog().warn("Failed to serialize feed dependency result deltas", e);
// TODO Swallow the exception and produce the flow file anyway?
}
} else {
getLog().debug("Found no dependent feeds");
}
}
if (file == null) {
file = session.get();
}
return file;
}
Aggregations