Search in sources :

Example 1 with FeedsForDataHistoryReindex

use of com.thinkbiganalytics.metadata.rest.model.feed.reindex.FeedsForDataHistoryReindex in project kylo by Teradata.

the class GetFeedsHistoryReindex method onTrigger.

@Override
public void onTrigger(ProcessContext context, ProcessSession session) {
    final ComponentLog logger = getLog();
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        flowFile = session.create();
    }
    logger.debug("Checking for feeds requiring reindexing historical data");
    try {
        MetadataProviderService metadataProviderService = getMetadataService(context);
        if ((metadataProviderService != null) && (metadataProviderService.getProvider() != null)) {
            String dateTimeOfCheck = String.valueOf(DateTime.now(DateTimeZone.UTC));
            FeedsForDataHistoryReindex feedsForHistoryReindexing = getMetadataService(context).getProvider().getFeedsForHistoryReindexing();
            if (feedsForHistoryReindexing != null) {
                logger.info("Found {} feeds requiring reindexing historical data", new Object[] { feedsForHistoryReindexing.getFeeds().size() });
                if (feedsForHistoryReindexing.getFeedCount() > 0) {
                    for (Feed feedForHistoryReindexing : feedsForHistoryReindexing.getFeeds()) {
                        Map<String, String> attributes = new HashMap<>();
                        attributes.put(FEED_ID_FOR_HISTORY_REINDEX_KEY, feedForHistoryReindexing.getId());
                        attributes.put(FEED_SYSTEM_NAME_FOR_HISTORY_REINDEX_KEY, feedForHistoryReindexing.getSystemName());
                        attributes.put(FEED_CATEGORY_SYSTEM_NAME_FOR_HISTORY_REINDEX_KEY, feedForHistoryReindexing.getCategory().getSystemName());
                        attributes.put(FEED_STATUS_FOR_HISTORY_REINDEX_KEY, feedForHistoryReindexing.getCurrentHistoryReindexingStatus().getHistoryReindexingState().toString());
                        attributes.put(FEED_LAST_MODIFIED_UTC_FOR_HISTORY_REINDEX_KEY, feedForHistoryReindexing.getCurrentHistoryReindexingStatus().getLastModifiedTimestamp().toString());
                        attributes.put(FEEDS_TOTAL_COUNT_FOR_HISTORY_REINDEX_KEY, String.valueOf(feedsForHistoryReindexing.getFeedCount()));
                        attributes.put(FEEDS_TOTAL_IDS_FOR_HISTORY_REINDEX_KEY, feedsForHistoryReindexing.getFeedIds().toString());
                        attributes.put(FEEDS_CHECK_TIME_UTC_FOR_HISTORY_REINDEX_KEY, dateTimeOfCheck);
                        // all attributes from parent flow file copied except uuid, creates a FORK event
                        FlowFile feedFlowFile = session.create(flowFile);
                        feedFlowFile = session.putAllAttributes(feedFlowFile, attributes);
                        session.transfer(feedFlowFile, REL_FOUND);
                        logger.info("Flow file created for reindexing feed's historical data: feed id {}, category name {}, feed name {}", new Object[] { FEED_ID_FOR_HISTORY_REINDEX_KEY, FEED_CATEGORY_SYSTEM_NAME_FOR_HISTORY_REINDEX_KEY, FEED_SYSTEM_NAME_FOR_HISTORY_REINDEX_KEY });
                    }
                    flowFile = session.putAttribute(flowFile, FEEDS_TOTAL_COUNT_FOR_HISTORY_REINDEX_KEY, String.valueOf(feedsForHistoryReindexing.getFeedCount()));
                    flowFile = session.putAttribute(flowFile, FEEDS_CHECK_TIME_UTC_FOR_HISTORY_REINDEX_KEY, dateTimeOfCheck);
                    // only for found case
                    session.transfer(flowFile, REL_ORIGINAL);
                } else {
                    // this will always be 0 here
                    flowFile = session.putAttribute(flowFile, FEEDS_TOTAL_COUNT_FOR_HISTORY_REINDEX_KEY, String.valueOf(feedsForHistoryReindexing.getFeedCount()));
                    // this will always be empty list here
                    flowFile = session.putAttribute(flowFile, FEEDS_TOTAL_IDS_FOR_HISTORY_REINDEX_KEY, feedsForHistoryReindexing.getFeedIds().toString());
                    flowFile = session.putAttribute(flowFile, FEEDS_CHECK_TIME_UTC_FOR_HISTORY_REINDEX_KEY, dateTimeOfCheck);
                    session.transfer(flowFile, REL_NOT_FOUND);
                }
            }
        } else {
            logger.error("Error checking for feeds requiring reindexing historical data. Check if Kylo services is running, and accessible from NiFi.");
            session.transfer(flowFile, REL_FAILURE);
        }
    } catch (Exception e) {
        logger.error("An exception was thrown during check for feeds requiring reindexing historical data: {}", new Object[] { e });
        session.transfer(flowFile, REL_FAILURE);
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) HashMap(java.util.HashMap) FeedsForDataHistoryReindex(com.thinkbiganalytics.metadata.rest.model.feed.reindex.FeedsForDataHistoryReindex) ComponentLog(org.apache.nifi.logging.ComponentLog) MetadataProviderService(com.thinkbiganalytics.nifi.core.api.metadata.MetadataProviderService) Feed(com.thinkbiganalytics.metadata.rest.model.feed.Feed)

Aggregations

Feed (com.thinkbiganalytics.metadata.rest.model.feed.Feed)1 FeedsForDataHistoryReindex (com.thinkbiganalytics.metadata.rest.model.feed.reindex.FeedsForDataHistoryReindex)1 MetadataProviderService (com.thinkbiganalytics.nifi.core.api.metadata.MetadataProviderService)1 HashMap (java.util.HashMap)1 FlowFile (org.apache.nifi.flowfile.FlowFile)1 ComponentLog (org.apache.nifi.logging.ComponentLog)1