use of com.thinkbiganalytics.nifi.core.api.metadata.MetadataProvider in project kylo by Teradata.
the class TerminateHiveTableFeed method createDestinationDatasource.
@Override
protected Datasource createDestinationDatasource(ProcessContext context, String datasetName, String descr) {
MetadataProvider provider = getProviderService(context).getProvider();
String databaseName = context.getProperty(HiveTableProperties.DATABASE_NAME).getValue();
String tableName = context.getProperty(HiveTableProperties.TABLE_NAME).getValue();
return provider.ensureHiveTableDatasource(datasetName, "", databaseName, tableName);
}
use of com.thinkbiganalytics.nifi.core.api.metadata.MetadataProvider in project kylo by Teradata.
the class TerminateHiveTableFeed method completeOperation.
@Override
protected DataOperation completeOperation(ProcessContext context, FlowFile flowFile, Datasource dataset, DataOperation op, DataOperation.State state) {
MetadataProvider provider = getProviderService(context).getProvider();
if (state == State.SUCCESS) {
HiveTableDatasource hds = (HiveTableDatasource) dataset;
Dataset changeSet = provider.createDataset(hds, null);
return provider.completeOperation(op.getId(), "", changeSet);
} else {
return provider.completeOperation(op.getId(), "", state);
}
}
use of com.thinkbiganalytics.nifi.core.api.metadata.MetadataProvider in project kylo by Teradata.
the class AbstractTerminateFeed method ensureFeedDestination.
protected FeedDestination ensureFeedDestination(ProcessContext context, FlowFile flowFile, Datasource destDatasource) {
MetadataProvider provider = getProviderService(context).getProvider();
String feedId = flowFile.getAttribute(FEED_ID_PROP);
if (feedId != null) {
Feed feed = provider.ensureFeedDestination(feedId, destDatasource.getId());
return feed.getDestination(destDatasource.getId());
} else {
throw new ProcessException("Feed ID property missing from flow file (" + FEED_ID_PROP + ")");
}
}
use of com.thinkbiganalytics.nifi.core.api.metadata.MetadataProvider in project kylo by Teradata.
the class PutFeedMetadata method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
final ComponentLog logger = getLog();
FlowFile flowFile = null;
try {
if (context.hasIncomingConnection()) {
flowFile = session.get();
// we know that we should run only if we have a FlowFile.
if (flowFile == null && context.hasNonLoopConnection()) {
return;
}
}
final FlowFile incoming = flowFile;
// Get the feed id
String category = context.getProperty(CATEGORY_NAME).evaluateAttributeExpressions(flowFile).getValue();
String feed = context.getProperty(FEED_NAME).evaluateAttributeExpressions(flowFile).getValue();
String namespace = context.getProperty(NAMESPACE).evaluateAttributeExpressions(flowFile).getValue();
getLog().debug("The category is: " + category + " and feed is " + feed);
MetadataProvider metadataProvider = getMetadataService(context).getProvider();
// Ignore the 3 required properties and send the rest to the metadata server
Map<PropertyDescriptor, String> properties = context.getProperties();
Set<PropertyDescriptor> propertyKeys = properties.keySet();
Properties metadataProperties = new Properties();
for (PropertyDescriptor property : propertyKeys) {
String propertyName = property.getName();
String value = context.getProperty(propertyName).evaluateAttributeExpressions(flowFile).getValue();
if (!PROPERTY_LIST_TO_IGNORE.contains(propertyName)) {
metadataProperties.setProperty(METADATA_FIELD_PREFIX + ":" + namespace + ":" + propertyName, value);
}
}
String feedId = metadataProvider.getFeedId(category, feed);
metadataProvider.updateFeedProperties(feedId, metadataProperties);
session.transfer(flowFile, REL_SUCCESS);
} catch (Exception e) {
logger.error("Error processing custom feed metadata", e);
session.transfer(flowFile, REL_FAILURE);
}
}
use of com.thinkbiganalytics.nifi.core.api.metadata.MetadataProvider in project kylo by Teradata.
the class TerminateDirectoryFeed method completeOperation.
@Override
protected DataOperation completeOperation(ProcessContext context, FlowFile flowFile, Datasource datasource, DataOperation op, DataOperation.State state) {
MetadataProvider provider = getProviderService(context).getProvider();
DirectoryDatasource dds = (DirectoryDatasource) datasource;
if (state == State.SUCCESS) {
ArrayList<Path> paths = new ArrayList<>();
// TODO Extract file paths from flow file
Dataset dataset = provider.createDataset(dds, paths);
return provider.completeOperation(op.getId(), "", dataset);
} else {
return provider.completeOperation(op.getId(), "", state);
}
}
Aggregations