use of com.thinkbiganalytics.metadata.api.datasource.Datasource in project kylo by Teradata.
the class FeedManagerFeedTest method testFeedDatasource.
@Test
public void testFeedDatasource() {
String categorySystemName = "my_category";
String feedName = "my_feed";
String templateName = "my_template";
String description = " my feed description";
setupFeedAndTemplate(categorySystemName, feedName, templateName);
// boolean isDefineTable = true;
// boolean isGetFile = false;
metadata.commit(() -> {
Feed feed = feedTestUtil.findFeed(categorySystemName, feedName);
Set<Datasource.ID> sources = new HashSet<Datasource.ID>();
Set<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> destinations = new HashSet<>();
// Add Table Dependencies
String uniqueName = FeedNameUtil.fullName(categorySystemName, feedName);
DerivedDatasource srcDatasource = datasourceProvider.ensureDatasource(uniqueName, feed.getDescription(), DerivedDatasource.class);
sources.add(srcDatasource.getId());
DerivedDatasource destDatasource = datasourceProvider.ensureDatasource("destination", feed.getDescription(), DerivedDatasource.class);
destinations.add(destDatasource.getId());
sources.stream().forEach(sourceId -> feedProvider.ensureFeedSource(feed.getId(), sourceId));
destinations.stream().forEach(destinationId -> feedProvider.ensureFeedDestination(feed.getId(), destinationId));
}, MetadataAccess.SERVICE);
// ensure the sources and dest got created
metadata.read(() -> {
Feed feed = feedTestUtil.findFeed(categorySystemName, feedName);
Assert.assertNotNull(feed.getSources());
Assert.assertTrue(feed.getSources().size() == 1, "Feed Sources should be 1");
Assert.assertNotNull(feed.getDestinations());
Assert.assertTrue(feed.getDestinations().size() == 1, "Feed Destinations should be 1");
List<? extends FeedDestination> feedDestinations = feed.getDestinations();
if (feedDestinations != null) {
FeedDestination feedDestination = feedDestinations.get(0);
Datasource ds = feedDestination.getDatasource();
Assert.assertTrue(ds instanceof DerivedDatasource, "Datasource was not expected DerivedDatasource");
}
}, MetadataAccess.SERVICE);
}
use of com.thinkbiganalytics.metadata.api.datasource.Datasource in project kylo by Teradata.
the class InMemoryFeedProvider method ensureFeed.
@Override
public Feed ensureFeed(String categorySystemName, String name, String descr, ID srcId, ID destId) {
Datasource sds = this.datasetProvider.getDatasource(srcId);
Datasource dds = this.datasetProvider.getDatasource(destId);
if (sds == null) {
throw new FeedCreateException("A dataset with the given ID does not exists: " + srcId);
}
if (dds == null) {
throw new FeedCreateException("A dataset with the given ID does not exists: " + destId);
}
BaseFeed feed = (BaseFeed) ensureFeed(categorySystemName, name, descr);
ensureFeedSource(feed, sds, null);
ensureFeedDestination(feed, dds);
return feed;
}
use of com.thinkbiganalytics.metadata.api.datasource.Datasource in project kylo by Teradata.
the class InMemoryFeedProvider method ensureFeedSource.
@Override
public FeedSource ensureFeedSource(Feed.ID feedId, Datasource.ID dsId, ServiceLevelAgreement.ID slaId) {
BaseFeed feed = (BaseFeed) this.feeds.get(feedId);
Datasource ds = this.datasetProvider.getDatasource(dsId);
if (feed == null) {
throw new FeedCreateException("A feed with the given ID does not exists: " + feedId);
}
if (ds == null) {
throw new FeedCreateException("A dataset with the given ID does not exists: " + dsId);
}
return ensureFeedSource(feed, ds, slaId);
}
use of com.thinkbiganalytics.metadata.api.datasource.Datasource in project kylo by Teradata.
the class DefaultFeedManagerFeedService method saveFeed.
private void saveFeed(final FeedMetadata feed) {
metadataAccess.commit(() -> {
Stopwatch stopwatch = Stopwatch.createStarted();
List<? extends HadoopSecurityGroup> previousSavedSecurityGroups = null;
// Store the old security groups before saving because we need to compare afterward
if (!feed.isNew()) {
Feed previousStateBeforeSaving = feedProvider.findById(feedProvider.resolveId(feed.getId()));
Map<String, String> userProperties = previousStateBeforeSaving.getUserProperties();
previousSavedSecurityGroups = previousStateBeforeSaving.getSecurityGroups();
}
// if this is the first time saving this feed create a new one
Feed domainFeed = feedModelTransform.feedToDomain(feed);
if (domainFeed.getState() == null) {
domainFeed.setState(Feed.State.ENABLED);
}
stopwatch.stop();
log.debug("Time to transform the feed to a domain object for saving: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
// initially save the feed
if (feed.isNew()) {
stopwatch.start();
domainFeed = feedProvider.update(domainFeed);
stopwatch.stop();
log.debug("Time to save the New feed: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
}
final String domainId = domainFeed.getId().toString();
final String feedName = FeedNameUtil.fullName(domainFeed.getCategory().getSystemName(), domainFeed.getName());
// Build preconditions
stopwatch.start();
assignFeedDependencies(feed, domainFeed);
stopwatch.stop();
log.debug("Time to assignFeedDependencies: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
// Assign the datasources
stopwatch.start();
assignFeedDatasources(feed, domainFeed);
stopwatch.stop();
log.debug("Time to assignFeedDatasources: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
stopwatch.start();
boolean isStream = feed.getRegisteredTemplate() != null ? feed.getRegisteredTemplate().isStream() : false;
Long timeBetweenBatchJobs = feed.getRegisteredTemplate() != null ? feed.getRegisteredTemplate().getTimeBetweenStartingBatchJobs() : 0L;
// sync the feed information to ops manager
metadataAccess.commit(() -> opsManagerFeedProvider.save(opsManagerFeedProvider.resolveId(domainId), feedName, isStream, timeBetweenBatchJobs));
stopwatch.stop();
log.debug("Time to sync feed data with Operations Manager: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
// Update hadoop security group polices if the groups changed
if (!feed.isNew() && !ListUtils.isEqualList(previousSavedSecurityGroups, domainFeed.getSecurityGroups())) {
stopwatch.start();
List<? extends HadoopSecurityGroup> securityGroups = domainFeed.getSecurityGroups();
List<String> groupsAsCommaList = securityGroups.stream().map(group -> group.getName()).collect(Collectors.toList());
hadoopAuthorizationService.updateSecurityGroupsForAllPolicies(feed.getSystemCategoryName(), feed.getSystemFeedName(), groupsAsCommaList, domainFeed.getProperties());
stopwatch.stop();
log.debug("Time to update hadoop security groups: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
}
// Update Hive metastore
stopwatch.start();
final boolean hasHiveDestination = domainFeed.getDestinations().stream().map(FeedDestination::getDatasource).filter(DerivedDatasource.class::isInstance).map(DerivedDatasource.class::cast).anyMatch(datasource -> "HiveDatasource".equals(datasource.getDatasourceType()));
if (hasHiveDestination) {
try {
feedHiveTableService.updateColumnDescriptions(feed);
} catch (final DataAccessException e) {
log.warn("Failed to update column descriptions for feed: {}", feed.getCategoryAndFeedDisplayName(), e);
}
}
stopwatch.stop();
log.debug("Time to update hive metastore: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
// Update Kylo metastore
stopwatch.start();
domainFeed = feedProvider.update(domainFeed);
stopwatch.stop();
log.debug("Time to call feedProvider.update: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
stopwatch.reset();
}, (e) -> {
if (feed.isNew() && StringUtils.isNotBlank(feed.getId())) {
// Rollback ops Manager insert if it is newly created
metadataAccess.commit(() -> {
opsManagerFeedProvider.delete(opsManagerFeedProvider.resolveId(feed.getId()));
});
}
});
}
use of com.thinkbiganalytics.metadata.api.datasource.Datasource in project kylo by Teradata.
the class DerivedDatasourceFactory method ensureDataTransformationSourceDatasources.
/**
* Builds the list of data sources for the specified data transformation feed.
*
* @param feed the feed
* @return the list of data sources
* @throws NullPointerException if the feed has no data transformation
*/
@Nonnull
private Set<Datasource.ID> ensureDataTransformationSourceDatasources(@Nonnull final FeedMetadata feed) {
final Set<Datasource.ID> datasources = new HashSet<>();
// Extract nodes in chart view model
@SuppressWarnings("unchecked") final Stream<Map<String, Object>> nodes = Optional.ofNullable(feed.getDataTransformation().getChartViewModel()).map(model -> (List<Map<String, Object>>) model.get("nodes")).map(Collection::stream).orElse(Stream.empty());
// Create a data source for each node
final DatasourceDefinition hiveDefinition = datasourceDefinitionProvider.findByProcessorType(DATA_TRANSFORMATION_HIVE_DEFINITION);
final DatasourceDefinition jdbcDefinition = datasourceDefinitionProvider.findByProcessorType(DATA_TRANSFORMATION_JDBC_DEFINITION);
nodes.forEach(node -> {
// Extract properties from node
final DatasourceDefinition datasourceDefinition;
final Map<String, String> properties = new HashMap<>();
if (node.get("datasourceId") == null || node.get("datasourceId").equals("HIVE")) {
final String name = (String) node.get("name");
datasourceDefinition = hiveDefinition;
properties.put(HIVE_SCHEMA_KEY, StringUtils.trim(StringUtils.substringBefore(name, ".")));
properties.put(HIVE_TABLE_KEY, StringUtils.trim(StringUtils.substringAfterLast(name, ".")));
} else {
final Datasource datasource = datasourceProvider.getDatasource(datasourceProvider.resolve((String) node.get("datasourceId")));
datasourceDefinition = jdbcDefinition;
properties.put(JDBC_CONNECTION_KEY, datasource.getName());
properties.put(JDBC_TABLE_KEY, (String) node.get("name"));
properties.putAll(parseDataTransformControllerServiceProperties(datasourceDefinition, datasource.getName()));
}
if (datasourceDefinition != null) {
// Create the derived data source
final String identityString = propertyExpressionResolver.resolveVariables(datasourceDefinition.getIdentityString(), properties);
final String title = datasourceDefinition.getTitle() != null ? propertyExpressionResolver.resolveVariables(datasourceDefinition.getTitle(), properties) : identityString;
final String desc = propertyExpressionResolver.resolveVariables(datasourceDefinition.getDescription(), properties);
final DerivedDatasource datasource = datasourceProvider.ensureDerivedDatasource(datasourceDefinition.getDatasourceType(), identityString, title, desc, new HashMap<>(properties));
datasources.add(datasource.getId());
}
});
// Build the data sources from the data source ids
final List<String> datasourceIds = Optional.ofNullable(feed.getDataTransformation()).map(FeedDataTransformation::getDatasourceIds).orElse(Collections.emptyList());
datasourceIds.stream().map(datasourceProvider::resolve).forEach(datasources::add);
return datasources;
}
Aggregations