use of com.thinkbiganalytics.metadata.api.datasource.Datasource in project kylo by Teradata.
the class DerivedDatasourceFactory method parseDataTransformControllerServiceProperties.
private Map<String, String> parseDataTransformControllerServiceProperties(DatasourceDefinition datasourceDefinition, String controllerServiceName) {
Map<String, String> properties = new HashMap<>();
if (datasourceDefinition != null) {
try {
if (StringUtils.isNotBlank(controllerServiceName)) {
// {Source Database Connection:Database Connection URL}
List<String> controllerServiceProperties = datasourceDefinition.getDatasourcePropertyKeys().stream().filter(k -> k.matches("\\{" + JDBC_CONNECTION_KEY + ":(.*)\\}")).collect(Collectors.toList());
List<String> serviceProperties = new ArrayList<>();
controllerServiceProperties.stream().forEach(p -> {
String property = p.substring(StringUtils.indexOf(p, ":") + 1, p.length() - 1);
serviceProperties.add(property);
});
ControllerServiceDTO csDto = nifiControllerServiceProperties.getControllerServiceByName(controllerServiceName);
if (csDto != null) {
serviceProperties.stream().forEach(p -> {
if (csDto != null) {
String value = csDto.getProperties().get(p);
if (value != null) {
properties.put(p, value);
}
}
});
}
}
} catch (Exception e) {
log.warn("An error occurred trying to parse controller service properties for data transformation when deriving the datasource for {}, {}. {} ", datasourceDefinition.getDatasourceType(), datasourceDefinition.getConnectionType(), e.getMessage(), e);
}
}
return properties;
}
use of com.thinkbiganalytics.metadata.api.datasource.Datasource in project kylo by Teradata.
the class InMemoryFeedProvider method ensureFeedDestination.
@Override
public FeedDestination ensureFeedDestination(Feed.ID feedId, Datasource.ID dsId) {
BaseFeed feed = (BaseFeed) this.feeds.get(feedId);
Datasource ds = this.datasetProvider.getDatasource(dsId);
if (feed == null) {
throw new FeedCreateException("A feed with the given ID does not exists: " + feedId);
}
if (ds == null) {
throw new FeedCreateException("A dataset with the given ID does not exists: " + dsId);
}
return ensureFeedDestination(feed, ds);
}
use of com.thinkbiganalytics.metadata.api.datasource.Datasource in project kylo by Teradata.
the class DerivedDatasourceFactory method parseControllerServiceProperties.
/**
* Parse the defintion metadata for the {propertyKey:CS Property Key} objects and pick out the values in the controller service
*
* @param datasourceDefinition the definition to use
* @param feedProperties the feed properties that match this definition
* @return a Map of the Controller Service Property Key, Value
*/
private Map<String, String> parseControllerServiceProperties(DatasourceDefinition datasourceDefinition, List<NifiProperty> feedProperties) {
Map<String, String> properties = new HashMap<>();
try {
// {Source Database Connection:Database Connection URL}
List<String> controllerServiceProperties = datasourceDefinition.getDatasourcePropertyKeys().stream().filter(k -> k.matches("\\{(.*):(.*)\\}")).collect(Collectors.toList());
Map<String, List<String>> serviceProperties = new HashMap<>();
controllerServiceProperties.stream().forEach(p -> {
String service = p.substring(1, StringUtils.indexOf(p, ":"));
String property = p.substring(StringUtils.indexOf(p, ":") + 1, p.length() - 1);
if (!serviceProperties.containsKey(service)) {
serviceProperties.put(service, new ArrayList<>());
}
serviceProperties.get(service).add(property);
});
serviceProperties.entrySet().stream().forEach(e -> {
String service = e.getKey();
String controllerServiceId = feedProperties.stream().filter(p -> StringUtils.isNotBlank(p.getValue()) && p.getPropertyDescriptor() != null && p.getPropertyDescriptor().getName().equalsIgnoreCase(service) && StringUtils.isNotBlank(p.getPropertyDescriptor().getIdentifiesControllerService())).map(p -> p.getValue()).findFirst().orElse(null);
if (controllerServiceId != null) {
ControllerServiceDTO csDto = nifiControllerServiceProperties.getControllerServiceById(controllerServiceId);
if (csDto != null) {
e.getValue().stream().forEach(propertyKey -> {
String value = csDto.getProperties().get(propertyKey);
if (value != null) {
properties.put(propertyKey, value);
}
});
}
}
});
} catch (Exception e) {
log.warn("An error occurred trying to parse controller service properties when deriving the datasource for {}, {}. {} ", datasourceDefinition.getDatasourceType(), datasourceDefinition.getConnectionType(), e.getMessage(), e);
}
return properties;
}
use of com.thinkbiganalytics.metadata.api.datasource.Datasource in project kylo by Teradata.
the class DerivedDatasourceFactory method ensureDataTransformationDestinationDatasources.
/**
* Builds the list of destinations for the specified data transformation feed.
*
* <p>The data source type is determined based on the sources used in the transformation. If only one source is used then it is assumed that the source and destination are the same. Otherwise it
* is assumed that the destination is Hive.</p>
*
* @param feed the feed
* @return the list of destinations
* @throws NullPointerException if the feed has no data transformation
*/
@Nonnull
private Set<Datasource.ID> ensureDataTransformationDestinationDatasources(@Nonnull final FeedMetadata feed) {
// Set properties based on data source type
final String processorType;
final Map<String, String> properties = new HashMap<>();
if (feed.getDataTransformation().getDatasourceIds() != null && feed.getDataTransformation().getDatasourceIds().size() == 1) {
final Datasource datasource = datasourceProvider.getDatasource(datasourceProvider.resolve(feed.getDataTransformation().getDatasourceIds().get(0)));
processorType = DATA_TRANSFORMATION_JDBC_DEFINITION;
properties.put(JDBC_CONNECTION_KEY, datasource.getName());
properties.put(JDBC_TABLE_KEY, feed.getSystemCategoryName() + "." + feed.getSystemFeedName());
} else {
processorType = DATA_TRANSFORMATION_HIVE_DEFINITION;
properties.put(HIVE_SCHEMA_KEY, feed.getSystemCategoryName());
properties.put(HIVE_TABLE_KEY, feed.getSystemFeedName());
}
// Create datasource
final DatasourceDefinition datasourceDefinition = datasourceDefinitionProvider.findByProcessorType(processorType);
if (datasourceDefinition != null) {
final String identityString = propertyExpressionResolver.resolveVariables(datasourceDefinition.getIdentityString(), properties);
final String title = datasourceDefinition.getTitle() != null ? propertyExpressionResolver.resolveVariables(datasourceDefinition.getTitle(), properties) : identityString;
final String desc = propertyExpressionResolver.resolveVariables(datasourceDefinition.getDescription(), properties);
if (processorType.equals(DATA_TRANSFORMATION_JDBC_DEFINITION)) {
properties.putAll(parseDataTransformControllerServiceProperties(datasourceDefinition, properties.get(JDBC_CONNECTION_KEY)));
}
final DerivedDatasource datasource = datasourceProvider.ensureDerivedDatasource(datasourceDefinition.getDatasourceType(), identityString, title, desc, new HashMap<>(properties));
return Collections.singleton(datasource.getId());
} else {
return Collections.emptySet();
}
}
use of com.thinkbiganalytics.metadata.api.datasource.Datasource in project kylo by Teradata.
the class JcrIndexService method indexDerivedDatasource.
/**
* Indexes derived datasource objects.
*
* @param datasource the derived datasource to index
* @return {@code true} if the index was updated, or {@code false} otherwise
*/
private boolean indexDerivedDatasource(@Nonnull final DerivedDatasource datasource) {
if (HIVE_DATASOURCE.equals(datasource.getDatasourceType())) {
boolean allowIndexing = metadataAccess.read(() -> {
boolean allowIndexingEvaluation = true;
Session session = JcrMetadataAccess.getActiveSession();
if (session != null) {
Value[] feedDestinationsArray = ((JcrDerivedDatasource) datasource).getNode().getProperty("tba:feedDestinations").getValues();
for (Value feedDestination : feedDestinationsArray) {
Node feedDestinationNode = session.getNodeByIdentifier(feedDestination.getString());
if (feedDestinationNode != null) {
Node feedDetailsNode = feedDestinationNode.getParent();
if (feedDetailsNode != null) {
Node feedSummaryNode = feedDetailsNode.getParent();
if (feedSummaryNode != null) {
String indexingOption = feedSummaryNode.getProperty("tba:allowIndexing").getString();
if ((indexingOption != null) && (indexingOption.equals("N"))) {
allowIndexingEvaluation = false;
}
}
}
}
}
}
return allowIndexingEvaluation;
}, MetadataAccess.SERVICE);
if (allowIndexing) {
final Map<String, Object> fields = new HashMap<>();
// Determine database and table names
final Map<String, Object> properties = datasource.getProperties();
fields.put("databaseName", properties.get("Target schema"));
fields.put("tableName", properties.get("Target table"));
// Generate list of column metadata
final Map<String, Object> genericProperties = datasource.getGenericProperties();
final Object columns = genericProperties.get("columns");
if (columns != null && columns instanceof List) {
final List<Map<String, Object>> hiveColumns = ((List<?>) columns).stream().map(Map.class::cast).map(map -> {
final Map<String, Object> column = new HashMap<>();
column.put("columnComment", map.get("description"));
column.put("columnName", map.get("name"));
@SuppressWarnings("unchecked") final List<Map<String, String>> tags = (List<Map<String, String>>) map.get("tags");
if (tags != null && !tags.isEmpty()) {
column.put("columnTags", tags.stream().map(tag -> tag.get("name")).collect(Collectors.toList()));
}
column.put("columnType", map.get("derivedDataType"));
return column;
}).collect(Collectors.toList());
fields.put("hiveColumns", hiveColumns);
}
// Index the Hive schema
if (fields.get("databaseName") != null && fields.get("tableName") != null) {
search.index(SearchIndex.DATASOURCES, datasource.getDatasourceType(), datasource.getId().toString(), fields);
return true;
}
} else {
// Drop schema from index if feed's indexing is disabled
try {
return checkAndDeleteSchema(((JcrDerivedDatasource) datasource).getId().getIdValue(), ((JcrDerivedDatasource) datasource).getPath());
} catch (RepositoryException e) {
log.warn("Unable to get id and/or path for datasource: {}", e.getMessage());
}
}
}
return false;
}
Aggregations