use of org.apache.commons.lang3.StringUtils.substringBefore in project cas by apereo.
the class ConfigurationMetadataPropertyCreator method createConfigurationProperty.
/**
* Create configuration property.
*
* @param fieldDecl the field decl
* @param propName the prop name
* @return the configuration metadata property
*/
public ConfigurationMetadataProperty createConfigurationProperty(final FieldDeclaration fieldDecl, final String propName) {
val variable = fieldDecl.getVariables().get(0);
val name = StreamSupport.stream(RelaxedPropertyNames.forCamelCase(variable.getNameAsString()).spliterator(), false).map(Object::toString).findFirst().orElseGet(variable::getNameAsString);
val indexedGroup = propName.concat(indexNameWithBrackets ? "[]" : StringUtils.EMPTY);
val indexedName = indexedGroup.concat(".").concat(name);
val prop = new ConfigurationMetadataProperty();
if (fieldDecl.getJavadoc().isPresent()) {
val description = fieldDecl.getJavadoc().get().getDescription().toText();
prop.setDescription(description);
prop.setShortDescription(StringUtils.substringBefore(description, "."));
} else {
LOGGER.error("No Javadoc found for field [{}]", indexedName);
}
prop.setName(indexedName);
prop.setId(indexedName);
var elementType = fieldDecl.getElementType();
val elementTypeStr = elementType.asString();
if (PRIMITIVES.containsKey(elementTypeStr)) {
prop.setType(PRIMITIVES.get(elementTypeStr));
} else if (elementTypeStr.startsWith("Map<") || elementTypeStr.startsWith("List<") || elementTypeStr.startsWith("Set<")) {
prop.setType("java.util." + elementTypeStr);
var typeName = elementTypeStr.substring(elementTypeStr.indexOf('<') + 1, elementTypeStr.indexOf('>'));
var parent = fieldDecl.getParentNode().get();
parent.findFirst(EnumDeclaration.class, em -> em.getNameAsString().contains(typeName)).ifPresent(em -> {
var builder = collectJavadocsEnumFields(prop, em);
prop.setDescription(builder.toString());
});
} else {
prop.setType(elementTypeStr);
var parent = fieldDecl.getParentNode().get();
var enumDecl = parent.findFirst(EnumDeclaration.class, em -> em.getNameAsString().contains(elementTypeStr));
if (enumDecl.isPresent()) {
val em = enumDecl.get();
var builder = collectJavadocsEnumFields(prop, em);
prop.setDescription(builder.toString());
em.getFullyQualifiedName().ifPresent(prop::setType);
}
}
val initializer = variable.getInitializer();
if (initializer.isPresent()) {
val exp = initializer.get();
if (exp instanceof LiteralStringValueExpr) {
prop.setDefaultValue(((LiteralStringValueExpr) exp).getValue());
} else if (exp instanceof BooleanLiteralExpr) {
prop.setDefaultValue(((BooleanLiteralExpr) exp).getValue());
} else if (exp instanceof FieldAccessExpr) {
prop.setDefaultValue(((FieldAccessExpr) exp).getNameAsString());
}
}
properties.add(prop);
val grp = new ComparableConfigurationMetadataProperty();
grp.setId(indexedGroup);
grp.setName(indexedGroup);
grp.setType(parentClass);
groups.add(grp);
return prop;
}
use of org.apache.commons.lang3.StringUtils.substringBefore in project kylo by Teradata.
the class RefreshableDataSourceTest method testCreateDataSourceAndGetProperties.
private Map<String, String> testCreateDataSourceAndGetProperties() throws Exception {
DataSource ds = Whitebox.invokeMethod(hiveDs, "create", true, principal);
Map<String, String> props = Arrays.stream(ds.toString().split("; ")).collect(Collectors.toMap(s -> StringUtils.substringBefore(s, "="), s -> StringUtils.substringAfter(s, "=")));
return props;
}
use of org.apache.commons.lang3.StringUtils.substringBefore in project kylo by Teradata.
the class DefaultServiceLevelAgreementService method saveAndScheduleSla.
/**
* In order to Save an SLA if it is related to a Feed(s) the user needs to have EDIT_DETAILS permission on the Feed(s)
*
* @param serviceLevelAgreement the sla to save
* @param feed an option Feed to relate to this SLA. If this is not present the related feeds are also embedded in the SLA policies. The Feed is a pointer access to the current
* feed the user is editing if they are creating an SLA from the Feed Details page. If creating an SLA from the main SLA page the feed property will not be populated.
*/
private ServiceLevelAgreement saveAndScheduleSla(ServiceLevelAgreementGroup serviceLevelAgreement, FeedMetadata feed) {
// ensure user has permissions to edit the SLA
if (serviceLevelAgreement != null) {
ServiceLevelAgreementMetricTransformerHelper transformer = new ServiceLevelAgreementMetricTransformerHelper();
// Read the feeds on the SLA as a Service. Then verify the current user has access to edit these feeds
List<String> feedsOnSla = metadataAccess.read(() -> {
List<String> feedIds = new ArrayList<>();
// all referencing Feeds
List<String> systemCategoryAndFeedNames = transformer.getCategoryFeedNames(serviceLevelAgreement);
for (String categoryAndFeed : systemCategoryAndFeedNames) {
// fetch and update the reference to the sla
String categoryName = StringUtils.trim(StringUtils.substringBefore(categoryAndFeed, "."));
String feedName = StringUtils.trim(StringUtils.substringAfterLast(categoryAndFeed, "."));
Feed feedEntity = feedProvider.findBySystemName(categoryName, feedName);
if (feedEntity != null) {
feedIds.add(feedEntity.getId().toString());
}
}
return feedIds;
}, MetadataAccess.SERVICE);
boolean allowedToEdit = feedsOnSla.isEmpty() ? true : feedsOnSla.stream().allMatch(feedId -> feedManagerFeedService.checkFeedPermission(feedId, FeedAccessControl.EDIT_DETAILS));
if (allowedToEdit) {
return metadataAccess.commit(() -> {
// Re read back in the Feeds for this session
Set<Feed> slaFeeds = new HashSet<Feed>();
Set<Feed.ID> slaFeedIds = new HashSet<Feed.ID>();
feedsOnSla.stream().forEach(feedId -> {
Feed feedEntity = feedProvider.findById(feedProvider.resolveId(feedId));
if (feedEntity != null) {
slaFeeds.add(feedEntity);
slaFeedIds.add(feedEntity.getId());
}
});
if (feed != null) {
feedManagerFeedService.checkFeedPermission(feed.getId(), FeedAccessControl.EDIT_DETAILS);
}
if (feed != null) {
transformer.applyFeedNameToCurrentFeedProperties(serviceLevelAgreement, feed.getCategory().getSystemName(), feed.getSystemFeedName());
}
ServiceLevelAgreement sla = transformer.getServiceLevelAgreement(serviceLevelAgreement);
ServiceLevelAgreementBuilder slaBuilder = null;
com.thinkbiganalytics.metadata.sla.api.ServiceLevelAgreement.ID existingId = null;
if (StringUtils.isNotBlank(sla.getId())) {
existingId = slaProvider.resolve(sla.getId());
}
if (existingId != null) {
slaBuilder = slaProvider.builder(existingId);
} else {
slaBuilder = slaProvider.builder();
}
slaBuilder.name(sla.getName()).description(sla.getDescription());
for (com.thinkbiganalytics.metadata.rest.model.sla.ObligationGroup group : sla.getGroups()) {
ObligationGroupBuilder groupBuilder = slaBuilder.obligationGroupBuilder(ObligationGroup.Condition.valueOf(group.getCondition()));
for (Obligation o : group.getObligations()) {
groupBuilder.obligationBuilder().metric(o.getMetrics()).description(o.getDescription()).build();
}
groupBuilder.build();
}
com.thinkbiganalytics.metadata.sla.api.ServiceLevelAgreement savedSla = slaBuilder.build();
List<ServiceLevelAgreementActionConfiguration> actions = transformer.getActionConfigurations(serviceLevelAgreement);
// now assign the sla checks
slaProvider.slaCheckBuilder(savedSla.getId()).removeSlaChecks().actionConfigurations(actions).build();
// relate them
Set<Feed.ID> feedIds = new HashSet<>();
FeedServiceLevelAgreementRelationship feedServiceLevelAgreementRelationship = feedSlaProvider.relateFeeds(savedSla, slaFeeds);
if (feedServiceLevelAgreementRelationship != null && feedServiceLevelAgreementRelationship.getFeeds() != null) {
feedIds = feedServiceLevelAgreementRelationship.getFeeds().stream().map(f -> f.getId()).collect(Collectors.toSet());
}
Set<VelocityTemplate.ID> velocityTemplates = findVelocityTemplates(serviceLevelAgreement);
// Update the JPA mapping in Ops Manager for this SLA and its related Feeds
serviceLevelAgreementDescriptionProvider.updateServiceLevelAgreement(savedSla.getId(), savedSla.getName(), savedSla.getDescription(), feedIds, velocityTemplates);
com.thinkbiganalytics.metadata.rest.model.sla.FeedServiceLevelAgreement restModel = serviceLevelAgreementTransform.toModel(savedSla, slaFeeds, true);
// schedule it
serviceLevelAgreementScheduler.scheduleServiceLevelAgreement(savedSla);
return restModel;
});
}
}
return null;
}
use of org.apache.commons.lang3.StringUtils.substringBefore in project kylo by Teradata.
the class DerivedDatasourceFactory method ensureDataTransformationSourceDatasources.
/**
* Builds the list of data sources for the specified data transformation feed.
*
* @param feed the feed
* @return the list of data sources
* @throws NullPointerException if the feed has no data transformation
*/
@Nonnull
private Set<Datasource.ID> ensureDataTransformationSourceDatasources(@Nonnull final FeedMetadata feed) {
final Set<String> dataSetIds = new HashSet<>();
final Set<Datasource.ID> datasources = new HashSet<>();
if (feed.getSourceDataSets() != null) {
List<String> datasetIds = feed.getSourceDataSets().stream().map(ds -> ds.getId()).collect(Collectors.toList());
dataSetIds.addAll(datasetIds);
}
final List<String> catalogSources = feed.getDataTransformation().getCatalogDataSourceIds() != null ? feed.getDataTransformation().getCatalogDataSourceIds() : new ArrayList<>();
// Extract nodes in chart view model
@SuppressWarnings("unchecked") final Stream<Map<String, Object>> nodes = Optional.ofNullable(feed.getDataTransformation().getChartViewModel()).map(model -> (List<Map<String, Object>>) model.get("nodes")).map(Collection::stream).orElse(Stream.empty());
// Create a data source for each node
final DatasourceDefinition hiveDefinition = datasourceDefinitionProvider.findByProcessorType(DATA_TRANSFORMATION_HIVE_DEFINITION);
final DatasourceDefinition jdbcDefinition = datasourceDefinitionProvider.findByProcessorType(DATA_TRANSFORMATION_JDBC_DEFINITION);
nodes.forEach(node -> {
// Filter data sets
if (!StringUtils.equalsAnyIgnoreCase((String) node.get("datasourceId"), null, "HIVE") && node.get("dataset") != null && !Objects.equals(node.get("datasetMatchesUserDataSource"), Boolean.TRUE)) {
dataSetIds.add((String) node.get("datasourceId"));
return;
}
// Extract properties from node
DatasourceDefinition datasourceDefinition = null;
final Map<String, String> properties = new HashMap<>();
String userDatasourceId = (String) node.get("datasourceId");
if ((userDatasourceId == null && node.get("dataset") == null) || (userDatasourceId != null && userDatasourceId.equalsIgnoreCase("HIVE"))) {
final String name = (String) node.get("name");
datasourceDefinition = hiveDefinition;
properties.put(HIVE_SCHEMA_KEY, StringUtils.trim(StringUtils.substringBefore(name, ".")));
properties.put(HIVE_TABLE_KEY, StringUtils.trim(StringUtils.substringAfterLast(name, ".")));
} else if (userDatasourceId != null) {
final Datasource datasource = datasourceProvider.getDatasource(datasourceProvider.resolve(userDatasourceId));
if (datasource != null) {
datasourceDefinition = jdbcDefinition;
properties.put(JDBC_CONNECTION_KEY, datasource.getName());
properties.put(JDBC_TABLE_KEY, (String) node.get("name"));
properties.putAll(parseDataTransformControllerServiceProperties(datasourceDefinition, datasource.getName()));
}
}
if (datasourceDefinition != null) {
// Create the derived data source
final String identityString = propertyExpressionResolver.resolveVariables(datasourceDefinition.getIdentityString(), properties);
final String title = datasourceDefinition.getTitle() != null ? propertyExpressionResolver.resolveVariables(datasourceDefinition.getTitle(), properties) : identityString;
final String desc = propertyExpressionResolver.resolveVariables(datasourceDefinition.getDescription(), properties);
final DerivedDatasource datasource = datasourceProvider.ensureDerivedDatasource(datasourceDefinition.getDatasourceType(), identityString, title, desc, new HashMap<>(properties));
datasources.add(datasource.getId());
}
});
// Build the data sources from the data source ids
if (dataSetIds.isEmpty()) {
final List<String> datasourceIds = Optional.ofNullable(feed.getDataTransformation()).map(FeedDataTransformation::getDatasourceIds).orElse(Collections.emptyList());
datasourceIds.stream().filter(id -> !dataSetIds.contains(id) && !catalogSources.contains(id)).map(datasourceProvider::resolve).forEach(datasources::add);
}
return datasources;
}
use of org.apache.commons.lang3.StringUtils.substringBefore in project kylo by Teradata.
the class DebugController method queryJcr.
/**
* Prints the nodes of the JCR path given, for debugging.
*
* @param query the jcr query
* @return a printout of the JCR tree
*/
@GET
@Path("jcr-sql")
@Produces({ MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON })
public JcrQueryResult queryJcr(@QueryParam("query") final String query) {
return metadata.read(() -> {
this.accessController.checkPermission(AccessController.SERVICES, MetadataAccessControl.ADMIN_METADATA);
List<List<String>> rows = new ArrayList<>();
Long startTime = System.currentTimeMillis();
JcrQueryResult jcrQueryResult = new JcrQueryResult();
try {
Session session = JcrMetadataAccess.getActiveSession();
Workspace workspace = (Workspace) session.getWorkspace();
String explainPlain = JcrQueryUtil.explainPlain(session, query);
// start the timer now:
startTime = System.currentTimeMillis();
QueryResult result = JcrQueryUtil.query(session, query);
jcrQueryResult.setExplainPlan(explainPlain);
RowIterator rowItr = result.getRows();
List<JcrQueryResultColumn> columns = new ArrayList<>();
String colsStr = StringUtils.substringAfter(query.toLowerCase(), "select");
colsStr = StringUtils.substringBefore(colsStr, "from");
if (StringUtils.isNotBlank(colsStr)) {
colsStr = colsStr.trim();
columns = Arrays.asList(colsStr.split(",")).stream().map(c -> {
String columnName = c;
if (c.contains("as ")) {
columnName = StringUtils.substringAfter(c, "as ");
} else if (c.contains(" ")) {
columnName = StringUtils.substringAfter(c, " ");
}
return new JcrQueryResultColumn(columnName);
}).collect(Collectors.toList());
}
jcrQueryResult.setColumns(columns);
while (rowItr.hasNext()) {
Row row = rowItr.nextRow();
Value[] rowValues = row.getValues();
if (rowValues != null) {
if (rowValues.length != columns.size()) {
columns = IntStream.range(0, rowValues.length).mapToObj(i -> new JcrQueryResultColumn("Column " + i)).collect(Collectors.toList());
jcrQueryResult.setColumns(columns);
}
JcrQueryResultRow jcrQueryResultRow = new JcrQueryResultRow();
jcrQueryResult.addRow(jcrQueryResultRow);
List<JcrQueryResultColumnValue> jcrQueryResultColumnValues = Arrays.asList(rowValues).stream().map(v -> {
try {
String value = v.getString();
return new JcrQueryResultColumnValue(value);
} catch (Exception e) {
return new JcrQueryResultColumnValue("ERROR: " + e.getMessage());
}
}).collect(Collectors.toList());
jcrQueryResultRow.setColumnValues(jcrQueryResultColumnValues);
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
long totalTime = System.currentTimeMillis() - startTime;
jcrQueryResult.setQueryTime(totalTime);
return jcrQueryResult;
});
}
Aggregations