use of org.apache.commons.lang3.StringUtils.substringAfterLast in project cas by apereo.
the class ConfigurationMetadataGenerator method removeNestedConfigurationPropertyGroups.
private static void removeNestedConfigurationPropertyGroups(final Set<ConfigurationMetadataProperty> properties, final Set<ConfigurationMetadataProperty> groups) {
var it = properties.iterator();
while (it.hasNext()) {
var entry = it.next();
try {
val propName = StringUtils.substringAfterLast(entry.getName(), ".");
val groupName = StringUtils.substringBeforeLast(entry.getName(), ".");
val res = groups.stream().filter(g -> g.getName().equalsIgnoreCase(groupName)).findFirst();
if (res.isPresent()) {
var grp = res.get();
val className = grp.getType();
val clazz = ClassUtils.getClass(className);
val names = RelaxedPropertyNames.forCamelCase(propName);
names.getValues().forEach(Unchecked.consumer(name -> {
val f = ReflectionUtils.findField(clazz, name);
if (f != null && f.isAnnotationPresent(NestedConfigurationProperty.class)) {
it.remove();
}
}));
}
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
}
use of org.apache.commons.lang3.StringUtils.substringAfterLast in project kylo by Teradata.
the class FeedRestController method profileSummary.
@GET
@Path("/{feedId}/profile-summary")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation("Gets a summary of the feed profiles.")
@ApiResponses({ @ApiResponse(code = 200, message = "Returns the profile summaries.", response = Map.class, responseContainer = "List"), @ApiResponse(code = 500, message = "The profiles are unavailable.", response = RestResponseStatus.class) })
public Response profileSummary(@PathParam("feedId") String feedId, @QueryParam("page") Integer page, @QueryParam("pageSize") Integer pageSize) {
if (page == null || page <= 0) {
page = 1;
}
if (pageSize == null || pageSize <= 0) {
pageSize = 10;
}
FeedMetadata feedMetadata = getMetadataService().getFeedById(feedId);
String profileTableName = feedMetadata.getProfileTableName();
QueryResult tablePartitions = hiveService.getTablePartitions(profileTableName);
List<Map<String, Object>> partitions = tablePartitions.getRows();
Stream<Long> sortedPartitions = partitions.stream().map(row -> Long.parseLong(row.get("partition").toString().substring("processing_dttm=".length()))).sorted(Comparator.reverseOrder());
long totalPartitions = partitions.size();
List<String> partitionsPage = sortedPartitions.skip((page - 1) * pageSize).limit(pageSize).map(partition -> "'" + Long.toString(partition) + "'").collect(Collectors.toList());
final String profileTable = HiveUtils.quoteIdentifier(profileTableName);
String query = "SELECT * from " + profileTable + " where columnname = '(ALL)' and processing_dttm in (" + StringUtils.join(partitionsPage, ',') + ")";
List<Map<String, Object>> rows = new ArrayList<>();
try {
QueryResult results = hiveService.query(query);
rows.addAll(results.getRows());
// add in the archive date time fields if applicipable
String ARCHIVE_PROCESSOR_TYPE = "com.thinkbiganalytics.nifi.GetTableData";
if (feedMetadata.getInputProcessorType().equalsIgnoreCase(ARCHIVE_PROCESSOR_TYPE)) {
NifiProperty property = NifiPropertyUtil.findPropertyByProcessorType(feedMetadata.getProperties(), ARCHIVE_PROCESSOR_TYPE, "Date Field");
if (property != null && property.getValue() != null) {
String field = property.getValue();
if (field.contains(".")) {
field = StringUtils.substringAfterLast(field, ".");
}
query = "SELECT * from " + profileTable + " where metrictype IN('MIN_TIMESTAMP','MAX_TIMESTAMP') AND columnname = " + HiveUtils.quoteString(field);
QueryResult dateRows = hiveService.query(query);
if (dateRows != null && !dateRows.isEmpty()) {
rows.addAll(dateRows.getRows());
}
}
}
} catch (DataAccessException e) {
if (e.getCause() instanceof org.apache.hive.service.cli.HiveSQLException && e.getCause().getMessage().contains("Table not found")) {
// this exception is ok to swallow since it just means no profile data exists yet
} else if (e.getCause().getMessage().contains("HiveAccessControlException Permission denied")) {
throw new AccessControlException("You do not have permission to execute this hive query");
} else {
throw e;
}
}
PageImpl<Map<String, Object>> response = new PageImpl<>(rows, null, totalPartitions);
return Response.ok(response).build();
}
Aggregations