use of com.epam.pipeline.exception.PipelineException in project cloud-pipeline by epam.
the class MonitoringESDao method deleteIndices.
/**
* Delete indices, that are older than retention period (in days)
* @param retentionPeriodDays retention period (in days)
*/
public void deleteIndices(int retentionPeriodDays) {
try {
Response response = lowLevelClient.performRequest(HttpMethod.GET.name(), "/_cat/indices");
try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent()))) {
String indicesToDelete = reader.lines().flatMap(l -> Arrays.stream(l.split(" "))).filter(str -> str.startsWith(INDEX_NAME_TOKEN)).map(name -> {
String dateString = name.substring(INDEX_NAME_TOKEN.length());
try {
return new ImmutablePair<>(name, LocalDate.parse(dateString, DATE_FORMATTER).atStartOfDay(ZoneOffset.UTC).toLocalDateTime());
} catch (IllegalArgumentException e) {
return new ImmutablePair<String, LocalDateTime>(name, null);
}
}).filter(pair -> pair.right != null && olderThanRetentionPeriod(retentionPeriodDays, pair.right)).map(pair -> pair.left).collect(Collectors.joining(","));
lowLevelClient.performRequest(HttpMethod.DELETE.name(), "/" + indicesToDelete);
}
} catch (IOException e) {
throw new PipelineException(e);
}
}
use of com.epam.pipeline.exception.PipelineException in project cloud-pipeline by epam.
the class MonitoringESDao method loadCpuUsageRateMetrics.
public Map<String, Double> loadCpuUsageRateMetrics(Collection<String> podIds, LocalDateTime from, LocalDateTime to) {
if (CollectionUtils.isEmpty(podIds)) {
return Collections.emptyMap();
}
SearchSourceBuilder builder = new SearchSourceBuilder().query(QueryBuilders.boolQuery().filter(QueryBuilders.termsQuery(path(FIELD_METRICS_TAGS, FIELD_POD_NAME_RAW), podIds)).filter(QueryBuilders.termQuery(path(FIELD_METRICS_TAGS, FIELD_NAMESPACE_NAME), "default")).filter(QueryBuilders.termQuery(path(FIELD_METRICS_TAGS, FIELD_TYPE), "pod_container")).filter(QueryBuilders.rangeQuery(FIELD_CPU_METRICS_TIMESTAMP).from(from.toInstant(ZoneOffset.UTC).toEpochMilli()).to(to.toInstant(ZoneOffset.UTC).toEpochMilli()))).size(0).aggregation(AggregationBuilders.terms(AGGREGATION_POD_NAME).field(path(FIELD_METRICS_TAGS, FIELD_POD_NAME_RAW)).size(podIds.size()).subAggregation(AggregationBuilders.avg(AGGREGATION_CPU_RATE).field("Metrics.cpu/usage_rate.value")));
SearchRequest request = new SearchRequest(getIndexNames(from, to)).types("cpu").source(builder);
SearchResponse response;
try {
response = client.search(request);
} catch (IOException e) {
throw new PipelineException(e);
}
Terms terms = response.getAggregations().get(AGGREGATION_POD_NAME);
return terms.getBuckets().stream().collect(Collectors.toMap(b -> b.getKey().toString(), b -> ((Avg) b.getAggregations().get(AGGREGATION_CPU_RATE)).getValue()));
}
use of com.epam.pipeline.exception.PipelineException in project cloud-pipeline by epam.
the class ToolScanScheduler method forceScheduleScanTool.
/**
* Schedule a Tool for security scan. Since a Tool's scan is a time costly operation, there's a queue for that.
* A tool is added to that queue and will be processed in order. Once the tool is added to a queue, it's scanStatus
* field is being set to {@link ToolScanStatus}.PENDING
* @param registry a registry path, where tool is located
* @param id Tool's id or image
* @param version Tool's version (Docker tag)
* @param rescan
*/
public Future<ToolVersionScanResult> forceScheduleScanTool(final String registry, final String id, final String version, final Boolean rescan) {
if (!preferenceManager.getPreference(SystemPreferences.DOCKER_SECURITY_TOOL_SCAN_ENABLED)) {
throw new IllegalArgumentException(messageHelper.getMessage(MessageConstants.ERROR_TOOL_SCAN_DISABLED));
}
Tool tool = toolManager.loadTool(registry, id);
Optional<ToolVersionScanResult> toolVersionScanResult = toolManager.loadToolVersionScan(tool.getId(), version);
ToolScanStatus curentStatus = toolVersionScanResult.map(ToolVersionScanResult::getStatus).orElse(ToolScanStatus.NOT_SCANNED);
// The tool is already in the queue
if (curentStatus != ToolScanStatus.PENDING) {
String layerRef = toolVersionScanResult.map(ToolVersionScanResult::getLastLayerRef).orElse(null);
String digest = toolVersionScanResult.map(ToolVersionScanResult::getDigest).orElse(null);
toolManager.updateToolVersionScanStatus(tool.getId(), ToolScanStatus.PENDING, null, version, layerRef, digest);
return forceScanExecutor.submit(new DelegatingSecurityContextCallable<>(() -> {
LOGGER.info(messageHelper.getMessage(MessageConstants.INFO_TOOL_FORCE_SCAN_STARTED, tool.getImage()));
try {
ToolVersionScanResult scanResult = toolScanManager.scanTool(tool, version, rescan);
toolManager.updateToolVulnerabilities(scanResult.getVulnerabilities(), tool.getId(), version);
toolManager.updateToolDependencies(scanResult.getDependencies(), tool.getId(), version);
toolManager.updateToolVersionScanStatus(tool.getId(), ToolScanStatus.COMPLETED, scanResult.getScanDate(), version, scanResult.getLastLayerRef(), scanResult.getDigest());
return scanResult;
} catch (Exception e) {
toolManager.updateToolVersionScanStatus(tool.getId(), ToolScanStatus.FAILED, new Date(), version, null, null);
LOGGER.error(messageHelper.getMessage(MessageConstants.ERROR_TOOL_SCAN_FAILED, tool.getImage()), e);
throw new PipelineException(e);
}
}, SecurityContextHolder.getContext()));
}
return CompletableFuture.completedFuture(new ToolVersionScanResult(ToolScanStatus.PENDING, null, Collections.emptyList(), Collections.emptyList()));
}
Aggregations