Search in sources :

Example 31 with Filter

use of com.amazonaws.services.s3.model.Filter in project workflow-service by UKHomeOffice.

the class CasesApplicationService method query.

/**
 * Query for cases that match a key. Each case is a collection of process instance pointers. No internal data
 * is returned.
 *
 * @param query
 * @param pageable
 * @param platformUser
 * @return a list of cases.
 */
@AuditableCaseEvent
public Page<Case> query(String query, Pageable pageable, PlatformUser platformUser) {
    log.info("Performing search by {}", platformUser.getEmail());
    final SearchRequest searchRequest = new SearchRequest();
    SearchSourceBuilder sourceBuilder = new SearchSourceBuilder();
    sourceBuilder.query(QueryBuilders.queryStringQuery(query));
    sourceBuilder.from(Math.toIntExact(pageable.getOffset()));
    sourceBuilder.size(pageable.getPageSize());
    sourceBuilder.fetchSource(new String[] { "businessKey" }, null);
    searchRequest.source(sourceBuilder);
    try {
        RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder();
        builder.addHeader("Content-Type", "application/json");
        final SearchResponse results = elasticsearchClient.search(searchRequest, builder.build());
        final Set<String> keys = StreamSupport.stream(results.getHits().spliterator(), false).filter(s -> s.getSourceAsMap().containsKey("businessKey")).map(s -> s.getSourceAsMap().get("businessKey").toString()).collect(toSet());
        List<HistoricProcessInstance> historicProcessInstances = new ArrayList<>();
        if (!keys.isEmpty()) {
            final List<HistoricProcessInstance> instances = keys.stream().map(key -> historyService.createHistoricProcessInstanceQuery().processInstanceBusinessKey(key).list()).flatMap(List::stream).collect(toList());
            historicProcessInstances.addAll(instances);
        }
        Map<String, List<HistoricProcessInstance>> groupedByBusinessKey = historicProcessInstances.stream().filter(instance -> this.candidateGroupFilter(instance, platformUser)).collect(Collectors.groupingBy(HistoricProcessInstance::getBusinessKey));
        List<Case> cases = groupedByBusinessKey.keySet().stream().map(key -> {
            Case caseDto = new Case();
            caseDto.setBusinessKey(key);
            List<HistoricProcessInstance> instances = groupedByBusinessKey.get(key);
            caseDto.setProcessInstances(instances.stream().map(HistoricProcessInstanceDto::fromHistoricProcessInstance).collect(toList()));
            return caseDto;
        }).collect(toList());
        final long totalHits = cases.size() == 0 ? 0 : results.getHits().getTotalHits().value;
        log.info("Number of cases returned for '{}' is '{}'", query, totalHits);
        return new PageImpl<>(cases, PageRequest.of(pageable.getPageNumber(), pageable.getPageSize()), totalHits);
    } catch (Exception e) {
        log.error("Failed to perform search", e);
        throw new RuntimeException(e);
    }
}
Also used : HistoricProcessInstanceDto(org.camunda.bpm.engine.rest.dto.history.HistoricProcessInstanceDto) AuthorizationService(org.camunda.bpm.engine.AuthorizationService) URLDecoder(java.net.URLDecoder) RequiredArgsConstructor(lombok.RequiredArgsConstructor) QueryBuilders(org.elasticsearch.index.query.QueryBuilders) ObjectListing(com.amazonaws.services.s3.model.ObjectListing) HistoricProcessInstance(org.camunda.bpm.engine.history.HistoricProcessInstance) HistoricProcessInstanceQuery(org.camunda.bpm.engine.history.HistoricProcessInstanceQuery) JSONObject(org.json.JSONObject) Resources(org.camunda.bpm.engine.authorization.Resources) ObjectMetadata(com.amazonaws.services.s3.model.ObjectMetadata) Duration(java.time.Duration) SearchResponse(org.elasticsearch.action.search.SearchResponse) RequestOptions(org.elasticsearch.client.RequestOptions) Pageable(org.springframework.data.domain.Pageable) ParseException(java.text.ParseException) AwsProperties(io.digital.patterns.workflow.aws.AwsProperties) Collectors.toSet(java.util.stream.Collectors.toSet) S3Uploader(io.digital.patterns.workflow.data.s3.S3Uploader) Form(io.digital.patterns.workflow.common.Form) PageRequest(org.springframework.data.domain.PageRequest) PostAuthorize(org.springframework.security.access.prepost.PostAuthorize) Page(org.springframework.data.domain.Page) Collectors(java.util.stream.Collectors) StandardCharsets(java.nio.charset.StandardCharsets) String.format(java.lang.String.format) IOUtils(org.apache.commons.io.IOUtils) DataUploadFailedException(io.digital.patterns.workflow.data.DataUploadFailedException) Slf4j(lombok.extern.slf4j.Slf4j) RetryCallback(org.springframework.retry.RetryCallback) SpinJsonNode(org.camunda.spin.json.SpinJsonNode) Authorization(org.camunda.bpm.engine.authorization.Authorization) PageImpl(org.springframework.data.domain.PageImpl) HistoryService(org.camunda.bpm.engine.HistoryService) java.util(java.util) SimpleDateFormat(java.text.SimpleDateFormat) SearchRequest(org.elasticsearch.action.search.SearchRequest) S3Object(com.amazonaws.services.s3.model.S3Object) Service(org.springframework.stereotype.Service) SearchSourceBuilder(org.elasticsearch.search.builder.SearchSourceBuilder) AmazonS3(com.amazonaws.services.s3.AmazonS3) StreamSupport(java.util.stream.StreamSupport) ActivityTypes(org.camunda.bpm.engine.ActivityTypes) JsonConverter(io.digital.patterns.workflow.data.elasticsearch.JsonConverter) IOException(java.io.IOException) RestHighLevelClient(org.elasticsearch.client.RestHighLevelClient) CamundaManager(io.digital.patterns.workflow.data.camunda.CamundaManager) Collectors.toList(java.util.stream.Collectors.toList) GetObjectMetadataRequest(com.amazonaws.services.s3.model.GetObjectMetadataRequest) RetryTemplate(org.springframework.retry.support.RetryTemplate) Spin(org.camunda.spin.Spin) JSONArray(org.json.JSONArray) PageImpl(org.springframework.data.domain.PageImpl) SearchRequest(org.elasticsearch.action.search.SearchRequest) RequestOptions(org.elasticsearch.client.RequestOptions) HistoricProcessInstance(org.camunda.bpm.engine.history.HistoricProcessInstance) ParseException(java.text.ParseException) DataUploadFailedException(io.digital.patterns.workflow.data.DataUploadFailedException) IOException(java.io.IOException) SearchSourceBuilder(org.elasticsearch.search.builder.SearchSourceBuilder) SearchResponse(org.elasticsearch.action.search.SearchResponse) Collectors.toList(java.util.stream.Collectors.toList)

Example 32 with Filter

use of com.amazonaws.services.s3.model.Filter in project workflow-service by UKHomeOffice.

the class PdfService method sendPDFs.

public void sendPDFs(String senderAddress, List<String> recipients, String body, String subject, List<String> attachmentIds) {
    if (recipients.isEmpty()) {
        log.warn("No recipients defined so not sending email");
        return;
    }
    List<String> filteredRecipients = recipients.stream().filter(StringUtils::isNotBlank).collect(Collectors.toList());
    try {
        Session session = Session.getDefaultInstance(new Properties());
        MimeMessage mimeMessage = new MimeMessage(session);
        mimeMessage.setSubject(subject, "UTF-8");
        mimeMessage.setFrom(senderAddress);
        mimeMessage.setRecipients(Message.RecipientType.TO, filteredRecipients.stream().map(recipient -> {
            Address address = null;
            try {
                address = new InternetAddress(recipient);
            } catch (AddressException e) {
                log.error("Failed to resolve to address {} {}", recipient, e.getMessage());
            }
            return address;
        }).toArray(Address[]::new));
        MimeMultipart mp = new MimeMultipart();
        BodyPart part = new MimeBodyPart();
        part.setContent(body, "text/html");
        mp.addBodyPart(part);
        attachmentIds.forEach(id -> {
            try {
                MimeBodyPart attachment = new MimeBodyPart();
                DataSource dataSource;
                if (!new URI(id).isAbsolute()) {
                    S3Object object = amazonS3.getObject(environment.getProperty("aws.s3.pdfs"), id);
                    dataSource = new ByteArrayDataSource(object.getObjectContent(), "application/pdf");
                    attachment.setFileName(id);
                    attachment.setContent("Content-Type", "application/pdf");
                } else {
                    dataSource = restTemplate.execute(id, HttpMethod.GET, null, (ResponseExtractor<DataSource>) response -> {
                        String type = Objects.requireNonNull(response.getHeaders().getContentType()).toString();
                        try {
                            attachment.setFileName(response.getHeaders().getContentDisposition().getFilename());
                            attachment.setContent("Content-Type", type);
                        } catch (MessagingException e) {
                            log.error("Unable to set file name {}", e.getMessage());
                        }
                        return new ByteArrayDataSource(response.getBody(), type);
                    });
                }
                attachment.setDataHandler(new DataHandler(dataSource));
                attachment.setHeader("Content-ID", "<" + UUID.randomUUID().toString() + ">");
                mp.addBodyPart(attachment);
            } catch (IOException | MessagingException | URISyntaxException e) {
                log.error("Failed to get data from S3 {}", e.getMessage());
            }
        });
        mimeMessage.setContent(mp);
        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        mimeMessage.writeTo(outputStream);
        RawMessage rawMessage = new RawMessage(ByteBuffer.wrap(outputStream.toByteArray()));
        SendRawEmailRequest sendEmailRequest = new SendRawEmailRequest(rawMessage);
        SendRawEmailResult result = amazonSimpleEmailService.sendRawEmail(sendEmailRequest);
        log.info("SES send result {}", result.getMessageId());
    } catch (Exception e) {
        log.error("Failed to send SES", e);
        throw new BpmnError("FAILED_TO_SEND_SES", e.getMessage(), e);
    }
}
Also used : ResponseExtractor(org.springframework.web.client.ResponseExtractor) DataHandler(javax.activation.DataHandler) URISyntaxException(java.net.URISyntaxException) AwsProperties(io.digital.patterns.workflow.aws.AwsProperties) URI(java.net.URI) S3Object(com.amazonaws.services.s3.model.S3Object) RawMessage(com.amazonaws.services.simpleemail.model.RawMessage) ByteArrayDataSource(javax.mail.util.ByteArrayDataSource) BpmnError(org.camunda.bpm.engine.delegate.BpmnError) SendRawEmailResult(com.amazonaws.services.simpleemail.model.SendRawEmailResult) IOException(java.io.IOException) ByteArrayOutputStream(java.io.ByteArrayOutputStream) URISyntaxException(java.net.URISyntaxException) IOException(java.io.IOException) ByteArrayDataSource(javax.mail.util.ByteArrayDataSource) DataSource(javax.activation.DataSource) SendRawEmailRequest(com.amazonaws.services.simpleemail.model.SendRawEmailRequest)

Example 33 with Filter

use of com.amazonaws.services.s3.model.Filter in project druid by druid-io.

the class EC2AutoScaler method terminate.

@Override
public AutoScalingData terminate(List<String> ips) {
    if (ips.isEmpty()) {
        return new AutoScalingData(Lists.<String>newArrayList());
    }
    DescribeInstancesResult result = amazonEC2Client.describeInstances(new DescribeInstancesRequest().withFilters(new Filter("private-ip-address", ips)));
    List<Instance> instances = Lists.newArrayList();
    for (Reservation reservation : result.getReservations()) {
        instances.addAll(reservation.getInstances());
    }
    try {
        return terminateWithIds(Lists.transform(instances, new Function<Instance, String>() {

            @Override
            public String apply(Instance input) {
                return input.getInstanceId();
            }
        }));
    } catch (Exception e) {
        log.error(e, "Unable to terminate any instances.");
    }
    return null;
}
Also used : DescribeInstancesResult(com.amazonaws.services.ec2.model.DescribeInstancesResult) Function(com.google.common.base.Function) Reservation(com.amazonaws.services.ec2.model.Reservation) AutoScalingData(io.druid.indexing.overlord.autoscaling.AutoScalingData) Filter(com.amazonaws.services.ec2.model.Filter) Instance(com.amazonaws.services.ec2.model.Instance) DescribeInstancesRequest(com.amazonaws.services.ec2.model.DescribeInstancesRequest)

Example 34 with Filter

use of com.amazonaws.services.s3.model.Filter in project stocator by SparkTC.

the class COSAPIClient method list.

@Override
public FileStatus[] list(String hostName, Path path, boolean fullListing, boolean prefixBased, Boolean isDirectory, boolean flatListing, PathFilter filter) throws FileNotFoundException, IOException {
    LOG.debug("Native direct list status for {}", path);
    ArrayList<FileStatus> tmpResult = new ArrayList<FileStatus>();
    String key = pathToKey(path);
    if (isDirectory != null && isDirectory.booleanValue() && !key.endsWith("/") && !path.toString().equals(hostName)) {
        key = key + "/";
        LOG.debug("listNativeDirect modify key to {}", key);
    }
    Map<String, FileStatus> emptyObjects = new HashMap<String, FileStatus>();
    ListObjectsRequest request = new ListObjectsRequest();
    request.setBucketName(mBucket);
    request.setMaxKeys(5000);
    request.setPrefix(key);
    if (!flatListing) {
        request.setDelimiter("/");
    }
    ObjectListing objectList = mClient.listObjects(request);
    List<S3ObjectSummary> objectSummaries = objectList.getObjectSummaries();
    List<String> commonPrefixes = objectList.getCommonPrefixes();
    boolean objectScanContinue = true;
    S3ObjectSummary prevObj = null;
    // start FTA logic
    boolean stocatorOrigin = isSparkOrigin(key, path.toString());
    if (stocatorOrigin) {
        LOG.debug("Stocator origin is true for {}", key);
        if (!isJobSuccessful(key)) {
            LOG.debug("{} created by failed Spark job. Skipped", key);
            if (fModeAutomaticDelete) {
                delete(hostName, new Path(key), true);
            }
            return new FileStatus[0];
        }
    }
    while (objectScanContinue) {
        for (S3ObjectSummary obj : objectSummaries) {
            if (prevObj == null) {
                prevObj = obj;
                prevObj.setKey(correctPlusSign(key, prevObj.getKey()));
                continue;
            }
            obj.setKey(correctPlusSign(key, obj.getKey()));
            String objKey = obj.getKey();
            String unifiedObjectName = extractUnifiedObjectName(objKey);
            LOG.trace("list candidate {}, unified name {}", objKey, unifiedObjectName);
            if (stocatorOrigin && !fullListing) {
                LOG.trace("{} created by Spark", unifiedObjectName);
                // we need to make sure there are no failed attempts
                if (nameWithoutTaskID(objKey).equals(nameWithoutTaskID(prevObj.getKey()))) {
                    // found failed that was not aborted.
                    LOG.trace("Colisiion found between {} and {}", prevObj.getKey(), objKey);
                    if (prevObj.getSize() < obj.getSize()) {
                        LOG.trace("New candidate is {}. Removed {}", obj.getKey(), prevObj.getKey());
                        prevObj = obj;
                    }
                    continue;
                }
            }
            FileStatus fs = createFileStatus(prevObj, hostName, path);
            if (fs.getLen() > 0 || fullListing) {
                LOG.trace("Native direct list. Adding {} size {}", fs.getPath(), fs.getLen());
                if (filter == null) {
                    tmpResult.add(fs);
                } else if (filter != null && filter.accept(fs.getPath())) {
                    tmpResult.add(fs);
                } else {
                    LOG.trace("{} rejected by path filter during list. Filter {}", fs.getPath(), filter);
                }
            } else {
                emptyObjects.put(fs.getPath().toString(), fs);
            }
            prevObj = obj;
        }
        boolean isTruncated = objectList.isTruncated();
        if (isTruncated) {
            objectList = mClient.listNextBatchOfObjects(objectList);
            objectSummaries = objectList.getObjectSummaries();
        } else {
            objectScanContinue = false;
        }
    }
    if (prevObj != null) {
        FileStatus fs = createFileStatus(prevObj, hostName, path);
        LOG.trace("Adding the last object from the list {}", fs.getPath());
        if (fs.getLen() > 0 || fullListing) {
            LOG.trace("Native direct list. Adding {} size {}", fs.getPath(), fs.getLen());
            if (filter == null) {
                memoryCache.putFileStatus(fs.getPath().toString(), fs);
                tmpResult.add(fs);
            } else if (filter != null && filter.accept(fs.getPath())) {
                memoryCache.putFileStatus(fs.getPath().toString(), fs);
                tmpResult.add(fs);
            } else {
                LOG.trace("{} rejected by path filter during list. Filter {}", fs.getPath(), filter);
            }
        } else if (!fs.getPath().getName().equals(HADOOP_SUCCESS)) {
            emptyObjects.put(fs.getPath().toString(), fs);
        }
    }
    // get common prefixes
    for (String comPrefix : commonPrefixes) {
        LOG.trace("Common prefix is {}", comPrefix);
        if (emptyObjects.containsKey(keyToQualifiedPath(hostName, comPrefix).toString()) || emptyObjects.isEmpty()) {
            FileStatus status = new COSFileStatus(true, false, keyToQualifiedPath(hostName, comPrefix));
            LOG.trace("Match between common prefix and empty object {}. Adding to result", comPrefix);
            if (filter == null) {
                memoryCache.putFileStatus(status.getPath().toString(), status);
                tmpResult.add(status);
            } else if (filter != null && filter.accept(status.getPath())) {
                memoryCache.putFileStatus(status.getPath().toString(), status);
                tmpResult.add(status);
            } else {
                LOG.trace("Common prefix {} rejected by path filter during list. Filter {}", status.getPath(), filter);
            }
        }
    }
    return tmpResult.toArray(new FileStatus[tmpResult.size()]);
}
Also used : StocatorPath(com.ibm.stocator.fs.common.StocatorPath) Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) LocatedFileStatus(org.apache.hadoop.fs.LocatedFileStatus) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ObjectListing(com.amazonaws.services.s3.model.ObjectListing) S3ObjectSummary(com.amazonaws.services.s3.model.S3ObjectSummary) ListObjectsRequest(com.amazonaws.services.s3.model.ListObjectsRequest)

Example 35 with Filter

use of com.amazonaws.services.s3.model.Filter in project chassis by Kixeye.

the class AwsUtils method getInstanceName.

/**
 * Fetches and instance's name Tag or null if it does not have one
 * @param instanceId
 * @param amazonEC2
 * @return
 */
public static String getInstanceName(String instanceId, AmazonEC2 amazonEC2) {
    DescribeTagsResult result = amazonEC2.describeTags(new DescribeTagsRequest().withFilters(new Filter().withName("resource-id").withValues(instanceId), new Filter().withName("resource-type").withValues("instance"), new Filter().withName("key").withValues(TAG_KEY_NAME)));
    if (result.getTags().isEmpty()) {
        return null;
    }
    String name = result.getTags().get(0).getValue();
    return name == null || name.trim().equals("") ? null : name;
}
Also used : DescribeTagsRequest(com.amazonaws.services.ec2.model.DescribeTagsRequest) DescribeTagsResult(com.amazonaws.services.ec2.model.DescribeTagsResult) Filter(com.amazonaws.services.ec2.model.Filter)

Aggregations

Filter (com.amazonaws.services.ec2.model.Filter)96 ArrayList (java.util.ArrayList)70 List (java.util.List)52 Collectors (java.util.stream.Collectors)46 IOException (java.io.IOException)41 HashMap (java.util.HashMap)38 Map (java.util.Map)35 AmazonS3 (com.amazonaws.services.s3.AmazonS3)34 Set (java.util.Set)31 DescribeInstancesRequest (com.amazonaws.services.ec2.model.DescribeInstancesRequest)30 S3ObjectSummary (com.amazonaws.services.s3.model.S3ObjectSummary)27 Instance (com.amazonaws.services.ec2.model.Instance)26 HashSet (java.util.HashSet)26 Reservation (com.amazonaws.services.ec2.model.Reservation)24 Collections (java.util.Collections)23 DescribeInstancesResult (com.amazonaws.services.ec2.model.DescribeInstancesResult)21 ObjectListing (com.amazonaws.services.s3.model.ObjectListing)21 DescribeSubnetsRequest (com.amazonaws.services.ec2.model.DescribeSubnetsRequest)20 Entry (java.util.Map.Entry)20 Tag (com.amazonaws.services.ec2.model.Tag)18