Search in sources :

Example 1 with ProvenanceDTO

use of org.apache.nifi.web.api.dto.provenance.ProvenanceDTO in project nifi by apache.

the class ProvenanceResource method getProvenance.

/**
 * Gets the provenance with the specified id.
 *
 * @param id            The id of the provenance
 * @param clusterNodeId The id of node in the cluster to search. This is optional and only relevant when clustered. If clustered and it is not specified the entire cluster is searched.
 * @return A provenanceEntity
 */
@GET
@Consumes(MediaType.WILDCARD)
@Produces(MediaType.APPLICATION_JSON)
@Path("{id}")
@ApiOperation(value = "Gets a provenance query", response = ProvenanceEntity.class, authorizations = { @Authorization(value = "Read - /provenance"), @Authorization(value = "Read - /data/{component-type}/{uuid}") })
@ApiResponses(value = { @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."), @ApiResponse(code = 401, message = "Client could not be authenticated."), @ApiResponse(code = 403, message = "Client is not authorized to make this request."), @ApiResponse(code = 404, message = "The specified resource could not be found."), @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.") })
public Response getProvenance(@ApiParam(value = "The id of the node where this query exists if clustered.", required = false) @QueryParam("clusterNodeId") final String clusterNodeId, @ApiParam(value = "Whether or not incremental results are returned. If false, provenance events" + " are only returned once the query completes. This property is true by default.", required = false) @QueryParam("summarize") @DefaultValue(value = "false") final Boolean summarize, @ApiParam(value = "Whether or not to summarize provenance events returned. This property is false by default.", required = false) @QueryParam("incrementalResults") @DefaultValue(value = "true") final Boolean incrementalResults, @ApiParam(value = "The id of the provenance query.", required = true) @PathParam("id") final String id) {
    authorizeProvenanceRequest();
    // replicate if cluster manager
    if (isReplicateRequest()) {
        // determine where this request should be sent
        if (clusterNodeId == null) {
            // replicate to all nodes
            return replicate(HttpMethod.GET);
        } else {
            return replicate(HttpMethod.GET, clusterNodeId);
        }
    }
    // get the provenance
    final ProvenanceDTO dto = serviceFacade.getProvenance(id, summarize, incrementalResults);
    dto.getRequest().setClusterNodeId(clusterNodeId);
    populateRemainingProvenanceContent(dto);
    // create the response entity
    final ProvenanceEntity entity = new ProvenanceEntity();
    entity.setProvenance(dto);
    // generate the response
    return generateOkResponse(entity).build();
}
Also used : ProvenanceEntity(org.apache.nifi.web.api.entity.ProvenanceEntity) ProvenanceDTO(org.apache.nifi.web.api.dto.provenance.ProvenanceDTO) Path(javax.ws.rs.Path) Consumes(javax.ws.rs.Consumes) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET) ApiOperation(io.swagger.annotations.ApiOperation) ApiResponses(io.swagger.annotations.ApiResponses)

Example 2 with ProvenanceDTO

use of org.apache.nifi.web.api.dto.provenance.ProvenanceDTO in project nifi by apache.

the class ControllerFacade method getProvenanceQuery.

/**
 * Retrieves the results of a provenance query.
 *
 * @param provenanceId id
 * @return the results of a provenance query
 */
public ProvenanceDTO getProvenanceQuery(String provenanceId, Boolean summarize, Boolean incrementalResults) {
    try {
        // get the query to the provenance repository
        final ProvenanceRepository provenanceRepository = flowController.getProvenanceRepository();
        final QuerySubmission querySubmission = provenanceRepository.retrieveQuerySubmission(provenanceId, NiFiUserUtils.getNiFiUser());
        // ensure the query results could be found
        if (querySubmission == null) {
            throw new ResourceNotFoundException("Cannot find the results for the specified provenance requests. Results may have been purged.");
        }
        // get the original query and the results
        final Query query = querySubmission.getQuery();
        final QueryResult queryResult = querySubmission.getResult();
        // build the response
        final ProvenanceDTO provenanceDto = new ProvenanceDTO();
        final ProvenanceRequestDTO requestDto = new ProvenanceRequestDTO();
        final ProvenanceResultsDTO resultsDto = new ProvenanceResultsDTO();
        // include the original request and results
        provenanceDto.setRequest(requestDto);
        provenanceDto.setResults(resultsDto);
        // convert the original request
        requestDto.setStartDate(query.getStartDate());
        requestDto.setEndDate(query.getEndDate());
        requestDto.setMinimumFileSize(query.getMinFileSize());
        requestDto.setMaximumFileSize(query.getMaxFileSize());
        requestDto.setMaxResults(query.getMaxResults());
        if (query.getSearchTerms() != null) {
            final Map<String, String> searchTerms = new HashMap<>();
            for (final SearchTerm searchTerm : query.getSearchTerms()) {
                searchTerms.put(searchTerm.getSearchableField().getFriendlyName(), searchTerm.getValue());
            }
            requestDto.setSearchTerms(searchTerms);
        }
        // convert the provenance
        provenanceDto.setId(query.getIdentifier());
        provenanceDto.setSubmissionTime(querySubmission.getSubmissionTime());
        provenanceDto.setExpiration(queryResult.getExpiration());
        provenanceDto.setFinished(queryResult.isFinished());
        provenanceDto.setPercentCompleted(queryResult.getPercentComplete());
        // convert each event
        final boolean includeResults = incrementalResults == null || Boolean.TRUE.equals(incrementalResults);
        if (includeResults || queryResult.isFinished()) {
            final List<ProvenanceEventDTO> events = new ArrayList<>();
            for (final ProvenanceEventRecord record : queryResult.getMatchingEvents()) {
                events.add(createProvenanceEventDto(record, Boolean.TRUE.equals(summarize)));
            }
            resultsDto.setProvenanceEvents(events);
        }
        if (requestDto.getMaxResults() != null && queryResult.getTotalHitCount() >= requestDto.getMaxResults()) {
            resultsDto.setTotalCount(requestDto.getMaxResults().longValue());
            resultsDto.setTotal(FormatUtils.formatCount(requestDto.getMaxResults().longValue()) + "+");
        } else {
            resultsDto.setTotalCount(queryResult.getTotalHitCount());
            resultsDto.setTotal(FormatUtils.formatCount(queryResult.getTotalHitCount()));
        }
        // include any errors
        if (queryResult.getError() != null) {
            final Set<String> errors = new HashSet<>();
            errors.add(queryResult.getError());
            resultsDto.setErrors(errors);
        }
        // set the generated timestamp
        final Date now = new Date();
        resultsDto.setGenerated(now);
        resultsDto.setTimeOffset(TimeZone.getDefault().getOffset(now.getTime()));
        // get the oldest available event time
        final List<ProvenanceEventRecord> firstEvent = provenanceRepository.getEvents(0, 1);
        if (!firstEvent.isEmpty()) {
            resultsDto.setOldestEvent(new Date(firstEvent.get(0).getEventTime()));
        }
        provenanceDto.setResults(resultsDto);
        return provenanceDto;
    } catch (final IOException ioe) {
        throw new NiFiCoreException("An error occurred while searching the provenance events.", ioe);
    }
}
Also used : NiFiCoreException(org.apache.nifi.web.NiFiCoreException) QuerySubmission(org.apache.nifi.provenance.search.QuerySubmission) Query(org.apache.nifi.provenance.search.Query) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ProvenanceResultsDTO(org.apache.nifi.web.api.dto.provenance.ProvenanceResultsDTO) IOException(java.io.IOException) SearchTerm(org.apache.nifi.provenance.search.SearchTerm) ProvenanceRequestDTO(org.apache.nifi.web.api.dto.provenance.ProvenanceRequestDTO) Date(java.util.Date) QueryResult(org.apache.nifi.provenance.search.QueryResult) ProvenanceEventDTO(org.apache.nifi.web.api.dto.provenance.ProvenanceEventDTO) ProvenanceEventRecord(org.apache.nifi.provenance.ProvenanceEventRecord) ProvenanceRepository(org.apache.nifi.provenance.ProvenanceRepository) ProvenanceDTO(org.apache.nifi.web.api.dto.provenance.ProvenanceDTO) ResourceNotFoundException(org.apache.nifi.web.ResourceNotFoundException) HashSet(java.util.HashSet)

Example 3 with ProvenanceDTO

use of org.apache.nifi.web.api.dto.provenance.ProvenanceDTO in project nifi by apache.

the class ProvenanceQueryEndpointMerger method merge.

@Override
public NodeResponse merge(URI uri, String method, Set<NodeResponse> successfulResponses, Set<NodeResponse> problematicResponses, NodeResponse clientResponse) {
    if (!canHandle(uri, method)) {
        throw new IllegalArgumentException("Cannot use Endpoint Mapper of type " + getClass().getSimpleName() + " to map responses for URI " + uri + ", HTTP Method " + method);
    }
    final ProvenanceEntity responseEntity = clientResponse.getClientResponse().readEntity(ProvenanceEntity.class);
    final ProvenanceDTO dto = responseEntity.getProvenance();
    final Map<NodeIdentifier, ProvenanceDTO> dtoMap = new HashMap<>();
    for (final NodeResponse nodeResponse : successfulResponses) {
        final ProvenanceEntity nodeResponseEntity = nodeResponse == clientResponse ? responseEntity : nodeResponse.getClientResponse().readEntity(ProvenanceEntity.class);
        final ProvenanceDTO nodeDto = nodeResponseEntity.getProvenance();
        dtoMap.put(nodeResponse.getNodeId(), nodeDto);
    }
    mergeResponses(dto, dtoMap, successfulResponses, problematicResponses);
    return new NodeResponse(clientResponse, responseEntity);
}
Also used : HashMap(java.util.HashMap) NodeIdentifier(org.apache.nifi.cluster.protocol.NodeIdentifier) NodeResponse(org.apache.nifi.cluster.manager.NodeResponse) ProvenanceEntity(org.apache.nifi.web.api.entity.ProvenanceEntity) ProvenanceDTO(org.apache.nifi.web.api.dto.provenance.ProvenanceDTO)

Example 4 with ProvenanceDTO

use of org.apache.nifi.web.api.dto.provenance.ProvenanceDTO in project nifi by apache.

the class ProvenanceQueryEndpointMerger method mergeResponses.

protected void mergeResponses(ProvenanceDTO clientDto, Map<NodeIdentifier, ProvenanceDTO> dtoMap, Set<NodeResponse> successfulResponses, Set<NodeResponse> problematicResponses) {
    final ProvenanceResultsDTO results = clientDto.getResults();
    final ProvenanceRequestDTO request = clientDto.getRequest();
    final List<ProvenanceEventDTO> allResults = new ArrayList<>(1024);
    final Set<String> errors = new HashSet<>();
    Date oldestEventDate = new Date();
    int percentageComplete = 0;
    boolean finished = true;
    long totalRecords = 0;
    for (final Map.Entry<NodeIdentifier, ProvenanceDTO> entry : dtoMap.entrySet()) {
        final NodeIdentifier nodeIdentifier = entry.getKey();
        final String nodeAddress = nodeIdentifier.getApiAddress() + ":" + nodeIdentifier.getApiPort();
        final ProvenanceDTO nodeDto = entry.getValue();
        final ProvenanceResultsDTO nodeResultDto = nodeDto.getResults();
        if (nodeResultDto != null && nodeResultDto.getProvenanceEvents() != null) {
            // increment the total number of records
            totalRecords += nodeResultDto.getTotalCount();
            // populate the cluster identifier
            for (final ProvenanceEventDTO eventDto : nodeResultDto.getProvenanceEvents()) {
                // from the Cluster Coordinator.
                if (eventDto.getClusterNodeId() == null || eventDto.getClusterNodeAddress() == null) {
                    eventDto.setClusterNodeId(nodeIdentifier.getId());
                    eventDto.setClusterNodeAddress(nodeAddress);
                    // add node identifier to the event's id so that it is unique across cluster
                    eventDto.setId(nodeIdentifier.getId() + eventDto.getId());
                }
                allResults.add(eventDto);
            }
        }
        if (nodeResultDto.getOldestEvent() != null && nodeResultDto.getOldestEvent().before(oldestEventDate)) {
            oldestEventDate = nodeResultDto.getOldestEvent();
        }
        if (nodeResultDto.getErrors() != null) {
            for (final String error : nodeResultDto.getErrors()) {
                errors.add(nodeAddress + " -- " + error);
            }
        }
        percentageComplete += nodeDto.getPercentCompleted();
        if (!nodeDto.isFinished()) {
            finished = false;
        }
    }
    percentageComplete /= dtoMap.size();
    // consider any problematic responses as errors
    for (final NodeResponse problematicResponse : problematicResponses) {
        final NodeIdentifier problemNode = problematicResponse.getNodeId();
        final String problemNodeAddress = problemNode.getApiAddress() + ":" + problemNode.getApiPort();
        errors.add(String.format("%s -- Request did not complete successfully (Status code: %s)", problemNodeAddress, problematicResponse.getStatus()));
    }
    // Since we get back up to the maximum number of results from each node, we need to sort those values and then
    // grab only the first X number of them. We do a sort based on time, such that the newest are included.
    // If 2 events have the same timestamp, we do a secondary sort based on Cluster Node Identifier. If those are
    // equal, we perform a tertiary sort based on the the event id
    Collections.sort(allResults, new Comparator<ProvenanceEventDTO>() {

        @Override
        public int compare(final ProvenanceEventDTO o1, final ProvenanceEventDTO o2) {
            final int eventTimeComparison = o1.getEventTime().compareTo(o2.getEventTime());
            if (eventTimeComparison != 0) {
                return -eventTimeComparison;
            }
            final String nodeId1 = o1.getClusterNodeId();
            final String nodeId2 = o2.getClusterNodeId();
            final int nodeIdComparison;
            if (nodeId1 == null && nodeId2 == null) {
                nodeIdComparison = 0;
            } else if (nodeId1 == null) {
                nodeIdComparison = 1;
            } else if (nodeId2 == null) {
                nodeIdComparison = -1;
            } else {
                nodeIdComparison = -nodeId1.compareTo(nodeId2);
            }
            if (nodeIdComparison != 0) {
                return nodeIdComparison;
            }
            return -Long.compare(o1.getEventId(), o2.getEventId());
        }
    });
    final int maxResults = request.getMaxResults().intValue();
    final List<ProvenanceEventDTO> selectedResults;
    if (allResults.size() < maxResults) {
        selectedResults = allResults;
    } else {
        selectedResults = allResults.subList(0, maxResults);
    }
    // include any errors
    if (errors.size() > 0) {
        results.setErrors(errors);
    }
    if (clientDto.getRequest().getMaxResults() != null && totalRecords >= clientDto.getRequest().getMaxResults()) {
        results.setTotalCount(clientDto.getRequest().getMaxResults().longValue());
        results.setTotal(FormatUtils.formatCount(clientDto.getRequest().getMaxResults().longValue()) + "+");
    } else {
        results.setTotal(FormatUtils.formatCount(totalRecords));
        results.setTotalCount(totalRecords);
    }
    results.setProvenanceEvents(selectedResults);
    results.setOldestEvent(oldestEventDate);
    results.setGenerated(new Date());
    clientDto.setPercentCompleted(percentageComplete);
    clientDto.setFinished(finished);
}
Also used : ArrayList(java.util.ArrayList) NodeResponse(org.apache.nifi.cluster.manager.NodeResponse) ProvenanceResultsDTO(org.apache.nifi.web.api.dto.provenance.ProvenanceResultsDTO) ProvenanceRequestDTO(org.apache.nifi.web.api.dto.provenance.ProvenanceRequestDTO) Date(java.util.Date) ProvenanceEventDTO(org.apache.nifi.web.api.dto.provenance.ProvenanceEventDTO) NodeIdentifier(org.apache.nifi.cluster.protocol.NodeIdentifier) ProvenanceDTO(org.apache.nifi.web.api.dto.provenance.ProvenanceDTO) HashMap(java.util.HashMap) Map(java.util.Map) HashSet(java.util.HashSet)

Example 5 with ProvenanceDTO

use of org.apache.nifi.web.api.dto.provenance.ProvenanceDTO in project nifi by apache.

the class ProvenanceResource method submitProvenanceRequest.

/**
 * Creates provenance using the specified query criteria.
 *
 * @param httpServletRequest request
 * @param requestProvenanceEntity   A provenanceEntity
 * @return A provenanceEntity
 */
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
// necessary due to bug in swagger
@Path("")
@ApiOperation(value = "Submits a provenance query", notes = "Provenance queries may be long running so this endpoint submits a request. The response will include the " + "current state of the query. If the request is not completed the URI in the response can be used at a " + "later time to get the updated state of the query. Once the query has completed the provenance request " + "should be deleted by the client who originally submitted it.", response = ProvenanceEntity.class, authorizations = { @Authorization(value = "Read - /provenance"), @Authorization(value = "Read - /data/{component-type}/{uuid}") })
@ApiResponses(value = { @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."), @ApiResponse(code = 401, message = "Client could not be authenticated."), @ApiResponse(code = 403, message = "Client is not authorized to make this request."), @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.") })
public Response submitProvenanceRequest(@Context final HttpServletRequest httpServletRequest, @ApiParam(value = "The provenance query details.", required = true) ProvenanceEntity requestProvenanceEntity) {
    // check the request
    if (requestProvenanceEntity == null) {
        requestProvenanceEntity = new ProvenanceEntity();
    }
    // get the provenance
    final ProvenanceDTO requestProvenanceDto;
    if (requestProvenanceEntity.getProvenance() != null) {
        requestProvenanceDto = requestProvenanceEntity.getProvenance();
    } else {
        requestProvenanceDto = new ProvenanceDTO();
        requestProvenanceEntity.setProvenance(requestProvenanceDto);
    }
    // replicate if cluster manager
    if (isReplicateRequest()) {
        // change content type to JSON for serializing entity
        final Map<String, String> headersToOverride = new HashMap<>();
        headersToOverride.put("content-type", MediaType.APPLICATION_JSON);
        // determine where this request should be sent
        if (requestProvenanceDto.getRequest() == null || requestProvenanceDto.getRequest().getClusterNodeId() == null) {
            // replicate to all nodes
            return replicate(HttpMethod.POST, requestProvenanceEntity, headersToOverride);
        } else {
            return replicate(HttpMethod.POST, requestProvenanceEntity, requestProvenanceDto.getRequest().getClusterNodeId(), headersToOverride);
        }
    }
    return withWriteLock(serviceFacade, requestProvenanceEntity, lookup -> authorizeProvenanceRequest(), null, (provenanceEntity) -> {
        final ProvenanceDTO provenanceDTO = provenanceEntity.getProvenance();
        // ensure the id is the same across the cluster
        final String provenanceId = generateUuid();
        // set the provenance id accordingly
        provenanceDTO.setId(provenanceId);
        // submit the provenance request
        final ProvenanceDTO dto = serviceFacade.submitProvenance(provenanceDTO);
        populateRemainingProvenanceContent(dto);
        // set the cluster id if necessary
        if (provenanceDTO.getRequest() != null && provenanceDTO.getRequest().getClusterNodeId() != null) {
            dto.getRequest().setClusterNodeId(provenanceDTO.getRequest().getClusterNodeId());
        }
        // create the response entity
        final ProvenanceEntity entity = new ProvenanceEntity();
        entity.setProvenance(dto);
        // generate the response
        return generateCreatedResponse(URI.create(dto.getUri()), entity).build();
    });
}
Also used : HashMap(java.util.HashMap) ProvenanceEntity(org.apache.nifi.web.api.entity.ProvenanceEntity) ProvenanceDTO(org.apache.nifi.web.api.dto.provenance.ProvenanceDTO) Path(javax.ws.rs.Path) POST(javax.ws.rs.POST) Consumes(javax.ws.rs.Consumes) Produces(javax.ws.rs.Produces) ApiOperation(io.swagger.annotations.ApiOperation) ApiResponses(io.swagger.annotations.ApiResponses)

Aggregations

ProvenanceDTO (org.apache.nifi.web.api.dto.provenance.ProvenanceDTO)5 HashMap (java.util.HashMap)4 ProvenanceEntity (org.apache.nifi.web.api.entity.ProvenanceEntity)3 ApiOperation (io.swagger.annotations.ApiOperation)2 ApiResponses (io.swagger.annotations.ApiResponses)2 ArrayList (java.util.ArrayList)2 Date (java.util.Date)2 HashSet (java.util.HashSet)2 Consumes (javax.ws.rs.Consumes)2 Path (javax.ws.rs.Path)2 Produces (javax.ws.rs.Produces)2 NodeResponse (org.apache.nifi.cluster.manager.NodeResponse)2 NodeIdentifier (org.apache.nifi.cluster.protocol.NodeIdentifier)2 ProvenanceEventDTO (org.apache.nifi.web.api.dto.provenance.ProvenanceEventDTO)2 ProvenanceRequestDTO (org.apache.nifi.web.api.dto.provenance.ProvenanceRequestDTO)2 ProvenanceResultsDTO (org.apache.nifi.web.api.dto.provenance.ProvenanceResultsDTO)2 IOException (java.io.IOException)1 Map (java.util.Map)1 GET (javax.ws.rs.GET)1 POST (javax.ws.rs.POST)1