use of org.apache.nifi.web.api.dto.provenance.ProvenanceEventDTO in project nifi by apache.
the class ProvenanceEventResource method submitReplay.
/**
* Creates a new replay request for the content associated with the specified provenance event id.
*
* @param httpServletRequest request
* @param replayRequestEntity The replay request
* @return A provenanceEventEntity
*/
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Path("replays")
@ApiOperation(value = "Replays content from a provenance event", response = ProvenanceEventEntity.class, authorizations = { @Authorization(value = "Read Component Data - /data/{component-type}/{uuid}"), @Authorization(value = "Write Component Data - /data/{component-type}/{uuid}") })
@ApiResponses(value = { @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."), @ApiResponse(code = 401, message = "Client could not be authenticated."), @ApiResponse(code = 403, message = "Client is not authorized to make this request."), @ApiResponse(code = 404, message = "The specified resource could not be found."), @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.") })
public Response submitReplay(@Context final HttpServletRequest httpServletRequest, @ApiParam(value = "The replay request.", required = true) final SubmitReplayRequestEntity replayRequestEntity) {
// ensure the event id is specified
if (replayRequestEntity == null || replayRequestEntity.getEventId() == null) {
throw new IllegalArgumentException("The id of the event must be specified.");
}
// replicate if cluster manager
if (isReplicateRequest()) {
// determine where this request should be sent
if (replayRequestEntity.getClusterNodeId() == null) {
throw new IllegalArgumentException("The id of the node in the cluster is required.");
} else {
return replicate(HttpMethod.POST, replayRequestEntity, replayRequestEntity.getClusterNodeId());
}
}
// handle expects request (usually from the cluster manager)
final String expects = httpServletRequest.getHeader(RequestReplicator.REQUEST_VALIDATION_HTTP_HEADER);
if (expects != null) {
return generateContinueResponse().build();
}
// submit the provenance replay request
final ProvenanceEventDTO event = serviceFacade.submitReplay(replayRequestEntity.getEventId());
event.setClusterNodeId(replayRequestEntity.getClusterNodeId());
// populate the cluster node address
final ClusterCoordinator coordinator = getClusterCoordinator();
if (coordinator != null) {
final NodeIdentifier nodeId = coordinator.getNodeIdentifier(replayRequestEntity.getClusterNodeId());
event.setClusterNodeAddress(nodeId.getApiAddress() + ":" + nodeId.getApiPort());
}
// create a response entity
final ProvenanceEventEntity entity = new ProvenanceEventEntity();
entity.setProvenanceEvent(event);
// generate the response
URI uri = URI.create(generateResourceUri("provenance-events", event.getId()));
return generateCreatedResponse(uri, entity).build();
}
use of org.apache.nifi.web.api.dto.provenance.ProvenanceEventDTO in project nifi by apache.
the class ControllerFacade method getProvenanceQuery.
/**
* Retrieves the results of a provenance query.
*
* @param provenanceId id
* @return the results of a provenance query
*/
public ProvenanceDTO getProvenanceQuery(String provenanceId, Boolean summarize, Boolean incrementalResults) {
try {
// get the query to the provenance repository
final ProvenanceRepository provenanceRepository = flowController.getProvenanceRepository();
final QuerySubmission querySubmission = provenanceRepository.retrieveQuerySubmission(provenanceId, NiFiUserUtils.getNiFiUser());
// ensure the query results could be found
if (querySubmission == null) {
throw new ResourceNotFoundException("Cannot find the results for the specified provenance requests. Results may have been purged.");
}
// get the original query and the results
final Query query = querySubmission.getQuery();
final QueryResult queryResult = querySubmission.getResult();
// build the response
final ProvenanceDTO provenanceDto = new ProvenanceDTO();
final ProvenanceRequestDTO requestDto = new ProvenanceRequestDTO();
final ProvenanceResultsDTO resultsDto = new ProvenanceResultsDTO();
// include the original request and results
provenanceDto.setRequest(requestDto);
provenanceDto.setResults(resultsDto);
// convert the original request
requestDto.setStartDate(query.getStartDate());
requestDto.setEndDate(query.getEndDate());
requestDto.setMinimumFileSize(query.getMinFileSize());
requestDto.setMaximumFileSize(query.getMaxFileSize());
requestDto.setMaxResults(query.getMaxResults());
if (query.getSearchTerms() != null) {
final Map<String, String> searchTerms = new HashMap<>();
for (final SearchTerm searchTerm : query.getSearchTerms()) {
searchTerms.put(searchTerm.getSearchableField().getFriendlyName(), searchTerm.getValue());
}
requestDto.setSearchTerms(searchTerms);
}
// convert the provenance
provenanceDto.setId(query.getIdentifier());
provenanceDto.setSubmissionTime(querySubmission.getSubmissionTime());
provenanceDto.setExpiration(queryResult.getExpiration());
provenanceDto.setFinished(queryResult.isFinished());
provenanceDto.setPercentCompleted(queryResult.getPercentComplete());
// convert each event
final boolean includeResults = incrementalResults == null || Boolean.TRUE.equals(incrementalResults);
if (includeResults || queryResult.isFinished()) {
final List<ProvenanceEventDTO> events = new ArrayList<>();
for (final ProvenanceEventRecord record : queryResult.getMatchingEvents()) {
events.add(createProvenanceEventDto(record, Boolean.TRUE.equals(summarize)));
}
resultsDto.setProvenanceEvents(events);
}
if (requestDto.getMaxResults() != null && queryResult.getTotalHitCount() >= requestDto.getMaxResults()) {
resultsDto.setTotalCount(requestDto.getMaxResults().longValue());
resultsDto.setTotal(FormatUtils.formatCount(requestDto.getMaxResults().longValue()) + "+");
} else {
resultsDto.setTotalCount(queryResult.getTotalHitCount());
resultsDto.setTotal(FormatUtils.formatCount(queryResult.getTotalHitCount()));
}
// include any errors
if (queryResult.getError() != null) {
final Set<String> errors = new HashSet<>();
errors.add(queryResult.getError());
resultsDto.setErrors(errors);
}
// set the generated timestamp
final Date now = new Date();
resultsDto.setGenerated(now);
resultsDto.setTimeOffset(TimeZone.getDefault().getOffset(now.getTime()));
// get the oldest available event time
final List<ProvenanceEventRecord> firstEvent = provenanceRepository.getEvents(0, 1);
if (!firstEvent.isEmpty()) {
resultsDto.setOldestEvent(new Date(firstEvent.get(0).getEventTime()));
}
provenanceDto.setResults(resultsDto);
return provenanceDto;
} catch (final IOException ioe) {
throw new NiFiCoreException("An error occurred while searching the provenance events.", ioe);
}
}
use of org.apache.nifi.web.api.dto.provenance.ProvenanceEventDTO in project nifi by apache.
the class ControllerFacade method createProvenanceEventDto.
/**
* Creates a ProvenanceEventDTO for the specified ProvenanceEventRecord.
*
* @param event event
* @return event
*/
private ProvenanceEventDTO createProvenanceEventDto(final ProvenanceEventRecord event, final boolean summarize) {
final ProvenanceEventDTO dto = new ProvenanceEventDTO();
dto.setId(String.valueOf(event.getEventId()));
dto.setEventId(event.getEventId());
dto.setEventTime(new Date(event.getEventTime()));
dto.setEventType(event.getEventType().name());
dto.setFlowFileUuid(event.getFlowFileUuid());
dto.setFileSize(FormatUtils.formatDataSize(event.getFileSize()));
dto.setFileSizeBytes(event.getFileSize());
dto.setComponentId(event.getComponentId());
dto.setComponentType(event.getComponentType());
// sets the component details if it can find the component still in the flow
setComponentDetails(dto);
// only include all details if not summarizing
if (!summarize) {
// convert the attributes
final Comparator<AttributeDTO> attributeComparator = new Comparator<AttributeDTO>() {
@Override
public int compare(AttributeDTO a1, AttributeDTO a2) {
return Collator.getInstance(Locale.US).compare(a1.getName(), a2.getName());
}
};
final SortedSet<AttributeDTO> attributes = new TreeSet<>(attributeComparator);
final Map<String, String> updatedAttrs = event.getUpdatedAttributes();
final Map<String, String> previousAttrs = event.getPreviousAttributes();
// add previous attributes that haven't been modified.
for (final Map.Entry<String, String> entry : previousAttrs.entrySet()) {
// don't add any attributes that have been updated; we will do that next
if (updatedAttrs.containsKey(entry.getKey())) {
continue;
}
final AttributeDTO attribute = new AttributeDTO();
attribute.setName(entry.getKey());
attribute.setValue(entry.getValue());
attribute.setPreviousValue(entry.getValue());
attributes.add(attribute);
}
// Add all of the update attributes
for (final Map.Entry<String, String> entry : updatedAttrs.entrySet()) {
final AttributeDTO attribute = new AttributeDTO();
attribute.setName(entry.getKey());
attribute.setValue(entry.getValue());
attribute.setPreviousValue(previousAttrs.get(entry.getKey()));
attributes.add(attribute);
}
// additional event details
dto.setAlternateIdentifierUri(event.getAlternateIdentifierUri());
dto.setAttributes(attributes);
dto.setTransitUri(event.getTransitUri());
dto.setSourceSystemFlowFileId(event.getSourceSystemFlowFileIdentifier());
dto.setRelationship(event.getRelationship());
dto.setDetails(event.getDetails());
final ContentAvailability contentAvailability = flowController.getContentAvailability(event);
// content
dto.setContentEqual(contentAvailability.isContentSame());
dto.setInputContentAvailable(contentAvailability.isInputAvailable());
dto.setInputContentClaimSection(event.getPreviousContentClaimSection());
dto.setInputContentClaimContainer(event.getPreviousContentClaimContainer());
dto.setInputContentClaimIdentifier(event.getPreviousContentClaimIdentifier());
dto.setInputContentClaimOffset(event.getPreviousContentClaimOffset());
dto.setInputContentClaimFileSizeBytes(event.getPreviousFileSize());
dto.setOutputContentAvailable(contentAvailability.isOutputAvailable());
dto.setOutputContentClaimSection(event.getContentClaimSection());
dto.setOutputContentClaimContainer(event.getContentClaimContainer());
dto.setOutputContentClaimIdentifier(event.getContentClaimIdentifier());
dto.setOutputContentClaimOffset(event.getContentClaimOffset());
dto.setOutputContentClaimFileSize(FormatUtils.formatDataSize(event.getFileSize()));
dto.setOutputContentClaimFileSizeBytes(event.getFileSize());
// format the previous file sizes if possible
if (event.getPreviousFileSize() != null) {
dto.setInputContentClaimFileSize(FormatUtils.formatDataSize(event.getPreviousFileSize()));
}
// determine if authorized for event replay
final AuthorizationResult replayAuthorized = checkAuthorizationForReplay(event);
// replay
dto.setReplayAvailable(contentAvailability.isReplayable() && Result.Approved.equals(replayAuthorized.getResult()));
dto.setReplayExplanation(contentAvailability.isReplayable() && !Result.Approved.equals(replayAuthorized.getResult()) ? replayAuthorized.getExplanation() : contentAvailability.getReasonNotReplayable());
dto.setSourceConnectionIdentifier(event.getSourceQueueIdentifier());
// event duration
if (event.getEventDuration() >= 0) {
dto.setEventDuration(event.getEventDuration());
}
// lineage duration
if (event.getLineageStartDate() > 0) {
final long lineageDuration = event.getEventTime() - event.getLineageStartDate();
dto.setLineageDuration(lineageDuration);
}
// parent uuids
final List<String> parentUuids = new ArrayList<>(event.getParentUuids());
Collections.sort(parentUuids, Collator.getInstance(Locale.US));
dto.setParentUuids(parentUuids);
// child uuids
final List<String> childUuids = new ArrayList<>(event.getChildUuids());
Collections.sort(childUuids, Collator.getInstance(Locale.US));
dto.setChildUuids(childUuids);
}
return dto;
}
use of org.apache.nifi.web.api.dto.provenance.ProvenanceEventDTO in project nifi by apache.
the class ProvenanceQueryEndpointMerger method mergeResponses.
protected void mergeResponses(ProvenanceDTO clientDto, Map<NodeIdentifier, ProvenanceDTO> dtoMap, Set<NodeResponse> successfulResponses, Set<NodeResponse> problematicResponses) {
final ProvenanceResultsDTO results = clientDto.getResults();
final ProvenanceRequestDTO request = clientDto.getRequest();
final List<ProvenanceEventDTO> allResults = new ArrayList<>(1024);
final Set<String> errors = new HashSet<>();
Date oldestEventDate = new Date();
int percentageComplete = 0;
boolean finished = true;
long totalRecords = 0;
for (final Map.Entry<NodeIdentifier, ProvenanceDTO> entry : dtoMap.entrySet()) {
final NodeIdentifier nodeIdentifier = entry.getKey();
final String nodeAddress = nodeIdentifier.getApiAddress() + ":" + nodeIdentifier.getApiPort();
final ProvenanceDTO nodeDto = entry.getValue();
final ProvenanceResultsDTO nodeResultDto = nodeDto.getResults();
if (nodeResultDto != null && nodeResultDto.getProvenanceEvents() != null) {
// increment the total number of records
totalRecords += nodeResultDto.getTotalCount();
// populate the cluster identifier
for (final ProvenanceEventDTO eventDto : nodeResultDto.getProvenanceEvents()) {
// from the Cluster Coordinator.
if (eventDto.getClusterNodeId() == null || eventDto.getClusterNodeAddress() == null) {
eventDto.setClusterNodeId(nodeIdentifier.getId());
eventDto.setClusterNodeAddress(nodeAddress);
// add node identifier to the event's id so that it is unique across cluster
eventDto.setId(nodeIdentifier.getId() + eventDto.getId());
}
allResults.add(eventDto);
}
}
if (nodeResultDto.getOldestEvent() != null && nodeResultDto.getOldestEvent().before(oldestEventDate)) {
oldestEventDate = nodeResultDto.getOldestEvent();
}
if (nodeResultDto.getErrors() != null) {
for (final String error : nodeResultDto.getErrors()) {
errors.add(nodeAddress + " -- " + error);
}
}
percentageComplete += nodeDto.getPercentCompleted();
if (!nodeDto.isFinished()) {
finished = false;
}
}
percentageComplete /= dtoMap.size();
// consider any problematic responses as errors
for (final NodeResponse problematicResponse : problematicResponses) {
final NodeIdentifier problemNode = problematicResponse.getNodeId();
final String problemNodeAddress = problemNode.getApiAddress() + ":" + problemNode.getApiPort();
errors.add(String.format("%s -- Request did not complete successfully (Status code: %s)", problemNodeAddress, problematicResponse.getStatus()));
}
// Since we get back up to the maximum number of results from each node, we need to sort those values and then
// grab only the first X number of them. We do a sort based on time, such that the newest are included.
// If 2 events have the same timestamp, we do a secondary sort based on Cluster Node Identifier. If those are
// equal, we perform a tertiary sort based on the the event id
Collections.sort(allResults, new Comparator<ProvenanceEventDTO>() {
@Override
public int compare(final ProvenanceEventDTO o1, final ProvenanceEventDTO o2) {
final int eventTimeComparison = o1.getEventTime().compareTo(o2.getEventTime());
if (eventTimeComparison != 0) {
return -eventTimeComparison;
}
final String nodeId1 = o1.getClusterNodeId();
final String nodeId2 = o2.getClusterNodeId();
final int nodeIdComparison;
if (nodeId1 == null && nodeId2 == null) {
nodeIdComparison = 0;
} else if (nodeId1 == null) {
nodeIdComparison = 1;
} else if (nodeId2 == null) {
nodeIdComparison = -1;
} else {
nodeIdComparison = -nodeId1.compareTo(nodeId2);
}
if (nodeIdComparison != 0) {
return nodeIdComparison;
}
return -Long.compare(o1.getEventId(), o2.getEventId());
}
});
final int maxResults = request.getMaxResults().intValue();
final List<ProvenanceEventDTO> selectedResults;
if (allResults.size() < maxResults) {
selectedResults = allResults;
} else {
selectedResults = allResults.subList(0, maxResults);
}
// include any errors
if (errors.size() > 0) {
results.setErrors(errors);
}
if (clientDto.getRequest().getMaxResults() != null && totalRecords >= clientDto.getRequest().getMaxResults()) {
results.setTotalCount(clientDto.getRequest().getMaxResults().longValue());
results.setTotal(FormatUtils.formatCount(clientDto.getRequest().getMaxResults().longValue()) + "+");
} else {
results.setTotal(FormatUtils.formatCount(totalRecords));
results.setTotalCount(totalRecords);
}
results.setProvenanceEvents(selectedResults);
results.setOldestEvent(oldestEventDate);
results.setGenerated(new Date());
clientDto.setPercentCompleted(percentageComplete);
clientDto.setFinished(finished);
}
use of org.apache.nifi.web.api.dto.provenance.ProvenanceEventDTO in project nifi by apache.
the class ProvenanceEventResource method getProvenanceEvent.
/**
* Gets the details for a provenance event.
*
* @param id The id of the event
* @param clusterNodeId The id of node in the cluster that the event/flowfile originated from. This is only required when clustered.
* @return A provenanceEventEntity
*/
@GET
@Consumes(MediaType.WILDCARD)
@Produces(MediaType.APPLICATION_JSON)
@Path("{id}")
@ApiOperation(value = "Gets a provenance event", response = ProvenanceEventEntity.class, authorizations = { @Authorization(value = "Read Component Data - /data/{component-type}/{uuid}") })
@ApiResponses(value = { @ApiResponse(code = 400, message = "NiFi was unable to complete the request because it was invalid. The request should not be retried without modification."), @ApiResponse(code = 401, message = "Client could not be authenticated."), @ApiResponse(code = 403, message = "Client is not authorized to make this request."), @ApiResponse(code = 404, message = "The specified resource could not be found."), @ApiResponse(code = 409, message = "The request was valid but NiFi was not in the appropriate state to process it. Retrying the same request later may be successful.") })
public Response getProvenanceEvent(@ApiParam(value = "The id of the node where this event exists if clustered.", required = false) @QueryParam("clusterNodeId") final String clusterNodeId, @ApiParam(value = "The provenance event id.", required = true) @PathParam("id") final LongParameter id) {
// ensure the id is specified
if (id == null) {
throw new IllegalArgumentException("Provenance event id must be specified.");
}
// replicate if cluster manager
if (isReplicateRequest()) {
// since we're cluster we must specify the cluster node identifier
if (clusterNodeId == null) {
throw new IllegalArgumentException("The cluster node identifier must be specified.");
}
return replicate(HttpMethod.GET, clusterNodeId);
}
// get the provenance event
final ProvenanceEventDTO event = serviceFacade.getProvenanceEvent(id.getLong());
event.setClusterNodeId(clusterNodeId);
// populate the cluster node address
final ClusterCoordinator coordinator = getClusterCoordinator();
if (coordinator != null) {
final NodeIdentifier nodeId = coordinator.getNodeIdentifier(clusterNodeId);
event.setClusterNodeAddress(nodeId.getApiAddress() + ":" + nodeId.getApiPort());
}
// create a response entity
final ProvenanceEventEntity entity = new ProvenanceEventEntity();
entity.setProvenanceEvent(event);
// generate the response
return generateOkResponse(entity).build();
}
Aggregations