use of org.apache.nifi.provenance.ProvenanceEventRecord in project nifi by apache.
the class StandardProvenanceReporter method expire.
void expire(final FlowFile flowFile, final String details) {
try {
final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.EXPIRE).setDetails(details).build();
events.add(record);
} catch (final Exception e) {
logger.error("Failed to generate Provenance Event due to " + e);
if (logger.isDebugEnabled()) {
logger.error("", e);
}
}
}
use of org.apache.nifi.provenance.ProvenanceEventRecord in project nifi by apache.
the class StandardProvenanceReporter method receive.
@Override
public void receive(final FlowFile flowFile, final String transitUri, final String sourceSystemFlowFileIdentifier, final String details, final long transmissionMillis) {
verifyFlowFileKnown(flowFile);
try {
final ProvenanceEventRecord record = build(flowFile, ProvenanceEventType.RECEIVE).setTransitUri(transitUri).setSourceSystemFlowFileIdentifier(sourceSystemFlowFileIdentifier).setEventDuration(transmissionMillis).setDetails(details).build();
events.add(record);
} catch (final Exception e) {
logger.error("Failed to generate Provenance Event due to " + e);
if (logger.isDebugEnabled()) {
logger.error("", e);
}
}
}
use of org.apache.nifi.provenance.ProvenanceEventRecord in project nifi by apache.
the class StandardProvenanceReporter method migrate.
void migrate(final StandardProvenanceReporter newOwner, final Collection<String> flowFileIds) {
final Set<ProvenanceEventRecord> toMove = new LinkedHashSet<>();
for (final ProvenanceEventRecord event : events) {
if (flowFileIds.contains(event.getFlowFileUuid())) {
toMove.add(event);
}
}
events.removeAll(toMove);
newOwner.events.addAll(toMove);
}
use of org.apache.nifi.provenance.ProvenanceEventRecord in project nifi by apache.
the class ProvenanceEventConsumer method filterEvents.
private List<ProvenanceEventRecord> filterEvents(ComponentMapHolder componentMapHolder, List<ProvenanceEventRecord> provenanceEvents) {
if (isFilteringEnabled()) {
List<ProvenanceEventRecord> filteredEvents = new ArrayList<>();
for (ProvenanceEventRecord provenanceEventRecord : provenanceEvents) {
if (!eventTypesExclude.isEmpty() && eventTypesExclude.contains(provenanceEventRecord.getEventType())) {
continue;
}
if (!eventTypes.isEmpty() && !eventTypes.contains(provenanceEventRecord.getEventType())) {
continue;
}
final String componentId = provenanceEventRecord.getComponentId();
if (!componentIdsExclude.isEmpty()) {
if (componentIdsExclude.contains(componentId)) {
continue;
}
// that is being excluded
if (componentMapHolder == null) {
continue;
}
final String processGroupId = componentMapHolder.getProcessGroupId(componentId, provenanceEventRecord.getComponentType());
if (!StringUtils.isEmpty(processGroupId)) {
// Check if the process group or any parent process group is specified as a target component ID.
if (componentIdsExclude.contains(processGroupId)) {
continue;
}
ParentProcessGroupSearchNode parentProcessGroup = componentMapHolder.getProcessGroupParent(processGroupId);
while (parentProcessGroup != null && !componentIdsExclude.contains(parentProcessGroup.getId())) {
parentProcessGroup = parentProcessGroup.getParent();
}
if (parentProcessGroup != null) {
continue;
}
}
}
if (!componentIds.isEmpty() && !componentIds.contains(componentId)) {
// that is being filtered on
if (componentMapHolder == null) {
continue;
}
final String processGroupId = componentMapHolder.getProcessGroupId(componentId, provenanceEventRecord.getComponentType());
if (StringUtils.isEmpty(processGroupId)) {
continue;
}
if (!componentIds.contains(processGroupId)) {
ParentProcessGroupSearchNode parentProcessGroup = componentMapHolder.getProcessGroupParent(processGroupId);
while (parentProcessGroup != null && !componentIds.contains(parentProcessGroup.getId())) {
parentProcessGroup = parentProcessGroup.getParent();
}
if (parentProcessGroup == null) {
continue;
}
}
}
if (componentTypeRegexExclude != null && componentTypeRegexExclude.matcher(provenanceEventRecord.getComponentType()).matches()) {
continue;
}
if (componentTypeRegex != null && !componentTypeRegex.matcher(provenanceEventRecord.getComponentType()).matches()) {
continue;
}
filteredEvents.add(provenanceEventRecord);
}
return filteredEvents;
} else {
return provenanceEvents;
}
}
use of org.apache.nifi.provenance.ProvenanceEventRecord in project nifi by apache.
the class FlowController method replayFlowFile.
public ProvenanceEventRecord replayFlowFile(final ProvenanceEventRecord event, final NiFiUser user) throws IOException {
if (event == null) {
throw new NullPointerException();
}
// Check that the event is a valid type.
final ProvenanceEventType type = event.getEventType();
if (type == ProvenanceEventType.JOIN) {
throw new IllegalArgumentException("Cannot replay events that are created from multiple parents");
}
// Make sure event has the Content Claim info
final Long contentSize = event.getPreviousFileSize();
final String contentClaimId = event.getPreviousContentClaimIdentifier();
final String contentClaimSection = event.getPreviousContentClaimSection();
final String contentClaimContainer = event.getPreviousContentClaimContainer();
if (contentSize == null || contentClaimId == null || contentClaimSection == null || contentClaimContainer == null) {
throw new IllegalArgumentException("Cannot replay data from Provenance Event because the event does not contain the required Content Claim");
}
// Make sure that the source queue exists
if (event.getSourceQueueIdentifier() == null) {
throw new IllegalArgumentException("Cannot replay data from Provenance Event because the event does not specify the Source FlowFile Queue");
}
final List<Connection> connections = getGroup(getRootGroupId()).findAllConnections();
FlowFileQueue queue = null;
for (final Connection connection : connections) {
if (event.getSourceQueueIdentifier().equals(connection.getIdentifier())) {
queue = connection.getFlowFileQueue();
break;
}
}
if (queue == null) {
throw new IllegalStateException("Cannot replay data from Provenance Event because the Source FlowFile Queue with ID " + event.getSourceQueueIdentifier() + " no longer exists");
}
// Create the ContentClaim. To do so, we first need the appropriate Resource Claim. Because we don't know whether or
// not the Resource Claim is still active, we first call ResourceClaimManager.getResourceClaim. If this returns
// null, then we know that the Resource Claim is no longer active and can just create a new one that is not writable.
// It's critical though that we first call getResourceClaim because otherwise, if the Resource Claim is active and we
// create a new one that is not writable, we could end up archiving or destroying the Resource Claim while it's still
// being written to by the Content Repository. This is important only because we are creating a FlowFile with this Resource
// Claim. If, for instance, we are simply creating the claim to request its content, as in #getContentAvailability, etc.
// then this is not necessary.
ResourceClaim resourceClaim = resourceClaimManager.getResourceClaim(event.getPreviousContentClaimContainer(), event.getPreviousContentClaimSection(), event.getPreviousContentClaimIdentifier());
if (resourceClaim == null) {
resourceClaim = resourceClaimManager.newResourceClaim(event.getPreviousContentClaimContainer(), event.getPreviousContentClaimSection(), event.getPreviousContentClaimIdentifier(), false, false);
}
// Increment Claimant Count, since we will now be referencing the Content Claim
resourceClaimManager.incrementClaimantCount(resourceClaim);
final long claimOffset = event.getPreviousContentClaimOffset() == null ? 0L : event.getPreviousContentClaimOffset().longValue();
final StandardContentClaim contentClaim = new StandardContentClaim(resourceClaim, claimOffset);
contentClaim.setLength(event.getPreviousFileSize() == null ? -1L : event.getPreviousFileSize());
if (!contentRepository.isAccessible(contentClaim)) {
resourceClaimManager.decrementClaimantCount(resourceClaim);
throw new IllegalStateException("Cannot replay data from Provenance Event because the data is no longer available in the Content Repository");
}
final String parentUUID = event.getFlowFileUuid();
final String newFlowFileUUID = UUID.randomUUID().toString();
// We need to create a new FlowFile by populating it with information from the
// Provenance Event. Particularly of note here is that we are setting the FlowFile's
// contentClaimOffset to 0. This is done for backward compatibility reasons. ContentClaim
// used to not have a concept of an offset, and the offset was tied only to the FlowFile. This
// was later refactored, so that the offset was part of the ContentClaim. If we set the offset
// in both places, we'll end up skipping over that many bytes twice instead of once (once to get
// to the beginning of the Content Claim and again to get to the offset within that Content Claim).
// To avoid this, we just always set the offset in the Content Claim itself and set the
// FlowFileRecord's contentClaimOffset to 0.
final FlowFileRecord flowFileRecord = new StandardFlowFileRecord.Builder().addAttributes(event.getPreviousAttributes()).contentClaim(contentClaim).contentClaimOffset(// use 0 because we used the content claim offset in the Content Claim itself
0L).entryDate(System.currentTimeMillis()).id(flowFileRepository.getNextFlowFileSequence()).lineageStart(event.getLineageStartDate(), 0L).size(contentSize.longValue()).addAttribute("flowfile.replay", "true").addAttribute("flowfile.replay.timestamp", String.valueOf(new Date())).addAttribute(CoreAttributes.UUID.key(), newFlowFileUUID).removeAttributes(CoreAttributes.DISCARD_REASON.key(), CoreAttributes.ALTERNATE_IDENTIFIER.key()).build();
// Register a Provenance Event to indicate that we replayed the data.
final ProvenanceEventRecord replayEvent = new StandardProvenanceEventRecord.Builder().setEventType(ProvenanceEventType.REPLAY).addChildUuid(newFlowFileUUID).addParentUuid(parentUUID).setFlowFileUUID(parentUUID).setAttributes(Collections.emptyMap(), flowFileRecord.getAttributes()).setCurrentContentClaim(event.getContentClaimContainer(), event.getContentClaimSection(), event.getContentClaimIdentifier(), event.getContentClaimOffset(), event.getFileSize()).setDetails("Replay requested by " + user.getIdentity()).setEventTime(System.currentTimeMillis()).setFlowFileEntryDate(System.currentTimeMillis()).setLineageStartDate(event.getLineageStartDate()).setComponentType(event.getComponentType()).setComponentId(event.getComponentId()).build();
provenanceRepository.registerEvent(replayEvent);
// Update the FlowFile Repository to indicate that we have added the FlowFile to the flow
final StandardRepositoryRecord record = new StandardRepositoryRecord(queue);
record.setWorking(flowFileRecord);
record.setDestination(queue);
flowFileRepository.updateRepository(Collections.singleton(record));
// Enqueue the data
queue.put(flowFileRecord);
return replayEvent;
}
Aggregations