use of org.apache.flink.runtime.jobgraph.IntermediateDataSet in project flink by apache.
the class TestJobDataFlowValidator method checkDataFlow.
public static void checkDataFlow(TestJobWithDescription testJob, boolean withDrain) {
Map<String, Map<Integer, OperatorFinishedEvent>> finishEvents = new HashMap<>();
for (TestEvent ev : testJob.eventQueue.getAll()) {
if (ev instanceof OperatorFinishedEvent) {
finishEvents.computeIfAbsent(ev.operatorId, ign -> new HashMap<>()).put(ev.subtaskIndex, ((OperatorFinishedEvent) ev));
}
}
for (JobVertex upstream : testJob.jobGraph.getVertices()) {
for (IntermediateDataSet produced : upstream.getProducedDataSets()) {
JobEdge edge = produced.getConsumer();
Optional<String> upstreamIDOptional = getTrackedOperatorID(upstream, true, testJob);
Optional<String> downstreamIDOptional = getTrackedOperatorID(edge.getTarget(), false, testJob);
if (upstreamIDOptional.isPresent() && downstreamIDOptional.isPresent()) {
final String upstreamID = upstreamIDOptional.get();
final String downstreamID = downstreamIDOptional.get();
if (testJob.sources.contains(upstreamID)) {
// TODO: if we add tests for FLIP-27 sources we might need to adjust
// this condition
LOG.debug("Legacy sources do not have the finish() method and thus do not" + " emit FinishEvent");
} else {
checkDataFlow(upstreamID, downstreamID, edge, finishEvents, withDrain);
}
} else {
LOG.debug("Ignoring edge (untracked operator): {}", edge);
}
}
}
}
Aggregations