use of org.apache.beam.sdk.runners.TransformHierarchy.Node in project beam by apache.
the class ViewOverrideFactoryTest method replacementGetViewReturnsOriginal.
@Test
public void replacementGetViewReturnsOriginal() {
final PCollection<Integer> ints = p.apply("CreateContents", Create.of(1, 2, 3));
final PCollectionView<List<Integer>> view = PCollectionViews.listView(ints, WindowingStrategy.globalDefault(), ints.getCoder());
PTransformReplacement<PCollection<Integer>, PCollectionView<List<Integer>>> replacement = factory.getReplacementTransform(AppliedPTransform.<PCollection<Integer>, PCollectionView<List<Integer>>, CreatePCollectionView<Integer, List<Integer>>>of("foo", ints.expand(), view.expand(), CreatePCollectionView.<Integer, List<Integer>>of(view), p));
ints.apply(replacement.getTransform());
final AtomicBoolean writeViewVisited = new AtomicBoolean();
p.traverseTopologically(new PipelineVisitor.Defaults() {
@Override
public void visitPrimitiveTransform(Node node) {
if (node.getTransform() instanceof WriteView) {
assertThat("There should only be one WriteView primitive in the graph", writeViewVisited.getAndSet(true), is(false));
PCollectionView replacementView = ((WriteView) node.getTransform()).getView();
assertThat(replacementView, Matchers.<PCollectionView>theInstance(view));
assertThat(node.getInputs().entrySet(), hasSize(1));
}
}
});
assertThat(writeViewVisited.get(), is(true));
}
use of org.apache.beam.sdk.runners.TransformHierarchy.Node in project beam by apache.
the class Pipeline method replace.
private void replace(final PTransformOverride override) {
final Set<Node> matches = new HashSet<>();
final Set<Node> freedNodes = new HashSet<>();
traverseTopologically(new PipelineVisitor.Defaults() {
@Override
public CompositeBehavior enterCompositeTransform(Node node) {
if (!node.isRootNode() && freedNodes.contains(node.getEnclosingNode())) {
// This node will be freed because its parent will be freed.
freedNodes.add(node);
return CompositeBehavior.ENTER_TRANSFORM;
}
if (!node.isRootNode() && override.getMatcher().matches(node.toAppliedPTransform(getPipeline()))) {
matches.add(node);
// This node will be freed. When we visit any of its children, they will also be freed
freedNodes.add(node);
}
return CompositeBehavior.ENTER_TRANSFORM;
}
@Override
public void visitPrimitiveTransform(Node node) {
if (freedNodes.contains(node.getEnclosingNode())) {
freedNodes.add(node);
} else if (override.getMatcher().matches(node.toAppliedPTransform(getPipeline()))) {
matches.add(node);
freedNodes.add(node);
}
}
});
for (Node freedNode : freedNodes) {
usedFullNames.remove(freedNode.getFullName());
}
for (Node match : matches) {
applyReplacement(match, override.getOverrideFactory());
}
}
use of org.apache.beam.sdk.runners.TransformHierarchy.Node in project beam by apache.
the class WindowIntoTranslationTest method testToFromProto.
@Test
public void testToFromProto() throws InvalidProtocolBufferException {
pipeline.apply(GenerateSequence.from(0)).apply(Window.<Long>into((WindowFn) windowFn));
final AtomicReference<AppliedPTransform<?, ?, Assign<?>>> assign = new AtomicReference<>(null);
pipeline.traverseTopologically(new PipelineVisitor.Defaults() {
@Override
public void visitPrimitiveTransform(Node node) {
if (node.getTransform() instanceof Window.Assign) {
checkState(assign.get() == null);
assign.set((AppliedPTransform<?, ?, Assign<?>>) node.toAppliedPTransform(getPipeline()));
}
}
});
checkState(assign.get() != null);
SdkComponents components = SdkComponents.create();
WindowIntoPayload payload = WindowIntoTranslation.toProto(assign.get().getTransform(), components);
assertEquals(windowFn, WindowIntoTranslation.getWindowFn(payload));
}
use of org.apache.beam.sdk.runners.TransformHierarchy.Node in project beam by apache.
the class CombineTranslationTest method testToFromProto.
@Test
public void testToFromProto() throws Exception {
PCollection<Integer> input = pipeline.apply(Create.of(1, 2, 3));
input.apply(Combine.globally(combineFn));
final AtomicReference<AppliedPTransform<?, ?, Combine.PerKey<?, ?, ?>>> combine = new AtomicReference<>();
pipeline.traverseTopologically(new PipelineVisitor.Defaults() {
@Override
public void leaveCompositeTransform(Node node) {
if (node.getTransform() instanceof Combine.PerKey) {
checkState(combine.get() == null);
combine.set((AppliedPTransform) node.toAppliedPTransform(getPipeline()));
}
}
});
checkState(combine.get() != null);
SdkComponents sdkComponents = SdkComponents.create();
CombinePayload combineProto = CombineTranslation.toProto(combine.get(), sdkComponents);
RunnerApi.Components componentsProto = sdkComponents.toComponents();
assertEquals(combineFn.getAccumulatorCoder(pipeline.getCoderRegistry(), input.getCoder()), CombineTranslation.getAccumulatorCoder(combineProto, componentsProto));
assertEquals(combineFn, CombineTranslation.getCombineFn(combineProto));
}
use of org.apache.beam.sdk.runners.TransformHierarchy.Node in project beam by apache.
the class DataflowRunnerTest method testUnconsumedReads.
/**
* Tests that all reads are consumed by at least one {@link PTransform}.
*/
@Test
public void testUnconsumedReads() throws IOException {
DataflowPipelineOptions dataflowOptions = buildPipelineOptions();
RuntimeTestOptions options = dataflowOptions.as(RuntimeTestOptions.class);
Pipeline p = buildDataflowPipeline(dataflowOptions);
PCollection<String> unconsumed = p.apply(TextIO.read().from(options.getInput()));
DataflowRunner.fromOptions(dataflowOptions).replaceTransforms(p);
final AtomicBoolean unconsumedSeenAsInput = new AtomicBoolean();
p.traverseTopologically(new PipelineVisitor.Defaults() {
@Override
public void visitPrimitiveTransform(Node node) {
unconsumedSeenAsInput.set(true);
}
});
assertThat(unconsumedSeenAsInput.get(), is(true));
}
Aggregations