use of org.apache.beam.runners.core.construction.graph.PipelineNode.PCollectionNode in project beam by apache.
the class OutputDeduplicatorTest method multipleDuplicatesInStages.
@Test
public void multipleDuplicatesInStages() {
/* A stage that produces multiple duplicates should have them all synthesized.
*
* Original Pipeline:
* red -> .out ---> one -> .out -----\
* \ -> shared.out
* \--> two -> .out ----|
* \ -> otherShared -> .out
* \-> three --> .out /
*
* Fused Pipeline:
* -> .out [-> one -> .out -> shared -> .out] \
* / -> blue -> .out
* | -> shared -> .out] /
* red -> .out [-> two -> .out |
* | -> otherShared -> .out]
* \
* -> .out [-> three -> .out -> otherShared -> .out]
*
* Deduplicated Pipeline:
* [-> one -> .out -> shared -> .out:0] --\
* | -> shared -> .out -> blue -> .out
* | -> shared -> .out:1] /
* red -> .out [-> two -> .out |
* | -> otherShared -> .out:0] --\
* | -> otherShared -> .out
* [-> three -> .out -> otherShared -> .out:1] ---/
*/
PCollection redOut = PCollection.newBuilder().setUniqueName("red.out").build();
PTransform red = PTransform.newBuilder().setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).build()).putOutputs("out", redOut.getUniqueName()).build();
PCollection threeOut = PCollection.newBuilder().setUniqueName("three.out").build();
PTransform three = PTransform.newBuilder().setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).build()).putInputs("in", redOut.getUniqueName()).putOutputs("out", threeOut.getUniqueName()).build();
PCollection oneOut = PCollection.newBuilder().setUniqueName("one.out").build();
PTransform one = PTransform.newBuilder().setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).build()).putInputs("in", redOut.getUniqueName()).putOutputs("out", oneOut.getUniqueName()).build();
PCollection twoOut = PCollection.newBuilder().setUniqueName("two.out").build();
PTransform two = PTransform.newBuilder().setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).build()).putInputs("in", redOut.getUniqueName()).putOutputs("out", twoOut.getUniqueName()).build();
PCollection sharedOut = PCollection.newBuilder().setUniqueName("shared.out").build();
PTransform shared = PTransform.newBuilder().setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).build()).putInputs("one", oneOut.getUniqueName()).putInputs("two", twoOut.getUniqueName()).putOutputs("shared", sharedOut.getUniqueName()).build();
PCollection otherSharedOut = PCollection.newBuilder().setUniqueName("shared.out2").build();
PTransform otherShared = PTransform.newBuilder().setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).build()).putInputs("multi", threeOut.getUniqueName()).putInputs("two", twoOut.getUniqueName()).putOutputs("out", otherSharedOut.getUniqueName()).build();
PCollection blueOut = PCollection.newBuilder().setUniqueName("blue.out").build();
PTransform blue = PTransform.newBuilder().setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).build()).putInputs("in", sharedOut.getUniqueName()).putOutputs("out", blueOut.getUniqueName()).build();
RunnerApi.Components components = Components.newBuilder().putTransforms("one", one).putPcollections(oneOut.getUniqueName(), oneOut).putTransforms("two", two).putPcollections(twoOut.getUniqueName(), twoOut).putTransforms("multi", three).putPcollections(threeOut.getUniqueName(), threeOut).putTransforms("shared", shared).putPcollections(sharedOut.getUniqueName(), sharedOut).putTransforms("otherShared", otherShared).putPcollections(otherSharedOut.getUniqueName(), otherSharedOut).putTransforms("red", red).putPcollections(redOut.getUniqueName(), redOut).putTransforms("blue", blue).putPcollections(blueOut.getUniqueName(), blueOut).build();
ExecutableStage multiStage = ImmutableExecutableStage.of(components, Environment.getDefaultInstance(), PipelineNode.pCollection(redOut.getUniqueName(), redOut), ImmutableList.of(), ImmutableList.of(), ImmutableList.of(), ImmutableList.of(PipelineNode.pTransform("multi", three), PipelineNode.pTransform("shared", shared), PipelineNode.pTransform("otherShared", otherShared)), ImmutableList.of(PipelineNode.pCollection(sharedOut.getUniqueName(), sharedOut), PipelineNode.pCollection(otherSharedOut.getUniqueName(), otherSharedOut)), DEFAULT_WIRE_CODER_SETTINGS);
ExecutableStage oneStage = ImmutableExecutableStage.of(components, Environment.getDefaultInstance(), PipelineNode.pCollection(redOut.getUniqueName(), redOut), ImmutableList.of(), ImmutableList.of(), ImmutableList.of(), ImmutableList.of(PipelineNode.pTransform("one", one), PipelineNode.pTransform("shared", shared)), ImmutableList.of(PipelineNode.pCollection(sharedOut.getUniqueName(), sharedOut)), DEFAULT_WIRE_CODER_SETTINGS);
ExecutableStage twoStage = ImmutableExecutableStage.of(components, Environment.getDefaultInstance(), PipelineNode.pCollection(redOut.getUniqueName(), redOut), ImmutableList.of(), ImmutableList.of(), ImmutableList.of(), ImmutableList.of(PipelineNode.pTransform("two", two), PipelineNode.pTransform("otherShared", otherShared)), ImmutableList.of(PipelineNode.pCollection(otherSharedOut.getUniqueName(), otherSharedOut)), DEFAULT_WIRE_CODER_SETTINGS);
PTransformNode redTransform = PipelineNode.pTransform("red", red);
PTransformNode blueTransform = PipelineNode.pTransform("blue", blue);
QueryablePipeline pipeline = QueryablePipeline.forPrimitivesIn(components);
DeduplicationResult result = OutputDeduplicator.ensureSingleProducer(pipeline, ImmutableList.of(oneStage, twoStage, multiStage), ImmutableList.of(redTransform, blueTransform));
assertThat(result.getIntroducedTransforms(), hasSize(2));
assertThat(result.getDeduplicatedStages().keySet(), containsInAnyOrder(multiStage, oneStage, twoStage));
assertThat(result.getDeduplicatedTransforms().keySet(), empty());
Collection<String> introducedIds = result.getIntroducedTransforms().stream().flatMap(pt -> pt.getTransform().getInputsMap().values().stream()).collect(Collectors.toList());
String[] stageOutputs = result.getDeduplicatedStages().values().stream().flatMap(s -> s.getOutputPCollections().stream().map(PCollectionNode::getId)).toArray(String[]::new);
assertThat(introducedIds, containsInAnyOrder(stageOutputs));
assertThat(result.getDeduplicatedComponents().getPcollectionsMap().keySet(), hasItems(introducedIds.toArray(new String[0])));
assertThat(result.getDeduplicatedComponents().getTransformsMap().entrySet(), hasItems(result.getIntroducedTransforms().stream().collect(Collectors.toMap(PTransformNode::getId, PTransformNode::getTransform)).entrySet().toArray(new Map.Entry[0])));
}
use of org.apache.beam.runners.core.construction.graph.PipelineNode.PCollectionNode in project beam by apache.
the class OutputDeduplicatorTest method duplicateOverStagesAndTransforms.
@Test
public void duplicateOverStagesAndTransforms() {
/* When both a stage and a runner-executed transform produce a PCollection, all should be
* replaced with synthetic flattens.
* original graph:
* --> one -> .out \
* red -> .out | -> shared -> .out
* --------------> /
*
* fused graph:
* --> [one -> .out -> shared ->] .out
* red -> .out |
* ------------------> shared --> .out
*
* deduplicated graph:
* --> [one -> .out -> shared ->] .out:0 \
* red -> .out | -> shared -> .out
* -----------------> shared:0 -> .out:1 /
*/
PCollection redOut = PCollection.newBuilder().setUniqueName("red.out").build();
PTransform red = PTransform.newBuilder().setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).build()).putOutputs("out", redOut.getUniqueName()).build();
PCollection oneOut = PCollection.newBuilder().setUniqueName("one.out").build();
PTransform one = PTransform.newBuilder().setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).build()).putInputs("in", redOut.getUniqueName()).putOutputs("out", oneOut.getUniqueName()).build();
PCollection sharedOut = PCollection.newBuilder().setUniqueName("shared.out").build();
PTransform shared = PTransform.newBuilder().setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).build()).putInputs("one", oneOut.getUniqueName()).putInputs("red", redOut.getUniqueName()).putOutputs("shared", sharedOut.getUniqueName()).build();
PCollection blueOut = PCollection.newBuilder().setUniqueName("blue.out").build();
PTransform blue = PTransform.newBuilder().setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).build()).putInputs("in", sharedOut.getUniqueName()).putOutputs("out", blueOut.getUniqueName()).build();
RunnerApi.Components components = Components.newBuilder().putTransforms("one", one).putPcollections(oneOut.getUniqueName(), oneOut).putTransforms("red", red).putPcollections(redOut.getUniqueName(), redOut).putTransforms("shared", shared).putPcollections(sharedOut.getUniqueName(), sharedOut).putTransforms("blue", blue).putPcollections(blueOut.getUniqueName(), blueOut).build();
PTransformNode sharedTransform = PipelineNode.pTransform("shared", shared);
ExecutableStage oneStage = ImmutableExecutableStage.of(components, Environment.getDefaultInstance(), PipelineNode.pCollection(redOut.getUniqueName(), redOut), ImmutableList.of(), ImmutableList.of(), ImmutableList.of(), ImmutableList.of(PipelineNode.pTransform("one", one), sharedTransform), ImmutableList.of(PipelineNode.pCollection(sharedOut.getUniqueName(), sharedOut)), DEFAULT_WIRE_CODER_SETTINGS);
PTransformNode redTransform = PipelineNode.pTransform("red", red);
PTransformNode blueTransform = PipelineNode.pTransform("blue", blue);
QueryablePipeline pipeline = QueryablePipeline.forPrimitivesIn(components);
DeduplicationResult result = OutputDeduplicator.ensureSingleProducer(pipeline, ImmutableList.of(oneStage), ImmutableList.of(redTransform, blueTransform, sharedTransform));
assertThat(result.getIntroducedTransforms(), hasSize(1));
PTransformNode introduced = getOnlyElement(result.getIntroducedTransforms());
assertThat(introduced.getTransform().getOutputsMap().size(), equalTo(1));
assertThat(getOnlyElement(introduced.getTransform().getOutputsMap().values()), equalTo(sharedOut.getUniqueName()));
assertThat(result.getDeduplicatedComponents().getPcollectionsMap().keySet(), hasItems(introduced.getTransform().getInputsMap().values().toArray(new String[0])));
assertThat(result.getDeduplicatedStages().keySet(), hasSize(1));
assertThat(result.getDeduplicatedTransforms().keySet(), containsInAnyOrder("shared"));
List<String> introducedOutputs = new ArrayList<>();
introducedOutputs.addAll(result.getDeduplicatedTransforms().get("shared").getTransform().getOutputsMap().values());
introducedOutputs.addAll(result.getDeduplicatedStages().get(oneStage).getOutputPCollections().stream().map(PCollectionNode::getId).collect(Collectors.toList()));
assertThat(introduced.getTransform().getInputsMap().values(), containsInAnyOrder(introducedOutputs.toArray(new String[0])));
assertThat(result.getDeduplicatedComponents().getPcollectionsMap().keySet(), hasItems(introducedOutputs.toArray(new String[0])));
assertThat(result.getDeduplicatedComponents().getTransformsMap(), hasEntry(introduced.getId(), introduced.getTransform()));
}
use of org.apache.beam.runners.core.construction.graph.PipelineNode.PCollectionNode in project beam by apache.
the class QueryablePipelineTest method transformWithSameSideAndMainInput.
/**
* Tests that inputs that are both side inputs and main inputs are returned from {@link
* QueryablePipeline#getPerElementConsumers(PCollectionNode)} and {@link
* QueryablePipeline#getSideInputs(PTransformNode)}.
*/
@Test
public void transformWithSameSideAndMainInput() {
Components components = Components.newBuilder().putPcollections("read_pc", RunnerApi.PCollection.getDefaultInstance()).putPcollections("pardo_out", RunnerApi.PCollection.getDefaultInstance()).putTransforms("root", PTransform.newBuilder().setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.IMPULSE_TRANSFORM_URN).build()).putOutputs("out", "read_pc").build()).putTransforms("multiConsumer", PTransform.newBuilder().putInputs("main_in", "read_pc").putInputs("side_in", "read_pc").putOutputs("out", "pardo_out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).setPayload(ParDoPayload.newBuilder().putSideInputs("side_in", SideInput.getDefaultInstance()).build().toByteString()).build()).build()).build();
QueryablePipeline qp = QueryablePipeline.forPrimitivesIn(components);
PCollectionNode multiInputPc = PipelineNode.pCollection("read_pc", components.getPcollectionsOrThrow("read_pc"));
PTransformNode multiConsumerPT = PipelineNode.pTransform("multiConsumer", components.getTransformsOrThrow("multiConsumer"));
SideInputReference sideInputRef = SideInputReference.of(multiConsumerPT, "side_in", multiInputPc);
assertThat(qp.getPerElementConsumers(multiInputPc), contains(multiConsumerPT));
assertThat(qp.getSideInputs(multiConsumerPT), contains(sideInputRef));
}
use of org.apache.beam.runners.core.construction.graph.PipelineNode.PCollectionNode in project beam by apache.
the class GreedyStageFuserTest method sideInputIncludedInStage.
@Test
public void sideInputIncludedInStage() {
Environment env = Environments.createDockerEnvironment("common");
PTransform readTransform = PTransform.newBuilder().setUniqueName("read").putInputs("input", "impulse.out").putOutputs("output", "read.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).setPayload(ParDoPayload.newBuilder().setDoFn(FunctionSpec.newBuilder()).build().toByteString())).setEnvironmentId("common").build();
PTransform parDoTransform = PTransform.newBuilder().setUniqueName("parDo").putInputs("input", "read.out").putInputs("side_input", "side_read.out").putOutputs("output", "parDo.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).setPayload(ParDoPayload.newBuilder().setDoFn(FunctionSpec.newBuilder()).putSideInputs("side_input", SideInput.getDefaultInstance()).build().toByteString())).setEnvironmentId("common").build();
PCollection sideInputPCollection = PCollection.newBuilder().setUniqueName("side_read.out").build();
QueryablePipeline p = QueryablePipeline.forPrimitivesIn(partialComponents.toBuilder().putTransforms("read", readTransform).putPcollections("read.out", PCollection.newBuilder().setUniqueName("read.out").build()).putTransforms("side_read", PTransform.newBuilder().setUniqueName("side_read").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN)).putInputs("input", "impulse.out").putOutputs("output", "side_read.out").build()).putPcollections("side_read.out", sideInputPCollection).putTransforms("parDo", parDoTransform).putPcollections("parDo.out", PCollection.newBuilder().setUniqueName("parDo.out").build()).putEnvironments("common", env).build());
PCollectionNode readOutput = getOnlyElement(p.getOutputPCollections(PipelineNode.pTransform("read", readTransform)));
ExecutableStage subgraph = GreedyStageFuser.forGrpcPortRead(p, readOutput, ImmutableSet.of(PipelineNode.pTransform("parDo", parDoTransform)));
PTransformNode parDoNode = PipelineNode.pTransform("parDo", parDoTransform);
SideInputReference sideInputRef = SideInputReference.of(parDoNode, "side_input", PipelineNode.pCollection("side_read.out", sideInputPCollection));
assertThat(subgraph.getSideInputs(), contains(sideInputRef));
assertThat(subgraph.getOutputPCollections(), emptyIterable());
}
use of org.apache.beam.runners.core.construction.graph.PipelineNode.PCollectionNode in project beam by apache.
the class GreedyStageFuserTest method materializesWithSideInputConsumer.
@Test
public void materializesWithSideInputConsumer() {
// (impulse.out) -> read -> read.out -----------> parDo -> parDo.out -> window -> window.out
// (impulse.out) -> side_read -> side_read.out /
// Where parDo takes side_read as a side input, fuses into
// (impulse.out) -> read -> (read.out)
// (impulse.out) -> side_read -> (side_read.out)
// (read.out) -> parDo -> parDo.out -> window -> window.out
// parDo doesn't have a per-element consumer from side_read.out, so it can't root a stage
// which consumes from that materialized collection. Nodes with side inputs must root a stage,
// but do not restrict fusion of consumers.
Environment env = Environments.createDockerEnvironment("common");
PTransform readTransform = PTransform.newBuilder().putInputs("input", "impulse.out").putOutputs("output", "read.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).setPayload(ParDoPayload.newBuilder().setDoFn(FunctionSpec.newBuilder()).build().toByteString())).setEnvironmentId("common").build();
QueryablePipeline p = QueryablePipeline.forPrimitivesIn(partialComponents.toBuilder().putTransforms("read", readTransform).putPcollections("read.out", PCollection.newBuilder().setUniqueName("read.out").build()).putTransforms("side_read", PTransform.newBuilder().setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN)).putInputs("input", "impulse.out").putOutputs("output", "side_read.out").build()).putPcollections("side_read.out", PCollection.newBuilder().setUniqueName("side_read.out").build()).putTransforms("parDo", PTransform.newBuilder().putInputs("input", "read.out").putInputs("side_input", "side_read.out").putOutputs("output", "parDo.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).setPayload(ParDoPayload.newBuilder().setDoFn(FunctionSpec.newBuilder()).putSideInputs("side_input", SideInput.getDefaultInstance()).build().toByteString())).setEnvironmentId("common").build()).putPcollections("parDo.out", PCollection.newBuilder().setUniqueName("parDo.out").build()).putTransforms("window", PTransform.newBuilder().putInputs("input", "read.out").putOutputs("output", "window.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.ASSIGN_WINDOWS_TRANSFORM_URN).setPayload(WindowIntoPayload.newBuilder().setWindowFn(FunctionSpec.newBuilder()).build().toByteString())).setEnvironmentId("common").build()).putPcollections("window.out", PCollection.newBuilder().setUniqueName("window.out").build()).putEnvironments("common", env).build());
PTransformNode readNode = PipelineNode.pTransform("read", readTransform);
PCollectionNode readOutput = getOnlyElement(p.getOutputPCollections(readNode));
ExecutableStage subgraph = GreedyStageFuser.forGrpcPortRead(p, impulseOutputNode, ImmutableSet.of(readNode));
assertThat(subgraph.getOutputPCollections(), contains(readOutput));
assertThat(subgraph, hasSubtransforms(readNode.getId()));
}
Aggregations