use of org.apache.beam.model.pipeline.v1.RunnerApi.Components in project beam by apache.
the class QueryablePipelineTest method rootTransforms.
@Test
public void rootTransforms() {
Pipeline p = Pipeline.create();
p.apply("UnboundedRead", Read.from(CountingSource.unbounded())).apply(Window.into(FixedWindows.of(Duration.millis(5L)))).apply(Count.perElement());
p.apply("BoundedRead", Read.from(CountingSource.upTo(100L)));
Components components = PipelineTranslation.toProto(p).getComponents();
QueryablePipeline qp = QueryablePipeline.forPrimitivesIn(components);
assertThat(qp.getRootTransforms(), hasSize(2));
for (PTransformNode rootTransform : qp.getRootTransforms()) {
assertThat("Root transforms should have no inputs", rootTransform.getTransform().getInputsCount(), equalTo(0));
assertThat("Only added impulse transforms to the pipeline", rootTransform.getTransform().getSpec().getUrn(), equalTo(PTransformTranslation.IMPULSE_TRANSFORM_URN));
}
}
use of org.apache.beam.model.pipeline.v1.RunnerApi.Components in project beam by apache.
the class QueryablePipelineTest method forTransformsWithMalformedGraph.
@Test
public void forTransformsWithMalformedGraph() {
Components components = Components.newBuilder().putTransforms("root", PTransform.newBuilder().putOutputs("output", "output.out").build()).putPcollections("output.out", RunnerApi.PCollection.newBuilder().setUniqueName("output.out").build()).putTransforms("consumer", PTransform.newBuilder().putInputs("input", "output.out").build()).build();
thrown.expect(IllegalArgumentException.class);
// Consumer consumes a PCollection which isn't produced.
QueryablePipeline.forTransforms(ImmutableSet.of("consumer"), components);
}
use of org.apache.beam.model.pipeline.v1.RunnerApi.Components in project beam by apache.
the class SdkComponentsTest method registerTransformAfterChildren.
@Test
public void registerTransformAfterChildren() throws IOException {
Create.Values<Long> create = Create.of(1L, 2L, 3L);
GenerateSequence createChild = GenerateSequence.from(0);
PCollection<Long> pt = pipeline.apply(create);
String userName = "my_transform";
String childUserName = "my_transform/my_nesting";
AppliedPTransform<?, ?, ?> transform = AppliedPTransform.of(userName, PValues.expandInput(pipeline.begin()), PValues.expandOutput(pt), create, ResourceHints.create(), pipeline);
AppliedPTransform<?, ?, ?> childTransform = AppliedPTransform.of(childUserName, PValues.expandInput(pipeline.begin()), PValues.expandOutput(pt), createChild, ResourceHints.create(), pipeline);
String childId = components.registerPTransform(childTransform, Collections.emptyList());
String parentId = components.registerPTransform(transform, Collections.singletonList(childTransform));
Components components = this.components.toComponents();
assertThat(components.getTransformsOrThrow(parentId).getSubtransforms(0), equalTo(childId));
assertThat(components.getTransformsOrThrow(childId).getSubtransformsCount(), equalTo(0));
}
use of org.apache.beam.model.pipeline.v1.RunnerApi.Components in project beam by apache.
the class GreedyPipelineFuserTest method singleEnvironmentAcrossGroupByKeyMultipleStages.
/*
* impulse -> .out -> read -> .out -> groupByKey -> .out -> parDo -> .out
* becomes
* (impulse.out) -> read -> (read.out)
* (groupByKey.out) -> parDo
*/
@Test
public void singleEnvironmentAcrossGroupByKeyMultipleStages() {
Components components = partialComponents.toBuilder().putTransforms("read", PTransform.newBuilder().setUniqueName("Read").putInputs("input", "impulse.out").putOutputs("output", "read.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).setPayload(ParDoPayload.newBuilder().setDoFn(FunctionSpec.newBuilder()).build().toByteString())).setEnvironmentId("py").build()).putPcollections("read.out", pc("read.out")).putTransforms("groupByKey", PTransform.newBuilder().setUniqueName("GroupByKey").putInputs("input", "read.out").putOutputs("output", "groupByKey.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.GROUP_BY_KEY_TRANSFORM_URN)).build()).putPcollections("groupByKey.out", pc("groupByKey.out")).putTransforms("parDo", PTransform.newBuilder().setUniqueName("ParDo").putInputs("input", "groupByKey.out").putOutputs("output", "parDo.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).setPayload(ParDoPayload.newBuilder().setDoFn(FunctionSpec.newBuilder()).build().toByteString())).setEnvironmentId("py").build()).putPcollections("parDo.out", pc("parDo.out")).build();
FusedPipeline fused = GreedyPipelineFuser.fuse(Pipeline.newBuilder().setComponents(components).build());
assertThat(fused.getRunnerExecutedTransforms(), containsInAnyOrder(PipelineNode.pTransform("impulse", components.getTransformsOrThrow("impulse")), PipelineNode.pTransform("groupByKey", components.getTransformsOrThrow("groupByKey"))));
assertThat(fused.getFusedStages(), containsInAnyOrder(ExecutableStageMatcher.withInput("impulse.out").withOutputs("read.out").withTransforms("read"), ExecutableStageMatcher.withInput("groupByKey.out").withNoOutputs().withTransforms("parDo")));
}
use of org.apache.beam.model.pipeline.v1.RunnerApi.Components in project beam by apache.
the class GreedyPipelineFuserTest method parDoWithStateAndTimerRootsStage.
/*
* Tests that parDo with state and timers is fused correctly and can be queried
* impulse -> .out -> timer -> .out
* becomes
* (impulse.out) -> timer
*/
@Test
public void parDoWithStateAndTimerRootsStage() {
PTransform timerTransform = PTransform.newBuilder().setUniqueName("TimerParDo").putInputs("input", "impulse.out").putInputs("timer", "timer.out").putOutputs("timer", "timer.out").putOutputs("output", "output.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).setPayload(ParDoPayload.newBuilder().setDoFn(FunctionSpec.newBuilder()).putStateSpecs("state", StateSpec.getDefaultInstance()).putTimerFamilySpecs("timer", TimerFamilySpec.getDefaultInstance()).build().toByteString())).setEnvironmentId("common").build();
Components components = partialComponents.toBuilder().putTransforms("timer", timerTransform).putPcollections("timer.out", pc("timer.out")).putPcollections("output.out", pc("output.out")).putEnvironments("common", Environments.createDockerEnvironment("common")).build();
FusedPipeline fused = GreedyPipelineFuser.fuse(Pipeline.newBuilder().setComponents(components).addRequirements(ParDoTranslation.REQUIRES_STATEFUL_PROCESSING_URN).build());
assertThat(fused.getRunnerExecutedTransforms(), containsInAnyOrder(PipelineNode.pTransform("impulse", components.getTransformsOrThrow("impulse"))));
assertThat(fused.getFusedStages(), contains(ExecutableStageMatcher.withInput("impulse.out").withNoOutputs().withTransforms("timer")));
}
Aggregations