Search in sources :

Example 1 with ProcessBundleSplitResponse

use of org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitResponse in project beam by apache.

the class BeamFnDataReadRunnerTest method executeSplit.

private static ProcessBundleSplitResponse executeSplit(BeamFnDataReadRunner<String> readRunner, String pTransformId, String bundleId, long index, double fractionOfRemainder, long inputElements, List<Long> allowedSplitPoints) throws Exception {
    for (long i = -1; i < index; i++) {
        readRunner.forwardElementToConsumer(valueInGlobalWindow(Long.valueOf(i).toString()));
    }
    ProcessBundleSplitRequest request = ProcessBundleSplitRequest.newBuilder().setInstructionId(bundleId).putDesiredSplits(pTransformId, DesiredSplit.newBuilder().setEstimatedInputElements(inputElements).setFractionOfRemainder(fractionOfRemainder).addAllAllowedSplitPoints(allowedSplitPoints).build()).build();
    ProcessBundleSplitResponse.Builder responseBuilder = ProcessBundleSplitResponse.newBuilder();
    readRunner.trySplit(request, responseBuilder);
    return responseBuilder.build();
}
Also used : ProcessBundleSplitResponse(org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitResponse) ProcessBundleSplitRequest(org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitRequest)

Example 2 with ProcessBundleSplitResponse

use of org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitResponse in project beam by apache.

the class RemoteExecutionTest method testSplit.

@Test(timeout = 60000L)
public void testSplit() throws Exception {
    launchSdkHarness(PipelineOptionsFactory.create());
    Pipeline p = Pipeline.create();
    p.apply("impulse", Impulse.create()).apply("create", ParDo.of(new DoFn<byte[], String>() {

        @ProcessElement
        public void process(ProcessContext ctxt) {
            ctxt.output("zero");
            ctxt.output(WaitingTillSplitRestrictionTracker.WAIT_TILL_SPLIT);
            ctxt.output("two");
        }
    })).apply("forceSplit", ParDo.of(new DoFn<String, String>() {

        @GetInitialRestriction
        public String getInitialRestriction(@Element String element) {
            return element;
        }

        @NewTracker
        public WaitingTillSplitRestrictionTracker newTracker(@Restriction String restriction) {
            return new WaitingTillSplitRestrictionTracker(restriction);
        }

        @ProcessElement
        public void process(RestrictionTracker<String, Void> tracker, ProcessContext context) {
            while (tracker.tryClaim(null)) {
            }
            context.output(tracker.currentRestriction());
        }
    })).apply("addKeys", WithKeys.of("foo")).setCoder(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of())).apply("gbk", GroupByKey.create());
    RunnerApi.Pipeline pipeline = PipelineTranslation.toProto(p);
    // Expand any splittable DoFns within the graph to enable sizing and splitting of bundles.
    RunnerApi.Pipeline pipelineWithSdfExpanded = ProtoOverrides.updateTransform(PTransformTranslation.PAR_DO_TRANSFORM_URN, pipeline, SplittableParDoExpander.createSizedReplacement());
    FusedPipeline fused = GreedyPipelineFuser.fuse(pipelineWithSdfExpanded);
    // Find the fused stage with the SDF ProcessSizedElementAndRestriction transform
    Optional<ExecutableStage> optionalStage = Iterables.tryFind(fused.getFusedStages(), (ExecutableStage stage) -> Iterables.filter(stage.getTransforms(), (PTransformNode node) -> PTransformTranslation.SPLITTABLE_PROCESS_SIZED_ELEMENTS_AND_RESTRICTIONS_URN.equals(node.getTransform().getSpec().getUrn())).iterator().hasNext());
    checkState(optionalStage.isPresent(), "Expected a stage with SDF ProcessSizedElementAndRestriction.");
    ExecutableStage stage = optionalStage.get();
    ExecutableProcessBundleDescriptor descriptor = ProcessBundleDescriptors.fromExecutableStage("my_stage", stage, dataServer.getApiServiceDescriptor());
    BundleProcessor processor = controlClient.getProcessor(descriptor.getProcessBundleDescriptor(), descriptor.getRemoteInputDestinations());
    Map<String, ? super Coder<WindowedValue<?>>> remoteOutputCoders = descriptor.getRemoteOutputCoders();
    Map<String, Collection<? super WindowedValue<?>>> outputValues = new HashMap<>();
    Map<String, RemoteOutputReceiver<?>> outputReceivers = new HashMap<>();
    for (Entry<String, ? super Coder<WindowedValue<?>>> remoteOutputCoder : remoteOutputCoders.entrySet()) {
        List<? super WindowedValue<?>> outputContents = Collections.synchronizedList(new ArrayList<>());
        outputValues.put(remoteOutputCoder.getKey(), outputContents);
        outputReceivers.put(remoteOutputCoder.getKey(), RemoteOutputReceiver.of((Coder) remoteOutputCoder.getValue(), (FnDataReceiver<? super WindowedValue<?>>) outputContents::add));
    }
    List<ProcessBundleSplitResponse> splitResponses = new ArrayList<>();
    List<ProcessBundleResponse> checkpointResponses = new ArrayList<>();
    List<String> requestsFinalization = new ArrayList<>();
    ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
    ScheduledFuture<Object> future;
    // Execute the remote bundle.
    try (RemoteBundle bundle = processor.newBundle(outputReceivers, Collections.emptyMap(), StateRequestHandler.unsupported(), BundleProgressHandler.ignored(), splitResponses::add, checkpointResponses::add, requestsFinalization::add)) {
        Iterables.getOnlyElement(bundle.getInputReceivers().values()).accept(valueInGlobalWindow(sdfSizedElementAndRestrictionForTest(WaitingTillSplitRestrictionTracker.WAIT_TILL_SPLIT)));
        // Keep sending splits until the bundle terminates.
        future = (ScheduledFuture) executor.scheduleWithFixedDelay(() -> bundle.split(0.5), 0L, 100L, TimeUnit.MILLISECONDS);
    }
    future.cancel(false);
    executor.shutdown();
    assertTrue(requestsFinalization.isEmpty());
    assertTrue(checkpointResponses.isEmpty());
    // We only validate the last split response since it is the only one that could possibly
    // contain the SDF split, all others will be a reduction in the ChannelSplit range.
    assertFalse(splitResponses.isEmpty());
    ProcessBundleSplitResponse splitResponse = splitResponses.get(splitResponses.size() - 1);
    ChannelSplit channelSplit = Iterables.getOnlyElement(splitResponse.getChannelSplitsList());
    // There is only one outcome for the final split that can happen since the SDF is blocking the
    // bundle from completing and hence needed to be split.
    assertEquals(-1L, channelSplit.getLastPrimaryElement());
    assertEquals(1L, channelSplit.getFirstResidualElement());
    assertEquals(1, splitResponse.getPrimaryRootsCount());
    assertEquals(1, splitResponse.getResidualRootsCount());
    assertThat(Iterables.getOnlyElement(outputValues.values()), containsInAnyOrder(valueInGlobalWindow(KV.of("foo", WaitingTillSplitRestrictionTracker.PRIMARY))));
}
Also used : ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) PTransformNode(org.apache.beam.runners.core.construction.graph.PipelineNode.PTransformNode) ArrayList(java.util.ArrayList) ByteString(org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.ByteString) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) BundleProcessor(org.apache.beam.runners.fnexecution.control.SdkHarnessClient.BundleProcessor) WindowedValue(org.apache.beam.sdk.util.WindowedValue) ExecutableStage(org.apache.beam.runners.core.construction.graph.ExecutableStage) ExecutableProcessBundleDescriptor(org.apache.beam.runners.fnexecution.control.ProcessBundleDescriptors.ExecutableProcessBundleDescriptor) ProcessBundleResponse(org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleResponse) KvCoder(org.apache.beam.sdk.coders.KvCoder) Coder(org.apache.beam.sdk.coders.Coder) StringUtf8Coder(org.apache.beam.sdk.coders.StringUtf8Coder) BigEndianLongCoder(org.apache.beam.sdk.coders.BigEndianLongCoder) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) FnDataReceiver(org.apache.beam.sdk.fn.data.FnDataReceiver) FusedPipeline(org.apache.beam.runners.core.construction.graph.FusedPipeline) Pipeline(org.apache.beam.sdk.Pipeline) FusedPipeline(org.apache.beam.runners.core.construction.graph.FusedPipeline) ProcessBundleSplitResponse(org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitResponse) DoFn(org.apache.beam.sdk.transforms.DoFn) Collection(java.util.Collection) PCollection(org.apache.beam.sdk.values.PCollection) ChannelSplit(org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitResponse.ChannelSplit) Test(org.junit.Test)

Aggregations

ProcessBundleSplitResponse (org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitResponse)2 ArrayList (java.util.ArrayList)1 Collection (java.util.Collection)1 HashMap (java.util.HashMap)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1 ScheduledExecutorService (java.util.concurrent.ScheduledExecutorService)1 ProcessBundleResponse (org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleResponse)1 ProcessBundleSplitRequest (org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitRequest)1 ChannelSplit (org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitResponse.ChannelSplit)1 RunnerApi (org.apache.beam.model.pipeline.v1.RunnerApi)1 ExecutableStage (org.apache.beam.runners.core.construction.graph.ExecutableStage)1 FusedPipeline (org.apache.beam.runners.core.construction.graph.FusedPipeline)1 PTransformNode (org.apache.beam.runners.core.construction.graph.PipelineNode.PTransformNode)1 ExecutableProcessBundleDescriptor (org.apache.beam.runners.fnexecution.control.ProcessBundleDescriptors.ExecutableProcessBundleDescriptor)1 BundleProcessor (org.apache.beam.runners.fnexecution.control.SdkHarnessClient.BundleProcessor)1 Pipeline (org.apache.beam.sdk.Pipeline)1 BigEndianLongCoder (org.apache.beam.sdk.coders.BigEndianLongCoder)1 Coder (org.apache.beam.sdk.coders.Coder)1 KvCoder (org.apache.beam.sdk.coders.KvCoder)1 StringUtf8Coder (org.apache.beam.sdk.coders.StringUtf8Coder)1