Search in sources :

Example 16 with WindowingStrategy

use of org.apache.beam.model.pipeline.v1.RunnerApi.WindowingStrategy in project beam by apache.

the class GroupAlsoByWindowParDoFnFactory method create.

@Override
public ParDoFn create(PipelineOptions options, CloudObject cloudUserFn, @Nullable List<SideInputInfo> sideInputInfos, TupleTag<?> mainOutputTag, Map<TupleTag<?>, Integer> outputTupleTagsToReceiverIndices, final DataflowExecutionContext<?> executionContext, DataflowOperationContext operationContext) throws Exception {
    Map.Entry<TupleTag<?>, Integer> entry = Iterables.getOnlyElement(outputTupleTagsToReceiverIndices.entrySet());
    checkArgument(entry.getKey().equals(mainOutputTag), "Output tags should reference only the main output tag: %s vs %s", entry.getKey(), mainOutputTag);
    checkArgument(entry.getValue() == 0, "There should be a single receiver, but using receiver index %s", entry.getValue());
    byte[] encodedWindowingStrategy = getBytes(cloudUserFn, PropertyNames.SERIALIZED_FN);
    WindowingStrategy windowingStrategy;
    try {
        windowingStrategy = deserializeWindowingStrategy(encodedWindowingStrategy);
    } catch (Exception e) {
        // TODO: Catch block disappears, becoming an error once Python SDK is compliant.
        if (DataflowRunner.hasExperiment(options.as(DataflowPipelineDebugOptions.class), "beam_fn_api")) {
            LOG.info("FnAPI: Unable to deserialize windowing strategy, assuming default", e);
            windowingStrategy = WindowingStrategy.globalDefault();
        } else {
            throw e;
        }
    }
    byte[] serializedCombineFn = getBytes(cloudUserFn, WorkerPropertyNames.COMBINE_FN, null);
    AppliedCombineFn<?, ?, ?, ?> combineFn = null;
    if (serializedCombineFn != null) {
        Object combineFnObj = SerializableUtils.deserializeFromByteArray(serializedCombineFn, "serialized combine fn");
        checkArgument(combineFnObj instanceof AppliedCombineFn, "unexpected kind of AppliedCombineFn: " + combineFnObj.getClass().getName());
        combineFn = (AppliedCombineFn<?, ?, ?, ?>) combineFnObj;
    }
    Map<String, Object> inputCoderObject = getObject(cloudUserFn, WorkerPropertyNames.INPUT_CODER);
    Coder<?> inputCoder = CloudObjects.coderFromCloudObject(CloudObject.fromSpec(inputCoderObject));
    checkArgument(inputCoder instanceof WindowedValueCoder, "Expected WindowedValueCoder for inputCoder, got: " + inputCoder.getClass().getName());
    @SuppressWarnings("unchecked") WindowedValueCoder<?> windowedValueCoder = (WindowedValueCoder<?>) inputCoder;
    Coder<?> elemCoder = windowedValueCoder.getValueCoder();
    checkArgument(elemCoder instanceof KvCoder, "Expected KvCoder for inputCoder, got: " + elemCoder.getClass().getName());
    @SuppressWarnings("unchecked") KvCoder<?, ?> kvCoder = (KvCoder<?, ?>) elemCoder;
    boolean isStreamingPipeline = options.as(StreamingOptions.class).isStreaming();
    SideInputReader sideInputReader = NullSideInputReader.empty();
    @Nullable AppliedCombineFn<?, ?, ?, ?> maybeMergingCombineFn = null;
    if (combineFn != null) {
        sideInputReader = executionContext.getSideInputReader(sideInputInfos, combineFn.getSideInputViews(), operationContext);
        String phase = getString(cloudUserFn, WorkerPropertyNames.PHASE, CombinePhase.ALL);
        checkArgument(phase.equals(CombinePhase.ALL) || phase.equals(CombinePhase.MERGE), "Unexpected phase: %s", phase);
        if (phase.equals(CombinePhase.MERGE)) {
            maybeMergingCombineFn = makeAppliedMergingFunction(combineFn);
        } else {
            maybeMergingCombineFn = combineFn;
        }
    }
    StateInternalsFactory<?> stateInternalsFactory = key -> executionContext.getStepContext(operationContext).stateInternals();
    // This will be a GABW Fn for either batch or streaming, with combiner in it or not
    GroupAlsoByWindowFn<?, ?> fn;
    // This will be a FakeKeyedWorkItemCoder for streaming or null for batch
    Coder<?> gabwInputCoder;
    // TODO: do not do this with mess of "if"
    if (isStreamingPipeline) {
        if (maybeMergingCombineFn == null) {
            fn = StreamingGroupAlsoByWindowsDoFns.createForIterable(windowingStrategy, stateInternalsFactory, ((KvCoder) kvCoder).getValueCoder());
            gabwInputCoder = WindmillKeyedWorkItem.FakeKeyedWorkItemCoder.of(kvCoder);
        } else {
            fn = StreamingGroupAlsoByWindowsDoFns.create(windowingStrategy, stateInternalsFactory, (AppliedCombineFn) maybeMergingCombineFn, ((KvCoder) kvCoder).getKeyCoder());
            gabwInputCoder = WindmillKeyedWorkItem.FakeKeyedWorkItemCoder.of(((AppliedCombineFn) maybeMergingCombineFn).getKvCoder());
        }
    } else {
        if (maybeMergingCombineFn == null) {
            fn = BatchGroupAlsoByWindowsDoFns.createForIterable(windowingStrategy, stateInternalsFactory, ((KvCoder) kvCoder).getValueCoder());
            gabwInputCoder = null;
        } else {
            fn = BatchGroupAlsoByWindowsDoFns.create(windowingStrategy, (AppliedCombineFn) maybeMergingCombineFn);
            gabwInputCoder = null;
        }
    }
    // TODO: or anyhow related to it, do not do this with mess of "if"
    if (maybeMergingCombineFn != null) {
        return new GroupAlsoByWindowsParDoFn(options, fn, windowingStrategy, ((AppliedCombineFn) maybeMergingCombineFn).getSideInputViews(), gabwInputCoder, sideInputReader, mainOutputTag, executionContext.getStepContext(operationContext));
    } else {
        return new GroupAlsoByWindowsParDoFn(options, fn, windowingStrategy, null, gabwInputCoder, sideInputReader, mainOutputTag, executionContext.getStepContext(operationContext));
    }
}
Also used : CombineFn(org.apache.beam.sdk.transforms.Combine.CombineFn) StateInternalsFactory(org.apache.beam.runners.core.StateInternalsFactory) CoderRegistry(org.apache.beam.sdk.coders.CoderRegistry) CombineFnWithContext(org.apache.beam.sdk.transforms.CombineWithContext.CombineFnWithContext) WindowedValueCoder(org.apache.beam.sdk.util.WindowedValue.WindowedValueCoder) LoggerFactory(org.slf4j.LoggerFactory) CloudObjects(org.apache.beam.runners.dataflow.util.CloudObjects) BatchGroupAlsoByWindowsDoFns(org.apache.beam.runners.dataflow.worker.util.BatchGroupAlsoByWindowsDoFns) Coder(org.apache.beam.sdk.coders.Coder) ListCoder(org.apache.beam.sdk.coders.ListCoder) RehydratedComponents(org.apache.beam.runners.core.construction.RehydratedComponents) ArrayList(java.util.ArrayList) GlobalCombineFn(org.apache.beam.sdk.transforms.CombineFnBase.GlobalCombineFn) DataflowPipelineDebugOptions(org.apache.beam.runners.dataflow.options.DataflowPipelineDebugOptions) TupleTag(org.apache.beam.sdk.values.TupleTag) Map(java.util.Map) Iterables(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables) Preconditions.checkArgument(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument) CloudObject(org.apache.beam.runners.dataflow.util.CloudObject) PipelineOptions(org.apache.beam.sdk.options.PipelineOptions) Nullable(org.checkerframework.checker.nullness.qual.Nullable) Structs.getBytes(org.apache.beam.runners.dataflow.util.Structs.getBytes) SideInputInfo(com.google.api.services.dataflow.model.SideInputInfo) SideInputReader(org.apache.beam.runners.core.SideInputReader) InvalidProtocolBufferException(org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) CannotProvideCoderException(org.apache.beam.sdk.coders.CannotProvideCoderException) KvCoder(org.apache.beam.sdk.coders.KvCoder) AppliedCombineFn(org.apache.beam.sdk.util.AppliedCombineFn) Logger(org.slf4j.Logger) StreamingOptions(org.apache.beam.sdk.options.StreamingOptions) NullSideInputReader(org.apache.beam.runners.core.NullSideInputReader) DataflowRunner(org.apache.beam.runners.dataflow.DataflowRunner) WorkerPropertyNames(org.apache.beam.runners.dataflow.worker.util.WorkerPropertyNames) RootCase(org.apache.beam.model.pipeline.v1.RunnerApi.MessageWithComponents.RootCase) Structs.getString(org.apache.beam.runners.dataflow.util.Structs.getString) ParDoFn(org.apache.beam.runners.dataflow.worker.util.common.worker.ParDoFn) List(java.util.List) WindowingStrategyTranslation(org.apache.beam.runners.core.construction.WindowingStrategyTranslation) Structs.getObject(org.apache.beam.runners.dataflow.util.Structs.getObject) SerializableUtils(org.apache.beam.sdk.util.SerializableUtils) Context(org.apache.beam.sdk.transforms.CombineWithContext.Context) WindowingStrategy(org.apache.beam.sdk.values.WindowingStrategy) PropertyNames(org.apache.beam.runners.dataflow.util.PropertyNames) StreamingOptions(org.apache.beam.sdk.options.StreamingOptions) TupleTag(org.apache.beam.sdk.values.TupleTag) SideInputReader(org.apache.beam.runners.core.SideInputReader) NullSideInputReader(org.apache.beam.runners.core.NullSideInputReader) Structs.getString(org.apache.beam.runners.dataflow.util.Structs.getString) WindowingStrategy(org.apache.beam.sdk.values.WindowingStrategy) KvCoder(org.apache.beam.sdk.coders.KvCoder) AppliedCombineFn(org.apache.beam.sdk.util.AppliedCombineFn) InvalidProtocolBufferException(org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException) CannotProvideCoderException(org.apache.beam.sdk.coders.CannotProvideCoderException) WindowedValueCoder(org.apache.beam.sdk.util.WindowedValue.WindowedValueCoder) CloudObject(org.apache.beam.runners.dataflow.util.CloudObject) Structs.getObject(org.apache.beam.runners.dataflow.util.Structs.getObject) Map(java.util.Map) Nullable(org.checkerframework.checker.nullness.qual.Nullable)

Example 17 with WindowingStrategy

use of org.apache.beam.model.pipeline.v1.RunnerApi.WindowingStrategy in project beam by apache.

the class ParDoBoundMultiTranslator method doTranslatePortable.

// static for serializing anonymous functions
private static <InT, OutT> void doTranslatePortable(PipelineNode.PTransformNode transform, QueryablePipeline pipeline, PortableTranslationContext ctx) {
    Map<String, String> outputs = transform.getTransform().getOutputsMap();
    final RunnerApi.ExecutableStagePayload stagePayload;
    try {
        stagePayload = RunnerApi.ExecutableStagePayload.parseFrom(transform.getTransform().getSpec().getPayload());
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
    String inputId = stagePayload.getInput();
    final MessageStream<OpMessage<InT>> inputStream = ctx.getMessageStreamById(inputId);
    // Analyze side inputs
    final List<MessageStream<OpMessage<Iterable<?>>>> sideInputStreams = new ArrayList<>();
    final Map<SideInputId, PCollectionView<?>> sideInputMapping = new HashMap<>();
    final Map<String, PCollectionView<?>> idToViewMapping = new HashMap<>();
    final RunnerApi.Components components = stagePayload.getComponents();
    for (SideInputId sideInputId : stagePayload.getSideInputsList()) {
        final String sideInputCollectionId = components.getTransformsOrThrow(sideInputId.getTransformId()).getInputsOrThrow(sideInputId.getLocalName());
        final WindowingStrategy<?, BoundedWindow> windowingStrategy = WindowUtils.getWindowStrategy(sideInputCollectionId, components);
        final WindowedValue.WindowedValueCoder<?> coder = (WindowedValue.WindowedValueCoder) instantiateCoder(sideInputCollectionId, components);
        // Create a runner-side view
        final PCollectionView<?> view = createPCollectionView(sideInputId, coder, windowingStrategy);
        // Use GBK to aggregate the side inputs and then broadcast it out
        final MessageStream<OpMessage<Iterable<?>>> broadcastSideInput = groupAndBroadcastSideInput(sideInputId, sideInputCollectionId, components.getPcollectionsOrThrow(sideInputCollectionId), (WindowingStrategy) windowingStrategy, coder, ctx);
        sideInputStreams.add(broadcastSideInput);
        sideInputMapping.put(sideInputId, view);
        idToViewMapping.put(getSideInputUniqueId(sideInputId), view);
    }
    final Map<TupleTag<?>, Integer> tagToIndexMap = new HashMap<>();
    final Map<Integer, String> indexToIdMap = new HashMap<>();
    final Map<String, TupleTag<?>> idToTupleTagMap = new HashMap<>();
    // first output as the main output
    final TupleTag<OutT> mainOutputTag = outputs.isEmpty() ? null : new TupleTag(outputs.keySet().iterator().next());
    AtomicInteger index = new AtomicInteger(0);
    outputs.keySet().iterator().forEachRemaining(outputName -> {
        TupleTag<?> tupleTag = new TupleTag<>(outputName);
        tagToIndexMap.put(tupleTag, index.get());
        String collectionId = outputs.get(outputName);
        indexToIdMap.put(index.get(), collectionId);
        idToTupleTagMap.put(collectionId, tupleTag);
        index.incrementAndGet();
    });
    WindowedValue.WindowedValueCoder<InT> windowedInputCoder = WindowUtils.instantiateWindowedCoder(inputId, pipeline.getComponents());
    // TODO: support schema and side inputs for portable runner
    // Note: transform.getTransform() is an ExecutableStage, not ParDo, so we need to extract
    // these info from its components.
    final DoFnSchemaInformation doFnSchemaInformation = null;
    final RunnerApi.PCollection input = pipeline.getComponents().getPcollectionsOrThrow(inputId);
    final PCollection.IsBounded isBounded = SamzaPipelineTranslatorUtils.isBounded(input);
    final Coder<?> keyCoder = StateUtils.isStateful(stagePayload) ? ((KvCoder) ((WindowedValue.FullWindowedValueCoder) windowedInputCoder).getValueCoder()).getKeyCoder() : null;
    final DoFnOp<InT, OutT, RawUnionValue> op = new DoFnOp<>(mainOutputTag, new NoOpDoFn<>(), keyCoder, // input coder not in use
    windowedInputCoder.getValueCoder(), windowedInputCoder, // output coders not in use
    Collections.emptyMap(), new ArrayList<>(sideInputMapping.values()), // used by java runner only
    new ArrayList<>(idToTupleTagMap.values()), WindowUtils.getWindowStrategy(inputId, stagePayload.getComponents()), idToViewMapping, new DoFnOp.MultiOutputManagerFactory(tagToIndexMap), ctx.getTransformFullName(), ctx.getTransformId(), isBounded, true, stagePayload, ctx.getJobInfo(), idToTupleTagMap, doFnSchemaInformation, sideInputMapping);
    final MessageStream<OpMessage<InT>> mergedStreams;
    if (sideInputStreams.isEmpty()) {
        mergedStreams = inputStream;
    } else {
        MessageStream<OpMessage<InT>> mergedSideInputStreams = MessageStream.mergeAll(sideInputStreams).flatMap(new SideInputWatermarkFn());
        mergedStreams = inputStream.merge(Collections.singletonList(mergedSideInputStreams));
    }
    final MessageStream<OpMessage<RawUnionValue>> taggedOutputStream = mergedStreams.flatMapAsync(OpAdapter.adapt(op));
    for (int outputIndex : tagToIndexMap.values()) {
        @SuppressWarnings("unchecked") final MessageStream<OpMessage<OutT>> outputStream = taggedOutputStream.filter(message -> message.getType() != OpMessage.Type.ELEMENT || message.getElement().getValue().getUnionTag() == outputIndex).flatMapAsync(OpAdapter.adapt(new RawUnionValueToValue()));
        ctx.registerMessageStream(indexToIdMap.get(outputIndex), outputStream);
    }
}
Also used : WindowedValue(org.apache.beam.sdk.util.WindowedValue) PCollectionViews(org.apache.beam.sdk.values.PCollectionViews) OpMessage(org.apache.beam.runners.samza.runtime.OpMessage) DoFnSchemaInformation(org.apache.beam.sdk.transforms.DoFnSchemaInformation) RunnerPCollectionView(org.apache.beam.runners.core.construction.RunnerPCollectionView) WatermarkFunction(org.apache.samza.operators.functions.WatermarkFunction) DoFnSignatures(org.apache.beam.sdk.transforms.reflect.DoFnSignatures) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Map(java.util.Map) Iterators(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterators) StateUtils(org.apache.beam.runners.samza.util.StateUtils) KvCoder(org.apache.beam.sdk.coders.KvCoder) Collection(java.util.Collection) ServiceLoader(java.util.ServiceLoader) Collectors(java.util.stream.Collectors) TransformHierarchy(org.apache.beam.sdk.runners.TransformHierarchy) FlatMapFunction(org.apache.samza.operators.functions.FlatMapFunction) List(java.util.List) ParDo(org.apache.beam.sdk.transforms.ParDo) TypeDescriptors(org.apache.beam.sdk.values.TypeDescriptors) OpEmitter(org.apache.beam.runners.samza.runtime.OpEmitter) PipelineTranslatorUtils.instantiateCoder(org.apache.beam.runners.fnexecution.translation.PipelineTranslatorUtils.instantiateCoder) WindowingStrategy(org.apache.beam.sdk.values.WindowingStrategy) SamzaPipelineTranslatorUtils(org.apache.beam.runners.samza.util.SamzaPipelineTranslatorUtils) KV(org.apache.beam.sdk.values.KV) TypeDescriptor(org.apache.beam.sdk.values.TypeDescriptor) OpAdapter(org.apache.beam.runners.samza.runtime.OpAdapter) DoFnOp(org.apache.beam.runners.samza.runtime.DoFnOp) Coder(org.apache.beam.sdk.coders.Coder) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) RawUnionValue(org.apache.beam.sdk.transforms.join.RawUnionValue) ViewFn(org.apache.beam.sdk.transforms.ViewFn) TupleTag(org.apache.beam.sdk.values.TupleTag) SamzaDoFnInvokerRegistrar(org.apache.beam.runners.samza.runtime.SamzaDoFnInvokerRegistrar) WindowUtils(org.apache.beam.runners.samza.util.WindowUtils) SideInputId(org.apache.beam.model.pipeline.v1.RunnerApi.ExecutableStagePayload.SideInputId) ParDoTranslation(org.apache.beam.runners.core.construction.ParDoTranslation) MessageStream(org.apache.samza.operators.MessageStream) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) DoFn(org.apache.beam.sdk.transforms.DoFn) QueryablePipeline(org.apache.beam.runners.core.construction.graph.QueryablePipeline) Op(org.apache.beam.runners.samza.runtime.Op) DoFnSignature(org.apache.beam.sdk.transforms.reflect.DoFnSignature) Iterator(java.util.Iterator) IterableCoder(org.apache.beam.sdk.coders.IterableCoder) IOException(java.io.IOException) PCollection(org.apache.beam.sdk.values.PCollection) SamzaPipelineOptions(org.apache.beam.runners.samza.SamzaPipelineOptions) PCollectionView(org.apache.beam.sdk.values.PCollectionView) BoundedWindow(org.apache.beam.sdk.transforms.windowing.BoundedWindow) Instant(org.joda.time.Instant) PipelineNode(org.apache.beam.runners.core.construction.graph.PipelineNode) VoidCoder(org.apache.beam.sdk.coders.VoidCoder) Collections(java.util.Collections) OpMessage(org.apache.beam.runners.samza.runtime.OpMessage) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) TupleTag(org.apache.beam.sdk.values.TupleTag) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) WindowedValue(org.apache.beam.sdk.util.WindowedValue) MessageStream(org.apache.samza.operators.MessageStream) BoundedWindow(org.apache.beam.sdk.transforms.windowing.BoundedWindow) SideInputId(org.apache.beam.model.pipeline.v1.RunnerApi.ExecutableStagePayload.SideInputId) RawUnionValue(org.apache.beam.sdk.transforms.join.RawUnionValue) RunnerPCollectionView(org.apache.beam.runners.core.construction.RunnerPCollectionView) PCollectionView(org.apache.beam.sdk.values.PCollectionView) DoFnSchemaInformation(org.apache.beam.sdk.transforms.DoFnSchemaInformation) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) DoFnOp(org.apache.beam.runners.samza.runtime.DoFnOp) IOException(java.io.IOException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) PCollection(org.apache.beam.sdk.values.PCollection)

Example 18 with WindowingStrategy

use of org.apache.beam.model.pipeline.v1.RunnerApi.WindowingStrategy in project beam by apache.

the class SparkStreamingPortablePipelineTranslator method translateGroupByKey.

private static <K, V> void translateGroupByKey(PTransformNode transformNode, RunnerApi.Pipeline pipeline, SparkStreamingTranslationContext context) {
    RunnerApi.Components components = pipeline.getComponents();
    String inputId = getInputId(transformNode);
    UnboundedDataset<KV<K, V>> inputDataset = (UnboundedDataset<KV<K, V>>) context.popDataset(inputId);
    List<Integer> streamSources = inputDataset.getStreamSources();
    WindowedValue.WindowedValueCoder<KV<K, V>> inputCoder = getWindowedValueCoder(inputId, components);
    KvCoder<K, V> inputKvCoder = (KvCoder<K, V>) inputCoder.getValueCoder();
    WindowingStrategy windowingStrategy = getWindowingStrategy(inputId, components);
    WindowFn<Object, BoundedWindow> windowFn = windowingStrategy.getWindowFn();
    WindowedValue.WindowedValueCoder<V> wvCoder = WindowedValue.FullWindowedValueCoder.of(inputKvCoder.getValueCoder(), windowFn.windowCoder());
    JavaDStream<WindowedValue<KV<K, Iterable<V>>>> outStream = SparkGroupAlsoByWindowViaWindowSet.groupByKeyAndWindow(inputDataset.getDStream(), inputKvCoder.getKeyCoder(), wvCoder, windowingStrategy, context.getSerializableOptions(), streamSources, transformNode.getId());
    context.pushDataset(getOutputId(transformNode), new UnboundedDataset<>(outStream, streamSources));
}
Also used : KvCoder(org.apache.beam.sdk.coders.KvCoder) KV(org.apache.beam.sdk.values.KV) PipelineTranslatorUtils.getWindowingStrategy(org.apache.beam.runners.fnexecution.translation.PipelineTranslatorUtils.getWindowingStrategy) WindowingStrategy(org.apache.beam.sdk.values.WindowingStrategy) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) UnboundedDataset(org.apache.beam.runners.spark.translation.streaming.UnboundedDataset) KV(org.apache.beam.sdk.values.KV) WindowedValue(org.apache.beam.sdk.util.WindowedValue) BoundedWindow(org.apache.beam.sdk.transforms.windowing.BoundedWindow)

Example 19 with WindowingStrategy

use of org.apache.beam.model.pipeline.v1.RunnerApi.WindowingStrategy in project beam by apache.

the class SparkBatchPortablePipelineTranslator method translateExecutableStage.

private static <InputT, OutputT, SideInputT> void translateExecutableStage(PTransformNode transformNode, RunnerApi.Pipeline pipeline, SparkTranslationContext context) {
    RunnerApi.ExecutableStagePayload stagePayload;
    try {
        stagePayload = RunnerApi.ExecutableStagePayload.parseFrom(transformNode.getTransform().getSpec().getPayload());
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
    String inputPCollectionId = stagePayload.getInput();
    Dataset inputDataset = context.popDataset(inputPCollectionId);
    Map<String, String> outputs = transformNode.getTransform().getOutputsMap();
    BiMap<String, Integer> outputExtractionMap = createOutputMap(outputs.values());
    Components components = pipeline.getComponents();
    Coder windowCoder = getWindowingStrategy(inputPCollectionId, components).getWindowFn().windowCoder();
    ImmutableMap<String, Tuple2<Broadcast<List<byte[]>>, WindowedValueCoder<SideInputT>>> broadcastVariables = broadcastSideInputs(stagePayload, context);
    JavaRDD<RawUnionValue> staged;
    if (stagePayload.getUserStatesCount() > 0 || stagePayload.getTimersCount() > 0) {
        Coder<WindowedValue<InputT>> windowedInputCoder = instantiateCoder(inputPCollectionId, components);
        Coder valueCoder = ((WindowedValue.FullWindowedValueCoder) windowedInputCoder).getValueCoder();
        // Stateful stages are only allowed of KV input to be able to group on the key
        if (!(valueCoder instanceof KvCoder)) {
            throw new IllegalStateException(String.format(Locale.ENGLISH, "The element coder for stateful DoFn '%s' must be KvCoder but is: %s", inputPCollectionId, valueCoder.getClass().getSimpleName()));
        }
        Coder keyCoder = ((KvCoder) valueCoder).getKeyCoder();
        Coder innerValueCoder = ((KvCoder) valueCoder).getValueCoder();
        WindowingStrategy windowingStrategy = getWindowingStrategy(inputPCollectionId, components);
        WindowFn<Object, BoundedWindow> windowFn = windowingStrategy.getWindowFn();
        WindowedValue.WindowedValueCoder wvCoder = WindowedValue.FullWindowedValueCoder.of(innerValueCoder, windowFn.windowCoder());
        JavaPairRDD<ByteArray, Iterable<WindowedValue<KV>>> groupedByKey = groupByKeyPair(inputDataset, keyCoder, wvCoder);
        SparkExecutableStageFunction<KV, SideInputT> function = new SparkExecutableStageFunction<>(context.getSerializableOptions(), stagePayload, context.jobInfo, outputExtractionMap, SparkExecutableStageContextFactory.getInstance(), broadcastVariables, MetricsAccumulator.getInstance(), windowCoder);
        staged = groupedByKey.flatMap(function.forPair());
    } else {
        JavaRDD<WindowedValue<InputT>> inputRdd2 = ((BoundedDataset<InputT>) inputDataset).getRDD();
        SparkExecutableStageFunction<InputT, SideInputT> function2 = new SparkExecutableStageFunction<>(context.getSerializableOptions(), stagePayload, context.jobInfo, outputExtractionMap, SparkExecutableStageContextFactory.getInstance(), broadcastVariables, MetricsAccumulator.getInstance(), windowCoder);
        staged = inputRdd2.mapPartitions(function2);
    }
    String intermediateId = getExecutableStageIntermediateId(transformNode);
    context.pushDataset(intermediateId, new Dataset() {

        @Override
        public void cache(String storageLevel, Coder<?> coder) {
            StorageLevel level = StorageLevel.fromString(storageLevel);
            staged.persist(level);
        }

        @Override
        public void action() {
            // Empty function to force computation of RDD.
            staged.foreach(TranslationUtils.emptyVoidFunction());
        }

        @Override
        public void setName(String name) {
            staged.setName(name);
        }
    });
    // pop dataset to mark RDD as used
    context.popDataset(intermediateId);
    for (String outputId : outputs.values()) {
        JavaRDD<WindowedValue<OutputT>> outputRdd = staged.flatMap(new SparkExecutableStageExtractionFunction<>(outputExtractionMap.get(outputId)));
        context.pushDataset(outputId, new BoundedDataset<>(outputRdd));
    }
    if (outputs.isEmpty()) {
        // After pipeline translation, we traverse the set of unconsumed PCollections and add a
        // no-op sink to each to make sure they are materialized by Spark. However, some SDK-executed
        // stages have no runner-visible output after fusion. We handle this case by adding a sink
        // here.
        JavaRDD<WindowedValue<OutputT>> outputRdd = staged.flatMap((rawUnionValue) -> Collections.emptyIterator());
        context.pushDataset(String.format("EmptyOutputSink_%d", context.nextSinkId()), new BoundedDataset<>(outputRdd));
    }
}
Also used : PipelineTranslatorUtils.getWindowingStrategy(org.apache.beam.runners.fnexecution.translation.PipelineTranslatorUtils.getWindowingStrategy) WindowingStrategy(org.apache.beam.sdk.values.WindowingStrategy) Components(org.apache.beam.model.pipeline.v1.RunnerApi.Components) WindowedValueCoder(org.apache.beam.sdk.util.WindowedValue.WindowedValueCoder) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) WindowedValue(org.apache.beam.sdk.util.WindowedValue) BoundedWindow(org.apache.beam.sdk.transforms.windowing.BoundedWindow) ByteArray(org.apache.beam.runners.spark.util.ByteArray) List(java.util.List) StorageLevel(org.apache.spark.storage.StorageLevel) WindowedValueCoder(org.apache.beam.sdk.util.WindowedValue.WindowedValueCoder) KvCoder(org.apache.beam.sdk.coders.KvCoder) PipelineTranslatorUtils.instantiateCoder(org.apache.beam.runners.fnexecution.translation.PipelineTranslatorUtils.instantiateCoder) PipelineTranslatorUtils.getWindowedValueCoder(org.apache.beam.runners.fnexecution.translation.PipelineTranslatorUtils.getWindowedValueCoder) Coder(org.apache.beam.sdk.coders.Coder) ByteArrayCoder(org.apache.beam.sdk.coders.ByteArrayCoder) RawUnionValue(org.apache.beam.sdk.transforms.join.RawUnionValue) KvCoder(org.apache.beam.sdk.coders.KvCoder) IOException(java.io.IOException) KV(org.apache.beam.sdk.values.KV) Tuple2(scala.Tuple2)

Example 20 with WindowingStrategy

use of org.apache.beam.model.pipeline.v1.RunnerApi.WindowingStrategy in project beam by apache.

the class SparkBatchPortablePipelineTranslator method translateGroupByKey.

private static <K, V> void translateGroupByKey(PTransformNode transformNode, RunnerApi.Pipeline pipeline, SparkTranslationContext context) {
    RunnerApi.Components components = pipeline.getComponents();
    String inputId = getInputId(transformNode);
    Dataset inputDataset = context.popDataset(inputId);
    JavaRDD<WindowedValue<KV<K, V>>> inputRdd = ((BoundedDataset<KV<K, V>>) inputDataset).getRDD();
    WindowedValueCoder<KV<K, V>> inputCoder = getWindowedValueCoder(inputId, components);
    KvCoder<K, V> inputKvCoder = (KvCoder<K, V>) inputCoder.getValueCoder();
    Coder<K> inputKeyCoder = inputKvCoder.getKeyCoder();
    Coder<V> inputValueCoder = inputKvCoder.getValueCoder();
    WindowingStrategy windowingStrategy = getWindowingStrategy(inputId, components);
    WindowFn<Object, BoundedWindow> windowFn = windowingStrategy.getWindowFn();
    WindowedValue.WindowedValueCoder<V> wvCoder = WindowedValue.FullWindowedValueCoder.of(inputValueCoder, windowFn.windowCoder());
    JavaRDD<WindowedValue<KV<K, Iterable<V>>>> groupedByKeyAndWindow;
    Partitioner partitioner = getPartitioner(context);
    // As this is batch, we can ignore triggering and allowed lateness parameters.
    if (windowingStrategy.getWindowFn().equals(new GlobalWindows()) && windowingStrategy.getTimestampCombiner().equals(TimestampCombiner.END_OF_WINDOW)) {
        // we can drop the windows and recover them later
        groupedByKeyAndWindow = GroupNonMergingWindowsFunctions.groupByKeyInGlobalWindow(inputRdd, inputKeyCoder, inputValueCoder, partitioner);
    } else if (GroupNonMergingWindowsFunctions.isEligibleForGroupByWindow(windowingStrategy)) {
        // we can have a memory sensitive translation for non-merging windows
        groupedByKeyAndWindow = GroupNonMergingWindowsFunctions.groupByKeyAndWindow(inputRdd, inputKeyCoder, inputValueCoder, windowingStrategy, partitioner);
    } else {
        JavaRDD<KV<K, Iterable<WindowedValue<V>>>> groupedByKeyOnly = GroupCombineFunctions.groupByKeyOnly(inputRdd, inputKeyCoder, wvCoder, partitioner);
        // for batch, GroupAlsoByWindow uses an in-memory StateInternals.
        groupedByKeyAndWindow = groupedByKeyOnly.flatMap(new SparkGroupAlsoByWindowViaOutputBufferFn<>(windowingStrategy, new TranslationUtils.InMemoryStateInternalsFactory<>(), SystemReduceFn.buffering(inputValueCoder), context.serializablePipelineOptions));
    }
    context.pushDataset(getOutputId(transformNode), new BoundedDataset<>(groupedByKeyAndWindow));
}
Also used : PipelineTranslatorUtils.getWindowingStrategy(org.apache.beam.runners.fnexecution.translation.PipelineTranslatorUtils.getWindowingStrategy) WindowingStrategy(org.apache.beam.sdk.values.WindowingStrategy) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) WindowedValue(org.apache.beam.sdk.util.WindowedValue) KV(org.apache.beam.sdk.values.KV) Components(org.apache.beam.model.pipeline.v1.RunnerApi.Components) BoundedWindow(org.apache.beam.sdk.transforms.windowing.BoundedWindow) Partitioner(org.apache.spark.Partitioner) HashPartitioner(org.apache.spark.HashPartitioner) GlobalWindows(org.apache.beam.sdk.transforms.windowing.GlobalWindows) KvCoder(org.apache.beam.sdk.coders.KvCoder) KV(org.apache.beam.sdk.values.KV) JavaRDD(org.apache.spark.api.java.JavaRDD)

Aggregations

RunnerApi (org.apache.beam.model.pipeline.v1.RunnerApi)15 WindowingStrategy (org.apache.beam.sdk.values.WindowingStrategy)9 WindowedValue (org.apache.beam.sdk.util.WindowedValue)7 RehydratedComponents (org.apache.beam.runners.core.construction.RehydratedComponents)6 KvCoder (org.apache.beam.sdk.coders.KvCoder)6 BoundedWindow (org.apache.beam.sdk.transforms.windowing.BoundedWindow)6 KV (org.apache.beam.sdk.values.KV)6 InvalidProtocolBufferException (org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException)6 IOException (java.io.IOException)5 PCollectionView (org.apache.beam.sdk.values.PCollectionView)5 ByteString (org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.ByteString)5 List (java.util.List)4 Map (java.util.Map)4 Structs.getString (org.apache.beam.runners.dataflow.util.Structs.getString)4 WindowedValueCoder (org.apache.beam.sdk.util.WindowedValue.WindowedValueCoder)4 Components (org.apache.beam.model.pipeline.v1.RunnerApi.Components)3 CloudObject (org.apache.beam.runners.dataflow.util.CloudObject)3 PipelineTranslatorUtils.getWindowingStrategy (org.apache.beam.runners.fnexecution.translation.PipelineTranslatorUtils.getWindowingStrategy)3 Coder (org.apache.beam.sdk.coders.Coder)3 TupleTag (org.apache.beam.sdk.values.TupleTag)3