Search in sources :

Example 46 with PCollectionView

use of org.apache.beam.sdk.values.PCollectionView in project beam by apache.

the class ParDoEvaluator method create.

public static <InputT, OutputT> ParDoEvaluator<InputT> create(EvaluationContext evaluationContext, PipelineOptions options, DirectStepContext stepContext, AppliedPTransform<?, ?, ?> application, Coder<InputT> inputCoder, WindowingStrategy<?, ? extends BoundedWindow> windowingStrategy, DoFn<InputT, OutputT> fn, StructuralKey<?> key, List<PCollectionView<?>> sideInputs, TupleTag<OutputT> mainOutputTag, List<TupleTag<?>> additionalOutputTags, Map<TupleTag<?>, PCollection<?>> outputs, DoFnSchemaInformation doFnSchemaInformation, Map<String, PCollectionView<?>> sideInputMapping, DoFnRunnerFactory<InputT, OutputT> runnerFactory) {
    BundleOutputManager outputManager = createOutputManager(evaluationContext, key, outputs);
    ReadyCheckingSideInputReader sideInputReader = evaluationContext.createSideInputReader(sideInputs);
    Map<TupleTag<?>, Coder<?>> outputCoders = outputs.entrySet().stream().collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue().getCoder()));
    PushbackSideInputDoFnRunner<InputT, OutputT> runner = runnerFactory.createRunner(options, fn, sideInputs, sideInputReader, outputManager, mainOutputTag, additionalOutputTags, stepContext, inputCoder, outputCoders, windowingStrategy, doFnSchemaInformation, sideInputMapping);
    return create(runner, stepContext, application, outputManager);
}
Also used : StatefulDoFnRunner(org.apache.beam.runners.core.StatefulDoFnRunner) UserCodeException(org.apache.beam.sdk.util.UserCodeException) PushbackSideInputDoFnRunner(org.apache.beam.runners.core.PushbackSideInputDoFnRunner) WindowedValue(org.apache.beam.sdk.util.WindowedValue) KeyedWorkItemCoder(org.apache.beam.runners.core.KeyedWorkItemCoder) DoFnRunner(org.apache.beam.runners.core.DoFnRunner) Coder(org.apache.beam.sdk.coders.Coder) HashMap(java.util.HashMap) DoFnRunners(org.apache.beam.runners.core.DoFnRunners) DoFnSchemaInformation(org.apache.beam.sdk.transforms.DoFnSchemaInformation) DoFnSignatures(org.apache.beam.sdk.transforms.reflect.DoFnSignatures) TupleTag(org.apache.beam.sdk.values.TupleTag) Map(java.util.Map) Preconditions.checkArgument(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument) TimerData(org.apache.beam.runners.core.TimerInternals.TimerData) PipelineOptions(org.apache.beam.sdk.options.PipelineOptions) AppliedPTransform(org.apache.beam.sdk.runners.AppliedPTransform) StructuralKey(org.apache.beam.runners.local.StructuralKey) DoFn(org.apache.beam.sdk.transforms.DoFn) ReadyCheckingSideInputReader(org.apache.beam.runners.core.ReadyCheckingSideInputReader) PCollection(org.apache.beam.sdk.values.PCollection) Collectors(java.util.stream.Collectors) List(java.util.List) DirectStepContext(org.apache.beam.runners.direct.DirectExecutionContext.DirectStepContext) SimplePushbackSideInputDoFnRunner(org.apache.beam.runners.core.SimplePushbackSideInputDoFnRunner) PCollectionView(org.apache.beam.sdk.values.PCollectionView) BoundedWindow(org.apache.beam.sdk.transforms.windowing.BoundedWindow) OutputManager(org.apache.beam.runners.core.DoFnRunners.OutputManager) ImmutableList(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList) WindowingStrategy(org.apache.beam.sdk.values.WindowingStrategy) KeyedWorkItemCoder(org.apache.beam.runners.core.KeyedWorkItemCoder) Coder(org.apache.beam.sdk.coders.Coder) TupleTag(org.apache.beam.sdk.values.TupleTag) ReadyCheckingSideInputReader(org.apache.beam.runners.core.ReadyCheckingSideInputReader)

Example 47 with PCollectionView

use of org.apache.beam.sdk.values.PCollectionView in project beam by apache.

the class FlinkStreamingPortablePipelineTranslator method getSideInputIdToPCollectionViewMap.

private static LinkedHashMap<RunnerApi.ExecutableStagePayload.SideInputId, PCollectionView<?>> getSideInputIdToPCollectionViewMap(RunnerApi.ExecutableStagePayload stagePayload, RunnerApi.Components components) {
    RehydratedComponents rehydratedComponents = RehydratedComponents.forComponents(components);
    LinkedHashMap<RunnerApi.ExecutableStagePayload.SideInputId, PCollectionView<?>> sideInputs = new LinkedHashMap<>();
    // for PCollectionView compatibility, not used to transform materialization
    ViewFn<Iterable<WindowedValue<?>>, ?> viewFn = (ViewFn) new PCollectionViews.MultimapViewFn<>((PCollectionViews.TypeDescriptorSupplier<Iterable<WindowedValue<Void>>>) () -> TypeDescriptors.iterables(new TypeDescriptor<WindowedValue<Void>>() {
    }), (PCollectionViews.TypeDescriptorSupplier<Void>) TypeDescriptors::voids);
    for (RunnerApi.ExecutableStagePayload.SideInputId sideInputId : stagePayload.getSideInputsList()) {
        // TODO: local name is unique as long as only one transform with side input can be within a
        // stage
        String sideInputTag = sideInputId.getLocalName();
        String collectionId = components.getTransformsOrThrow(sideInputId.getTransformId()).getInputsOrThrow(sideInputId.getLocalName());
        RunnerApi.WindowingStrategy windowingStrategyProto = components.getWindowingStrategiesOrThrow(components.getPcollectionsOrThrow(collectionId).getWindowingStrategyId());
        final WindowingStrategy<?, ?> windowingStrategy;
        try {
            windowingStrategy = WindowingStrategyTranslation.fromProto(windowingStrategyProto, rehydratedComponents);
        } catch (InvalidProtocolBufferException e) {
            throw new IllegalStateException(String.format("Unable to hydrate side input windowing strategy %s.", windowingStrategyProto), e);
        }
        Coder<WindowedValue<Object>> coder = instantiateCoder(collectionId, components);
        // side input materialization via GBK (T -> Iterable<T>)
        WindowedValueCoder wvCoder = (WindowedValueCoder) coder;
        coder = wvCoder.withValueCoder(IterableCoder.of(wvCoder.getValueCoder()));
        sideInputs.put(sideInputId, new RunnerPCollectionView<>(null, new TupleTag<>(sideInputTag), viewFn, // TODO: support custom mapping fn
        windowingStrategy.getWindowFn().getDefaultWindowMappingFn(), windowingStrategy, coder));
    }
    return sideInputs;
}
Also used : TupleTag(org.apache.beam.sdk.values.TupleTag) LinkedHashMap(java.util.LinkedHashMap) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) ViewFn(org.apache.beam.sdk.transforms.ViewFn) WindowedValue(org.apache.beam.sdk.util.WindowedValue) PCollectionViews(org.apache.beam.sdk.values.PCollectionViews) InvalidProtocolBufferException(org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException) TypeDescriptors(org.apache.beam.sdk.values.TypeDescriptors) RunnerPCollectionView(org.apache.beam.runners.core.construction.RunnerPCollectionView) PCollectionView(org.apache.beam.sdk.values.PCollectionView) WindowedValueCoder(org.apache.beam.sdk.util.WindowedValue.WindowedValueCoder) TypeDescriptor(org.apache.beam.sdk.values.TypeDescriptor) RehydratedComponents(org.apache.beam.runners.core.construction.RehydratedComponents)

Example 48 with PCollectionView

use of org.apache.beam.sdk.values.PCollectionView in project beam by apache.

the class FlinkStreamingPortablePipelineTranslator method transformSideInputs.

private TransformedSideInputs transformSideInputs(RunnerApi.ExecutableStagePayload stagePayload, RunnerApi.Components components, StreamingTranslationContext context) {
    LinkedHashMap<RunnerApi.ExecutableStagePayload.SideInputId, PCollectionView<?>> sideInputs = getSideInputIdToPCollectionViewMap(stagePayload, components);
    Map<TupleTag<?>, Integer> tagToIntMapping = new HashMap<>();
    Map<Integer, PCollectionView<?>> intToViewMapping = new HashMap<>();
    List<WindowedValueCoder<KV<Void, Object>>> kvCoders = new ArrayList<>();
    List<Coder<?>> viewCoders = new ArrayList<>();
    int count = 0;
    for (Map.Entry<RunnerApi.ExecutableStagePayload.SideInputId, PCollectionView<?>> sideInput : sideInputs.entrySet()) {
        TupleTag<?> tag = sideInput.getValue().getTagInternal();
        intToViewMapping.put(count, sideInput.getValue());
        tagToIntMapping.put(tag, count);
        count++;
        String collectionId = components.getTransformsOrThrow(sideInput.getKey().getTransformId()).getInputsOrThrow(sideInput.getKey().getLocalName());
        DataStream<Object> sideInputStream = context.getDataStreamOrThrow(collectionId);
        TypeInformation<Object> tpe = sideInputStream.getType();
        if (!(tpe instanceof CoderTypeInformation)) {
            throw new IllegalStateException("Input Stream TypeInformation is no CoderTypeInformation.");
        }
        WindowedValueCoder<Object> coder = (WindowedValueCoder) ((CoderTypeInformation) tpe).getCoder();
        Coder<KV<Void, Object>> kvCoder = KvCoder.of(VoidCoder.of(), coder.getValueCoder());
        kvCoders.add(coder.withValueCoder(kvCoder));
        // coder for materialized view matching GBK below
        WindowedValueCoder<KV<Void, Iterable<Object>>> viewCoder = coder.withValueCoder(KvCoder.of(VoidCoder.of(), IterableCoder.of(coder.getValueCoder())));
        viewCoders.add(viewCoder);
    }
    // second pass, now that we gathered the input coders
    UnionCoder unionCoder = UnionCoder.of(viewCoders);
    CoderTypeInformation<RawUnionValue> unionTypeInformation = new CoderTypeInformation<>(unionCoder, context.getPipelineOptions());
    // transform each side input to RawUnionValue and union them
    DataStream<RawUnionValue> sideInputUnion = null;
    for (Map.Entry<RunnerApi.ExecutableStagePayload.SideInputId, PCollectionView<?>> sideInput : sideInputs.entrySet()) {
        TupleTag<?> tag = sideInput.getValue().getTagInternal();
        final int intTag = tagToIntMapping.get(tag);
        RunnerApi.PTransform pTransform = components.getTransformsOrThrow(sideInput.getKey().getTransformId());
        String collectionId = pTransform.getInputsOrThrow(sideInput.getKey().getLocalName());
        DataStream<WindowedValue<?>> sideInputStream = context.getDataStreamOrThrow(collectionId);
        // insert GBK to materialize side input view
        String viewName = sideInput.getKey().getTransformId() + "-" + sideInput.getKey().getLocalName();
        WindowedValueCoder<KV<Void, Object>> kvCoder = kvCoders.get(intTag);
        DataStream<WindowedValue<KV<Void, Object>>> keyedSideInputStream = sideInputStream.map(new ToVoidKeyValue(context.getPipelineOptions()));
        SingleOutputStreamOperator<WindowedValue<KV<Void, Iterable<Object>>>> viewStream = addGBK(keyedSideInputStream, sideInput.getValue().getWindowingStrategyInternal(), kvCoder, viewName, context);
        // Assign a unique but consistent id to re-map operator state
        viewStream.uid(pTransform.getUniqueName() + "-" + sideInput.getKey().getLocalName());
        DataStream<RawUnionValue> unionValueStream = viewStream.map(new FlinkStreamingTransformTranslators.ToRawUnion<>(intTag, context.getPipelineOptions())).returns(unionTypeInformation);
        if (sideInputUnion == null) {
            sideInputUnion = unionValueStream;
        } else {
            sideInputUnion = sideInputUnion.union(unionValueStream);
        }
    }
    return new TransformedSideInputs(intToViewMapping, sideInputUnion);
}
Also used : LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) TupleTag(org.apache.beam.sdk.values.TupleTag) RunnerApi(org.apache.beam.model.pipeline.v1.RunnerApi) WindowedValue(org.apache.beam.sdk.util.WindowedValue) CoderTypeInformation(org.apache.beam.runners.flink.translation.types.CoderTypeInformation) SingletonKeyedWorkItemCoder(org.apache.beam.runners.flink.translation.wrappers.streaming.SingletonKeyedWorkItemCoder) WindowedValueCoder(org.apache.beam.sdk.util.WindowedValue.WindowedValueCoder) KvCoder(org.apache.beam.sdk.coders.KvCoder) PipelineTranslatorUtils.instantiateCoder(org.apache.beam.runners.fnexecution.translation.PipelineTranslatorUtils.instantiateCoder) IterableCoder(org.apache.beam.sdk.coders.IterableCoder) VoidCoder(org.apache.beam.sdk.coders.VoidCoder) UnionCoder(org.apache.beam.sdk.transforms.join.UnionCoder) Coder(org.apache.beam.sdk.coders.Coder) ByteArrayCoder(org.apache.beam.sdk.coders.ByteArrayCoder) UnionCoder(org.apache.beam.sdk.transforms.join.UnionCoder) RawUnionValue(org.apache.beam.sdk.transforms.join.RawUnionValue) KV(org.apache.beam.sdk.values.KV) RunnerPCollectionView(org.apache.beam.runners.core.construction.RunnerPCollectionView) PCollectionView(org.apache.beam.sdk.values.PCollectionView) WindowedValueCoder(org.apache.beam.sdk.util.WindowedValue.WindowedValueCoder) ImmutableMap(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) BiMap(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.BiMap) TreeMap(java.util.TreeMap) PipelineTranslatorUtils.createOutputMap(org.apache.beam.runners.fnexecution.translation.PipelineTranslatorUtils.createOutputMap) HashMap(java.util.HashMap)

Example 49 with PCollectionView

use of org.apache.beam.sdk.values.PCollectionView in project beam by apache.

the class BeamFnMapTaskExecutorFactory method createOperationTransformForRegisterFnNodes.

private Function<Node, Node> createOperationTransformForRegisterFnNodes(final IdGenerator idGenerator, final InstructionRequestHandler instructionRequestHandler, final StateDelegator beamFnStateDelegator, final String stageName, final DataflowExecutionContext<?> executionContext) {
    return new TypeSafeNodeFunction<RegisterRequestNode>(RegisterRequestNode.class) {

        @Override
        public Node typedApply(RegisterRequestNode input) {
            ImmutableMap.Builder<String, DataflowOperationContext> ptransformIdToOperationContextBuilder = ImmutableMap.builder();
            ImmutableMap.Builder<String, DataflowStepContext> ptransformIdToStepContext = ImmutableMap.builder();
            for (Map.Entry<String, NameContext> entry : input.getPTransformIdToPartialNameContextMap().entrySet()) {
                NameContext fullNameContext = NameContext.create(stageName, entry.getValue().originalName(), entry.getValue().systemName(), entry.getValue().userName());
                DataflowOperationContext operationContext = executionContext.createOperationContext(fullNameContext);
                ptransformIdToOperationContextBuilder.put(entry.getKey(), operationContext);
                ptransformIdToStepContext.put(entry.getKey(), executionContext.getStepContext(operationContext));
            }
            ImmutableMap.Builder<String, NameContext> pcollectionIdToNameContext = ImmutableMap.builder();
            for (Map.Entry<String, NameContext> entry : input.getPCollectionToPartialNameContextMap().entrySet()) {
                pcollectionIdToNameContext.put(entry.getKey(), NameContext.create(stageName, entry.getValue().originalName(), entry.getValue().systemName(), entry.getValue().userName()));
            }
            ImmutableMap<String, DataflowOperationContext> ptransformIdToOperationContexts = ptransformIdToOperationContextBuilder.build();
            ImmutableMap<String, SideInputReader> ptransformIdToSideInputReaders = buildPTransformIdToSideInputReadersMap(executionContext, input, ptransformIdToOperationContexts);
            ImmutableTable<String, String, PCollectionView<?>> ptransformIdToSideInputIdToPCollectionView = buildPTransformIdToSideInputIdToPCollectionView(input);
            return OperationNode.create(new RegisterAndProcessBundleOperation(idGenerator, instructionRequestHandler, beamFnStateDelegator, input.getRegisterRequest(), ptransformIdToOperationContexts, ptransformIdToStepContext.build(), ptransformIdToSideInputReaders, ptransformIdToSideInputIdToPCollectionView, pcollectionIdToNameContext.build(), // TODO: Set NameContext properly for these operations.
            executionContext.createOperationContext(NameContext.create(stageName, stageName, stageName, stageName))));
        }
    };
}
Also used : NameContext(org.apache.beam.runners.dataflow.worker.counters.NameContext) SideInputReader(org.apache.beam.runners.core.SideInputReader) DataflowStepContext(org.apache.beam.runners.dataflow.worker.DataflowExecutionContext.DataflowStepContext) RegisterAndProcessBundleOperation(org.apache.beam.runners.dataflow.worker.fn.control.RegisterAndProcessBundleOperation) ImmutableMap(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap) RegisterRequestNode(org.apache.beam.runners.dataflow.worker.graph.Nodes.RegisterRequestNode) PCollectionView(org.apache.beam.sdk.values.PCollectionView) TypeSafeNodeFunction(org.apache.beam.runners.dataflow.worker.graph.Networks.TypeSafeNodeFunction) ImmutableMap(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap) Map(java.util.Map) HashMap(java.util.HashMap)

Example 50 with PCollectionView

use of org.apache.beam.sdk.values.PCollectionView in project beam by apache.

the class CombineValuesFnFactory method create.

@Override
public ParDoFn create(PipelineOptions options, CloudObject cloudUserFn, @Nullable List<SideInputInfo> sideInputInfos, TupleTag<?> mainOutputTag, Map<TupleTag<?>, Integer> outputTupleTagsToReceiverIndices, DataflowExecutionContext<?> executionContext, DataflowOperationContext operationContext) throws Exception {
    Preconditions.checkArgument(outputTupleTagsToReceiverIndices.size() == 1, "expected exactly one output for CombineValuesFn");
    Object deserializedFn = SerializableUtils.deserializeFromByteArray(getBytes(cloudUserFn, PropertyNames.SERIALIZED_FN), "serialized user fn");
    Preconditions.checkArgument(deserializedFn instanceof AppliedCombineFn);
    AppliedCombineFn<?, ?, ?, ?> combineFn = (AppliedCombineFn<?, ?, ?, ?>) deserializedFn;
    Iterable<PCollectionView<?>> sideInputViews = combineFn.getSideInputViews();
    SideInputReader sideInputReader = executionContext.getSideInputReader(sideInputInfos, sideInputViews, operationContext);
    // Get the combine phase, default to ALL. (The implementation
    // doesn't have to split the combiner).
    String phase = getString(cloudUserFn, WorkerPropertyNames.PHASE, CombinePhase.ALL);
    DoFnInfo<?, ?> doFnInfo = getDoFnInfo(combineFn, sideInputReader, phase);
    return new SimpleParDoFn(options, DoFnInstanceManagers.singleInstance(doFnInfo), sideInputReader, mainOutputTag, outputTupleTagsToReceiverIndices, executionContext.getStepContext(operationContext), operationContext, doFnInfo.getDoFnSchemaInformation(), doFnInfo.getSideInputMapping(), SimpleDoFnRunnerFactory.INSTANCE);
}
Also used : PCollectionView(org.apache.beam.sdk.values.PCollectionView) CloudObject(org.apache.beam.runners.dataflow.util.CloudObject) SideInputReader(org.apache.beam.runners.core.SideInputReader) Structs.getString(org.apache.beam.runners.dataflow.util.Structs.getString) AppliedCombineFn(org.apache.beam.sdk.util.AppliedCombineFn)

Aggregations

PCollectionView (org.apache.beam.sdk.values.PCollectionView)67 Map (java.util.Map)29 HashMap (java.util.HashMap)28 Test (org.junit.Test)28 TupleTag (org.apache.beam.sdk.values.TupleTag)27 BoundedWindow (org.apache.beam.sdk.transforms.windowing.BoundedWindow)22 Coder (org.apache.beam.sdk.coders.Coder)21 KV (org.apache.beam.sdk.values.KV)20 Instant (org.joda.time.Instant)20 KvCoder (org.apache.beam.sdk.coders.KvCoder)18 WindowedValue (org.apache.beam.sdk.util.WindowedValue)18 PCollection (org.apache.beam.sdk.values.PCollection)18 DoFn (org.apache.beam.sdk.transforms.DoFn)16 ArrayList (java.util.ArrayList)15 IntervalWindow (org.apache.beam.sdk.transforms.windowing.IntervalWindow)14 List (java.util.List)13 ImmutableMap (org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap)13 IOException (java.io.IOException)12 RunnerApi (org.apache.beam.model.pipeline.v1.RunnerApi)12 ByteString (org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.ByteString)10