use of org.apache.beam.runners.dataflow.worker.graph.Networks.TypeSafeNodeFunction in project beam by apache.
the class BeamFnMapTaskExecutorFactory method createOperationTransformForFetchAndFilterStreamingSideInputNodes.
private Function<Node, Node> createOperationTransformForFetchAndFilterStreamingSideInputNodes(MutableNetwork<Node, Edge> network, IdGenerator idGenerator, InstructionRequestHandler instructionRequestHandler, FnDataService beamFnDataService, Endpoints.ApiServiceDescriptor dataApiServiceDescriptor, DataflowExecutionContext executionContext, String stageName) {
return new TypeSafeNodeFunction<FetchAndFilterStreamingSideInputsNode>(FetchAndFilterStreamingSideInputsNode.class) {
@Override
public Node typedApply(FetchAndFilterStreamingSideInputsNode input) {
OutputReceiverNode output = (OutputReceiverNode) Iterables.getOnlyElement(network.successors(input));
DataflowOperationContext operationContext = executionContext.createOperationContext(NameContext.create(stageName, input.getNameContext().originalName(), input.getNameContext().systemName(), input.getNameContext().userName()));
return OperationNode.create(new FetchAndFilterStreamingSideInputsOperation<>(new OutputReceiver[] { output.getOutputReceiver() }, operationContext, instructionRequestHandler, beamFnDataService, dataApiServiceDescriptor, idGenerator, (Coder<WindowedValue<Object>>) output.getCoder(), (WindowingStrategy<?, BoundedWindow>) input.getWindowingStrategy(), executionContext.getStepContext(operationContext), input.getPCollectionViewsToWindowMappingFns()));
}
};
}
use of org.apache.beam.runners.dataflow.worker.graph.Networks.TypeSafeNodeFunction in project beam by apache.
the class BeamFnMapTaskExecutorFactory method createOperationTransformForExecutableStageNode.
private Function<Node, Node> createOperationTransformForExecutableStageNode(final Network<Node, Edge> network, final String stageName, final DataflowExecutionContext<?> executionContext, final JobBundleFactory jobBundleFactory) {
return new TypeSafeNodeFunction<ExecutableStageNode>(ExecutableStageNode.class) {
@Override
public Node typedApply(ExecutableStageNode input) {
StageBundleFactory stageBundleFactory = jobBundleFactory.forStage(input.getExecutableStage());
Iterable<OutputReceiverNode> outputReceiverNodes = Iterables.filter(network.successors(input), OutputReceiverNode.class);
Map<String, OutputReceiver> outputReceiverMap = new HashMap<>();
Lists.newArrayList(outputReceiverNodes).stream().forEach(outputReceiverNode -> outputReceiverMap.put(outputReceiverNode.getPcollectionId(), outputReceiverNode.getOutputReceiver()));
ImmutableMap.Builder<String, DataflowOperationContext> ptransformIdToOperationContextBuilder = ImmutableMap.builder();
for (Map.Entry<String, NameContext> entry : input.getPTransformIdToPartialNameContextMap().entrySet()) {
NameContext fullNameContext = NameContext.create(stageName, entry.getValue().originalName(), entry.getValue().systemName(), entry.getValue().userName());
DataflowOperationContext operationContext = executionContext.createOperationContext(fullNameContext);
ptransformIdToOperationContextBuilder.put(entry.getKey(), operationContext);
}
ImmutableMap<String, DataflowOperationContext> ptransformIdToOperationContexts = ptransformIdToOperationContextBuilder.build();
ImmutableMap<String, SideInputReader> ptransformIdToSideInputReaders = buildPTransformIdToSideInputReadersMap(executionContext, input, ptransformIdToOperationContexts);
Map<RunnerApi.ExecutableStagePayload.SideInputId, PCollectionView<?>> ptransformIdToSideInputIdToPCollectionView = buildSideInputIdToPCollectionView(input);
return OperationNode.create(new ProcessRemoteBundleOperation(input.getExecutableStage(), executionContext.createOperationContext(NameContext.create(stageName, stageName, stageName, stageName)), stageBundleFactory, outputReceiverMap, ptransformIdToSideInputReaders, ptransformIdToSideInputIdToPCollectionView));
}
};
}
use of org.apache.beam.runners.dataflow.worker.graph.Networks.TypeSafeNodeFunction in project beam by apache.
the class BeamFnMapTaskExecutorFactory method createOperationTransformForGrpcPortNodes.
private Function<Node, Node> createOperationTransformForGrpcPortNodes(final Network<Node, Edge> network, final FnDataService beamFnDataService, final OperationContext context) {
return new TypeSafeNodeFunction<RemoteGrpcPortNode>(RemoteGrpcPortNode.class) {
@Override
public Node typedApply(RemoteGrpcPortNode input) {
RegisterAndProcessBundleOperation registerFnOperation = (RegisterAndProcessBundleOperation) Iterables.getOnlyElement(Iterables.filter(network.adjacentNodes(input), OperationNode.class)).getOperation();
// The coder comes from the one and only adjacent output node
Coder<?> coder = Iterables.getOnlyElement(Iterables.filter(network.adjacentNodes(input), OutputReceiverNode.class)).getCoder();
// We figure out whether we are outputting some where if the output node is a
// successor.
Iterable<OutputReceiverNode> outputReceiverNodes = Iterables.filter(network.successors(input), OutputReceiverNode.class);
Operation operation;
if (outputReceiverNodes.iterator().hasNext()) {
OutputReceiver[] outputReceivers = new OutputReceiver[] { Iterables.getOnlyElement(outputReceiverNodes).getOutputReceiver() };
operation = new RemoteGrpcPortReadOperation<>(beamFnDataService, input.getPrimitiveTransformId(), registerFnOperation::getProcessBundleInstructionId, (Coder) coder, outputReceivers, context);
} else {
operation = new RemoteGrpcPortWriteOperation<>(beamFnDataService, input.getPrimitiveTransformId(), registerFnOperation::getProcessBundleInstructionId, (Coder) coder, context);
}
return OperationNode.create(operation);
}
};
}
use of org.apache.beam.runners.dataflow.worker.graph.Networks.TypeSafeNodeFunction in project beam by apache.
the class BeamFnMapTaskExecutorFactory method createOperationTransformForParallelInstructionNodes.
/**
* Creates an {@link Operation} from the given {@link ParallelInstruction} definition using the
* provided {@link ReaderFactory}.
*/
Function<Node, Node> createOperationTransformForParallelInstructionNodes(final String stageName, final Network<Node, Edge> network, final PipelineOptions options, final ReaderFactory readerFactory, final SinkFactory sinkFactory, final DataflowExecutionContext<?> executionContext) {
return new TypeSafeNodeFunction<ParallelInstructionNode>(ParallelInstructionNode.class) {
@Override
public Node typedApply(ParallelInstructionNode node) {
ParallelInstruction instruction = node.getParallelInstruction();
NameContext nameContext = NameContext.create(stageName, instruction.getOriginalName(), instruction.getSystemName(), instruction.getName());
try {
DataflowOperationContext context = executionContext.createOperationContext(nameContext);
if (instruction.getRead() != null) {
return createReadOperation(network, node, options, readerFactory, executionContext, context);
} else if (instruction.getWrite() != null) {
return createWriteOperation(node, options, sinkFactory, executionContext, context);
} else if (instruction.getParDo() != null) {
return createParDoOperation(network, node, options, executionContext, context);
} else if (instruction.getPartialGroupByKey() != null) {
return createPartialGroupByKeyOperation(network, node, options, executionContext, context);
} else if (instruction.getFlatten() != null) {
return createFlattenOperation(network, node, context);
} else {
throw new IllegalArgumentException(String.format("Unexpected instruction: %s", instruction));
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
};
}
use of org.apache.beam.runners.dataflow.worker.graph.Networks.TypeSafeNodeFunction in project beam by apache.
the class IntrinsicMapTaskExecutorFactory method createOperationTransformForParallelInstructionNodes.
/**
* Creates an {@link Operation} from the given {@link ParallelInstruction} definition using the
* provided {@link ReaderFactory}.
*/
Function<Node, Node> createOperationTransformForParallelInstructionNodes(final String stageName, final Network<Node, Edge> network, final PipelineOptions options, final ReaderFactory readerFactory, final SinkFactory sinkFactory, final DataflowExecutionContext<?> executionContext) {
return new TypeSafeNodeFunction<ParallelInstructionNode>(ParallelInstructionNode.class) {
@Override
public Node typedApply(ParallelInstructionNode node) {
ParallelInstruction instruction = node.getParallelInstruction();
NameContext nameContext = NameContext.create(stageName, instruction.getOriginalName(), instruction.getSystemName(), instruction.getName());
try {
DataflowOperationContext context = executionContext.createOperationContext(nameContext);
if (instruction.getRead() != null) {
return createReadOperation(network, node, options, readerFactory, executionContext, context);
} else if (instruction.getWrite() != null) {
return createWriteOperation(node, options, sinkFactory, executionContext, context);
} else if (instruction.getParDo() != null) {
return createParDoOperation(network, node, options, executionContext, context);
} else if (instruction.getPartialGroupByKey() != null) {
return createPartialGroupByKeyOperation(network, node, options, executionContext, context);
} else if (instruction.getFlatten() != null) {
return createFlattenOperation(network, node, context);
} else {
throw new IllegalArgumentException(String.format("Unexpected instruction: %s", instruction));
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
};
}
Aggregations