use of org.apache.beam.runners.dataflow.worker.graph.Networks.TypeSafeNodeFunction in project beam by apache.
the class IntrinsicMapTaskExecutorFactory method createOutputReceiversTransform.
/**
* Returns a function which can convert {@link InstructionOutput}s into {@link OutputReceiver}s.
*/
static Function<Node, Node> createOutputReceiversTransform(final String stageName, final CounterFactory counterFactory) {
return new TypeSafeNodeFunction<InstructionOutputNode>(InstructionOutputNode.class) {
@Override
public Node typedApply(InstructionOutputNode input) {
InstructionOutput cloudOutput = input.getInstructionOutput();
OutputReceiver outputReceiver = new OutputReceiver();
Coder<?> coder = CloudObjects.coderFromCloudObject(CloudObject.fromSpec(cloudOutput.getCodec()));
@SuppressWarnings("unchecked") ElementCounter outputCounter = new DataflowOutputCounter(cloudOutput.getName(), new ElementByteSizeObservableCoder<>(coder), counterFactory, NameContext.create(stageName, cloudOutput.getOriginalName(), cloudOutput.getSystemName(), cloudOutput.getName()));
outputReceiver.addOutputCounter(outputCounter);
return OutputReceiverNode.create(outputReceiver, coder, input.getPcollectionId());
}
};
}
use of org.apache.beam.runners.dataflow.worker.graph.Networks.TypeSafeNodeFunction in project beam by apache.
the class LengthPrefixUnknownCoders method andReplaceForParallelInstructionNode.
/**
* Replace unknown coders on the given {@link ParallelInstructionNode} with {@link
* org.apache.beam.sdk.coders.LengthPrefixCoder LengthPrefixCoder<T>} where {@code T} is a
* {@link org.apache.beam.sdk.coders.ByteArrayCoder}.
*/
private static Function<Node, Node> andReplaceForParallelInstructionNode() {
return new TypeSafeNodeFunction<ParallelInstructionNode>(ParallelInstructionNode.class) {
@Override
public Node typedApply(ParallelInstructionNode input) {
ParallelInstruction instruction = input.getParallelInstruction();
Nodes.ExecutionLocation location = input.getExecutionLocation();
try {
instruction = forParallelInstruction(instruction, true);
} catch (Exception e) {
throw new RuntimeException(String.format("Failed to replace unknown coder with " + "LengthPrefixCoder for : {%s}", input.getParallelInstruction()), e);
}
return ParallelInstructionNode.create(instruction, location);
}
};
}
use of org.apache.beam.runners.dataflow.worker.graph.Networks.TypeSafeNodeFunction in project beam by apache.
the class BeamFnMapTaskExecutorFactory method createOutputReceiversTransform.
/**
* Returns a function which can convert {@link InstructionOutput}s into {@link OutputReceiver}s.
*/
static Function<Node, Node> createOutputReceiversTransform(final String stageName, final CounterFactory counterFactory) {
return new TypeSafeNodeFunction<InstructionOutputNode>(InstructionOutputNode.class) {
@Override
public Node typedApply(InstructionOutputNode input) {
InstructionOutput cloudOutput = input.getInstructionOutput();
OutputReceiver outputReceiver = new OutputReceiver();
Coder<?> coder = CloudObjects.coderFromCloudObject(CloudObject.fromSpec(cloudOutput.getCodec()));
@SuppressWarnings("unchecked") ElementCounter outputCounter = new DataflowOutputCounter(cloudOutput.getName(), new ElementByteSizeObservableCoder<>(coder), counterFactory, NameContext.create(stageName, cloudOutput.getOriginalName(), cloudOutput.getSystemName(), cloudOutput.getName()));
outputReceiver.addOutputCounter(outputCounter);
return OutputReceiverNode.create(outputReceiver, coder, input.getPcollectionId());
}
};
}
use of org.apache.beam.runners.dataflow.worker.graph.Networks.TypeSafeNodeFunction in project beam by apache.
the class BeamFnMapTaskExecutorFactory method createOperationTransformForRegisterFnNodes.
private Function<Node, Node> createOperationTransformForRegisterFnNodes(final IdGenerator idGenerator, final InstructionRequestHandler instructionRequestHandler, final StateDelegator beamFnStateDelegator, final String stageName, final DataflowExecutionContext<?> executionContext) {
return new TypeSafeNodeFunction<RegisterRequestNode>(RegisterRequestNode.class) {
@Override
public Node typedApply(RegisterRequestNode input) {
ImmutableMap.Builder<String, DataflowOperationContext> ptransformIdToOperationContextBuilder = ImmutableMap.builder();
ImmutableMap.Builder<String, DataflowStepContext> ptransformIdToStepContext = ImmutableMap.builder();
for (Map.Entry<String, NameContext> entry : input.getPTransformIdToPartialNameContextMap().entrySet()) {
NameContext fullNameContext = NameContext.create(stageName, entry.getValue().originalName(), entry.getValue().systemName(), entry.getValue().userName());
DataflowOperationContext operationContext = executionContext.createOperationContext(fullNameContext);
ptransformIdToOperationContextBuilder.put(entry.getKey(), operationContext);
ptransformIdToStepContext.put(entry.getKey(), executionContext.getStepContext(operationContext));
}
ImmutableMap.Builder<String, NameContext> pcollectionIdToNameContext = ImmutableMap.builder();
for (Map.Entry<String, NameContext> entry : input.getPCollectionToPartialNameContextMap().entrySet()) {
pcollectionIdToNameContext.put(entry.getKey(), NameContext.create(stageName, entry.getValue().originalName(), entry.getValue().systemName(), entry.getValue().userName()));
}
ImmutableMap<String, DataflowOperationContext> ptransformIdToOperationContexts = ptransformIdToOperationContextBuilder.build();
ImmutableMap<String, SideInputReader> ptransformIdToSideInputReaders = buildPTransformIdToSideInputReadersMap(executionContext, input, ptransformIdToOperationContexts);
ImmutableTable<String, String, PCollectionView<?>> ptransformIdToSideInputIdToPCollectionView = buildPTransformIdToSideInputIdToPCollectionView(input);
return OperationNode.create(new RegisterAndProcessBundleOperation(idGenerator, instructionRequestHandler, beamFnStateDelegator, input.getRegisterRequest(), ptransformIdToOperationContexts, ptransformIdToStepContext.build(), ptransformIdToSideInputReaders, ptransformIdToSideInputIdToPCollectionView, pcollectionIdToNameContext.build(), // TODO: Set NameContext properly for these operations.
executionContext.createOperationContext(NameContext.create(stageName, stageName, stageName, stageName))));
}
};
}
Aggregations