use of com.google.api.services.dataflow.model.ReadInstruction in project beam by apache.
the class IntrinsicMapTaskExecutorFactoryTest method createReadInstruction.
static ParallelInstruction createReadInstruction(String name, Class<? extends ReaderFactory> readerFactoryClass) {
CloudObject spec = CloudObject.forClass(readerFactoryClass);
Source cloudSource = new Source();
cloudSource.setSpec(spec);
cloudSource.setCodec(windowedStringCoder);
ReadInstruction readInstruction = new ReadInstruction();
readInstruction.setSource(cloudSource);
InstructionOutput output = new InstructionOutput();
output.setName("read_output_name");
output.setCodec(windowedStringCoder);
output.setOriginalName("originalName");
output.setSystemName("systemName");
ParallelInstruction instruction = new ParallelInstruction();
instruction.setSystemName(name);
instruction.setOriginalName(name + "OriginalName");
instruction.setRead(readInstruction);
instruction.setOutputs(Arrays.asList(output));
return instruction;
}
use of com.google.api.services.dataflow.model.ReadInstruction in project beam by apache.
the class StreamingDataflowWorkerTest method makeUnboundedSourcePipeline.
private List<ParallelInstruction> makeUnboundedSourcePipeline(// Total number of messages in each split of the unbounded source.
int numMessagesPerShard, DoFn<ValueWithRecordId<KV<Integer, Integer>>, String> doFn) throws Exception {
DataflowPipelineOptions options = PipelineOptionsFactory.create().as(DataflowPipelineOptions.class);
options.setNumWorkers(1);
CloudObject codec = CloudObjects.asCloudObject(WindowedValue.getFullCoder(ValueWithRecordId.ValueWithRecordIdCoder.of(KvCoder.of(VarIntCoder.of(), VarIntCoder.of())), GlobalWindow.Coder.INSTANCE), /*sdkComponents=*/
null);
return Arrays.asList(new ParallelInstruction().setSystemName("Read").setOriginalName("OriginalReadName").setRead(new ReadInstruction().setSource(CustomSources.serializeToCloudSource(new TestCountingSource(numMessagesPerShard), options).setCodec(codec))).setOutputs(Arrays.asList(new InstructionOutput().setName("read_output").setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME).setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME).setCodec(codec))), makeDoFnInstruction(doFn, 0, StringUtf8Coder.of(), WindowingStrategy.globalDefault()), makeSinkInstruction(StringUtf8Coder.of(), 1, GlobalWindow.Coder.INSTANCE));
}
use of com.google.api.services.dataflow.model.ReadInstruction in project beam by apache.
the class StreamingDataflowWorkerTest method makeWindowingSourceInstruction.
private ParallelInstruction makeWindowingSourceInstruction(Coder<?> coder) {
CloudObject timerCloudObject = CloudObject.forClassName("com.google.cloud.dataflow.sdk.util.TimerOrElement$TimerOrElementCoder");
List<CloudObject> component = Collections.singletonList(CloudObjects.asCloudObject(coder, /*sdkComponents=*/
null));
Structs.addList(timerCloudObject, PropertyNames.COMPONENT_ENCODINGS, component);
CloudObject encodedCoder = CloudObject.forClassName("kind:windowed_value");
Structs.addBoolean(encodedCoder, PropertyNames.IS_WRAPPER, true);
Structs.addList(encodedCoder, PropertyNames.COMPONENT_ENCODINGS, ImmutableList.of(timerCloudObject, CloudObjects.asCloudObject(IntervalWindowCoder.of(), /*sdkComponents=*/
null)));
return new ParallelInstruction().setSystemName(DEFAULT_SOURCE_SYSTEM_NAME).setOriginalName(DEFAULT_SOURCE_ORIGINAL_NAME).setRead(new ReadInstruction().setSource(new Source().setSpec(CloudObject.forClass(WindowingWindmillReader.class)).setCodec(encodedCoder))).setOutputs(Arrays.asList(new InstructionOutput().setName(Long.toString(idGenerator.get())).setCodec(encodedCoder).setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME).setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)));
}
use of com.google.api.services.dataflow.model.ReadInstruction in project beam by apache.
the class MapTaskToNetworkFunctionTest method testParDo.
@Test
public void testParDo() {
InstructionOutput readOutput = createInstructionOutput("Read.out");
ParallelInstruction read = createParallelInstruction("Read", readOutput);
read.setRead(new ReadInstruction());
MultiOutputInfo parDoMultiOutput = createMultiOutputInfo("output");
ParDoInstruction parDoInstruction = new ParDoInstruction();
// Read.out
parDoInstruction.setInput(createInstructionInput(0, 0));
parDoInstruction.setMultiOutputInfos(ImmutableList.of(parDoMultiOutput));
InstructionOutput parDoOutput = createInstructionOutput("ParDo.out");
ParallelInstruction parDo = createParallelInstruction("ParDo", parDoOutput);
parDo.setParDo(parDoInstruction);
MapTask mapTask = new MapTask();
mapTask.setInstructions(ImmutableList.of(read, parDo));
mapTask.setFactory(Transport.getJsonFactory());
Network<Node, Edge> network = new MapTaskToNetworkFunction(IdGenerators.decrementingLongs()).apply(mapTask);
assertNetworkProperties(network);
assertEquals(4, network.nodes().size());
assertEquals(3, network.edges().size());
ParallelInstructionNode readNode = get(network, read);
InstructionOutputNode readOutputNode = getOnlySuccessor(network, readNode);
assertEquals(readOutput, readOutputNode.getInstructionOutput());
ParallelInstructionNode parDoNode = getOnlySuccessor(network, readOutputNode);
InstructionOutputNode parDoOutputNode = getOnlySuccessor(network, parDoNode);
assertEquals(parDoOutput, parDoOutputNode.getInstructionOutput());
assertEquals(parDoMultiOutput, ((MultiOutputInfoEdge) Iterables.getOnlyElement(network.edgesConnecting(parDoNode, parDoOutputNode))).getMultiOutputInfo());
}
use of com.google.api.services.dataflow.model.ReadInstruction in project beam by apache.
the class MapTaskToNetworkFunctionTest method testFlatten.
@Test
public void testFlatten() {
// ReadA --\
// |--> Flatten
// ReadB --/
InstructionOutput readOutputA = createInstructionOutput("ReadA.out");
ParallelInstruction readA = createParallelInstruction("ReadA", readOutputA);
readA.setRead(new ReadInstruction());
InstructionOutput readOutputB = createInstructionOutput("ReadB.out");
ParallelInstruction readB = createParallelInstruction("ReadB", readOutputB);
readB.setRead(new ReadInstruction());
FlattenInstruction flattenInstruction = new FlattenInstruction();
flattenInstruction.setInputs(ImmutableList.of(// ReadA.out
createInstructionInput(0, 0), // ReadB.out
createInstructionInput(1, 0)));
InstructionOutput flattenOutput = createInstructionOutput("Flatten.out");
ParallelInstruction flatten = createParallelInstruction("Flatten", flattenOutput);
flatten.setFlatten(flattenInstruction);
MapTask mapTask = new MapTask();
mapTask.setInstructions(ImmutableList.of(readA, readB, flatten));
mapTask.setFactory(Transport.getJsonFactory());
Network<Node, Edge> network = new MapTaskToNetworkFunction(IdGenerators.decrementingLongs()).apply(mapTask);
assertNetworkProperties(network);
assertEquals(6, network.nodes().size());
assertEquals(5, network.edges().size());
ParallelInstructionNode readANode = get(network, readA);
InstructionOutputNode readOutputANode = getOnlySuccessor(network, readANode);
assertEquals(readOutputA, readOutputANode.getInstructionOutput());
ParallelInstructionNode readBNode = get(network, readB);
InstructionOutputNode readOutputBNode = getOnlySuccessor(network, readBNode);
assertEquals(readOutputB, readOutputBNode.getInstructionOutput());
// Make sure the successors for both ReadA and ReadB output PCollections are the same
assertEquals(network.successors(readOutputANode), network.successors(readOutputBNode));
ParallelInstructionNode flattenNode = getOnlySuccessor(network, readOutputANode);
InstructionOutputNode flattenOutputNode = getOnlySuccessor(network, flattenNode);
assertEquals(flattenOutput, flattenOutputNode.getInstructionOutput());
}
Aggregations