use of com.google.api.services.dataflow.model.Source in project beam by apache.
the class AvroByteReaderFactoryTest method runTestCreateAvroReader.
NativeReader<?> runTestCreateAvroReader(String filename, @Nullable Long start, @Nullable Long end, CloudObject encoding) throws Exception {
CloudObject spec = CloudObject.forClassName("AvroSource");
addString(spec, "filename", filename);
if (start != null) {
addLong(spec, "start_offset", start);
}
if (end != null) {
addLong(spec, "end_offset", end);
}
Source cloudSource = new Source();
cloudSource.setSpec(spec);
cloudSource.setCodec(encoding);
NativeReader<?> reader = ReaderRegistry.defaultRegistry().create(cloudSource, PipelineOptionsFactory.create(), // ExecutionContext
null, null);
return reader;
}
use of com.google.api.services.dataflow.model.Source in project beam by apache.
the class ConcatReaderTest method createSourceForTestReader.
private Source createSourceForTestReader(TestReader<String> testReader) {
Source source = new Source();
CloudObject specObj = CloudObject.forClass(TestReader.class);
specObj.put(READER_OBJECT, testReader);
source.setSpec(specObj);
return source;
}
use of com.google.api.services.dataflow.model.Source in project beam by apache.
the class CloudSourceUtils method flattenBaseSpecs.
/**
* Returns a copy of the source with {@code baseSpecs} flattened into {@code spec}. On conflict
* for a parameter name, values in {@code spec} override values in {@code baseSpecs}, and later
* values in {@code baseSpecs} override earlier ones.
*/
public static Source flattenBaseSpecs(Source source) {
if (source.getBaseSpecs() == null) {
return source;
}
Map<String, Object> params = new HashMap<>();
for (Map<String, Object> baseSpec : source.getBaseSpecs()) {
params.putAll(baseSpec);
}
params.putAll(source.getSpec());
Source result = source.clone();
result.setSpec(params);
result.setBaseSpecs(null);
return result;
}
use of com.google.api.services.dataflow.model.Source in project beam by apache.
the class BeamFnMapTaskExecutorFactory method createReadOperation.
OperationNode createReadOperation(Network<Node, Edge> network, ParallelInstructionNode node, PipelineOptions options, ReaderFactory readerFactory, DataflowExecutionContext<?> executionContext, DataflowOperationContext operationContext) throws Exception {
ParallelInstruction instruction = node.getParallelInstruction();
ReadInstruction read = instruction.getRead();
Source cloudSource = CloudSourceUtils.flattenBaseSpecs(read.getSource());
CloudObject sourceSpec = CloudObject.fromSpec(cloudSource.getSpec());
Coder<?> coder = CloudObjects.coderFromCloudObject(CloudObject.fromSpec(cloudSource.getCodec()));
NativeReader<?> reader = readerFactory.create(sourceSpec, coder, options, executionContext, operationContext);
OutputReceiver[] receivers = getOutputReceivers(network, node);
return OperationNode.create(ReadOperation.create(reader, receivers, operationContext));
}
use of com.google.api.services.dataflow.model.Source in project beam by apache.
the class LengthPrefixUnknownCodersTest method createReadNode.
private static ParallelInstructionNode createReadNode(String name, String readClassName, Coder<?> coder) {
ParallelInstruction parallelInstruction = new ParallelInstruction().setName(name).setRead(new ReadInstruction().setSource(new Source().setCodec(CloudObjects.asCloudObject(coder, /*sdkComponents=*/
null)).setSpec(CloudObject.forClassName(readClassName))));
parallelInstruction.setFactory(new JacksonFactory());
return ParallelInstructionNode.create(parallelInstruction, Nodes.ExecutionLocation.UNKNOWN);
}
Aggregations