use of com.google.api.services.dataflow.model.Source in project beam by apache.
the class ConcatReaderFactoryTest method testCreateConcatReaderWithManySubSources.
@Test
public void testCreateConcatReaderWithManySubSources() throws Exception {
List<List<String>> allData = createInMemorySourceData(15, 10);
Source source = createSourcesWithInMemorySources(allData);
@SuppressWarnings("unchecked") NativeReader<String> reader = (NativeReader<String>) ReaderRegistry.defaultRegistry().create(source, null, null, null);
assertNotNull(reader);
List<String> expected = new ArrayList<>();
for (List<String> data : allData) {
expected.addAll(data);
}
assertThat(readAllFromReader(reader), containsInAnyOrder(expected.toArray()));
}
use of com.google.api.services.dataflow.model.Source in project beam by apache.
the class ConcatReaderFactoryTest method testCreateConcatReaderWithOneSubSource.
@Test
public void testCreateConcatReaderWithOneSubSource() throws Exception {
List<List<String>> allData = createInMemorySourceData(1, 10);
Source source = createSourcesWithInMemorySources(allData);
@SuppressWarnings("unchecked") NativeReader<String> reader = (NativeReader<String>) ReaderRegistry.defaultRegistry().create(source, null, null, null);
assertNotNull(reader);
List<String> expected = new ArrayList<>();
for (List<String> data : allData) {
expected.addAll(data);
}
assertThat(readAllFromReader(reader), containsInAnyOrder(expected.toArray()));
}
use of com.google.api.services.dataflow.model.Source in project beam by apache.
the class StreamingDataflowWorkerTest method makeWindowingSourceInstruction.
private ParallelInstruction makeWindowingSourceInstruction(Coder<?> coder) {
CloudObject timerCloudObject = CloudObject.forClassName("com.google.cloud.dataflow.sdk.util.TimerOrElement$TimerOrElementCoder");
List<CloudObject> component = Collections.singletonList(CloudObjects.asCloudObject(coder, /*sdkComponents=*/
null));
Structs.addList(timerCloudObject, PropertyNames.COMPONENT_ENCODINGS, component);
CloudObject encodedCoder = CloudObject.forClassName("kind:windowed_value");
Structs.addBoolean(encodedCoder, PropertyNames.IS_WRAPPER, true);
Structs.addList(encodedCoder, PropertyNames.COMPONENT_ENCODINGS, ImmutableList.of(timerCloudObject, CloudObjects.asCloudObject(IntervalWindowCoder.of(), /*sdkComponents=*/
null)));
return new ParallelInstruction().setSystemName(DEFAULT_SOURCE_SYSTEM_NAME).setOriginalName(DEFAULT_SOURCE_ORIGINAL_NAME).setRead(new ReadInstruction().setSource(new Source().setSpec(CloudObject.forClass(WindowingWindmillReader.class)).setCodec(encodedCoder))).setOutputs(Arrays.asList(new InstructionOutput().setName(Long.toString(idGenerator.get())).setCodec(encodedCoder).setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME).setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)));
}
use of com.google.api.services.dataflow.model.Source in project beam by apache.
the class ShuffleReaderFactoryTest method runTestCreateShuffleReader.
<T extends NativeReader> T runTestCreateShuffleReader(byte[] shuffleReaderConfig, @Nullable String start, @Nullable String end, CloudObject encoding, BatchModeExecutionContext context, Class<T> shuffleReaderClass, String shuffleSourceAlias) throws Exception {
CloudObject spec = CloudObject.forClassName(shuffleSourceAlias);
addString(spec, "shuffle_reader_config", encodeBase64String(shuffleReaderConfig));
if (start != null) {
addString(spec, "start_shuffle_position", start);
}
if (end != null) {
addString(spec, "end_shuffle_position", end);
}
Source cloudSource = new Source();
cloudSource.setSpec(spec);
cloudSource.setCodec(encoding);
NativeReader<?> reader = ReaderRegistry.defaultRegistry().create(cloudSource, PipelineOptionsFactory.create(), context, null);
assertThat(reader, new IsInstanceOf(shuffleReaderClass));
return (T) reader;
}
use of com.google.api.services.dataflow.model.Source in project beam by apache.
the class ReaderFactoryTest method testCreateUnknownReader.
@Test
public void testCreateUnknownReader() throws Exception {
CloudObject spec = CloudObject.forClassName("UnknownSource");
Source cloudSource = new Source();
cloudSource.setSpec(spec);
cloudSource.setCodec(CloudObjects.asCloudObject(StringUtf8Coder.of(), /*sdkComponents=*/
null));
try {
PipelineOptions options = PipelineOptionsFactory.create();
ReaderRegistry.defaultRegistry().create(cloudSource, options, BatchModeExecutionContext.forTesting(options, "testStage"), null);
Assert.fail("should have thrown an exception");
} catch (Exception exn) {
assertThat(exn.toString(), CoreMatchers.containsString("Unable to create a Reader"));
}
}
Aggregations