use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowPipelineTranslatorTest method testSettingOfPipelineOptionsWithCustomUserType.
@Test
public void testSettingOfPipelineOptionsWithCustomUserType() throws IOException {
DataflowPipelineOptions options = buildPipelineOptions();
options.setRunner(DataflowRunner.class);
options.as(JacksonIncompatibleOptions.class).setJacksonIncompatible(new JacksonIncompatible("userCustomTypeTest"));
Pipeline p = Pipeline.create(options);
p.traverseTopologically(new RecordingPipelineVisitor());
Job job = DataflowPipelineTranslator.fromOptions(options).translate(p, DataflowRunner.fromOptions(options), Collections.<DataflowPackage>emptyList()).getJob();
Map<String, Object> sdkPipelineOptions = job.getEnvironment().getSdkPipelineOptions();
assertThat(sdkPipelineOptions, hasKey("options"));
Map<String, Object> optionsMap = (Map<String, Object>) sdkPipelineOptions.get("options");
assertThat(optionsMap, hasEntry("jacksonIncompatible", (Object) "userCustomTypeTest"));
}
use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowPipelineTranslatorTest method testPredefinedAddStep.
@Test
public void testPredefinedAddStep() throws Exception {
DataflowPipelineOptions options = buildPipelineOptions();
DataflowPipelineTranslator translator = DataflowPipelineTranslator.fromOptions(options);
DataflowPipelineTranslator.registerTransformTranslator(EmbeddedTransform.class, new EmbeddedTranslator());
// Create a predefined step using another pipeline
Step predefinedStep = createPredefinedStep();
// Create a pipeline that the predefined step will be embedded into
Pipeline pipeline = Pipeline.create(options);
pipeline.apply("ReadMyFile", TextIO.read().from("gs://bucket/in")).apply(ParDo.of(new NoOpFn())).apply(new EmbeddedTransform(predefinedStep.clone())).apply(ParDo.of(new NoOpFn()));
DataflowRunner runner = DataflowRunner.fromOptions(options);
runner.replaceTransforms(pipeline);
Job job = translator.translate(pipeline, runner, Collections.<DataflowPackage>emptyList()).getJob();
assertAllStepOutputsHaveUniqueIds(job);
List<Step> steps = job.getSteps();
assertEquals(4, steps.size());
// The input to the embedded step should match the output of the step before
Map<String, Object> step1Out = getOutputPortReference(steps.get(1));
Map<String, Object> step2In = getDictionary(steps.get(2).getProperties(), PropertyNames.PARALLEL_INPUT);
assertEquals(step1Out, step2In);
// The output from the embedded step should match the input of the step after
Map<String, Object> step2Out = getOutputPortReference(steps.get(2));
Map<String, Object> step3In = getDictionary(steps.get(3).getProperties(), PropertyNames.PARALLEL_INPUT);
assertEquals(step2Out, step3In);
// The step should not have been modified other than remapping the input
Step predefinedStepClone = predefinedStep.clone();
Step embeddedStepClone = steps.get(2).clone();
predefinedStepClone.getProperties().remove(PropertyNames.PARALLEL_INPUT);
embeddedStepClone.getProperties().remove(PropertyNames.PARALLEL_INPUT);
assertEquals(predefinedStepClone, embeddedStepClone);
}
use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowPipelineTranslatorTest method testSettingOfSdkPipelineOptions.
@Test
public void testSettingOfSdkPipelineOptions() throws IOException {
DataflowPipelineOptions options = buildPipelineOptions();
options.setRunner(DataflowRunner.class);
Pipeline p = Pipeline.create(options);
p.traverseTopologically(new RecordingPipelineVisitor());
Job job = DataflowPipelineTranslator.fromOptions(options).translate(p, DataflowRunner.fromOptions(options), Collections.<DataflowPackage>emptyList()).getJob();
Map<String, Object> sdkPipelineOptions = job.getEnvironment().getSdkPipelineOptions();
assertThat(sdkPipelineOptions, hasKey("options"));
Map<String, Object> optionsMap = (Map<String, Object>) sdkPipelineOptions.get("options");
assertThat(optionsMap, hasEntry("appName", (Object) "DataflowPipelineTranslatorTest"));
assertThat(optionsMap, hasEntry("project", (Object) "some-project"));
assertThat(optionsMap, hasEntry("pathValidatorClass", (Object) GcsPathValidator.class.getName()));
assertThat(optionsMap, hasEntry("runner", (Object) DataflowRunner.class.getName()));
assertThat(optionsMap, hasEntry("jobName", (Object) "some-job-name"));
assertThat(optionsMap, hasEntry("tempLocation", (Object) "gs://somebucket/some/path"));
assertThat(optionsMap, hasEntry("stagingLocation", (Object) "gs://somebucket/some/path/staging/"));
assertThat(optionsMap, hasEntry("stableUniqueNames", (Object) "WARNING"));
assertThat(optionsMap, hasEntry("streaming", (Object) false));
assertThat(optionsMap, hasEntry("numberOfWorkerHarnessThreads", (Object) 0));
}
use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowPipelineTranslatorTest method testNetworkConfig.
@Test
public void testNetworkConfig() throws IOException {
final String testNetwork = "test-network";
DataflowPipelineOptions options = buildPipelineOptions();
options.setNetwork(testNetwork);
Pipeline p = buildPipeline(options);
p.traverseTopologically(new RecordingPipelineVisitor());
Job job = DataflowPipelineTranslator.fromOptions(options).translate(p, DataflowRunner.fromOptions(options), Collections.<DataflowPackage>emptyList()).getJob();
assertEquals(1, job.getEnvironment().getWorkerPools().size());
assertEquals(testNetwork, job.getEnvironment().getWorkerPools().get(0).getNetwork());
}
use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowPipelineTranslatorTest method testToIterableTranslationWithIsmSideInput.
@Test
public void testToIterableTranslationWithIsmSideInput() throws Exception {
// A "change detector" test that makes sure the translation
// of getting a PCollectionView<Iterable<T>> does not change
// in bad ways during refactor
DataflowPipelineOptions options = buildPipelineOptions();
DataflowPipelineTranslator translator = DataflowPipelineTranslator.fromOptions(options);
Pipeline pipeline = Pipeline.create(options);
pipeline.apply(Create.of(1, 2, 3)).apply(View.<Integer>asIterable());
DataflowRunner runner = DataflowRunner.fromOptions(options);
runner.replaceTransforms(pipeline);
Job job = translator.translate(pipeline, runner, Collections.<DataflowPackage>emptyList()).getJob();
assertAllStepOutputsHaveUniqueIds(job);
List<Step> steps = job.getSteps();
assertEquals(3, steps.size());
@SuppressWarnings("unchecked") List<Map<String, Object>> toIsmRecordOutputs = (List<Map<String, Object>>) steps.get(1).getProperties().get(PropertyNames.OUTPUT_INFO);
assertTrue(Structs.getBoolean(Iterables.getOnlyElement(toIsmRecordOutputs), "use_indexed_format"));
Step collectionToSingletonStep = steps.get(2);
assertEquals("CollectionToSingleton", collectionToSingletonStep.getKind());
}
Aggregations