use of org.apache.beam.runners.core.construction.SdkComponents in project beam by apache.
the class DataflowPipelineTranslatorTest method testSubnetworkConfig.
@Test
public void testSubnetworkConfig() throws IOException {
final String testSubnetwork = "regions/REGION/subnetworks/SUBNETWORK";
DataflowPipelineOptions options = buildPipelineOptions();
options.setSubnetwork(testSubnetwork);
Pipeline p = buildPipeline(options);
p.traverseTopologically(new RecordingPipelineVisitor());
SdkComponents sdkComponents = createSdkComponents(options);
RunnerApi.Pipeline pipelineProto = PipelineTranslation.toProto(p, sdkComponents, true);
Job job = DataflowPipelineTranslator.fromOptions(options).translate(p, pipelineProto, sdkComponents, DataflowRunner.fromOptions(options), Collections.emptyList()).getJob();
assertEquals(1, job.getEnvironment().getWorkerPools().size());
assertEquals(testSubnetwork, job.getEnvironment().getWorkerPools().get(0).getSubnetwork());
}
use of org.apache.beam.runners.core.construction.SdkComponents in project beam by apache.
the class DataflowPipelineTranslatorTest method testDataflowServiceOptionsSet.
@Test
public void testDataflowServiceOptionsSet() throws IOException {
final List<String> dataflowServiceOptions = Stream.of("whizz=bang", "foo=bar").collect(Collectors.toList());
DataflowPipelineOptions options = buildPipelineOptions();
options.setDataflowServiceOptions(dataflowServiceOptions);
Pipeline p = buildPipeline(options);
p.traverseTopologically(new RecordingPipelineVisitor());
SdkComponents sdkComponents = createSdkComponents(options);
RunnerApi.Pipeline pipelineProto = PipelineTranslation.toProto(p, sdkComponents, true);
Job job = DataflowPipelineTranslator.fromOptions(options).translate(p, pipelineProto, sdkComponents, DataflowRunner.fromOptions(options), Collections.emptyList()).getJob();
assertEquals(dataflowServiceOptions, job.getEnvironment().getServiceOptions());
}
use of org.apache.beam.runners.core.construction.SdkComponents in project beam by apache.
the class DataflowPipelineTranslatorTest method testPortablePipelineContainsExpectedDependenciesAndCapabilities.
@Test
public void testPortablePipelineContainsExpectedDependenciesAndCapabilities() throws Exception {
DataflowPipelineOptions options = buildPipelineOptions();
options.setExperiments(Arrays.asList("beam_fn_api"));
DataflowRunner runner = DataflowRunner.fromOptions(options);
DataflowPipelineTranslator translator = DataflowPipelineTranslator.fromOptions(options);
Pipeline pipeline = Pipeline.create(options);
pipeline.apply(Impulse.create()).apply(MapElements.via(new SimpleFunction<byte[], String>() {
@Override
public String apply(byte[] input) {
return "";
}
})).apply(Window.into(FixedWindows.of(Duration.standardMinutes(1))));
runner.replaceV1Transforms(pipeline);
File file1 = File.createTempFile("file1-", ".txt");
file1.deleteOnExit();
File file2 = File.createTempFile("file2-", ".txt");
file2.deleteOnExit();
SdkComponents sdkComponents = SdkComponents.create();
sdkComponents.registerEnvironment(Environments.createDockerEnvironment(DataflowRunner.getContainerImageForJob(options)).toBuilder().addAllDependencies(Environments.getArtifacts(ImmutableList.of("file1.txt=" + file1, "file2.txt=" + file2))).addAllCapabilities(Environments.getJavaCapabilities()).build());
RunnerApi.Pipeline pipelineProto = PipelineTranslation.toProto(pipeline, sdkComponents, true);
JobSpecification result = translator.translate(pipeline, pipelineProto, sdkComponents, runner, Collections.emptyList());
Components componentsProto = result.getPipelineProto().getComponents();
assertThat(Iterables.getOnlyElement(componentsProto.getEnvironmentsMap().values()).getCapabilitiesList(), containsInAnyOrder(Environments.getJavaCapabilities().toArray(new String[0])));
assertThat(Iterables.getOnlyElement(componentsProto.getEnvironmentsMap().values()).getDependenciesList(), containsInAnyOrder(Environments.getArtifacts(ImmutableList.of("file1.txt=" + file1, "file2.txt=" + file2)).toArray(new ArtifactInformation[0])));
}
use of org.apache.beam.runners.core.construction.SdkComponents in project beam by apache.
the class DataflowPipelineTranslatorTest method testSubnetworkConfigMissing.
@Test
public void testSubnetworkConfigMissing() throws IOException {
DataflowPipelineOptions options = buildPipelineOptions();
Pipeline p = buildPipeline(options);
p.traverseTopologically(new RecordingPipelineVisitor());
SdkComponents sdkComponents = createSdkComponents(options);
RunnerApi.Pipeline pipelineProto = PipelineTranslation.toProto(p, sdkComponents, true);
Job job = DataflowPipelineTranslator.fromOptions(options).translate(p, pipelineProto, sdkComponents, DataflowRunner.fromOptions(options), Collections.emptyList()).getJob();
assertEquals(1, job.getEnvironment().getWorkerPools().size());
assertNull(job.getEnvironment().getWorkerPools().get(0).getSubnetwork());
}
use of org.apache.beam.runners.core.construction.SdkComponents in project beam by apache.
the class DataflowPipelineTranslatorTest method testNetworkConfig.
@Test
public void testNetworkConfig() throws IOException {
final String testNetwork = "test-network";
DataflowPipelineOptions options = buildPipelineOptions();
options.setNetwork(testNetwork);
Pipeline p = buildPipeline(options);
p.traverseTopologically(new RecordingPipelineVisitor());
SdkComponents sdkComponents = createSdkComponents(options);
RunnerApi.Pipeline pipelineProto = PipelineTranslation.toProto(p, sdkComponents, true);
Job job = DataflowPipelineTranslator.fromOptions(options).translate(p, pipelineProto, sdkComponents, DataflowRunner.fromOptions(options), Collections.emptyList()).getJob();
assertEquals(1, job.getEnvironment().getWorkerPools().size());
assertEquals(testNetwork, job.getEnvironment().getWorkerPools().get(0).getNetwork());
}
Aggregations