use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowRunnerTest method testGcsUploadBufferSizeIsSetForStreamingWhenDefault.
@Test
public void testGcsUploadBufferSizeIsSetForStreamingWhenDefault() throws IOException {
DataflowPipelineOptions streamingOptions = buildPipelineOptions();
streamingOptions.setStreaming(true);
streamingOptions.setRunner(DataflowRunner.class);
Pipeline p = Pipeline.create(streamingOptions);
// Instantiation of a runner prior to run() currently has a side effect of mutating the options.
// This could be tested by DataflowRunner.fromOptions(streamingOptions) but would not ensure
// that the pipeline itself had the expected options set.
p.run();
assertEquals(DataflowRunner.GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT, streamingOptions.getGcsUploadBufferSizeBytes().intValue());
}
use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowRunnerTest method testNoProjectFails.
@Test
public void testNoProjectFails() {
DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
options.setRunner(DataflowRunner.class);
// Explicitly set to null to prevent the default instance factory from reading credentials
// from a user's environment, causing this test to fail.
options.setProject(null);
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Project id");
thrown.expectMessage("when running a Dataflow in the cloud");
DataflowRunner.fromOptions(options);
}
use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowRunnerTest method testNoStagingLocationAndNoTempLocationFails.
@Test
public void testNoStagingLocationAndNoTempLocationFails() {
DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
options.setRunner(DataflowRunner.class);
options.setProject("foo-project");
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("DataflowRunner requires gcpTempLocation, " + "but failed to retrieve a value from PipelineOption");
DataflowRunner.fromOptions(options);
}
use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowPipelineTranslatorTest method testWorkerMachineTypeConfig.
@Test
public void testWorkerMachineTypeConfig() throws IOException {
final String testMachineType = "test-machine-type";
DataflowPipelineOptions options = buildPipelineOptions();
options.setWorkerMachineType(testMachineType);
Pipeline p = buildPipeline(options);
p.traverseTopologically(new RecordingPipelineVisitor());
Job job = DataflowPipelineTranslator.fromOptions(options).translate(p, DataflowRunner.fromOptions(options), Collections.<DataflowPackage>emptyList()).getJob();
assertEquals(1, job.getEnvironment().getWorkerPools().size());
WorkerPool workerPool = job.getEnvironment().getWorkerPools().get(0);
assertEquals(testMachineType, workerPool.getMachineType());
}
use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowPipelineTranslatorTest method testDiskSizeGbConfig.
@Test
public void testDiskSizeGbConfig() throws IOException {
final Integer diskSizeGb = 1234;
DataflowPipelineOptions options = buildPipelineOptions();
options.setDiskSizeGb(diskSizeGb);
Pipeline p = buildPipeline(options);
p.traverseTopologically(new RecordingPipelineVisitor());
Job job = DataflowPipelineTranslator.fromOptions(options).translate(p, DataflowRunner.fromOptions(options), Collections.<DataflowPackage>emptyList()).getJob();
assertEquals(1, job.getEnvironment().getWorkerPools().size());
assertEquals(diskSizeGb, job.getEnvironment().getWorkerPools().get(0).getDiskSizeGb());
}
Aggregations