use of org.apache.beam.sdk.Pipeline in project beam by apache.
the class DataflowRunnerTest method testUpdateNonExistentPipeline.
@Test
public void testUpdateNonExistentPipeline() throws IOException {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Could not find running job named badjobname");
DataflowPipelineOptions options = buildPipelineOptions();
options.setUpdate(true);
options.setJobName("badJobName");
Pipeline p = buildDataflowPipeline(options);
p.run();
}
use of org.apache.beam.sdk.Pipeline in project beam by apache.
the class DataflowRunnerTest method testGcsUploadBufferSizeIsSetForStreamingWhenDefault.
@Test
public void testGcsUploadBufferSizeIsSetForStreamingWhenDefault() throws IOException {
DataflowPipelineOptions streamingOptions = buildPipelineOptions();
streamingOptions.setStreaming(true);
streamingOptions.setRunner(DataflowRunner.class);
Pipeline p = Pipeline.create(streamingOptions);
// Instantiation of a runner prior to run() currently has a side effect of mutating the options.
// This could be tested by DataflowRunner.fromOptions(streamingOptions) but would not ensure
// that the pipeline itself had the expected options set.
p.run();
assertEquals(DataflowRunner.GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT, streamingOptions.getGcsUploadBufferSizeBytes().intValue());
}
use of org.apache.beam.sdk.Pipeline in project beam by apache.
the class DataflowPipelineTranslatorTest method testWorkerMachineTypeConfig.
@Test
public void testWorkerMachineTypeConfig() throws IOException {
final String testMachineType = "test-machine-type";
DataflowPipelineOptions options = buildPipelineOptions();
options.setWorkerMachineType(testMachineType);
Pipeline p = buildPipeline(options);
p.traverseTopologically(new RecordingPipelineVisitor());
Job job = DataflowPipelineTranslator.fromOptions(options).translate(p, DataflowRunner.fromOptions(options), Collections.<DataflowPackage>emptyList()).getJob();
assertEquals(1, job.getEnvironment().getWorkerPools().size());
WorkerPool workerPool = job.getEnvironment().getWorkerPools().get(0);
assertEquals(testMachineType, workerPool.getMachineType());
}
use of org.apache.beam.sdk.Pipeline in project beam by apache.
the class DataflowPipelineTranslatorTest method testDiskSizeGbConfig.
@Test
public void testDiskSizeGbConfig() throws IOException {
final Integer diskSizeGb = 1234;
DataflowPipelineOptions options = buildPipelineOptions();
options.setDiskSizeGb(diskSizeGb);
Pipeline p = buildPipeline(options);
p.traverseTopologically(new RecordingPipelineVisitor());
Job job = DataflowPipelineTranslator.fromOptions(options).translate(p, DataflowRunner.fromOptions(options), Collections.<DataflowPackage>emptyList()).getJob();
assertEquals(1, job.getEnvironment().getWorkerPools().size());
assertEquals(diskSizeGb, job.getEnvironment().getWorkerPools().get(0).getDiskSizeGb());
}
use of org.apache.beam.sdk.Pipeline in project beam by apache.
the class CrashingRunnerTest method applySucceeds.
@Test
public void applySucceeds() {
PipelineOptions opts = PipelineOptionsFactory.create();
opts.setRunner(CrashingRunner.class);
Pipeline p = Pipeline.create(opts);
p.apply(Create.of(1, 2, 3));
}
Aggregations