use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowRunnerTest method testApplyIsScopedToExactClass.
@Test
public void testApplyIsScopedToExactClass() throws IOException {
DataflowPipelineOptions options = buildPipelineOptions();
Pipeline p = Pipeline.create(options);
Create.TimestampedValues<String> transform = Create.timestamped(Arrays.asList(TimestampedValue.of("TestString", Instant.now())));
p.apply(transform);
CompositeTransformRecorder recorder = new CompositeTransformRecorder();
p.traverseTopologically(recorder);
// The recorder will also have seen a Create.Values composite as well, but we can't obtain that
// transform.
assertThat("Expected to have seen CreateTimestamped composite transform.", recorder.getCompositeTransforms(), hasItem(transform));
assertThat("Expected to have two composites, CreateTimestamped and Create.Values", recorder.getCompositeTransforms(), hasItem(Matchers.<PTransform<?, ?>>isA((Class) Create.Values.class)));
}
use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowRunnerTest method testNonExistentTempLocation.
@Test
public void testNonExistentTempLocation() throws IOException {
DataflowPipelineOptions options = buildPipelineOptions();
options.setGcpTempLocation(NON_EXISTENT_BUCKET);
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage(containsString("Output path does not exist or is not writeable: " + NON_EXISTENT_BUCKET));
DataflowRunner.fromOptions(options);
ArgumentCaptor<Job> jobCaptor = ArgumentCaptor.forClass(Job.class);
Mockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());
assertValidJob(jobCaptor.getValue());
}
use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowRunnerTest method testInvalidStagingLocation.
@Test
public void testInvalidStagingLocation() throws IOException {
DataflowPipelineOptions options = buildPipelineOptions();
options.setStagingLocation("file://my/staging/location");
try {
DataflowRunner.fromOptions(options);
fail("fromOptions should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("Expected a valid 'gs://' path but was given"));
}
options.setStagingLocation("my/staging/location");
try {
DataflowRunner.fromOptions(options);
fail("fromOptions should have failed");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("Expected a valid 'gs://' path but was given"));
}
}
use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowRunnerTest method testInvalidJobName.
@Test
public void testInvalidJobName() throws IOException {
List<String> invalidNames = Arrays.asList("invalid_name", "0invalid", "invalid-");
List<String> expectedReason = Arrays.asList("JobName invalid", "JobName invalid", "JobName invalid");
for (int i = 0; i < invalidNames.size(); ++i) {
DataflowPipelineOptions options = buildPipelineOptions();
options.setJobName(invalidNames.get(i));
try {
DataflowRunner.fromOptions(options);
fail("Expected IllegalArgumentException for jobName " + options.getJobName());
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString(expectedReason.get(i)));
}
}
}
use of org.apache.beam.runners.dataflow.options.DataflowPipelineOptions in project beam by apache.
the class DataflowPipelineTranslatorTest method testMaxNumWorkersIsPassedWhenNoAlgorithmIsSet.
@Test
public void testMaxNumWorkersIsPassedWhenNoAlgorithmIsSet() throws IOException {
final DataflowPipelineWorkerPoolOptions.AutoscalingAlgorithmType noScaling = null;
DataflowPipelineOptions options = buildPipelineOptions();
options.setMaxNumWorkers(42);
options.setAutoscalingAlgorithm(noScaling);
Pipeline p = buildPipeline(options);
p.traverseTopologically(new RecordingPipelineVisitor());
Job job = DataflowPipelineTranslator.fromOptions(options).translate(p, DataflowRunner.fromOptions(options), Collections.<DataflowPackage>emptyList()).getJob();
assertEquals(1, job.getEnvironment().getWorkerPools().size());
assertNull(job.getEnvironment().getWorkerPools().get(0).getAutoscalingSettings().getAlgorithm());
assertEquals(42, job.getEnvironment().getWorkerPools().get(0).getAutoscalingSettings().getMaxNumWorkers().intValue());
}
Aggregations