use of org.apache.beam.sdk.Pipeline in project beam by apache.
the class TestDataflowRunnerTest method testBatchOnSuccessMatcherWhenPipelineSucceeds.
@Test
public void testBatchOnSuccessMatcherWhenPipelineSucceeds() throws Exception {
Pipeline p = TestPipeline.create(options);
PCollection<Integer> pc = p.apply(Create.of(1, 2, 3));
PAssert.that(pc).containsInAnyOrder(1, 2, 3);
final DataflowPipelineJob mockJob = Mockito.mock(DataflowPipelineJob.class);
when(mockJob.getState()).thenReturn(State.DONE);
when(mockJob.getProjectId()).thenReturn("test-project");
when(mockJob.getJobId()).thenReturn("test-job");
DataflowRunner mockRunner = Mockito.mock(DataflowRunner.class);
when(mockRunner.run(any(Pipeline.class))).thenReturn(mockJob);
TestDataflowRunner runner = TestDataflowRunner.fromOptionsAndClient(options, mockClient);
options.as(TestPipelineOptions.class).setOnSuccessMatcher(new TestSuccessMatcher(mockJob, 1));
when(mockClient.getJobMetrics(anyString())).thenReturn(generateMockMetricResponse(true, /* success */
true));
runner.run(p, mockRunner);
}
use of org.apache.beam.sdk.Pipeline in project beam by apache.
the class TestDataflowRunnerTest method testCheckingForSuccessWhenPAssertFails.
/**
* Tests that when we just see a tentative failure for a {@link PAssert} it is considered a
* conclusive failure.
*/
@Test
public void testCheckingForSuccessWhenPAssertFails() throws Exception {
DataflowPipelineJob job = spy(new DataflowPipelineJob(mockClient, "test-job", options, null));
Pipeline p = TestPipeline.create(options);
PCollection<Integer> pc = p.apply(Create.of(1, 2, 3));
PAssert.that(pc).containsInAnyOrder(1, 2, 3);
when(mockClient.getJobMetrics(anyString())).thenReturn(buildJobMetrics(generateMockMetrics(false, /* success */
true)));
TestDataflowRunner runner = TestDataflowRunner.fromOptionsAndClient(options, mockClient);
doReturn(State.DONE).when(job).getState();
assertThat(runner.checkForPAssertSuccess(job), equalTo(Optional.of(false)));
}
use of org.apache.beam.sdk.Pipeline in project beam by apache.
the class TestDataflowRunnerTest method testBatchOnCreateMatcher.
@Test
public void testBatchOnCreateMatcher() throws Exception {
Pipeline p = TestPipeline.create(options);
PCollection<Integer> pc = p.apply(Create.of(1, 2, 3));
PAssert.that(pc).containsInAnyOrder(1, 2, 3);
final DataflowPipelineJob mockJob = Mockito.mock(DataflowPipelineJob.class);
when(mockJob.getState()).thenReturn(State.DONE);
when(mockJob.getProjectId()).thenReturn("test-project");
when(mockJob.getJobId()).thenReturn("test-job");
DataflowRunner mockRunner = Mockito.mock(DataflowRunner.class);
when(mockRunner.run(any(Pipeline.class))).thenReturn(mockJob);
TestDataflowRunner runner = TestDataflowRunner.fromOptionsAndClient(options, mockClient);
options.as(TestPipelineOptions.class).setOnCreateMatcher(new TestSuccessMatcher(mockJob, 0));
when(mockClient.getJobMetrics(anyString())).thenReturn(generateMockMetricResponse(true, /* success */
true));
runner.run(p, mockRunner);
}
use of org.apache.beam.sdk.Pipeline in project beam by apache.
the class TestDataflowRunnerTest method testGetJobMetricsThatSucceeds.
@Test
public void testGetJobMetricsThatSucceeds() throws Exception {
DataflowPipelineJob job = spy(new DataflowPipelineJob(mockClient, "test-job", options, null));
Pipeline p = TestPipeline.create(options);
p.apply(Create.of(1, 2, 3));
when(mockClient.getJobMetrics(anyString())).thenReturn(generateMockMetricResponse(true, /* success */
true));
TestDataflowRunner runner = TestDataflowRunner.fromOptionsAndClient(options, mockClient);
JobMetrics metrics = runner.getJobMetrics(job);
assertEquals(1, metrics.getMetrics().size());
assertEquals(generateMockMetrics(true, /* success */
true), metrics.getMetrics());
}
use of org.apache.beam.sdk.Pipeline in project beam by apache.
the class DataflowRunnerTest method testTemplateRunnerFullCompletion.
/**
* Tests that the {@link DataflowRunner} with {@code --templateLocation} returns normally
* when the runner issuccessfully run.
*/
@Test
public void testTemplateRunnerFullCompletion() throws Exception {
File existingFile = tmpFolder.newFile();
DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
options.setJobName("TestJobName");
options.setGcpCredential(new TestCredential());
options.setPathValidatorClass(NoopPathValidator.class);
options.setProject("test-project");
options.setRunner(DataflowRunner.class);
options.setTemplateLocation(existingFile.getPath());
options.setTempLocation(tmpFolder.getRoot().getPath());
Pipeline p = Pipeline.create(options);
p.run();
expectedLogs.verifyInfo("Template successfully created");
}
Aggregations