use of com.google.api.services.dataflow.model.Job in project beam by apache.
the class DataflowMetricsTest method testCachingMetricUpdates.
@Test
public void testCachingMetricUpdates() throws IOException {
Job modelJob = new Job();
modelJob.setCurrentState(State.RUNNING.toString());
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
when(job.getState()).thenReturn(State.DONE);
job.jobId = JOB_ID;
JobMetrics jobMetrics = new JobMetrics();
jobMetrics.setMetrics(ImmutableList.<MetricUpdate>of());
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
verify(dataflowClient, times(0)).getJobMetrics(JOB_ID);
dataflowMetrics.queryMetrics(null);
verify(dataflowClient, times(1)).getJobMetrics(JOB_ID);
dataflowMetrics.queryMetrics(null);
verify(dataflowClient, times(1)).getJobMetrics(JOB_ID);
}
use of com.google.api.services.dataflow.model.Job in project beam by apache.
the class DataflowPipelineJobTest method testCancelTerminatedJobWithStaleState.
/**
* Test that {@link DataflowPipelineJob#cancel} doesn't throw if the Dataflow service returns
* non-terminal state even though the cancel API call failed, which can happen in practice.
*
* <p>TODO: delete this code if the API calls become consistent.
*/
@Test
public void testCancelTerminatedJobWithStaleState() throws IOException {
Dataflow.Projects.Locations.Jobs.Get statusRequest = mock(Dataflow.Projects.Locations.Jobs.Get.class);
Job statusResponse = new Job();
statusResponse.setCurrentState("JOB_STATE_RUNNING");
when(mockJobs.get(PROJECT_ID, REGION_ID, JOB_ID)).thenReturn(statusRequest);
when(statusRequest.execute()).thenReturn(statusResponse);
Dataflow.Projects.Locations.Jobs.Update update = mock(Dataflow.Projects.Locations.Jobs.Update.class);
when(mockJobs.update(eq(PROJECT_ID), eq(REGION_ID), eq(JOB_ID), any(Job.class))).thenReturn(update);
when(update.execute()).thenThrow(new IOException("Job has terminated in state SUCCESS"));
DataflowPipelineJob job = new DataflowPipelineJob(DataflowClient.create(options), JOB_ID, options, null);
State returned = job.cancel();
assertThat(returned, equalTo(State.RUNNING));
expectedLogs.verifyWarn("Cancel failed because job is already terminated.");
}
use of com.google.api.services.dataflow.model.Job in project beam by apache.
the class DataflowPipelineJobTest method testWaitToFinishMessagesFail.
@Test
public void testWaitToFinishMessagesFail() throws Exception {
Dataflow.Projects.Locations.Jobs.Get statusRequest = mock(Dataflow.Projects.Locations.Jobs.Get.class);
Job statusResponse = new Job();
statusResponse.setCurrentState("JOB_STATE_" + State.DONE.name());
when(mockJobs.get(eq(PROJECT_ID), eq(REGION_ID), eq(JOB_ID))).thenReturn(statusRequest);
when(statusRequest.execute()).thenReturn(statusResponse);
MonitoringUtil.JobMessagesHandler jobHandler = mock(MonitoringUtil.JobMessagesHandler.class);
Dataflow.Projects.Locations.Jobs.Messages mockMessages = mock(Dataflow.Projects.Locations.Jobs.Messages.class);
Messages.List listRequest = mock(Dataflow.Projects.Locations.Jobs.Messages.List.class);
when(mockJobs.messages()).thenReturn(mockMessages);
when(mockMessages.list(eq(PROJECT_ID), eq(REGION_ID), eq(JOB_ID))).thenReturn(listRequest);
when(listRequest.setPageToken(eq((String) null))).thenReturn(listRequest);
when(listRequest.execute()).thenThrow(SocketTimeoutException.class);
DataflowPipelineJob job = new DataflowPipelineJob(DataflowClient.create(options), JOB_ID, options, ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of());
State state = job.waitUntilFinish(Duration.standardMinutes(5), jobHandler, fastClock, fastClock);
assertEquals(null, state);
}
use of com.google.api.services.dataflow.model.Job in project beam by apache.
the class DataflowPipelineJobTest method testWaitUntilFinishNoRepeatedLogs.
/**
* Tests that a {@link DataflowPipelineJob} does not duplicate messages.
*/
@Test
public void testWaitUntilFinishNoRepeatedLogs() throws Exception {
DataflowPipelineJob job = new DataflowPipelineJob(mockDataflowClient, JOB_ID, options, null);
Sleeper sleeper = new ZeroSleeper();
NanoClock nanoClock = mock(NanoClock.class);
Instant separatingTimestamp = new Instant(42L);
JobMessage theMessage = infoMessage(separatingTimestamp, "nothing");
MonitoringUtil mockMonitor = mock(MonitoringUtil.class);
when(mockMonitor.getJobMessages(anyString(), anyLong())).thenReturn(ImmutableList.of(theMessage));
// The Job just always reports "running" across all calls
Job fakeJob = new Job();
fakeJob.setCurrentState("JOB_STATE_RUNNING");
when(mockDataflowClient.getJob(anyString())).thenReturn(fakeJob);
// After waitUntilFinish the DataflowPipelineJob should record the latest message timestamp
when(nanoClock.nanoTime()).thenReturn(0L).thenReturn(2000000000L);
job.waitUntilFinish(Duration.standardSeconds(1), mockHandler, sleeper, nanoClock, mockMonitor);
verify(mockHandler).process(ImmutableList.of(theMessage));
// Second waitUntilFinish should request jobs with `separatingTimestamp` so the monitor
// will only return new messages
when(nanoClock.nanoTime()).thenReturn(3000000000L).thenReturn(6000000000L);
job.waitUntilFinish(Duration.standardSeconds(1), mockHandler, sleeper, nanoClock, mockMonitor);
verify(mockMonitor).getJobMessages(anyString(), eq(separatingTimestamp.getMillis()));
}
use of com.google.api.services.dataflow.model.Job in project beam by apache.
the class DataflowPipelineJobTest method mockWaitToFinishInState.
public State mockWaitToFinishInState(State state) throws Exception {
Dataflow.Projects.Locations.Jobs.Get statusRequest = mock(Dataflow.Projects.Locations.Jobs.Get.class);
Job statusResponse = new Job();
statusResponse.setCurrentState("JOB_STATE_" + state.name());
if (state == State.UPDATED) {
statusResponse.setReplacedByJobId(REPLACEMENT_JOB_ID);
}
when(mockJobs.get(eq(PROJECT_ID), eq(REGION_ID), eq(JOB_ID))).thenReturn(statusRequest);
when(statusRequest.execute()).thenReturn(statusResponse);
DataflowPipelineJob job = new DataflowPipelineJob(DataflowClient.create(options), JOB_ID, options, ImmutableMap.<AppliedPTransform<?, ?, ?>, String>of());
return job.waitUntilFinish(Duration.standardMinutes(1), null, fastClock, fastClock);
}
Aggregations