use of com.google.api.services.dataflow.model.JobMetrics in project beam by apache.
the class TestDataflowRunnerTest method testGetJobMetricsThatSucceeds.
@Test
public void testGetJobMetricsThatSucceeds() throws Exception {
DataflowPipelineJob job = spy(new DataflowPipelineJob(mockClient, "test-job", options, null));
Pipeline p = TestPipeline.create(options);
p.apply(Create.of(1, 2, 3));
when(mockClient.getJobMetrics(anyString())).thenReturn(generateMockMetricResponse(true, /* success */
true));
TestDataflowRunner runner = TestDataflowRunner.fromOptionsAndClient(options, mockClient);
JobMetrics metrics = runner.getJobMetrics(job);
assertEquals(1, metrics.getMetrics().size());
assertEquals(generateMockMetrics(true, /* success */
true), metrics.getMetrics());
}
use of com.google.api.services.dataflow.model.JobMetrics in project beam by apache.
the class DataflowMetricsTest method testEmptyMetricUpdates.
@Test
public void testEmptyMetricUpdates() throws IOException {
Job modelJob = new Job();
modelJob.setCurrentState(State.RUNNING.toString());
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
DataflowPipelineOptions options = mock(DataflowPipelineOptions.class);
when(options.isStreaming()).thenReturn(false);
when(job.getDataflowOptions()).thenReturn(options);
when(job.getState()).thenReturn(State.RUNNING);
job.jobId = JOB_ID;
JobMetrics jobMetrics = new JobMetrics();
jobMetrics.setMetrics(null);
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
MetricQueryResults result = dataflowMetrics.allMetrics();
assertThat(ImmutableList.copyOf(result.getCounters()), is(empty()));
assertThat(ImmutableList.copyOf(result.getDistributions()), is(empty()));
}
use of com.google.api.services.dataflow.model.JobMetrics in project beam by apache.
the class DataflowMetricsTest method testDistributionUpdatesStreaming.
@Test
public void testDistributionUpdatesStreaming() throws IOException {
JobMetrics jobMetrics = new JobMetrics();
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
DataflowPipelineOptions options = mock(DataflowPipelineOptions.class);
when(options.isStreaming()).thenReturn(true);
when(job.getDataflowOptions()).thenReturn(options);
when(job.getState()).thenReturn(State.RUNNING);
job.jobId = JOB_ID;
AppliedPTransform<?, ?, ?> myStep2 = mock(AppliedPTransform.class);
when(myStep2.getFullName()).thenReturn("myStepName");
job.transformStepNames = HashBiMap.create();
job.transformStepNames.put(myStep2, "s2");
// The parser relies on the fact that one tentative and one committed metric update exist in
// the job metrics results.
jobMetrics.setMetrics(ImmutableList.of(makeDistributionMetricUpdate("distributionName", "distributionNamespace", "s2", 18L, 2L, 2L, 16L, false), makeDistributionMetricUpdate("distributionName", "distributionNamespace", "s2", 18L, 2L, 2L, 16L, true)));
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
MetricQueryResults result = dataflowMetrics.allMetrics();
try {
result.getDistributions().iterator().next().getCommitted();
fail("Expected UnsupportedOperationException");
} catch (UnsupportedOperationException expected) {
assertThat(expected.getMessage(), containsString("This runner does not currently support committed" + " metrics results. Please use 'attempted' instead."));
}
assertThat(result.getDistributions(), contains(attemptedMetricsResult("distributionNamespace", "distributionName", "myStepName", DistributionResult.create(18, 2, 2, 16))));
}
use of com.google.api.services.dataflow.model.JobMetrics in project beam by apache.
the class TestDataflowRunnerTest method buildJobMetrics.
private JobMetrics buildJobMetrics(List<MetricUpdate> metricList) {
JobMetrics jobMetrics = new JobMetrics();
jobMetrics.setMetrics(metricList);
// N.B. Setting the factory is necessary in order to get valid JSON.
jobMetrics.setFactory(Transport.getJsonFactory());
return jobMetrics;
}
use of com.google.api.services.dataflow.model.JobMetrics in project beam by apache.
the class DataflowMetricsTest method testSingleCounterUpdates.
@Test
public void testSingleCounterUpdates() throws IOException {
JobMetrics jobMetrics = new JobMetrics();
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
DataflowPipelineOptions options = mock(DataflowPipelineOptions.class);
when(options.isStreaming()).thenReturn(false);
when(job.getDataflowOptions()).thenReturn(options);
when(job.getState()).thenReturn(State.RUNNING);
job.jobId = JOB_ID;
AppliedPTransform<?, ?, ?> myStep = mock(AppliedPTransform.class);
when(myStep.getFullName()).thenReturn("myStepName");
job.transformStepNames = HashBiMap.create();
job.transformStepNames.put(myStep, "s2");
MetricUpdate update = new MetricUpdate();
long stepValue = 1234L;
update.setScalar(new BigDecimal(stepValue));
// The parser relies on the fact that one tentative and one committed metric update exist in
// the job metrics results.
MetricUpdate mu1 = makeCounterMetricUpdate("counterName", "counterNamespace", "s2", 1234L, false);
MetricUpdate mu1Tentative = makeCounterMetricUpdate("counterName", "counterNamespace", "s2", 1233L, true);
jobMetrics.setMetrics(ImmutableList.of(mu1, mu1Tentative));
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
MetricQueryResults result = dataflowMetrics.allMetrics();
assertThat(result.getCounters(), containsInAnyOrder(attemptedMetricsResult("counterNamespace", "counterName", "myStepName", 1234L)));
assertThat(result.getCounters(), containsInAnyOrder(committedMetricsResult("counterNamespace", "counterName", "myStepName", 1234L)));
}
Aggregations