use of com.google.api.services.dataflow.model.JobMetrics in project beam by apache.
the class DataflowMetrics method queryServiceForMetrics.
private MetricQueryResults queryServiceForMetrics(MetricsFilter filter) {
List<com.google.api.services.dataflow.model.MetricUpdate> metricUpdates;
ImmutableList<MetricResult<Long>> counters = ImmutableList.of();
ImmutableList<MetricResult<DistributionResult>> distributions = ImmutableList.of();
ImmutableList<MetricResult<GaugeResult>> gauges = ImmutableList.of();
JobMetrics jobMetrics;
try {
jobMetrics = dataflowClient.getJobMetrics(dataflowPipelineJob.jobId);
} catch (IOException e) {
LOG.warn("Unable to query job metrics.\n");
return DataflowMetricQueryResults.create(counters, distributions, gauges);
}
metricUpdates = firstNonNull(jobMetrics.getMetrics(), Collections.<com.google.api.services.dataflow.model.MetricUpdate>emptyList());
return populateMetricQueryResults(metricUpdates, filter);
}
use of com.google.api.services.dataflow.model.JobMetrics in project beam by apache.
the class DataflowMetricsTest method testMultipleCounterUpdates.
@Test
public void testMultipleCounterUpdates() throws IOException {
JobMetrics jobMetrics = new JobMetrics();
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
DataflowPipelineOptions options = mock(DataflowPipelineOptions.class);
when(options.isStreaming()).thenReturn(false);
when(job.getDataflowOptions()).thenReturn(options);
when(job.getState()).thenReturn(State.RUNNING);
job.jobId = JOB_ID;
AppliedPTransform<?, ?, ?> myStep2 = mock(AppliedPTransform.class);
when(myStep2.getFullName()).thenReturn("myStepName");
job.transformStepNames = HashBiMap.create();
job.transformStepNames.put(myStep2, "s2");
AppliedPTransform<?, ?, ?> myStep3 = mock(AppliedPTransform.class);
when(myStep3.getFullName()).thenReturn("myStepName3");
job.transformStepNames.put(myStep3, "s3");
AppliedPTransform<?, ?, ?> myStep4 = mock(AppliedPTransform.class);
when(myStep4.getFullName()).thenReturn("myStepName4");
job.transformStepNames.put(myStep4, "s4");
// The parser relies on the fact that one tentative and one committed metric update exist in
// the job metrics results.
jobMetrics.setMetrics(ImmutableList.of(makeCounterMetricUpdate("counterName", "counterNamespace", "s2", 1233L, false), makeCounterMetricUpdate("counterName", "counterNamespace", "s2", 1234L, true), makeCounterMetricUpdate("otherCounter", "otherNamespace", "s3", 12L, false), makeCounterMetricUpdate("otherCounter", "otherNamespace", "s3", 12L, true), makeCounterMetricUpdate("counterName", "otherNamespace", "s4", 1200L, false), makeCounterMetricUpdate("counterName", "otherNamespace", "s4", 1233L, true), // The following counter can not have its name translated thus it won't appear.
makeCounterMetricUpdate("lostName", "otherNamespace", "s5", 1200L, false), makeCounterMetricUpdate("lostName", "otherNamespace", "s5", 1200L, true)));
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
MetricQueryResults result = dataflowMetrics.allMetrics();
assertThat(result.getCounters(), containsInAnyOrder(attemptedMetricsResult("counterNamespace", "counterName", "myStepName", 1233L), attemptedMetricsResult("otherNamespace", "otherCounter", "myStepName3", 12L), attemptedMetricsResult("otherNamespace", "counterName", "myStepName4", 1200L)));
assertThat(result.getCounters(), containsInAnyOrder(committedMetricsResult("counterNamespace", "counterName", "myStepName", 1233L), committedMetricsResult("otherNamespace", "otherCounter", "myStepName3", 12L), committedMetricsResult("otherNamespace", "counterName", "myStepName4", 1200L)));
}
use of com.google.api.services.dataflow.model.JobMetrics in project beam by apache.
the class DataflowMetricsTest method testDistributionUpdates.
@Test
public void testDistributionUpdates() throws IOException {
JobMetrics jobMetrics = new JobMetrics();
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
DataflowPipelineOptions options = mock(DataflowPipelineOptions.class);
when(options.isStreaming()).thenReturn(false);
when(job.getDataflowOptions()).thenReturn(options);
when(job.getState()).thenReturn(State.RUNNING);
job.jobId = JOB_ID;
AppliedPTransform<?, ?, ?> myStep2 = mock(AppliedPTransform.class);
when(myStep2.getFullName()).thenReturn("myStepName");
job.transformStepNames = HashBiMap.create();
job.transformStepNames.put(myStep2, "s2");
// The parser relies on the fact that one tentative and one committed metric update exist in
// the job metrics results.
jobMetrics.setMetrics(ImmutableList.of(makeDistributionMetricUpdate("distributionName", "distributionNamespace", "s2", 18L, 2L, 2L, 16L, false), makeDistributionMetricUpdate("distributionName", "distributionNamespace", "s2", 18L, 2L, 2L, 16L, true)));
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
MetricQueryResults result = dataflowMetrics.allMetrics();
assertThat(result.getDistributions(), contains(attemptedMetricsResult("distributionNamespace", "distributionName", "myStepName", DistributionResult.create(18, 2, 2, 16))));
assertThat(result.getDistributions(), contains(committedMetricsResult("distributionNamespace", "distributionName", "myStepName", DistributionResult.create(18, 2, 2, 16))));
}
use of com.google.api.services.dataflow.model.JobMetrics in project beam by apache.
the class DataflowMetricsTest method testIgnoreDistributionButGetCounterUpdates.
@Test
public void testIgnoreDistributionButGetCounterUpdates() throws IOException {
JobMetrics jobMetrics = new JobMetrics();
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
DataflowPipelineOptions options = mock(DataflowPipelineOptions.class);
when(options.isStreaming()).thenReturn(false);
when(job.getDataflowOptions()).thenReturn(options);
when(job.getState()).thenReturn(State.RUNNING);
job.jobId = JOB_ID;
AppliedPTransform<?, ?, ?> myStep = mock(AppliedPTransform.class);
when(myStep.getFullName()).thenReturn("myStepName");
job.transformStepNames = HashBiMap.create();
job.transformStepNames.put(myStep, "s2");
// The parser relies on the fact that one tentative and one committed metric update exist in
// the job metrics results.
jobMetrics.setMetrics(ImmutableList.of(makeCounterMetricUpdate("counterName", "counterNamespace", "s2", 1233L, false), makeCounterMetricUpdate("counterName", "counterNamespace", "s2", 1233L, true), makeCounterMetricUpdate("otherCounter[MIN]", "otherNamespace", "s2", 0L, false), makeCounterMetricUpdate("otherCounter[MIN]", "otherNamespace", "s2", 0L, true)));
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
MetricQueryResults result = dataflowMetrics.allMetrics();
assertThat(result.getCounters(), containsInAnyOrder(attemptedMetricsResult("counterNamespace", "counterName", "myStepName", 1233L)));
assertThat(result.getCounters(), containsInAnyOrder(committedMetricsResult("counterNamespace", "counterName", "myStepName", 1233L)));
}
use of com.google.api.services.dataflow.model.JobMetrics in project beam by apache.
the class DataflowMetricsTest method testCachingMetricUpdates.
@Test
public void testCachingMetricUpdates() throws IOException {
Job modelJob = new Job();
modelJob.setCurrentState(State.RUNNING.toString());
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
DataflowPipelineOptions options = mock(DataflowPipelineOptions.class);
when(options.isStreaming()).thenReturn(false);
when(job.getDataflowOptions()).thenReturn(options);
when(job.getState()).thenReturn(State.DONE);
job.jobId = JOB_ID;
JobMetrics jobMetrics = new JobMetrics();
jobMetrics.setMetrics(ImmutableList.of());
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
verify(dataflowClient, times(0)).getJobMetrics(JOB_ID);
dataflowMetrics.allMetrics();
verify(dataflowClient, times(1)).getJobMetrics(JOB_ID);
dataflowMetrics.allMetrics();
verify(dataflowClient, times(1)).getJobMetrics(JOB_ID);
}
Aggregations