use of com.google.api.services.dataflow.model.MetricUpdate in project beam by apache.
the class DataflowMetricsTest method makeCounterMetricUpdate.
private MetricUpdate makeCounterMetricUpdate(String name, String namespace, String step, long scalar, boolean tentative) {
MetricUpdate update = new MetricUpdate();
update.setScalar(new BigDecimal(scalar));
MetricStructuredName structuredName = new MetricStructuredName();
structuredName.setName(name);
structuredName.setOrigin("user");
ImmutableMap.Builder contextBuilder = new ImmutableMap.Builder<String, String>();
contextBuilder.put("step", step).put("namespace", namespace);
if (tentative) {
contextBuilder.put("tentative", "true");
}
structuredName.setContext(contextBuilder.build());
update.setName(structuredName);
return update;
}
use of com.google.api.services.dataflow.model.MetricUpdate in project beam by apache.
the class TestDataflowRunnerTest method generateMockMetrics.
private List<MetricUpdate> generateMockMetrics(boolean success, boolean tentative) {
MetricStructuredName name = new MetricStructuredName();
name.setName(success ? "PAssertSuccess" : "PAssertFailure");
name.setContext(tentative ? ImmutableMap.of("tentative", "") : ImmutableMap.<String, String>of());
MetricUpdate metric = new MetricUpdate();
metric.setName(name);
metric.setScalar(BigDecimal.ONE);
return Lists.newArrayList(metric);
}
use of com.google.api.services.dataflow.model.MetricUpdate in project beam by apache.
the class TestDataflowRunnerTest method generateMockStreamingMetrics.
private List<MetricUpdate> generateMockStreamingMetrics(Map<String, BigDecimal> metricMap) {
List<MetricUpdate> metrics = Lists.newArrayList();
for (Map.Entry<String, BigDecimal> entry : metricMap.entrySet()) {
MetricStructuredName name = new MetricStructuredName();
name.setName(entry.getKey());
MetricUpdate metric = new MetricUpdate();
metric.setName(name);
metric.setScalar(entry.getValue());
metrics.add(metric);
}
return metrics;
}
use of com.google.api.services.dataflow.model.MetricUpdate in project beam by apache.
the class TestDataflowRunner method checkForPAssertSuccess.
/**
* Check that PAssert expectations were met.
*
* <p>If the pipeline is not in a failed/cancelled state and no PAsserts were used within the
* pipeline, then this method will state that all PAsserts succeeded.
*
* @return Optional.of(false) if we are certain a PAssert failed. Optional.of(true) if we are
* certain all PAsserts passed. Optional.absent() if the evidence is inconclusive, including
* when the pipeline may have failed for other reasons.
*/
@VisibleForTesting
Optional<Boolean> checkForPAssertSuccess(DataflowPipelineJob job) {
JobMetrics metrics = getJobMetrics(job);
if (metrics == null || metrics.getMetrics() == null) {
LOG.warn("Metrics not present for Dataflow job {}.", job.getJobId());
return Optional.absent();
}
int successes = 0;
int failures = 0;
for (MetricUpdate metric : metrics.getMetrics()) {
if (metric.getName() == null || metric.getName().getContext() == null || !metric.getName().getContext().containsKey(TENTATIVE_COUNTER)) {
// Don't double count using the non-tentative version of the metric.
continue;
}
if (PAssert.SUCCESS_COUNTER.equals(metric.getName().getName())) {
successes += ((BigDecimal) metric.getScalar()).intValue();
} else if (PAssert.FAILURE_COUNTER.equals(metric.getName().getName())) {
failures += ((BigDecimal) metric.getScalar()).intValue();
}
}
if (failures > 0) {
LOG.info("Failure result for Dataflow job {}. Found {} success, {} failures out of " + "{} expected assertions.", job.getJobId(), successes, failures, expectedNumberOfAssertions);
return Optional.of(false);
} else if (successes >= expectedNumberOfAssertions) {
LOG.info("Success result for Dataflow job {}." + " Found {} success, {} failures out of {} expected assertions.", job.getJobId(), successes, failures, expectedNumberOfAssertions);
return Optional.of(true);
}
// If the job failed, this is a definite failure. We only cancel jobs when they fail.
State state = job.getState();
if (state == State.FAILED || state == State.CANCELLED) {
LOG.info("Dataflow job {} terminated in failure state {} without reporting a failed assertion", job.getJobId(), state);
return Optional.absent();
}
LOG.info("Inconclusive results for Dataflow job {}." + " Found {} success, {} failures out of {} expected assertions.", job.getJobId(), successes, failures, expectedNumberOfAssertions);
return Optional.absent();
}
use of com.google.api.services.dataflow.model.MetricUpdate in project beam by apache.
the class DataflowMetricsTest method testSingleCounterUpdates.
@Test
public void testSingleCounterUpdates() throws IOException {
JobMetrics jobMetrics = new JobMetrics();
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
when(job.getState()).thenReturn(State.RUNNING);
job.jobId = JOB_ID;
MetricUpdate update = new MetricUpdate();
long stepValue = 1234L;
update.setScalar(new BigDecimal(stepValue));
// The parser relies on the fact that one tentative and one committed metric update exist in
// the job metrics results.
MetricUpdate mu1 = makeCounterMetricUpdate("counterName", "counterNamespace", "s2", 1234L, false);
MetricUpdate mu1Tentative = makeCounterMetricUpdate("counterName", "counterNamespace", "s2", 1233L, true);
jobMetrics.setMetrics(ImmutableList.of(mu1, mu1Tentative));
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
MetricQueryResults result = dataflowMetrics.queryMetrics(null);
assertThat(result.counters(), containsInAnyOrder(attemptedMetricsResult("counterNamespace", "counterName", "s2", 1233L)));
assertThat(result.counters(), containsInAnyOrder(committedMetricsResult("counterNamespace", "counterName", "s2", 1234L)));
}
Aggregations