use of org.apache.beam.sdk.testutils.NamedTestResult in project beam by apache.
the class KafkaIOIT method readMetrics.
private Set<NamedTestResult> readMetrics(PipelineResult writeResult, PipelineResult readResult) {
BiFunction<MetricsReader, String, NamedTestResult> supplier = (reader, metricName) -> {
long start = reader.getStartTimeMetric(metricName);
long end = reader.getEndTimeMetric(metricName);
return NamedTestResult.create(TEST_ID, TIMESTAMP, metricName, (end - start) / 1e3);
};
NamedTestResult writeTime = supplier.apply(new MetricsReader(writeResult, NAMESPACE), WRITE_TIME_METRIC_NAME);
NamedTestResult readTime = supplier.apply(new MetricsReader(readResult, NAMESPACE), READ_TIME_METRIC_NAME);
NamedTestResult runTime = NamedTestResult.create(TEST_ID, TIMESTAMP, RUN_TIME_METRIC_NAME, writeTime.getValue() + readTime.getValue());
return ImmutableSet.of(readTime, writeTime, runTime);
}
use of org.apache.beam.sdk.testutils.NamedTestResult in project beam by apache.
the class KafkaIOIT method testKafkaIOReadsAndWritesCorrectlyInStreaming.
@Test
public void testKafkaIOReadsAndWritesCorrectlyInStreaming() throws IOException {
// Use batch pipeline to write records.
writePipeline.apply("Generate records", Read.from(new SyntheticBoundedSource(sourceOptions))).apply("Measure write time", ParDo.of(new TimeMonitor<>(NAMESPACE, WRITE_TIME_METRIC_NAME))).apply("Write to Kafka", writeToKafka());
// Use streaming pipeline to read Kafka records.
readPipeline.getOptions().as(Options.class).setStreaming(true);
readPipeline.apply("Read from unbounded Kafka", readFromKafka()).apply("Measure read time", ParDo.of(new TimeMonitor<>(NAMESPACE, READ_TIME_METRIC_NAME))).apply("Map records to strings", MapElements.via(new MapKafkaRecordsToStrings())).apply("Counting element", ParDo.of(new CountingFn(NAMESPACE, READ_ELEMENT_METRIC_NAME)));
PipelineResult writeResult = writePipeline.run();
writeResult.waitUntilFinish();
PipelineResult readResult = readPipeline.run();
PipelineResult.State readState = readResult.waitUntilFinish(Duration.standardSeconds(options.getReadTimeout()));
cancelIfTimeouted(readResult, readState);
assertEquals(sourceOptions.numRecords, readElementMetric(readResult, NAMESPACE, READ_ELEMENT_METRIC_NAME));
if (!options.isWithTestcontainers()) {
Set<NamedTestResult> metrics = readMetrics(writeResult, readResult);
IOITMetrics.publishToInflux(TEST_ID, TIMESTAMP, metrics, settings);
}
}
use of org.apache.beam.sdk.testutils.NamedTestResult in project beam by apache.
the class BigQueryIOIT method extractAndPublishTime.
private void extractAndPublishTime(PipelineResult pipelineResult, String writeTimeMetricName) {
final NamedTestResult metricResult = getMetricSupplier(writeTimeMetricName).apply(new MetricsReader(pipelineResult, NAMESPACE));
final List<NamedTestResult> listResults = Collections.singletonList(metricResult);
IOITMetrics.publishToInflux(TEST_ID, TEST_TIMESTAMP, listResults, settings);
}
use of org.apache.beam.sdk.testutils.NamedTestResult in project beam by apache.
the class LoadTest method readMetrics.
private List<NamedTestResult> readMetrics(Timestamp timestamp, PipelineResult result, String testId) {
MetricsReader reader = new MetricsReader(result, metricsNamespace);
NamedTestResult runtime = NamedTestResult.create(testId, timestamp.toString(), buildMetric("runtime_sec"), (reader.getEndTimeMetric("runtime") - reader.getStartTimeMetric("runtime")) / 1000D);
NamedTestResult totalBytes = NamedTestResult.create(testId, timestamp.toString(), buildMetric("total_bytes_count"), reader.getCounterMetric("totalBytes.count"));
return Arrays.asList(runtime, totalBytes);
}
use of org.apache.beam.sdk.testutils.NamedTestResult in project beam by apache.
the class LoadTest method run.
/**
* Runs the load test, collects and publishes test results to various data store and/or console.
*/
public PipelineResult run() throws IOException {
final Timestamp timestamp = Timestamp.now();
loadTest();
final PipelineResult pipelineResult = pipeline.run();
pipelineResult.waitUntilFinish(Duration.standardMinutes(options.getLoadTestTimeout()));
final String testId = UUID.randomUUID().toString();
final List<NamedTestResult> metrics = readMetrics(timestamp, pipelineResult, testId);
ConsoleResultPublisher.publish(metrics, testId, timestamp.toString());
handleFailure(pipelineResult, metrics);
if (options.getPublishToInfluxDB()) {
InfluxDBPublisher.publishWithSettings(metrics, settings);
}
return pipelineResult;
}
Aggregations