use of org.apache.beam.sdk.PipelineResult in project beam by apache.
the class MetricsReaderTest method testStartTimeIsTheMinimumFromAllCollectedDistributions.
@Test
public void testStartTimeIsTheMinimumFromAllCollectedDistributions() {
List<Integer> sampleInputData = Arrays.asList(1, 2, 3, 4, 5);
createTestPipelineWithBranches(sampleInputData);
PipelineResult result = testPipeline.run();
MetricsReader reader = new MetricsReader(result, NAMESPACE, 0);
assertEquals(1, reader.getStartTimeMetric("timeDist"));
}
use of org.apache.beam.sdk.PipelineResult in project beam by apache.
the class BoundedSideInputJoinTest method queryMatchesModel.
/**
* Test {@code query} matches {@code model}.
*/
private <T extends KnownSize> void queryMatchesModel(String name, NexmarkConfiguration config, NexmarkQueryTransform<T> query, NexmarkQueryModel<T> model, boolean streamingMode) throws Exception {
ResourceId sideInputResourceId = FileSystems.matchNewResource(String.format("%s/BoundedSideInputJoin-%s", p.getOptions().getTempLocation(), new Random().nextInt()), false);
config.sideInputUrl = sideInputResourceId.toString();
try {
PCollection<KV<Long, String>> sideInput = NexmarkUtils.prepareSideInput(p, config);
query.setSideInput(sideInput);
PCollection<Event> events = p.apply(name + ".Read", streamingMode ? NexmarkUtils.streamEventsSource(config) : NexmarkUtils.batchEventsSource(config));
PCollection<TimestampedValue<T>> results = (PCollection<TimestampedValue<T>>) events.apply(new NexmarkQuery<>(config, query));
PAssert.that(results).satisfies(model.assertionFor());
PipelineResult result = p.run();
result.waitUntilFinish();
} finally {
NexmarkUtils.cleanUpSideInput(config);
}
}
use of org.apache.beam.sdk.PipelineResult in project beam by apache.
the class SessionSideInputJoinTest method queryMatchesModel.
/**
* Test {@code query} matches {@code model}.
*/
private <T extends KnownSize> void queryMatchesModel(String name, NexmarkConfiguration config, NexmarkQueryTransform<T> query, NexmarkQueryModel<T> model, boolean streamingMode) throws Exception {
ResourceId sideInputResourceId = FileSystems.matchNewResource(String.format("%s/SessionSideInputJoin-%s", p.getOptions().getTempLocation(), new Random().nextInt()), false);
config.sideInputUrl = sideInputResourceId.toString();
try {
PCollection<KV<Long, String>> sideInput = NexmarkUtils.prepareSideInput(p, config);
query.setSideInput(sideInput);
PCollection<Event> events = p.apply(name + ".Read", streamingMode ? NexmarkUtils.streamEventsSource(config) : NexmarkUtils.batchEventsSource(config));
PCollection<TimestampedValue<T>> results = (PCollection<TimestampedValue<T>>) events.apply(new NexmarkQuery<>(config, query));
PAssert.that(results).satisfies(model.assertionFor());
PipelineResult result = p.run();
result.waitUntilFinish();
} finally {
NexmarkUtils.cleanUpSideInput(config);
}
}
use of org.apache.beam.sdk.PipelineResult in project beam by apache.
the class QueryTest method queryMatchesModel.
/**
* Test {@code query} matches {@code model}.
*/
private <T extends KnownSize> void queryMatchesModel(String name, NexmarkQueryTransform<T> query, NexmarkQueryModel<T> model, boolean streamingMode) {
NexmarkUtils.setupPipeline(NexmarkUtils.CoderStrategy.HAND, p);
PCollection<Event> events = p.apply(name + ".Read", streamingMode ? NexmarkUtils.streamEventsSource(CONFIG) : NexmarkUtils.batchEventsSource(CONFIG));
PCollection<TimestampedValue<T>> results = (PCollection<TimestampedValue<T>>) events.apply(new NexmarkQuery<>(CONFIG, query));
PAssert.that(results).satisfies(model.assertionFor());
PipelineResult result = p.run();
result.waitUntilFinish();
}
use of org.apache.beam.sdk.PipelineResult in project beam by apache.
the class BigQueryIOPushDownIT method readUsingDefaultMethod.
@Test
public void readUsingDefaultMethod() {
sqlEnv.executeDdl(String.format(CREATE_TABLE_STATEMENT, Method.DEFAULT.toString()));
BeamRelNode beamRelNode = sqlEnv.parseQuery(SELECT_STATEMENT);
BeamSqlRelUtils.toPCollection(pipeline, beamRelNode).apply(ParDo.of(new TimeMonitor<>(NAMESPACE, READ_TIME_METRIC)));
PipelineResult result = pipeline.run();
result.waitUntilFinish();
collectAndPublishMetrics(result, "_default");
}
Aggregations