use of org.apache.beam.sdk.PipelineResult in project beam by apache.
the class DirectRunnerTest method testWaitUntilFinishTimeout.
@Test
public void testWaitUntilFinishTimeout() throws Exception {
DirectOptions options = PipelineOptionsFactory.as(DirectOptions.class);
options.setBlockOnRun(false);
options.setRunner(DirectRunner.class);
Pipeline p = Pipeline.create(options);
p.apply(Create.of(1L)).apply(ParDo.of(new DoFn<Long, Long>() {
@ProcessElement
public void hang(ProcessContext context) throws InterruptedException {
// Hangs "forever"
Thread.sleep(Long.MAX_VALUE);
}
}));
PipelineResult result = p.run();
// The pipeline should never complete;
assertThat(result.getState(), is(State.RUNNING));
// Must time out, otherwise this test will never complete
assertEquals(null, result.waitUntilFinish(Duration.millis(1L)));
// Ensure multiple calls complete
assertEquals(null, result.waitUntilFinish(Duration.millis(1L)));
}
use of org.apache.beam.sdk.PipelineResult in project beam by apache.
the class TestSamzaRunner method run.
@Override
public PipelineResult run(Pipeline pipeline) {
try {
final PipelineResult result = delegate.run(pipeline);
result.waitUntilFinish();
return result;
} catch (Throwable t) {
// Search for AssertionError. If present use it as the cause of the pipeline failure.
Throwable current = t;
while (current != null) {
if (current instanceof AssertionError) {
throw (AssertionError) current;
}
current = current.getCause();
}
throw t;
}
}
use of org.apache.beam.sdk.PipelineResult in project beam by apache.
the class TestPortableRunner method run.
@Override
public PipelineResult run(Pipeline pipeline) {
TestPortablePipelineOptions testPortablePipelineOptions = options.as(TestPortablePipelineOptions.class);
String jobServerHostPort;
JobServerDriver jobServerDriver;
Class<JobServerDriver> jobServerDriverClass = testPortablePipelineOptions.getJobServerDriver();
String[] parameters = testPortablePipelineOptions.getJobServerConfig();
try {
jobServerDriver = InstanceBuilder.ofType(jobServerDriverClass).fromFactoryMethod("fromParams").withArg(String[].class, parameters).build();
jobServerHostPort = jobServerDriver.start();
} catch (IOException e) {
throw new RuntimeException("Failed to start job server", e);
}
try {
PortablePipelineOptions portableOptions = options.as(PortablePipelineOptions.class);
portableOptions.setRunner(PortableRunner.class);
portableOptions.setJobEndpoint(jobServerHostPort);
PortableRunner runner = PortableRunner.fromOptions(portableOptions);
PipelineResult result = runner.run(pipeline);
assertThat("Pipeline did not succeed.", result.waitUntilFinish(), Matchers.is(State.DONE));
return result;
} finally {
jobServerDriver.stop();
}
}
use of org.apache.beam.sdk.PipelineResult in project beam by apache.
the class PortableRunnerTest method extractsMetrics.
@Test
public void extractsMetrics() throws Exception {
JobApi.MetricResults metricResults = generateMetricResults();
createJobServer(JobState.Enum.DONE, metricResults);
PortableRunner runner = PortableRunner.create(options, ManagedChannelFactory.createInProcess());
PipelineResult result = runner.run(p);
result.waitUntilFinish();
MetricQueryResults metricQueryResults = result.metrics().allMetrics();
assertThat(metricQueryResults.getCounters().iterator().next().getAttempted(), is(COUNTER_VALUE));
assertThat(metricQueryResults.getDistributions().iterator().next().getAttempted().getCount(), is(DIST_COUNT));
assertThat(metricQueryResults.getDistributions().iterator().next().getAttempted().getMax(), is(DIST_MAX));
assertThat(metricQueryResults.getDistributions().iterator().next().getAttempted().getMin(), is(DIST_MIN));
assertThat(metricQueryResults.getDistributions().iterator().next().getAttempted().getSum(), is(DIST_SUM));
assertThat(metricQueryResults.getGauges().iterator().next().getAttempted().getValue(), is(GAUGE_VALUE));
}
use of org.apache.beam.sdk.PipelineResult in project beam by apache.
the class ProvidedSparkContextTest method testWithValidProvidedContext.
private void testWithValidProvidedContext(JavaSparkContext jsc) throws Exception {
SparkContextOptions options = getSparkContextOptions(jsc);
Pipeline p = Pipeline.create(options);
PCollection<String> inputWords = p.apply(Create.of(WORDS).withCoder(StringUtf8Coder.of()));
PCollection<String> output = inputWords.apply(new WordCount.CountWords()).apply(MapElements.via(new WordCount.FormatAsTextFn()));
PAssert.that(output).containsInAnyOrder(EXPECTED_COUNT_SET);
// Run test from pipeline
PipelineResult result = p.run();
TestPipeline.verifyPAssertsSucceeded(p, result);
}
Aggregations