use of org.apache.beam.sdk.testing.TestPipeline in project components by Talend.
the class TCompBoundedSourceSinkAdapterTest method testPipeline.
@Test
public void testPipeline() {
Pipeline pipeline = TestPipeline.create();
FixedFlowProperties fixedFlowProperties = new FixedFlowProperties("fixedFlowProperties");
fixedFlowProperties.init();
fixedFlowProperties.data.setValue("a;b;c");
fixedFlowProperties.rowDelimited.setValue(";");
AssertResultProperties assertResultProperties = new AssertResultProperties("assertResultProperties");
assertResultProperties.init();
assertResultProperties.data.setValue("b;c;a");
assertResultProperties.rowDelimited.setValue(";");
FixedFlowSource fixedFlowSource = new FixedFlowSource();
fixedFlowSource.initialize(null, fixedFlowProperties);
AssertResultSink assertResultSink = new AssertResultSink();
assertResultSink.initialize(null, assertResultProperties);
TCompBoundedSourceAdapter source = new TCompBoundedSourceAdapter(fixedFlowSource);
TCompSinkAdapter sink = new TCompSinkAdapter(assertResultSink);
pipeline.apply(Read.from(source)).apply(Write.to(sink));
pipeline.run();
}
use of org.apache.beam.sdk.testing.TestPipeline in project beam by apache.
the class PubsubIOTest method testRuntimeValueProviderTopic.
@Test
public void testRuntimeValueProviderTopic() {
TestPipeline pipeline = TestPipeline.create();
ValueProvider<String> topic = pipeline.newProvider("projects/project/topics/topic");
Read<String> pubsubRead = PubsubIO.readStrings().fromTopic(topic);
pipeline.apply(pubsubRead);
assertThat(pubsubRead.getTopicProvider(), not(nullValue()));
assertThat(pubsubRead.getTopicProvider().isAccessible(), is(false));
}
use of org.apache.beam.sdk.testing.TestPipeline in project beam by apache.
the class BeamSortRelTest method testOrderBy_exception.
@Test
public void testOrderBy_exception() {
thrown.expect(UnsupportedOperationException.class);
thrown.expectMessage("`ORDER BY` is only supported for GlobalWindows");
String sql = "INSERT INTO SUB_ORDER_RAM(order_id, site_id) SELECT " + " order_id, COUNT(*) " + "FROM ORDER_DETAILS " + "GROUP BY order_id, TUMBLE(order_time, INTERVAL '1' HOUR)" + "ORDER BY order_id asc limit 11";
TestPipeline pipeline = TestPipeline.create();
compilePipeline(sql, pipeline);
}
use of org.apache.beam.sdk.testing.TestPipeline in project beam by apache.
the class FlinkTransformOverridesTest method testRunnerDeterminedSharding.
@Test
public void testRunnerDeterminedSharding() {
FlinkPipelineOptions options = FlinkPipelineOptions.defaults();
options.setRunner(TestFlinkRunner.class);
options.setFlinkMaster("[auto]");
options.setParallelism(5);
TestPipeline p = TestPipeline.fromOptions(options);
StreamingShardedWriteFactory<Object, Void, Object> factory = new StreamingShardedWriteFactory<>(p.getOptions());
WriteFiles<Object, Void, Object> original = WriteFiles.to(new TestSink(tmpFolder.toString()));
@SuppressWarnings("unchecked") PCollection<Object> objs = (PCollection) p.apply(Create.empty(VoidCoder.of()));
AppliedPTransform<PCollection<Object>, WriteFilesResult<Void>, WriteFiles<Object, Void, Object>> originalApplication = AppliedPTransform.of("writefiles", PValues.expandInput(objs), Collections.emptyMap(), original, ResourceHints.create(), p);
WriteFiles<Object, Void, Object> replacement = (WriteFiles<Object, Void, Object>) factory.getReplacementTransform(originalApplication).getTransform();
assertThat(replacement, not(equalTo((Object) original)));
assertThat(replacement.getNumShardsProvider().get(), is(10));
}
use of org.apache.beam.sdk.testing.TestPipeline in project beam by apache.
the class DataflowRunnerTest method testStreamingWriteOverride.
private void testStreamingWriteOverride(PipelineOptions options, int expectedNumShards) {
TestPipeline p = TestPipeline.fromOptions(options);
StreamingShardedWriteFactory<Object, Void, Object> factory = new StreamingShardedWriteFactory<>(p.getOptions());
WriteFiles<Object, Void, Object> original = WriteFiles.to(new TestSink(tmpFolder.toString()));
PCollection<Object> objs = (PCollection) p.apply(Create.empty(VoidCoder.of()));
AppliedPTransform<PCollection<Object>, WriteFilesResult<Void>, WriteFiles<Object, Void, Object>> originalApplication = AppliedPTransform.of("writefiles", PValues.expandInput(objs), Collections.emptyMap(), original, ResourceHints.create(), p);
WriteFiles<Object, Void, Object> replacement = (WriteFiles<Object, Void, Object>) factory.getReplacementTransform(originalApplication).getTransform();
assertThat(replacement, not(equalTo((Object) original)));
assertThat(replacement.getNumShardsProvider().get(), equalTo(expectedNumShards));
WriteFilesResult<Void> originalResult = objs.apply(original);
WriteFilesResult<Void> replacementResult = objs.apply(replacement);
Map<PCollection<?>, ReplacementOutput> res = factory.mapOutputs(PValues.expandOutput(originalResult), replacementResult);
assertEquals(1, res.size());
assertEquals(originalResult.getPerDestinationOutputFilenames(), res.get(replacementResult.getPerDestinationOutputFilenames()).getOriginal().getValue());
}
Aggregations