use of org.apache.beam.sdk.values.POutput in project component-runtime by Talend.
the class BeamProcessorChainImpl method extractDoFn.
private static Collection<DoFn<?, ?>> extractDoFn(final CapturingPipeline.TransformWithCoder step, final CoderRegistry coderRegistry) {
final CapturingPipeline capturingPipeline = new CapturingPipeline(PipelineOptionsFactory.create());
if (coderRegistry != null) {
capturingPipeline.setCoderRegistry(coderRegistry);
}
final POutput apply = capturingPipeline.apply(new PTransform<PBegin, PCollection<Object>>() {
@Override
public PCollection<Object> expand(final PBegin input) {
return PCollection.createPrimitiveOutputInternal(capturingPipeline, WindowingStrategy.globalDefault(), PCollection.IsBounded.BOUNDED, TypingCoder.INSTANCE);
}
@Override
protected Coder<?> getDefaultOutputCoder() {
return TypingCoder.INSTANCE;
}
}).apply(step.getTransform());
if (PCollectionTuple.class.isInstance(apply) && step.getCoders() != null) {
final Map<TupleTag<?>, PCollection<?>> all = PCollectionTuple.class.cast(apply).getAll();
step.getCoders().forEach((k, v) -> {
final PCollection<?> collection = all.get(k);
if (collection != null) {
collection.setCoder(Coder.class.cast(v));
}
});
} else if (PCollection.class.isInstance(apply) && step.getCoders() != null && !step.getCoders().isEmpty()) {
PCollection.class.cast(apply).setCoder(Coder.class.cast(step.getCoders().values().iterator().next()));
}
final CapturingPipeline.SinkExtractor sinkExtractor = new CapturingPipeline.SinkExtractor();
capturingPipeline.traverseTopologically(sinkExtractor);
return sinkExtractor.getOutputs();
}
use of org.apache.beam.sdk.values.POutput in project beam by apache.
the class ReplacementOutputsTest method taggedSucceeds.
@Test
public void taggedSucceeds() {
PCollectionTuple original = PCollectionTuple.of(intsTag, ints).and(strsTag, strs).and(moreIntsTag, moreInts);
Map<PCollection<?>, ReplacementOutput> replacements = ReplacementOutputs.tagged(PValues.expandOutput((POutput) original), PCollectionTuple.of(strsTag, replacementStrs).and(moreIntsTag, moreReplacementInts).and(intsTag, replacementInts));
assertThat(replacements.keySet(), Matchers.containsInAnyOrder(replacementStrs, replacementInts, moreReplacementInts));
ReplacementOutput intsReplacement = replacements.get(replacementInts);
ReplacementOutput strsReplacement = replacements.get(replacementStrs);
ReplacementOutput moreIntsReplacement = replacements.get(moreReplacementInts);
assertThat(intsReplacement, equalTo(ReplacementOutput.of(TaggedPValue.of(intsTag, ints), TaggedPValue.of(intsTag, replacementInts))));
assertThat(strsReplacement, equalTo(ReplacementOutput.of(TaggedPValue.of(strsTag, strs), TaggedPValue.of(strsTag, replacementStrs))));
assertThat(moreIntsReplacement, equalTo(ReplacementOutput.of(TaggedPValue.of(moreIntsTag, moreInts), TaggedPValue.of(moreIntsTag, moreReplacementInts))));
}
use of org.apache.beam.sdk.values.POutput in project beam by apache.
the class PipelineRunnerTest method testRunPTransform.
@Test
@Category({ NeedsRunner.class, UsesCommittedMetrics.class, UsesCounterMetrics.class })
public void testRunPTransform() {
final String namespace = PipelineRunnerTest.class.getName();
final Counter counter = Metrics.counter(namespace, "count");
final PipelineResult result = PipelineRunner.fromOptions(p.getOptions()).run(new PTransform<PBegin, POutput>() {
@Override
public POutput expand(PBegin input) {
PCollection<Double> output = input.apply(Create.of(1, 2, 3, 4)).apply("ScaleByTwo", MapElements.via(new ScaleFn<>(2.0, counter)));
PAssert.that(output).containsInAnyOrder(2.0, 4.0, 6.0, 8.0);
return output;
}
});
// Checking counters to verify the pipeline actually ran.
assertThat(result.metrics().queryMetrics(MetricsFilter.builder().addNameFilter(MetricNameFilter.inNamespace(namespace)).build()).getCounters(), hasItem(metricsResult(namespace, "count", "ScaleByTwo", 4L, true)));
}
use of org.apache.beam.sdk.values.POutput in project beam by apache.
the class JavaClassLookupTransformProvider method getMethod.
private Method getMethod(PTransform<PInput, POutput> transform, BuilderMethod builderMethod, AllowedClass allowListClass) {
Row builderMethodRow = decodeRow(builderMethod.getSchema(), builderMethod.getPayload());
List<Method> matchingMethods = Arrays.stream(transform.getClass().getMethods()).filter(m -> isBuilderMethodForName(m, builderMethod.getName(), allowListClass)).filter(m -> parametersCompatible(m.getParameters(), builderMethodRow)).filter(m -> PTransform.class.isAssignableFrom(m.getReturnType())).collect(Collectors.toList());
if (matchingMethods.size() == 0) {
throw new RuntimeException("Could not find a matching method in transform " + transform + " for BuilderMethod" + builderMethod + ". When using field names, make sure they are available in the compiled" + " Java class.");
} else if (matchingMethods.size() > 1) {
throw new RuntimeException("Expected to find exactly one matching method in transform " + transform + " for BuilderMethod" + builderMethod + " but found " + matchingMethods.size());
}
return matchingMethods.get(0);
}
use of org.apache.beam.sdk.values.POutput in project beam by apache.
the class JavaClassLookupTransformProvider method applyBuilderMethods.
private PTransform<PInput, POutput> applyBuilderMethods(PTransform<PInput, POutput> transform, JavaClassLookupPayload payload, AllowedClass allowListClass) {
for (BuilderMethod builderMethod : payload.getBuilderMethodsList()) {
Method method = getMethod(transform, builderMethod, allowListClass);
try {
Row builderMethodRow = decodeRow(builderMethod.getSchema(), builderMethod.getPayload());
transform = (PTransform<PInput, POutput>) method.invoke(transform, getParameterValues(method.getParameters(), builderMethodRow, method.getGenericParameterTypes()));
} catch (IllegalAccessException | InvocationTargetException e) {
throw new IllegalArgumentException("Could not invoke the builder method " + builderMethod + " on transform " + transform + " with parameter schema " + builderMethod.getSchema(), e);
}
}
return transform;
}
Aggregations