use of org.apache.hop.pipeline.transforms.filterrows.FilterRowsMeta in project hop by apache.
the class BeamPipelineMetaUtil method generateFilterRowsPipelineMeta.
public static final PipelineMeta generateFilterRowsPipelineMeta(String transname, String inputTransformName, String outputTransformName, IHopMetadataProvider metadataProvider) throws Exception {
IHopMetadataSerializer<FileDefinition> serializer = metadataProvider.getSerializer(FileDefinition.class);
FileDefinition customerFileDefinition = createCustomersInputFileDefinition();
serializer.save(customerFileDefinition);
PipelineMeta pipelineMeta = new PipelineMeta();
pipelineMeta.setName(transname);
pipelineMeta.setMetadataProvider(metadataProvider);
// Add the io transform
//
BeamInputMeta beamInputMeta = new BeamInputMeta();
beamInputMeta.setInputLocation(PipelineTestBase.INPUT_CUSTOMERS_FILE);
beamInputMeta.setFileDefinitionName(customerFileDefinition.getName());
TransformMeta beamInputTransformMeta = new TransformMeta(inputTransformName, beamInputMeta);
beamInputTransformMeta.setTransformPluginId(BeamConst.STRING_BEAM_INPUT_PLUGIN_ID);
pipelineMeta.addTransform(beamInputTransformMeta);
// Add 2 add constants transforms A and B
//
ConstantMeta constantA = new ConstantMeta();
ConstantField cf1 = new ConstantField("label", "String", "< 'k'");
constantA.getFields().add(cf1);
TransformMeta constantAMeta = new TransformMeta("A", constantA);
pipelineMeta.addTransform(constantAMeta);
ConstantMeta constantB = new ConstantMeta();
ConstantField cf2 = new ConstantField("label", "String", ">= 'k'");
constantB.getFields().add(cf2);
TransformMeta constantBMeta = new TransformMeta("B", constantB);
pipelineMeta.addTransform(constantBMeta);
// Add Filter rows transform looking for customers name > "k"
// Send rows to A (true) and B (false)
//
FilterRowsMeta filter = new FilterRowsMeta();
filter.getCondition().setLeftValuename("name");
filter.getCondition().setFunction(Condition.FUNC_SMALLER);
filter.getCondition().setRightExact(new ValueMetaAndData("value", "k"));
filter.setTrueTransformName("A");
filter.setFalseTransformName("B");
TransformMeta filterMeta = new TransformMeta("Filter", filter);
pipelineMeta.addTransform(filterMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(beamInputTransformMeta, filterMeta));
pipelineMeta.addPipelineHop(new PipelineHopMeta(filterMeta, constantAMeta));
pipelineMeta.addPipelineHop(new PipelineHopMeta(filterMeta, constantBMeta));
// Add a dummy behind it all to flatten/merge the data again...
//
DummyMeta dummyPipelineMeta = new DummyMeta();
TransformMeta dummyTransformMeta = new TransformMeta("Flatten", dummyPipelineMeta);
pipelineMeta.addTransform(dummyTransformMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(constantAMeta, dummyTransformMeta));
pipelineMeta.addPipelineHop(new PipelineHopMeta(constantBMeta, dummyTransformMeta));
// Add the output transform
//
BeamOutputMeta beamOutputMeta = new BeamOutputMeta();
beamOutputMeta.setOutputLocation("/tmp/customers/output/");
beamOutputMeta.setFileDefinitionName(null);
beamOutputMeta.setFilePrefix("filter-test");
beamOutputMeta.setFileSuffix(".csv");
// Not yet supported
beamOutputMeta.setWindowed(false);
TransformMeta beamOutputTransformMeta = new TransformMeta(outputTransformName, beamOutputMeta);
beamOutputTransformMeta.setTransformPluginId("BeamOutput");
pipelineMeta.addTransform(beamOutputTransformMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(dummyTransformMeta, beamOutputTransformMeta));
return pipelineMeta;
}
Aggregations