use of org.apache.hop.beam.transforms.io.BeamOutputMeta in project hop by apache.
the class BeamPipelineMetaUtil method generateStreamLookupPipelineMeta.
public static final PipelineMeta generateStreamLookupPipelineMeta(String transname, String inputTransformName, String outputTransformName, IHopMetadataProvider metadataProvider) throws Exception {
IHopMetadataSerializer<FileDefinition> serializer = metadataProvider.getSerializer(FileDefinition.class);
FileDefinition customerFileDefinition = createCustomersInputFileDefinition();
serializer.save(customerFileDefinition);
PipelineMeta pipelineMeta = new PipelineMeta();
pipelineMeta.setName(transname);
pipelineMeta.setMetadataProvider(metadataProvider);
// Add the main io transform
//
BeamInputMeta beamInputMeta = new BeamInputMeta();
beamInputMeta.setInputLocation(PipelineTestBase.INPUT_CUSTOMERS_FILE);
beamInputMeta.setFileDefinitionName(customerFileDefinition.getName());
TransformMeta beamInputTransformMeta = new TransformMeta(inputTransformName, beamInputMeta);
beamInputTransformMeta.setTransformPluginId(BeamConst.STRING_BEAM_INPUT_PLUGIN_ID);
pipelineMeta.addTransform(beamInputTransformMeta);
TransformMeta lookupBeamInputTransformMeta = beamInputTransformMeta;
// Add a Memory Group By transform which will
MemoryGroupByMeta memoryGroupByMeta = new MemoryGroupByMeta();
memoryGroupByMeta.allocate(1, 1);
memoryGroupByMeta.getGroupField()[0] = "stateCode";
memoryGroupByMeta.getAggregateType()[0] = MemoryGroupByMeta.TYPE_GROUP_COUNT_ALL;
memoryGroupByMeta.getAggregateField()[0] = "rowsPerState";
memoryGroupByMeta.getSubjectField()[0] = "id";
TransformMeta memoryGroupByTransformMeta = new TransformMeta("rowsPerState", memoryGroupByMeta);
pipelineMeta.addTransform(memoryGroupByTransformMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(lookupBeamInputTransformMeta, memoryGroupByTransformMeta));
// Add a Stream Lookup transform ...
//
StreamLookupMeta streamLookupMeta = new StreamLookupMeta();
streamLookupMeta.allocate(1, 1);
streamLookupMeta.getKeystream()[0] = "stateCode";
streamLookupMeta.getKeylookup()[0] = "stateCode";
streamLookupMeta.getValue()[0] = "rowsPerState";
streamLookupMeta.getValueName()[0] = "nrPerState";
streamLookupMeta.getValueDefault()[0] = null;
streamLookupMeta.getValueDefaultType()[0] = IValueMeta.TYPE_INTEGER;
streamLookupMeta.setMemoryPreservationActive(false);
streamLookupMeta.getTransformIOMeta().getInfoStreams().get(0).setTransformMeta(// Read from Mem.GroupBy
memoryGroupByTransformMeta);
TransformMeta streamLookupTransformMeta = new TransformMeta("Stream Lookup", streamLookupMeta);
pipelineMeta.addTransform(streamLookupTransformMeta);
pipelineMeta.addPipelineHop(// Main io
new PipelineHopMeta(beamInputTransformMeta, streamLookupTransformMeta));
pipelineMeta.addPipelineHop(// info stream
new PipelineHopMeta(memoryGroupByTransformMeta, streamLookupTransformMeta));
// Add the output transform to write results
//
BeamOutputMeta beamOutputMeta = new BeamOutputMeta();
beamOutputMeta.setOutputLocation("/tmp/customers/output/");
beamOutputMeta.setFileDefinitionName(null);
beamOutputMeta.setFilePrefix("stream-lookup");
beamOutputMeta.setFileSuffix(".csv");
// Not yet supported
beamOutputMeta.setWindowed(false);
TransformMeta beamOutputTransformMeta = new TransformMeta(outputTransformName, beamOutputMeta);
beamOutputTransformMeta.setTransformPluginId("BeamOutput");
pipelineMeta.addTransform(beamOutputTransformMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(streamLookupTransformMeta, beamOutputTransformMeta));
return pipelineMeta;
}
use of org.apache.hop.beam.transforms.io.BeamOutputMeta in project hop by apache.
the class BeamPipelineMetaUtil method generateFilterRowsPipelineMeta.
public static final PipelineMeta generateFilterRowsPipelineMeta(String transname, String inputTransformName, String outputTransformName, IHopMetadataProvider metadataProvider) throws Exception {
IHopMetadataSerializer<FileDefinition> serializer = metadataProvider.getSerializer(FileDefinition.class);
FileDefinition customerFileDefinition = createCustomersInputFileDefinition();
serializer.save(customerFileDefinition);
PipelineMeta pipelineMeta = new PipelineMeta();
pipelineMeta.setName(transname);
pipelineMeta.setMetadataProvider(metadataProvider);
// Add the io transform
//
BeamInputMeta beamInputMeta = new BeamInputMeta();
beamInputMeta.setInputLocation(PipelineTestBase.INPUT_CUSTOMERS_FILE);
beamInputMeta.setFileDefinitionName(customerFileDefinition.getName());
TransformMeta beamInputTransformMeta = new TransformMeta(inputTransformName, beamInputMeta);
beamInputTransformMeta.setTransformPluginId(BeamConst.STRING_BEAM_INPUT_PLUGIN_ID);
pipelineMeta.addTransform(beamInputTransformMeta);
// Add 2 add constants transforms A and B
//
ConstantMeta constantA = new ConstantMeta();
ConstantField cf1 = new ConstantField("label", "String", "< 'k'");
constantA.getFields().add(cf1);
TransformMeta constantAMeta = new TransformMeta("A", constantA);
pipelineMeta.addTransform(constantAMeta);
ConstantMeta constantB = new ConstantMeta();
ConstantField cf2 = new ConstantField("label", "String", ">= 'k'");
constantB.getFields().add(cf2);
TransformMeta constantBMeta = new TransformMeta("B", constantB);
pipelineMeta.addTransform(constantBMeta);
// Add Filter rows transform looking for customers name > "k"
// Send rows to A (true) and B (false)
//
FilterRowsMeta filter = new FilterRowsMeta();
filter.getCondition().setLeftValuename("name");
filter.getCondition().setFunction(Condition.FUNC_SMALLER);
filter.getCondition().setRightExact(new ValueMetaAndData("value", "k"));
filter.setTrueTransformName("A");
filter.setFalseTransformName("B");
TransformMeta filterMeta = new TransformMeta("Filter", filter);
pipelineMeta.addTransform(filterMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(beamInputTransformMeta, filterMeta));
pipelineMeta.addPipelineHop(new PipelineHopMeta(filterMeta, constantAMeta));
pipelineMeta.addPipelineHop(new PipelineHopMeta(filterMeta, constantBMeta));
// Add a dummy behind it all to flatten/merge the data again...
//
DummyMeta dummyPipelineMeta = new DummyMeta();
TransformMeta dummyTransformMeta = new TransformMeta("Flatten", dummyPipelineMeta);
pipelineMeta.addTransform(dummyTransformMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(constantAMeta, dummyTransformMeta));
pipelineMeta.addPipelineHop(new PipelineHopMeta(constantBMeta, dummyTransformMeta));
// Add the output transform
//
BeamOutputMeta beamOutputMeta = new BeamOutputMeta();
beamOutputMeta.setOutputLocation("/tmp/customers/output/");
beamOutputMeta.setFileDefinitionName(null);
beamOutputMeta.setFilePrefix("filter-test");
beamOutputMeta.setFileSuffix(".csv");
// Not yet supported
beamOutputMeta.setWindowed(false);
TransformMeta beamOutputTransformMeta = new TransformMeta(outputTransformName, beamOutputMeta);
beamOutputTransformMeta.setTransformPluginId("BeamOutput");
pipelineMeta.addTransform(beamOutputTransformMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(dummyTransformMeta, beamOutputTransformMeta));
return pipelineMeta;
}
use of org.apache.hop.beam.transforms.io.BeamOutputMeta in project hop by apache.
the class BeamPipelineMetaUtil method generateBeamGroupByPipelineMeta.
public static final PipelineMeta generateBeamGroupByPipelineMeta(String transname, String inputTransformName, String outputTransformName, IHopMetadataProvider metadataProvider) throws Exception {
IHopMetadataSerializer<FileDefinition> serializer = metadataProvider.getSerializer(FileDefinition.class);
FileDefinition customerFileDefinition = createCustomersInputFileDefinition();
serializer.save(customerFileDefinition);
PipelineMeta pipelineMeta = new PipelineMeta();
pipelineMeta.setName(transname);
pipelineMeta.setMetadataProvider(metadataProvider);
// Add the io transform
//
BeamInputMeta beamInputMeta = new BeamInputMeta();
beamInputMeta.setInputLocation(PipelineTestBase.INPUT_CUSTOMERS_FILE);
beamInputMeta.setFileDefinitionName(customerFileDefinition.getName());
TransformMeta beamInputTransformMeta = new TransformMeta(inputTransformName, beamInputMeta);
beamInputTransformMeta.setTransformPluginId(BeamConst.STRING_BEAM_INPUT_PLUGIN_ID);
pipelineMeta.addTransform(beamInputTransformMeta);
// Add a dummy in between to get started...
//
MemoryGroupByMeta memoryGroupByMeta = new MemoryGroupByMeta();
memoryGroupByMeta.allocate(1, 2);
memoryGroupByMeta.getGroupField()[0] = "state";
// count(id)
memoryGroupByMeta.getAggregateField()[0] = "nrIds";
memoryGroupByMeta.getSubjectField()[0] = "id";
memoryGroupByMeta.getAggregateType()[0] = MemoryGroupByMeta.TYPE_GROUP_COUNT_ALL;
// sum(id)
memoryGroupByMeta.getAggregateField()[1] = "sumIds";
memoryGroupByMeta.getSubjectField()[1] = "id";
memoryGroupByMeta.getAggregateType()[1] = MemoryGroupByMeta.TYPE_GROUP_SUM;
TransformMeta memoryGroupByTransformMeta = new TransformMeta("Group By", memoryGroupByMeta);
pipelineMeta.addTransform(memoryGroupByTransformMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(beamInputTransformMeta, memoryGroupByTransformMeta));
// Add the output transform
//
BeamOutputMeta beamOutputMeta = new BeamOutputMeta();
beamOutputMeta.setOutputLocation("/tmp/customers/output/");
beamOutputMeta.setFileDefinitionName(null);
beamOutputMeta.setFilePrefix("grouped");
beamOutputMeta.setFileSuffix(".csv");
// Not yet supported
beamOutputMeta.setWindowed(false);
TransformMeta beamOutputTransformMeta = new TransformMeta(outputTransformName, beamOutputMeta);
beamOutputTransformMeta.setTransformPluginId("BeamOutput");
pipelineMeta.addTransform(beamOutputTransformMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(memoryGroupByTransformMeta, beamOutputTransformMeta));
return pipelineMeta;
}
use of org.apache.hop.beam.transforms.io.BeamOutputMeta in project hop by apache.
the class BeamPipelineMetaUtil method generateBeamInputOutputPipelineMeta.
public static final PipelineMeta generateBeamInputOutputPipelineMeta(String pipelineName, String inputTransformName, String outputTransformName, IHopMetadataProvider metadataProvider) throws Exception {
IHopMetadataSerializer<FileDefinition> serializer = metadataProvider.getSerializer(FileDefinition.class);
FileDefinition customerFileDefinition = createCustomersInputFileDefinition();
serializer.save(customerFileDefinition);
PipelineMeta pipelineMeta = new PipelineMeta();
pipelineMeta.setName(pipelineName);
pipelineMeta.setMetadataProvider(metadataProvider);
// Add the io transform
//
BeamInputMeta beamInputMeta = new BeamInputMeta();
beamInputMeta.setInputLocation(PipelineTestBase.INPUT_CUSTOMERS_FILE);
beamInputMeta.setFileDefinitionName(customerFileDefinition.getName());
TransformMeta beamInputTransformMeta = new TransformMeta(inputTransformName, beamInputMeta);
beamInputTransformMeta.setTransformPluginId("BeamInput");
pipelineMeta.addTransform(beamInputTransformMeta);
// Add a dummy in between to get started...
//
DummyMeta dummyPipelineMeta = new DummyMeta();
TransformMeta dummyTransformMeta = new TransformMeta("Dummy", dummyPipelineMeta);
pipelineMeta.addTransform(dummyTransformMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(beamInputTransformMeta, dummyTransformMeta));
// Add the output transform
//
BeamOutputMeta beamOutputMeta = new BeamOutputMeta();
beamOutputMeta.setOutputLocation("/tmp/customers/output/");
beamOutputMeta.setFileDefinitionName(null);
beamOutputMeta.setFilePrefix("customers");
beamOutputMeta.setFileSuffix(".csv");
// Not yet supported
beamOutputMeta.setWindowed(false);
TransformMeta beamOutputTransformMeta = new TransformMeta(outputTransformName, beamOutputMeta);
beamOutputTransformMeta.setTransformPluginId("BeamOutput");
pipelineMeta.addTransform(beamOutputTransformMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(dummyTransformMeta, beamOutputTransformMeta));
return pipelineMeta;
}
use of org.apache.hop.beam.transforms.io.BeamOutputMeta in project hop by apache.
the class BeamPipelineMetaUtil method generateSwitchCasePipelineMeta.
public static final PipelineMeta generateSwitchCasePipelineMeta(String transname, String inputTransformName, String outputTransformName, IHopMetadataProvider metadataProvider) throws Exception {
IHopMetadataSerializer<FileDefinition> serializer = metadataProvider.getSerializer(FileDefinition.class);
FileDefinition customerFileDefinition = createCustomersInputFileDefinition();
serializer.save(customerFileDefinition);
PipelineMeta pipelineMeta = new PipelineMeta();
pipelineMeta.setName(transname);
pipelineMeta.setMetadataProvider(metadataProvider);
// Add the io transform
//
BeamInputMeta beamInputMeta = new BeamInputMeta();
beamInputMeta.setInputLocation(PipelineTestBase.INPUT_CUSTOMERS_FILE);
beamInputMeta.setFileDefinitionName(customerFileDefinition.getName());
TransformMeta beamInputTransformMeta = new TransformMeta(inputTransformName, beamInputMeta);
beamInputTransformMeta.setTransformPluginId(BeamConst.STRING_BEAM_INPUT_PLUGIN_ID);
pipelineMeta.addTransform(beamInputTransformMeta);
// Add 4 add constants transforms CA and FL, NY, Default
//
String[] stateCodes = new String[] { "CA", "FL", "NY", "AR", "Default" };
for (String stateCode : stateCodes) {
ConstantMeta constant = new ConstantMeta();
ConstantField cf = new ConstantField("Comment", "String", stateCode + " : some comment");
constant.getFields().add(cf);
TransformMeta constantMeta = new TransformMeta(stateCode, constant);
pipelineMeta.addTransform(constantMeta);
}
// Add Switch / Case transform looking switching on stateCode field
// Send rows to A (true) and B (false)
//
SwitchCaseMeta switchCaseMeta = new SwitchCaseMeta();
switchCaseMeta.setFieldName("stateCode");
switchCaseMeta.setCaseValueType("String");
//
for (int i = 0; i < stateCodes.length - 1; i++) {
String stateCode = stateCodes[i];
List<SwitchCaseTarget> caseTargets = switchCaseMeta.getCaseTargets();
SwitchCaseTarget target = new SwitchCaseTarget();
target.setCaseValue(stateCode);
target.setCaseTargetTransformName(stateCode);
caseTargets.add(target);
}
switchCaseMeta.setDefaultTargetTransformName(stateCodes[stateCodes.length - 1]);
switchCaseMeta.searchInfoAndTargetTransforms(pipelineMeta.getTransforms());
TransformMeta switchCaseTransformMeta = new TransformMeta("Switch/Case", switchCaseMeta);
pipelineMeta.addTransform(switchCaseTransformMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(beamInputTransformMeta, switchCaseTransformMeta));
for (String stateCode : stateCodes) {
pipelineMeta.addPipelineHop(new PipelineHopMeta(switchCaseTransformMeta, pipelineMeta.findTransform(stateCode)));
}
// Add a dummy behind it all to flatten/merge the data again...
//
DummyMeta dummyPipelineMeta = new DummyMeta();
TransformMeta dummyTransformMeta = new TransformMeta("Flatten", dummyPipelineMeta);
pipelineMeta.addTransform(dummyTransformMeta);
for (String stateCode : stateCodes) {
pipelineMeta.addPipelineHop(new PipelineHopMeta(pipelineMeta.findTransform(stateCode), dummyTransformMeta));
}
// Add the output transform
//
BeamOutputMeta beamOutputMeta = new BeamOutputMeta();
beamOutputMeta.setOutputLocation("/tmp/customers/output/");
beamOutputMeta.setFileDefinitionName(null);
beamOutputMeta.setFilePrefix("switch-case-test");
beamOutputMeta.setFileSuffix(".csv");
// Not yet supported
beamOutputMeta.setWindowed(false);
TransformMeta beamOutputTransformMeta = new TransformMeta(outputTransformName, beamOutputMeta);
beamOutputTransformMeta.setTransformPluginId("BeamOutput");
pipelineMeta.addTransform(beamOutputTransformMeta);
pipelineMeta.addPipelineHop(new PipelineHopMeta(dummyTransformMeta, beamOutputTransformMeta));
return pipelineMeta;
}
Aggregations