use of org.pentaho.di.trans.steps.dummytrans.DummyTransMeta in project pentaho-kettle by pentaho.
the class JsonOutputTest method createDummyStep.
/**
* Create a dummy step for this class.
*
* @param name
* @param registry
* @return
*/
private StepMeta createDummyStep(String name, PluginRegistry registry) {
// Create a dummy step 1 and add it to the tranMeta
String dummyStepName = "dummy step";
DummyTransMeta dm1 = new DummyTransMeta();
String dummyPid1 = registry.getPluginId(StepPluginType.class, dm1);
StepMeta dummyStep = new StepMeta(dummyPid1, dummyStepName, dm1);
return dummyStep;
}
use of org.pentaho.di.trans.steps.dummytrans.DummyTransMeta in project pentaho-kettle by pentaho.
the class SwitchCaseTest method testCreateOutputValueMapping.
/**
* PDI-6900 Check that SwichCase step can correctly set up input values to output rowsets.
*
* @throws KettleException
* @throws URISyntaxException
* @throws ParserConfigurationException
* @throws SAXException
* @throws IOException
*/
@Test
public void testCreateOutputValueMapping() throws KettleException, URISyntaxException, ParserConfigurationException, SAXException, IOException {
SwitchCaseCustom krasavez = new SwitchCaseCustom(mockHelper);
// load step info value-case mapping from xml.
List<DatabaseMeta> emptyList = new ArrayList<DatabaseMeta>();
krasavez.meta.loadXML(loadStepXmlMetadata("SwitchCaseTest.xml"), emptyList, mock(IMetaStore.class));
KeyToRowSetMap expectedNN = new KeyToRowSetMap();
Set<RowSet> nulls = new HashSet<RowSet>();
// create real steps for all targets
List<SwitchCaseTarget> list = krasavez.meta.getCaseTargets();
for (SwitchCaseTarget item : list) {
StepMetaInterface smInt = new DummyTransMeta();
StepMeta stepMeta = new StepMeta(item.caseTargetStepname, smInt);
item.caseTargetStep = stepMeta;
// create and put row set for this
RowSet rw = new QueueRowSet();
krasavez.map.put(item.caseTargetStepname, rw);
// null values goes to null rowset
if (item.caseValue != null) {
expectedNN.put(item.caseValue, rw);
} else {
nulls.add(rw);
}
}
// create default step
StepMetaInterface smInt = new DummyTransMeta();
StepMeta stepMeta = new StepMeta(krasavez.meta.getDefaultTargetStepname(), smInt);
krasavez.meta.setDefaultTargetStep(stepMeta);
RowSet rw = new QueueRowSet();
krasavez.map.put(krasavez.meta.getDefaultTargetStepname(), rw);
krasavez.createOutputValueMapping();
// inspect step output data:
Set<RowSet> ones = krasavez.data.outputMap.get("1");
assertEquals("Output map for 1 values contains 2 row sets", 2, ones.size());
Set<RowSet> twos = krasavez.data.outputMap.get("2");
assertEquals("Output map for 2 values contains 1 row sets", 1, twos.size());
assertEquals("Null row set contains 2 items: ", 2, krasavez.data.nullRowSetSet.size());
assertEquals("We have at least one default rowset", 1, krasavez.data.defaultRowSetSet.size());
// check that rowsets data is correct:
Set<RowSet> rowsets = expectedNN.get("1");
for (RowSet rowset : rowsets) {
assertTrue("Output map for 1 values contains expected row set", ones.contains(rowset));
}
rowsets = expectedNN.get("2");
for (RowSet rowset : rowsets) {
assertTrue("Output map for 2 values contains expected row set", twos.contains(rowset));
}
for (RowSet rowset : krasavez.data.nullRowSetSet) {
assertTrue("Output map for null values contains expected row set", nulls.contains(rowset));
}
// we have already check that there is only one item.
for (RowSet rowset : krasavez.data.defaultRowSetSet) {
assertTrue("Output map for default case contains expected row set", rowset.equals(rw));
}
}
use of org.pentaho.di.trans.steps.dummytrans.DummyTransMeta in project pentaho-kettle by pentaho.
the class TransPartitioningTest method prepareStepMetas_1_x2.
/**
* one 'regular step' to 'step running in 2 copies'
*/
private void prepareStepMetas_1_x2() {
StepMeta dummy1 = new StepMeta(ONE, null);
StepMeta dummy2 = new StepMeta(TWO, null);
dummy2.setCopies(2);
chain.add(dummy1);
chain.add(dummy2);
for (StepMeta item : chain) {
item.setStepMetaInterface(new DummyTransMeta());
}
}
use of org.pentaho.di.trans.steps.dummytrans.DummyTransMeta in project pentaho-kettle by pentaho.
the class TransPartitioningTest method prepareStepMetas_x2_cl1.
/**
* This is a case when first step running 2 copies and next is partitioned one.
*
* @throws KettlePluginException
*/
private void prepareStepMetas_x2_cl1() throws KettlePluginException {
StepMeta dummy1 = new StepMeta(ONE, null);
StepMeta dummy2 = new StepMeta(TWO, null);
PartitionSchema schema1 = new PartitionSchema("p1", Arrays.asList(new String[] { PID1, PID2 }));
StepPartitioningMeta partMeta1 = new StepPartitioningMeta("Mirror to all partitions", schema1);
dummy2.setStepPartitioningMeta(partMeta1);
dummy1.setCopies(2);
chain.add(dummy1);
chain.add(dummy2);
for (StepMeta item : chain) {
item.setStepMetaInterface(new DummyTransMeta());
}
}
use of org.pentaho.di.trans.steps.dummytrans.DummyTransMeta in project pentaho-kettle by pentaho.
the class TransPartitioningTest method prepareStepMetas_1_cl1.
/**
* This is a case when we have 1 step to 1 clustered step distribution.
*
* @throws KettlePluginException
*/
private void prepareStepMetas_1_cl1() throws KettlePluginException {
StepMeta dummy1 = new StepMeta(ONE, null);
StepMeta dummy2 = new StepMeta(TWO, null);
PartitionSchema schema = new PartitionSchema("p1", Arrays.asList(new String[] { PID1, PID2 }));
StepPartitioningMeta partMeta = new StepPartitioningMeta("Mirror to all partitions", schema);
dummy2.setStepPartitioningMeta(partMeta);
chain.add(dummy1);
chain.add(dummy2);
for (StepMeta item : chain) {
item.setStepMetaInterface(new DummyTransMeta());
}
}
Aggregations