use of org.pentaho.di.trans.step.RowAdapter in project pentaho-kettle by pentaho.
the class CalculatorUnitTest method testReturnDigitsOnly.
@Test
public void testReturnDigitsOnly() throws KettleException {
RowMeta inputRowMeta = new RowMeta();
ValueMetaString nameMeta = new ValueMetaString("Name");
inputRowMeta.addValueMeta(nameMeta);
ValueMetaString valueMeta = new ValueMetaString("Value");
inputRowMeta.addValueMeta(valueMeta);
RowSet inputRowSet = smh.getMockInputRowSet(new Object[][] { { "name1", "qwe123asd456zxc" }, { "name2", null } });
inputRowSet.setRowMeta(inputRowMeta);
Calculator calculator = new Calculator(smh.stepMeta, smh.stepDataInterface, 0, smh.transMeta, smh.trans);
calculator.addRowSetToInputRowSets(inputRowSet);
calculator.setInputRowMeta(inputRowMeta);
calculator.init(smh.initStepMetaInterface, smh.initStepDataInterface);
CalculatorMeta meta = new CalculatorMeta();
meta.setCalculation(new CalculatorMetaFunction[] { new CalculatorMetaFunction("digits", CalculatorMetaFunction.CALC_GET_ONLY_DIGITS, "Value", null, null, ValueMetaInterface.TYPE_STRING, 0, 0, false, "", "", "", "") });
// Verify output
try {
calculator.addRowListener(new RowAdapter() {
@Override
public void rowWrittenEvent(RowMetaInterface rowMeta, Object[] row) throws KettleStepException {
assertEquals("123456", row[2]);
}
});
calculator.processRow(meta, new CalculatorData());
} catch (KettleException ke) {
ke.printStackTrace();
fail();
}
}
use of org.pentaho.di.trans.step.RowAdapter in project pentaho-kettle by pentaho.
the class CsvInputDoubleLineEndTest method doTest.
private void doTest(final String fileEncoding, final String stepEncoding, final String testData) throws Exception {
String testFilePath = createTestFile(fileEncoding, testData).getAbsolutePath();
CsvInputMeta meta = createStepMeta(testFilePath, stepEncoding);
CsvInputData data = new CsvInputData();
CsvInput csvInput = new CsvInput(stepMockHelper.stepMeta, stepMockHelper.stepDataInterface, 0, stepMockHelper.transMeta, stepMockHelper.trans);
csvInput.init(meta, data);
csvInput.addRowListener(new RowAdapter() {
@Override
public void rowWrittenEvent(RowMetaInterface rowMeta, Object[] row) throws KettleStepException {
for (int i = 0; i < rowMeta.size(); i++) {
assertEquals("Value", row[i]);
}
}
});
boolean haveRowsToRead;
do {
haveRowsToRead = !csvInput.processRow(meta, data);
} while (!haveRowsToRead);
csvInput.dispose(meta, data);
assertEquals(2, csvInput.getLinesWritten());
}
use of org.pentaho.di.trans.step.RowAdapter in project pentaho-kettle by pentaho.
the class CsvInputUnicodeTest method doTest.
private void doTest(final String fileEncoding, final String stepEncoding, final String testData, final String delimiter) throws Exception {
String testFilePath = createTestFile(fileEncoding, testData).getAbsolutePath();
CsvInputMeta meta = createStepMeta(testFilePath, stepEncoding, delimiter);
CsvInputData data = new CsvInputData();
CsvInput csvInput = new CsvInput(stepMockHelper.stepMeta, stepMockHelper.stepDataInterface, 0, stepMockHelper.transMeta, stepMockHelper.trans);
csvInput.init(meta, data);
csvInput.addRowListener(new RowAdapter() {
@Override
public void rowWrittenEvent(RowMetaInterface rowMeta, Object[] row) throws KettleStepException {
for (int i = 0; i < rowMeta.size(); i++) {
Assert.assertEquals("Value", row[i]);
}
}
});
boolean haveRowsToRead;
do {
haveRowsToRead = !csvInput.processRow(meta, data);
} while (!haveRowsToRead);
csvInput.dispose(meta, data);
Assert.assertEquals(2, csvInput.getLinesWritten());
}
use of org.pentaho.di.trans.step.RowAdapter in project pentaho-kettle by pentaho.
the class CsvProcessRowInParallelTest method processRows.
/**
* So as not to heap up list of taken parameters, we are passing combi, but we expect to see CsvInput class instances
* in it's content.
*/
private int processRows(StepMetaDataCombi combi) throws Exception {
CsvInput csvInput = (CsvInput) combi.step;
CsvInputData stepData = (CsvInputData) combi.data;
CsvInputMeta stepMeta = (CsvInputMeta) combi.meta;
final int[] writtenRows = { 0 };
csvInput.addRowListener(new RowAdapter() {
@Override
public void rowWrittenEvent(RowMetaInterface rowMeta, Object[] row) throws KettleStepException {
writtenRows[0]++;
}
});
boolean haveRowsToRead;
do {
haveRowsToRead = !csvInput.processRow(stepMeta, stepData);
} while (!haveRowsToRead);
csvInput.dispose(stepMeta, stepData);
return writtenRows[0];
}
use of org.pentaho.di.trans.step.RowAdapter in project pentaho-kettle by pentaho.
the class SingleThreader method prepareMappingExecution.
public void prepareMappingExecution() throws KettleException {
SingleThreaderData singleThreaderData = getData();
// Set the type to single threaded in case the user forgot...
//
singleThreaderData.mappingTransMeta.setTransformationType(TransformationType.SingleThreaded);
// Create the transformation from meta-data...
singleThreaderData.mappingTrans = new Trans(singleThreaderData.mappingTransMeta, getTrans());
// Pass the parameters down to the sub-transformation.
//
StepWithMappingMeta.activateParams(getData().mappingTrans, getData().mappingTrans, this, getData().mappingTrans.listParameters(), meta.getParameters(), meta.getParameterValues());
getData().mappingTrans.activateParameters();
// Disable thread priority managment as it will slow things down needlessly.
// The single threaded engine doesn't use threads and doesn't need row locking.
//
singleThreaderData.mappingTrans.getTransMeta().setUsingThreadPriorityManagment(false);
// Leave a path up so that we can set variables in sub-transformations...
//
singleThreaderData.mappingTrans.setParentTrans(getTrans());
// Pass down the safe mode flag to the mapping...
//
singleThreaderData.mappingTrans.setSafeModeEnabled(getTrans().isSafeModeEnabled());
// Pass down the metrics gathering flag to the mapping...
//
singleThreaderData.mappingTrans.setGatheringMetrics(getTrans().isGatheringMetrics());
// Also set the name of this step in the mapping transformation for logging purposes
//
singleThreaderData.mappingTrans.setMappingStepName(getStepname());
initServletConfig();
// prepare the execution
//
singleThreaderData.mappingTrans.prepareExecution(null);
//
if (singleThreaderData.injectStepMeta.isMappingInput()) {
MappingInputData mappingInputData = (MappingInputData) singleThreaderData.mappingTrans.findDataInterface(singleThreaderData.injectStepMeta.getName());
mappingInputData.sourceSteps = new StepInterface[0];
mappingInputData.valueRenames = new ArrayList<MappingValueRename>();
}
// Add row producer & row listener
singleThreaderData.rowProducer = singleThreaderData.mappingTrans.addRowProducer(meta.getInjectStep(), 0);
StepInterface retrieveStep = singleThreaderData.mappingTrans.getStepInterface(meta.getRetrieveStep(), 0);
retrieveStep.addRowListener(new RowAdapter() {
@Override
public void rowWrittenEvent(RowMetaInterface rowMeta, Object[] row) throws KettleStepException {
// Simply pass it along to the next steps after the SingleThreader
//
SingleThreader.this.putRow(rowMeta, row);
}
});
singleThreaderData.mappingTrans.startThreads();
// Create the executor...
singleThreaderData.executor = new SingleThreadedTransExecutor(singleThreaderData.mappingTrans);
//
try {
boolean ok = singleThreaderData.executor.init();
if (!ok) {
throw new KettleException(BaseMessages.getString(PKG, "SingleThreader.Exception.UnableToInitSingleThreadedTransformation"));
}
} catch (KettleException e) {
throw new KettleException(BaseMessages.getString(PKG, "SingleThreader.Exception.UnableToPrepareExecutionOfMapping"), e);
}
// Add the mapping transformation to the active sub-transformations map in the parent transformation
//
getTrans().addActiveSubTransformation(getStepname(), singleThreaderData.mappingTrans);
}
Aggregations