use of org.pentaho.di.trans.RowProducer in project pentaho-kettle by pentaho.
the class ValueMapperIT method testValueMapper3.
/**
* Test case for valuemapper step. Injector step to a valuemapper step to a dummy step. Rows go in and should be
* mapped accordingly.
*
* This test will explicitly test the empty field processing.
*/
public void testValueMapper3() throws Exception {
KettleEnvironment.init();
//
// Create a new transformation...
//
TransMeta transMeta = new TransMeta();
transMeta.setName("valuemappertest3");
PluginRegistry registry = PluginRegistry.getInstance();
//
// create an injector step...
//
String injectorStepname = "injector step";
InjectorMeta im = new InjectorMeta();
// Set the information of the injector.
String injectorPid = registry.getPluginId(StepPluginType.class, im);
StepMeta injectorStep = new StepMeta(injectorPid, injectorStepname, im);
transMeta.addStep(injectorStep);
//
// Create a dummy step 1
//
String dummyStepname1 = "dummy step 1";
DummyTransMeta dm1 = new DummyTransMeta();
String dummyPid1 = registry.getPluginId(StepPluginType.class, dm1);
StepMeta dummyStep1 = new StepMeta(dummyPid1, dummyStepname1, dm1);
transMeta.addStep(dummyStep1);
TransHopMeta hi = new TransHopMeta(injectorStep, dummyStep1);
transMeta.addTransHop(hi);
//
// Create a step
//
String valueMapperName = "valuemapper step";
ValueMapperMeta vm = new ValueMapperMeta();
vm.setFieldToUse("field1");
vm.setTargetField("");
vm.setSourceValue(new String[] { "abc", "ABC", "def", "ghij", null });
vm.setTargetValue(new String[] { "begin", "BEG_LONGER", "test", "", "emptyField" });
String valueMapperPid = registry.getPluginId(StepPluginType.class, vm);
StepMeta valueMapperStep = new StepMeta(valueMapperPid, valueMapperName, vm);
transMeta.addStep(valueMapperStep);
TransHopMeta hi2 = new TransHopMeta(dummyStep1, valueMapperStep);
transMeta.addTransHop(hi2);
//
// Create a dummy step 2
//
String dummyStepname2 = "dummy step 2";
DummyTransMeta dm2 = new DummyTransMeta();
String dummyPid2 = registry.getPluginId(StepPluginType.class, dm2);
StepMeta dummyStep2 = new StepMeta(dummyPid2, dummyStepname2, dm2);
transMeta.addStep(dummyStep2);
TransHopMeta hi3 = new TransHopMeta(valueMapperStep, dummyStep2);
transMeta.addTransHop(hi3);
// Now execute the transformation...
Trans trans = new Trans(transMeta);
trans.prepareExecution(null);
StepInterface si = trans.getStepInterface(dummyStepname1, 0);
RowStepCollector dummyRc1 = new RowStepCollector();
si.addRowListener(dummyRc1);
si = trans.getStepInterface(valueMapperName, 0);
RowStepCollector valueMapperRc = new RowStepCollector();
si.addRowListener(valueMapperRc);
si = trans.getStepInterface(dummyStepname2, 0);
RowStepCollector dummyRc = new RowStepCollector();
si.addRowListener(dummyRc);
RowProducer rp = trans.addRowProducer(injectorStepname, 0);
trans.startThreads();
// add rows
List<RowMetaAndData> inputList = createData();
Iterator<RowMetaAndData> it = inputList.iterator();
while (it.hasNext()) {
RowMetaAndData rm = it.next();
rp.putRow(rm.getRowMeta(), rm.getData());
}
rp.finished();
trans.waitUntilFinished();
// Compare the results
List<RowMetaAndData> resultRows = dummyRc.getRowsWritten();
List<RowMetaAndData> goldenImageRows = createResultData3();
checkRows(goldenImageRows, resultRows);
}
use of org.pentaho.di.trans.RowProducer in project pentaho-kettle by pentaho.
the class ValueMapperIT method testValueMapper1.
/**
* Test case for valuemapper step. Injector step to a valuemapper step to a dummy step. Rows go in and should be
* mapped accordingly.
*
* This test will write the mappings in a new field.
*/
public void testValueMapper1() throws Exception {
KettleEnvironment.init();
//
// Create a new transformation...
//
TransMeta transMeta = new TransMeta();
transMeta.setName("valuemappertest1");
PluginRegistry registry = PluginRegistry.getInstance();
//
// create an injector step...
//
String injectorStepname = "injector step";
InjectorMeta im = new InjectorMeta();
// Set the information of the injector.
String injectorPid = registry.getPluginId(StepPluginType.class, im);
StepMeta injectorStep = new StepMeta(injectorPid, injectorStepname, im);
transMeta.addStep(injectorStep);
//
// Create a dummy step 1
//
String dummyStepname1 = "dummy step 1";
DummyTransMeta dm1 = new DummyTransMeta();
String dummyPid1 = registry.getPluginId(StepPluginType.class, dm1);
StepMeta dummyStep1 = new StepMeta(dummyPid1, dummyStepname1, dm1);
transMeta.addStep(dummyStep1);
TransHopMeta hi = new TransHopMeta(injectorStep, dummyStep1);
transMeta.addTransHop(hi);
//
// Create a ValueMapper step
//
String valueMapperName = "valuemapper step";
ValueMapperMeta vm = new ValueMapperMeta();
vm.setFieldToUse("field1");
vm.setTargetField("new_field");
vm.setSourceValue(new String[] { "abc", "ABC", "def", "ghij" });
vm.setTargetValue(new String[] { "begin", "BEG_LONGER", "test", "" });
String valueMapperPid = registry.getPluginId(StepPluginType.class, vm);
StepMeta valueMapperStep = new StepMeta(valueMapperPid, valueMapperName, vm);
transMeta.addStep(valueMapperStep);
TransHopMeta hi2 = new TransHopMeta(dummyStep1, valueMapperStep);
transMeta.addTransHop(hi2);
//
// Create a dummy step 2
//
String dummyStepname2 = "dummy step 2";
DummyTransMeta dm2 = new DummyTransMeta();
String dummyPid2 = registry.getPluginId(StepPluginType.class, dm2);
StepMeta dummyStep2 = new StepMeta(dummyPid2, dummyStepname2, dm2);
transMeta.addStep(dummyStep2);
TransHopMeta hi3 = new TransHopMeta(valueMapperStep, dummyStep2);
transMeta.addTransHop(hi3);
// Now execute the transformation...
Trans trans = new Trans(transMeta);
trans.prepareExecution(null);
StepInterface si = trans.getStepInterface(dummyStepname1, 0);
RowStepCollector dummyRc1 = new RowStepCollector();
si.addRowListener(dummyRc1);
si = trans.getStepInterface(valueMapperName, 0);
RowStepCollector valueMapperRc = new RowStepCollector();
si.addRowListener(valueMapperRc);
si = trans.getStepInterface(dummyStepname2, 0);
RowStepCollector dummyRc = new RowStepCollector();
si.addRowListener(dummyRc);
RowProducer rp = trans.addRowProducer(injectorStepname, 0);
trans.startThreads();
// add rows
List<RowMetaAndData> inputList = createData();
Iterator<RowMetaAndData> it = inputList.iterator();
while (it.hasNext()) {
RowMetaAndData rm = it.next();
rp.putRow(rm.getRowMeta(), rm.getData());
}
rp.finished();
trans.waitUntilFinished();
// Compare the results
List<RowMetaAndData> resultRows = dummyRc.getRowsWritten();
List<RowMetaAndData> goldenImageRows = createResultData1();
checkRows(goldenImageRows, resultRows);
}
use of org.pentaho.di.trans.RowProducer in project pdi-dataservice-server-plugin by pentaho.
the class DataServiceExecutorTest method testExecuteStreamQuery.
@Test
public void testExecuteStreamQuery() throws Exception {
when(genTrans.isFinishedOrStopped()).thenReturn(true);
SQL sql = new SQL("SELECT * FROM " + DATA_SERVICE_NAME);
when(serviceTrans.getTransMeta().listParameters()).thenReturn(new String[0]);
when(sqlTransGenerator.getSql()).thenReturn(sql);
PushDownOptimizationMeta optimization = mock(PushDownOptimizationMeta.class);
when(optimization.isEnabled()).thenReturn(true);
dataService.getPushDownOptimizationMeta().add(optimization);
dataService.setStreaming(true);
IMetaStore metastore = mock(IMetaStore.class);
DataServiceExecutor executor = new DataServiceExecutor.Builder(sql, dataService, context).serviceTrans(serviceTrans).sqlTransGenerator(sqlTransGenerator).genTrans(genTrans).metastore(metastore).windowMode(IDataServiceClientService.StreamingMode.ROW_BASED).windowSize(1).windowEvery(0).windowLimit(0).build();
ArgumentCaptor<String> objectIds = ArgumentCaptor.forClass(String.class);
verify(serviceTrans).setContainerObjectId(objectIds.capture());
when(serviceTrans.getContainerObjectId()).thenReturn(objectIds.getValue());
verify(genTrans).setContainerObjectId(objectIds.capture());
when(genTrans.getContainerObjectId()).thenReturn(objectIds.getValue());
verify(serviceTrans).setMetaStore(metastore);
verify(genTrans).setMetaStore(metastore);
RowProducer sqlTransRowProducer = mock(RowProducer.class);
when(genTrans.addRowProducer(INJECTOR_STEP_NAME, 0)).thenReturn(sqlTransRowProducer);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
// Start Execution
executor.executeQuery(new DataOutputStream(outputStream));
// Check header was written
assertThat(outputStream.size(), greaterThan(0));
outputStream.reset();
executor.waitUntilFinished();
verify(serviceTrans, times(0)).waitUntilFinished();
verify(genTrans).waitUntilFinished();
}
use of org.pentaho.di.trans.RowProducer in project pdi-dataservice-server-plugin by pentaho.
the class StreamingGeneratedTransExecution method runGenTrans.
/**
* Runs the generated transformation with the rows given by param.
*
* @param rowIterator The {@link List} input rows.
* @throws KettleStepException
*/
private void runGenTrans(final List<RowMetaAndData> rowIterator) throws KettleStepException {
if (isRunning.compareAndSet(false, true)) {
try {
LogChannelInterface log = genTrans.getLogChannel();
RowProducer rowProducer;
StepInterface resultStep;
genTrans.cleanup();
genTrans.prepareExecution(null);
rowProducer = genTrans.addRowProducer(injectorStepName, 0);
genTrans.startThreads();
resultStep = genTrans.findRunThread(resultStepName);
resultStep.cleanup();
resultStep.addRowListener(resultRowListener);
for (RowMetaAndData injectRows : rowIterator) {
while (!rowProducer.putRowWait(injectRows.getRowMeta(), injectRows.getData(), 1, TimeUnit.SECONDS) && genTrans.isRunning()) {
// Row queue was full, try again
log.logRowlevel(DataServiceConstants.ROW_BUFFER_IS_FULL_TRYING_AGAIN);
}
}
rowProducer.finished();
genTrans.waitUntilFinished();
genTrans.stopAll();
log.logDetailed(DataServiceConstants.STREAMING_GENERATED_TRANSFORMATION_STOPPED);
} catch (KettleException e) {
throw new KettleStepException(e);
}
}
}
use of org.pentaho.di.trans.RowProducer in project pdi-dataservice-server-plugin by pentaho.
the class CachedServiceTest method testReplayPartialCache.
@Test
public void testReplayPartialCache() throws Exception {
DataServiceExecutor executor = dataServiceExecutor(BASE_QUERY + " LIMIT 20");
CachedService cachedService = CachedService.complete(testData);
RowProducer rowProducer = genTrans.addRowProducer(INJECTOR_STEP, 0);
// Activate cachedServiceLoader
Executor mockExecutor = mock(Executor.class);
final CachedServiceLoader cachedServiceLoader = new CachedServiceLoader(cachedService, mockExecutor);
ListenableFuture<Integer> replay = cachedServiceLoader.replay(executor);
ArgumentCaptor<Runnable> replayRunnable = ArgumentCaptor.forClass(Runnable.class);
verify(mockExecutor).execute(replayRunnable.capture());
stepMetaDataCombi.step = inputStep;
stepMetaDataCombi.meta = inputStepMetaInterface;
stepMetaDataCombi.data = inputStepDataInterface;
List<StepMetaDataCombi> stepMetaDataCombis = new ArrayList<>();
stepMetaDataCombis.add(stepMetaDataCombi);
when(serviceTrans.getSteps()).thenReturn(stepMetaDataCombis);
// Simulate executing data service
executor.executeListeners(DataServiceExecutor.ExecutionPoint.READY);
executor.executeListeners(DataServiceExecutor.ExecutionPoint.START);
// Verify that serviceTrans never started, genTrans is accepting rows
verify(serviceTrans).stopAll();
verify(inputStep).setOutputDone();
verify(inputStep).dispose(inputStepMetaInterface, inputStepDataInterface);
verify(inputStep).markStop();
verify(serviceTrans, never()).startThreads();
verify(genTrans).startThreads();
final AtomicInteger rowsProduced = new AtomicInteger(0);
when(rowProducer.putRowWait(any(RowMetaInterface.class), any(Object[].class), anyInt(), any(TimeUnit.class))).then(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
rowsProduced.getAndIncrement();
return true;
}
});
when(genTrans.isRunning()).then(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
return rowsProduced.get() < 20;
}
});
// Run cache loader (would be asynchronous)
replayRunnable.getValue().run();
verify(rowProducer).finished();
assertThat(replay.get(1, TimeUnit.SECONDS), equalTo(20));
assertThat(rowsProduced.get(), equalTo(20));
for (RowMetaAndData metaAndData : Iterables.limit(testData, 20)) {
Object[] data = metaAndData.getData();
verify(rowProducer).putRowWait(eq(metaAndData.getRowMeta()), and(eq(data), AdditionalMatchers.not(same(data))), anyInt(), any(TimeUnit.class));
}
}
Aggregations