use of org.pentaho.di.core.RowSet in project pentaho-kettle by pentaho.
the class BaseStep method findInputRowSet.
/**
* Find input row set.
*
* @param from the from
* @param fromcopy the fromcopy
* @param to the to
* @param tocopy the tocopy
* @return the row set
*/
public RowSet findInputRowSet(String from, int fromcopy, String to, int tocopy) {
synchronized (inputRowSetsLock) {
for (RowSet rs : inputRowSets) {
if (rs.getOriginStepName().equalsIgnoreCase(from) && rs.getDestinationStepName().equalsIgnoreCase(to) && rs.getOriginStepCopy() == fromcopy && rs.getDestinationStepCopy() == tocopy) {
return rs;
}
}
}
// See if the rowset is part of the output of a mapping source step...
//
// Lookup step "From"
//
StepMeta mappingStep = transMeta.findStep(from);
//
if (mappingStep != null && mappingStep.isMapping()) {
// In this case we can cast the step thread to a Mapping...
//
List<StepInterface> baseSteps = trans.findBaseSteps(from);
if (baseSteps.size() == 1) {
Mapping mapping = (Mapping) baseSteps.get(0);
// Find the appropriate rowset in the mapping...
// The rowset in question has been passed over to a Mapping Input step inside the Mapping transformation.
//
MappingOutput[] outputs = mapping.getMappingTrans().findMappingOutput();
for (MappingOutput output : outputs) {
for (RowSet rs : output.getOutputRowSets()) {
//
if (rs.getDestinationStepName().equalsIgnoreCase(to)) {
return rs;
}
}
}
}
}
return null;
}
use of org.pentaho.di.core.RowSet in project pentaho-kettle by pentaho.
the class BaseStep method dispatch.
/**
* This method finds the surrounding steps and rowsets for this base step. This steps keeps it's own list of rowsets
* (etc.) to prevent it from having to search every time.
* <p>
* Note that all rowsets input and output is already created by transformation itself. So
* in this place we will look and choose which rowsets will be used by this particular step.
* <p>
* We will collect all input rowsets and output rowsets so step will be able to read input data,
* and write to the output.
* <p>
* Steps can run in multiple copies, on in partitioned fashion. For this case we should take
* in account that in different cases we should take in account one to one, one to many and other cases
* properly.
*/
public void dispatch() {
if (transMeta == null) {
// for preview reasons, no dispatching is done!
return;
}
StepMeta stepMeta = transMeta.findStep(stepname);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "BaseStep.Log.StartingBuffersAllocation"));
}
// How many next steps are there? 0, 1 or more??
// How many steps do we send output to?
List<StepMeta> previousSteps = transMeta.findPreviousSteps(stepMeta, true);
List<StepMeta> succeedingSteps = transMeta.findNextSteps(stepMeta);
int nrInput = previousSteps.size();
int nrOutput = succeedingSteps.size();
synchronized (inputRowSetsLock) {
synchronized (outputRowSetsLock) {
inputRowSets = new ArrayList<>();
outputRowSets = new ArrayList<>();
errorRowSet = null;
prevSteps = new StepMeta[nrInput];
nextSteps = new StepMeta[nrOutput];
// we start with input[0];
currentInputRowSetNr = 0;
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "BaseStep.Log.StepInfo", String.valueOf(nrInput), String.valueOf(nrOutput)));
}
// populate input rowsets.
for (int i = 0; i < previousSteps.size(); i++) {
prevSteps[i] = previousSteps.get(i);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "BaseStep.Log.GotPreviousStep", stepname, String.valueOf(i), prevSteps[i].getName()));
}
// Looking at the previous step, you can have either 1 rowset to look at or more then one.
int prevCopies = prevSteps[i].getCopies();
int nextCopies = stepMeta.getCopies();
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "BaseStep.Log.InputRowInfo", String.valueOf(prevCopies), String.valueOf(nextCopies)));
}
int nrCopies;
int dispatchType;
boolean repartitioning;
if (prevSteps[i].isPartitioned()) {
repartitioning = !prevSteps[i].getStepPartitioningMeta().equals(stepMeta.getStepPartitioningMeta());
} else {
repartitioning = stepMeta.isPartitioned();
}
if (prevCopies == 1 && nextCopies == 1) {
// normal hop
dispatchType = Trans.TYPE_DISP_1_1;
nrCopies = 1;
} else if (prevCopies == 1 && nextCopies > 1) {
// one to many hop
dispatchType = Trans.TYPE_DISP_1_N;
nrCopies = 1;
} else if (prevCopies > 1 && nextCopies == 1) {
// from many to one hop
dispatchType = Trans.TYPE_DISP_N_1;
nrCopies = prevCopies;
} else if (prevCopies == nextCopies && !repartitioning) {
// this may be many-to-many or swim-lanes hop
dispatchType = Trans.TYPE_DISP_N_N;
nrCopies = 1;
} else {
// > 1!
dispatchType = Trans.TYPE_DISP_N_M;
nrCopies = prevCopies;
}
for (int c = 0; c < nrCopies; c++) {
RowSet rowSet = null;
switch(dispatchType) {
case Trans.TYPE_DISP_1_1:
rowSet = trans.findRowSet(prevSteps[i].getName(), 0, stepname, 0);
break;
case Trans.TYPE_DISP_1_N:
rowSet = trans.findRowSet(prevSteps[i].getName(), 0, stepname, getCopy());
break;
case Trans.TYPE_DISP_N_1:
rowSet = trans.findRowSet(prevSteps[i].getName(), c, stepname, 0);
break;
case Trans.TYPE_DISP_N_N:
rowSet = trans.findRowSet(prevSteps[i].getName(), getCopy(), stepname, getCopy());
break;
case Trans.TYPE_DISP_N_M:
rowSet = trans.findRowSet(prevSteps[i].getName(), c, stepname, getCopy());
break;
default:
break;
}
if (rowSet != null) {
inputRowSets.add(rowSet);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "BaseStep.Log.FoundInputRowset", rowSet.getName()));
}
} else {
if (!prevSteps[i].isMapping() && !stepMeta.isMapping()) {
logError(BaseMessages.getString(PKG, "BaseStep.Log.UnableToFindInputRowset"));
setErrors(1);
stopAll();
return;
}
}
}
}
// And now the output part!
for (int i = 0; i < nrOutput; i++) {
nextSteps[i] = succeedingSteps.get(i);
int prevCopies = stepMeta.getCopies();
int nextCopies = nextSteps[i].getCopies();
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "BaseStep.Log.OutputRowInfo", String.valueOf(prevCopies), String.valueOf(nextCopies)));
}
int nrCopies;
int dispatchType;
boolean repartitioning;
if (stepMeta.isPartitioned()) {
repartitioning = !stepMeta.getStepPartitioningMeta().equals(nextSteps[i].getStepPartitioningMeta());
} else {
repartitioning = nextSteps[i].isPartitioned();
}
if (prevCopies == 1 && nextCopies == 1) {
dispatchType = Trans.TYPE_DISP_1_1;
nrCopies = 1;
} else if (prevCopies == 1 && nextCopies > 1) {
dispatchType = Trans.TYPE_DISP_1_N;
nrCopies = nextCopies;
} else if (prevCopies > 1 && nextCopies == 1) {
dispatchType = Trans.TYPE_DISP_N_1;
nrCopies = 1;
} else if (prevCopies == nextCopies && !repartitioning) {
dispatchType = Trans.TYPE_DISP_N_N;
nrCopies = 1;
} else {
// > 1!
dispatchType = Trans.TYPE_DISP_N_M;
nrCopies = nextCopies;
}
for (int c = 0; c < nrCopies; c++) {
RowSet rowSet = null;
switch(dispatchType) {
case Trans.TYPE_DISP_1_1:
rowSet = trans.findRowSet(stepname, 0, nextSteps[i].getName(), 0);
break;
case Trans.TYPE_DISP_1_N:
rowSet = trans.findRowSet(stepname, 0, nextSteps[i].getName(), c);
break;
case Trans.TYPE_DISP_N_1:
rowSet = trans.findRowSet(stepname, getCopy(), nextSteps[i].getName(), 0);
break;
case Trans.TYPE_DISP_N_N:
rowSet = trans.findRowSet(stepname, getCopy(), nextSteps[i].getName(), getCopy());
break;
case Trans.TYPE_DISP_N_M:
rowSet = trans.findRowSet(stepname, getCopy(), nextSteps[i].getName(), c);
break;
default:
break;
}
if (rowSet != null) {
outputRowSets.add(rowSet);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "BaseStep.Log.FoundOutputRowset", rowSet.getName()));
}
} else {
if (!stepMeta.isMapping() && !nextSteps[i].isMapping()) {
logError(BaseMessages.getString(PKG, "BaseStep.Log.UnableToFindOutputRowset"));
setErrors(1);
stopAll();
return;
}
}
}
}
}
if (stepMeta.getTargetStepPartitioningMeta() != null) {
nextStepPartitioningMeta = stepMeta.getTargetStepPartitioningMeta();
}
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "BaseStep.Log.FinishedDispatching"));
}
}
}
use of org.pentaho.di.core.RowSet in project pentaho-kettle by pentaho.
the class BaseStep method identifyErrorOutput.
/*
* (non-Javadoc)
*
* @see org.pentaho.di.trans.step.StepInterface#identifyErrorOutput()
*/
@Override
public void identifyErrorOutput() {
if (stepMeta.isDoingErrorHandling()) {
StepErrorMeta stepErrorMeta = stepMeta.getStepErrorMeta();
synchronized (outputRowSetsLock) {
for (int rowsetNr = 0; rowsetNr < outputRowSets.size(); rowsetNr++) {
RowSet outputRowSet = outputRowSets.get(rowsetNr);
if (outputRowSet.getDestinationStepName().equalsIgnoreCase(stepErrorMeta.getTargetStep().getName())) {
// This is the rowset to move!
//
errorRowSet = outputRowSet;
outputRowSets.remove(rowsetNr);
return;
}
}
}
}
}
use of org.pentaho.di.core.RowSet in project pentaho-kettle by pentaho.
the class MetaInjectTest method injectMetaFromMultipleInputSteps.
@Test
public void injectMetaFromMultipleInputSteps() throws KettleException {
Map<TargetStepAttribute, SourceStepField> targetSourceMapping = new LinkedHashMap<TargetStepAttribute, SourceStepField>();
targetSourceMapping.put(new TargetStepAttribute(INJECTOR_STEP_NAME, "DATA_TYPE", true), new SourceStepField("TYPE_INPUT", "col_type"));
targetSourceMapping.put(new TargetStepAttribute(INJECTOR_STEP_NAME, "NAME", true), new SourceStepField("NAME_INPUT", "col_name"));
meta.setTargetSourceMapping(targetSourceMapping);
doReturn(new String[] { "NAME_INPUT", "TYPE_INPUT" }).when(transMeta).getPrevStepNames(any(StepMeta.class));
RowSet nameInputRowSet = mock(RowSet.class);
RowMeta nameRowMeta = new RowMeta();
nameRowMeta.addValueMeta(new ValueMetaString("col_name"));
doReturn(nameRowMeta).when(nameInputRowSet).getRowMeta();
doReturn(nameInputRowSet).when(metaInject).findInputRowSet("NAME_INPUT");
RowSet typeInputRowSet = mock(RowSet.class);
RowMeta typeRowMeta = new RowMeta();
typeRowMeta.addValueMeta(new ValueMetaString("col_type"));
doReturn(typeRowMeta).when(typeInputRowSet).getRowMeta();
doReturn(typeInputRowSet).when(metaInject).findInputRowSet("TYPE_INPUT");
doReturn(new Object[] { "FIRST_NAME" }).doReturn(null).when(metaInject).getRowFrom(nameInputRowSet);
doReturn(new Object[] { "String" }).doReturn(null).when(metaInject).getRowFrom(typeInputRowSet);
List<StepInjectionMetaEntry> injectionMetaEntryList = new ArrayList<StepInjectionMetaEntry>();
StepInjectionMetaEntry fields = new StepInjectionMetaEntry("FIELDS", ValueMetaInterface.TYPE_NONE, "");
StepInjectionMetaEntry fieldEntry = new StepInjectionMetaEntry("FIELD", ValueMetaInterface.TYPE_NONE, "");
fields.getDetails().add(fieldEntry);
StepInjectionMetaEntry nameEntry = new StepInjectionMetaEntry("NAME", ValueMetaInterface.TYPE_STRING, "");
fieldEntry.getDetails().add(nameEntry);
StepInjectionMetaEntry dataEntry = new StepInjectionMetaEntry("DATA_TYPE", ValueMetaInterface.TYPE_STRING, "");
fieldEntry.getDetails().add(dataEntry);
injectionMetaEntryList.add(fields);
doReturn(injectionMetaEntryList).when(metaInjectionInterface).getStepInjectionMetadataEntries();
meta.setNoExecution(true);
assertTrue(metaInject.init(meta, data));
metaInject.processRow(meta, data);
StepInjectionMetaEntry expectedNameEntry = new StepInjectionMetaEntry("NAME", "FIRST_NAME", ValueMetaInterface.TYPE_STRING, "");
StepInjectionMetaEntry expectedDataEntry = new StepInjectionMetaEntry("DATA_TYPE", "String", ValueMetaInterface.TYPE_STRING, "");
verify(metaInject, atLeastOnce()).setEntryValueIfFieldExists(refEq(expectedNameEntry), any(RowMetaAndData.class), any(SourceStepField.class));
verify(metaInject, atLeastOnce()).setEntryValueIfFieldExists(refEq(expectedDataEntry), any(RowMetaAndData.class), any(SourceStepField.class));
}
use of org.pentaho.di.core.RowSet in project pentaho-kettle by pentaho.
the class StreamLookupTest method mockDataRowSet.
private RowSet mockDataRowSet(boolean binary) {
final int storageType = binary ? ValueMetaInterface.STORAGE_TYPE_BINARY_STRING : ValueMetaInterface.STORAGE_TYPE_NORMAL;
Object[][] data = { { "Name1", "1" }, { "Name2", "2" } };
if (binary) {
convertDataToBinary(data);
}
RowSet dataRowSet = smh.getMockInputRowSet(data);
RowMeta dataRowMeta = new RowMeta();
ValueMetaString valueMeta = new ValueMetaString("Name");
valueMeta.setStorageType(storageType);
valueMeta.setStorageMetadata(new ValueMetaString());
dataRowMeta.addValueMeta(valueMeta);
ValueMetaString idMeta = new ValueMetaString("Id");
idMeta.setStorageType(storageType);
idMeta.setStorageMetadata(new ValueMetaString());
dataRowMeta.addValueMeta(idMeta);
doReturn(dataRowMeta).when(dataRowSet).getRowMeta();
return dataRowSet;
}
Aggregations