use of org.pentaho.di.trans.steps.mapping.MappingValueRename in project pentaho-kettle by pentaho.
the class MappingOutputMeta method getFields.
public void getFields(RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore) throws KettleStepException {
//
if (inputValueRenames != null) {
for (MappingValueRename valueRename : inputValueRenames) {
ValueMetaInterface valueMeta = r.searchValueMeta(valueRename.getTargetValueName());
if (valueMeta != null) {
valueMeta.setName(valueRename.getSourceValueName());
}
}
}
//
if (outputValueRenames != null) {
for (MappingValueRename valueRename : outputValueRenames) {
int valueMetaRenameIndex = r.indexOfValue(valueRename.getSourceValueName());
if (valueMetaRenameIndex >= 0) {
ValueMetaInterface valueMetaRename = r.getValueMeta(valueMetaRenameIndex).clone();
valueMetaRename.setName(valueRename.getTargetValueName());
// must maintain the same columns order. Noticed when implementing the Mapping step in AEL (BACKLOG-23372)
r.removeValueMeta(valueMetaRenameIndex);
r.addValueMeta(valueMetaRenameIndex, valueMetaRename);
}
}
}
}
use of org.pentaho.di.trans.steps.mapping.MappingValueRename in project pentaho-kettle by pentaho.
the class MappingInput method processRow.
// ProcessRow is not doing anything
// It's a place holder for accepting rows from the parent transformation...
// So, basically, this is a glorified Dummy with a little bit of meta-data
//
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
meta = (MappingInputMeta) smi;
data = (MappingInputData) sdi;
if (!data.linked) {
//
// Wait until we know were to read from the parent transformation...
// However, don't wait forever, if we don't have a connection after 60 seconds: bail out!
//
int totalsleep = 0;
while (!isStopped() && data.sourceSteps == null) {
try {
totalsleep += 10;
Thread.sleep(10);
} catch (InterruptedException e) {
stopAll();
}
if (totalsleep > timeOut) {
throw new KettleException(BaseMessages.getString(PKG, "MappingInput.Exception.UnableToConnectWithParentMapping", "" + (totalsleep / 1000)));
}
}
// OK, now we're ready to read from the parent source steps.
data.linked = true;
}
Object[] row = getRow();
if (row == null) {
setOutputDone();
return false;
}
if (first) {
first = false;
// The Input RowMetadata is not the same as the output row meta-data.
// The difference is described in the data interface
//
// String[] data.sourceFieldname
// String[] data.targetFieldname
//
// --> getInputRowMeta() is not corresponding to what we're outputting.
// In essence, we need to rename a couple of fields...
//
data.outputRowMeta = getInputRowMeta().clone();
//
for (MappingValueRename valueRename : data.valueRenames) {
ValueMetaInterface valueMeta = data.outputRowMeta.searchValueMeta(valueRename.getSourceValueName());
if (valueMeta == null) {
throw new KettleStepException(BaseMessages.getString(PKG, "MappingInput.Exception.UnableToFindMappedValue", valueRename.getSourceValueName()));
}
valueMeta.setName(valueRename.getTargetValueName());
valueMeta = getInputRowMeta().searchValueMeta(valueRename.getSourceValueName());
if (valueMeta == null) {
throw new KettleStepException(BaseMessages.getString(PKG, "MappingInput.Exception.UnableToFindMappedValue", valueRename.getSourceValueName()));
}
valueMeta.setName(valueRename.getTargetValueName());
}
// This is typical side effect of ESR-4178
data.outputRowMeta.setValueMetaList(data.outputRowMeta.getValueMetaList());
this.getInputRowMeta().setValueMetaList(this.getInputRowMeta().getValueMetaList());
// The input row meta has been manipulated correctly for the call to meta.getFields(), so create a blank
// outputRowMeta
meta.setInputRowMeta(getInputRowMeta());
if (meta.isSelectingAndSortingUnspecifiedFields()) {
data.outputRowMeta = new RowMeta();
} else {
meta.setInputRowMeta(new RowMeta());
}
// Fill the output row meta with the processed fields
meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore);
if (meta.isSelectingAndSortingUnspecifiedFields()) {
//
// Create a list of the indexes to get the right order or fields on the output.
//
data.fieldNrs = new int[data.outputRowMeta.size()];
for (int i = 0; i < data.outputRowMeta.size(); i++) {
data.fieldNrs[i] = getInputRowMeta().indexOfValue(data.outputRowMeta.getValueMeta(i).getName());
}
}
}
// Fill and send the output row
if (meta.isSelectingAndSortingUnspecifiedFields()) {
Object[] outputRowData = RowDataUtil.allocateRowData(data.outputRowMeta.size());
for (int i = 0; i < data.fieldNrs.length; i++) {
outputRowData[i] = row[data.fieldNrs[i]];
}
putRow(data.outputRowMeta, outputRowData);
} else {
putRow(data.outputRowMeta, row);
}
return true;
}
use of org.pentaho.di.trans.steps.mapping.MappingValueRename in project pentaho-kettle by pentaho.
the class SimpleMappingMeta method getFields.
public void getFields(RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore) throws KettleStepException {
// First load some interesting data...
// Then see which fields get added to the row.
//
TransMeta mappingTransMeta = null;
try {
mappingTransMeta = loadMappingMeta(this, repository, metaStore, space, mappingParameters.isInheritingAllVariables());
} catch (KettleException e) {
throw new KettleStepException(BaseMessages.getString(PKG, "SimpleMappingMeta.Exception.UnableToLoadMappingTransformation"), e);
}
// for instance)
if (mappingParameters != null && mappingTransMeta != null) {
// Just set the variables in the transformation statically.
// This just means: set a number of variables or parameter values:
//
StepWithMappingMeta.activateParams(mappingTransMeta, mappingTransMeta, space, mappingTransMeta.listParameters(), mappingParameters.getVariable(), mappingParameters.getInputField(), mappingParameters.isInheritingAllVariables());
}
// Keep track of all the fields that need renaming...
//
List<MappingValueRename> inputRenameList = new ArrayList<MappingValueRename>();
//
// Before we ask the mapping outputs anything, we should teach the mapping
// input steps in the sub-transformation about the data coming in...
//
RowMetaInterface inputRowMeta;
// The row metadata, what we pass to the mapping input step
// definition.getOutputStep(), is "row"
// However, we do need to re-map some fields...
//
inputRowMeta = row.clone();
if (!inputRowMeta.isEmpty()) {
for (MappingValueRename valueRename : inputMapping.getValueRenames()) {
ValueMetaInterface valueMeta = inputRowMeta.searchValueMeta(valueRename.getSourceValueName());
if (valueMeta == null) {
throw new KettleStepException(BaseMessages.getString(PKG, "SimpleMappingMeta.Exception.UnableToFindField", valueRename.getSourceValueName()));
}
valueMeta.setName(valueRename.getTargetValueName());
}
}
// What is this mapping input step?
//
StepMeta mappingInputStep = mappingTransMeta.findMappingInputStep(null);
// We're certain it's a MappingInput step...
//
MappingInputMeta mappingInputMeta = (MappingInputMeta) mappingInputStep.getStepMetaInterface();
// Inform the mapping input step about what it's going to receive...
//
mappingInputMeta.setInputRowMeta(inputRowMeta);
// What values are we changing names for: already done!
//
mappingInputMeta.setValueRenames(null);
//
if (inputMapping.isRenamingOnOutput()) {
SimpleMapping.addInputRenames(inputRenameList, inputMapping.getValueRenames());
}
StepMeta mappingOutputStep = mappingTransMeta.findMappingOutputStep(null);
// We know it's a mapping output step...
MappingOutputMeta mappingOutputMeta = (MappingOutputMeta) mappingOutputStep.getStepMetaInterface();
// Change a few columns.
mappingOutputMeta.setOutputValueRenames(outputMapping.getValueRenames());
// Perhaps we need to change a few input columns back to the original?
//
mappingOutputMeta.setInputValueRenames(inputRenameList);
// Now we know wat's going to come out of there...
// This is going to be the full row, including all the remapping, etc.
//
RowMetaInterface mappingOutputRowMeta = mappingTransMeta.getStepFields(mappingOutputStep);
row.clear();
row.addRowMeta(mappingOutputRowMeta);
}
use of org.pentaho.di.trans.steps.mapping.MappingValueRename in project pentaho-kettle by pentaho.
the class RowDataInputMapper method putRow.
/**
* Attempts to put the <code>row</code> onto the underlying <code>rowProducer</code> during its timeout period.
* Returns <code>true</code> if the operation completed successfully and <code>false</code> otherwise.
*
* @param rowMeta input row's meta data
* @param row input row
* @return <code>true</code> if the <code>row</code> was put successfully
*/
public boolean putRow(RowMetaInterface rowMeta, Object[] row) {
if (first) {
first = false;
renamedRowMeta = rowMeta.clone();
for (MappingValueRename valueRename : inputDefinition.getValueRenames()) {
ValueMetaInterface valueMeta = renamedRowMeta.searchValueMeta(valueRename.getSourceValueName());
if (valueMeta != null) {
valueMeta.setName(valueRename.getTargetValueName());
}
}
}
return rowProducer.putRow(renamedRowMeta, row, false);
}
use of org.pentaho.di.trans.steps.mapping.MappingValueRename in project pentaho-kettle by pentaho.
the class MappingInputMeta_GetFields_Test method simpleRename.
private static Object[] simpleRename() {
RowMeta inputRowMeta = createRowMeta("field1", "field2");
List<MappingValueRename> renames = Collections.singletonList(new MappingValueRename("field2", "renamed"));
String[] fields = new String[] { "field1", "renamed" };
String[] expected = new String[] { "field1", "renamed" };
return createCaseData(inputRowMeta, renames, fields, expected);
}
Aggregations