use of org.pentaho.di.core.exception.KettleStepException in project pentaho-kettle by pentaho.
the class SetVariable method setValue.
private void setValue(Object[] rowData, int i, boolean usedefault) throws KettleException {
// Set the appropriate environment variable
//
String value = null;
if (usedefault) {
value = environmentSubstitute(meta.getDefaultValue()[i]);
} else {
int index = data.outputMeta.indexOfValue(meta.getFieldName()[i]);
if (index < 0) {
throw new KettleException("Unable to find field [" + meta.getFieldName()[i] + "] in input row");
}
ValueMetaInterface valueMeta = data.outputMeta.getValueMeta(index);
Object valueData = rowData[index];
//
if (meta.isUsingFormatting()) {
value = valueMeta.getString(valueData);
} else {
value = valueMeta.getCompatibleString(valueData);
}
}
if (value == null) {
value = "";
}
// Get variable name
String varname = meta.getVariableName()[i];
if (Utils.isEmpty(varname)) {
if (Utils.isEmpty(value)) {
throw new KettleException("Variable name nor value was specified on line #" + (i + 1));
} else {
throw new KettleException("There was no variable name specified for value [" + value + "]");
}
}
Job parentJob = null;
// We always set the variable in this step and in the parent transformation...
//
setVariable(varname, value);
// Set variable in the transformation
//
Trans trans = getTrans();
trans.setVariable(varname, value);
//
while (trans.getParentTrans() != null) {
trans = trans.getParentTrans();
trans.setVariable(varname, value);
}
//
switch(meta.getVariableType()[i]) {
case SetVariableMeta.VARIABLE_TYPE_JVM:
System.setProperty(varname, value);
parentJob = trans.getParentJob();
while (parentJob != null) {
parentJob.setVariable(varname, value);
parentJob = parentJob.getParentJob();
}
break;
case SetVariableMeta.VARIABLE_TYPE_ROOT_JOB:
// Comments by SB
// VariableSpace rootJob = null;
parentJob = trans.getParentJob();
while (parentJob != null) {
parentJob.setVariable(varname, value);
// rootJob = parentJob;
parentJob = parentJob.getParentJob();
}
break;
case SetVariableMeta.VARIABLE_TYPE_GRAND_PARENT_JOB:
// Set the variable in the parent job
//
parentJob = trans.getParentJob();
if (parentJob != null) {
parentJob.setVariable(varname, value);
} else {
throw new KettleStepException("Can't set variable [" + varname + "] on parent job: the parent job is not available");
}
// Set the variable on the grand-parent job
//
VariableSpace gpJob = trans.getParentJob().getParentJob();
if (gpJob != null) {
gpJob.setVariable(varname, value);
} else {
throw new KettleStepException("Can't set variable [" + varname + "] on grand parent job: the grand parent job is not available");
}
break;
case SetVariableMeta.VARIABLE_TYPE_PARENT_JOB:
// Set the variable in the parent job
//
parentJob = trans.getParentJob();
if (parentJob != null) {
parentJob.setVariable(varname, value);
} else {
throw new KettleStepException("Can't set variable [" + varname + "] on parent job: the parent job is not available");
}
break;
default:
break;
}
logBasic(BaseMessages.getString(PKG, "SetVariable.Log.SetVariableToValue", meta.getVariableName()[i], value));
}
use of org.pentaho.di.core.exception.KettleStepException in project pentaho-kettle by pentaho.
the class SingleThreader method prepareMappingExecution.
public void prepareMappingExecution() throws KettleException {
SingleThreaderData singleThreaderData = getData();
// Set the type to single threaded in case the user forgot...
//
singleThreaderData.mappingTransMeta.setTransformationType(TransformationType.SingleThreaded);
// Create the transformation from meta-data...
singleThreaderData.mappingTrans = new Trans(singleThreaderData.mappingTransMeta, getTrans());
// Pass the parameters down to the sub-transformation.
//
StepWithMappingMeta.activateParams(getData().mappingTrans, getData().mappingTrans, this, getData().mappingTrans.listParameters(), meta.getParameters(), meta.getParameterValues());
getData().mappingTrans.activateParameters();
// Disable thread priority managment as it will slow things down needlessly.
// The single threaded engine doesn't use threads and doesn't need row locking.
//
singleThreaderData.mappingTrans.getTransMeta().setUsingThreadPriorityManagment(false);
// Leave a path up so that we can set variables in sub-transformations...
//
singleThreaderData.mappingTrans.setParentTrans(getTrans());
// Pass down the safe mode flag to the mapping...
//
singleThreaderData.mappingTrans.setSafeModeEnabled(getTrans().isSafeModeEnabled());
// Pass down the metrics gathering flag to the mapping...
//
singleThreaderData.mappingTrans.setGatheringMetrics(getTrans().isGatheringMetrics());
// Also set the name of this step in the mapping transformation for logging purposes
//
singleThreaderData.mappingTrans.setMappingStepName(getStepname());
initServletConfig();
// prepare the execution
//
singleThreaderData.mappingTrans.prepareExecution(null);
//
if (singleThreaderData.injectStepMeta.isMappingInput()) {
MappingInputData mappingInputData = (MappingInputData) singleThreaderData.mappingTrans.findDataInterface(singleThreaderData.injectStepMeta.getName());
mappingInputData.sourceSteps = new StepInterface[0];
mappingInputData.valueRenames = new ArrayList<MappingValueRename>();
}
// Add row producer & row listener
singleThreaderData.rowProducer = singleThreaderData.mappingTrans.addRowProducer(meta.getInjectStep(), 0);
StepInterface retrieveStep = singleThreaderData.mappingTrans.getStepInterface(meta.getRetrieveStep(), 0);
retrieveStep.addRowListener(new RowAdapter() {
@Override
public void rowWrittenEvent(RowMetaInterface rowMeta, Object[] row) throws KettleStepException {
// Simply pass it along to the next steps after the SingleThreader
//
SingleThreader.this.putRow(rowMeta, row);
}
});
singleThreaderData.mappingTrans.startThreads();
// Create the executor...
singleThreaderData.executor = new SingleThreadedTransExecutor(singleThreaderData.mappingTrans);
//
try {
boolean ok = singleThreaderData.executor.init();
if (!ok) {
throw new KettleException(BaseMessages.getString(PKG, "SingleThreader.Exception.UnableToInitSingleThreadedTransformation"));
}
} catch (KettleException e) {
throw new KettleException(BaseMessages.getString(PKG, "SingleThreader.Exception.UnableToPrepareExecutionOfMapping"), e);
}
// Add the mapping transformation to the active sub-transformations map in the parent transformation
//
getTrans().addActiveSubTransformation(getStepname(), singleThreaderData.mappingTrans);
}
use of org.pentaho.di.core.exception.KettleStepException in project pentaho-kettle by pentaho.
the class SingleThreaderMeta method getFields.
public void getFields(RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore) throws KettleStepException {
// First load some interesting data...
//
// Then see which fields get added to the row.
//
TransMeta mappingTransMeta = null;
try {
mappingTransMeta = loadSingleThreadedTransMeta(this, repository, space);
} catch (KettleException e) {
throw new KettleStepException(BaseMessages.getString(PKG, "SingleThreaderMeta.Exception.UnableToLoadMappingTransformation"), e);
}
row.clear();
//
if (!Utils.isEmpty(space.environmentSubstitute(retrieveStep))) {
RowMetaInterface stepFields = mappingTransMeta.getStepFields(retrieveStep);
row.addRowMeta(stepFields);
}
}
use of org.pentaho.di.core.exception.KettleStepException in project pentaho-kettle by pentaho.
the class SortedMerge method getRowSorted.
/**
* We read from all streams in the partition merge mode For that we need at least one row on all input rowsets... If
* we don't have a row, we wait for one.
*
* TODO: keep the inputRowSets() list sorted and go from there. That should dramatically improve speed as you only
* need half as many comparisons.
*
* @return the next row
*/
private synchronized Object[] getRowSorted() throws KettleException {
if (first) {
first = false;
// Verify that socket connections to all the remote input steps are opened
// before we start to read/write ...
//
openRemoteInputStepSocketsOnce();
// Read one row from all rowsets...
//
data.sortedBuffer = new ArrayList<RowSetRow>();
data.rowMeta = null;
// PDI-1212:
// If one of the inputRowSets holds a null row (the input yields
// 0 rows), then the null rowSet is removed from the InputRowSet buffer.. (BaseStep.getRowFrom())
// which throws this loop off by one (the next set never gets processed).
// Instead of modifying BaseStep, I figure reversing the loop here would
// effect change in less areas. If the reverse loop causes a problem, please
// re-open http://jira.pentaho.com/browse/PDI-1212.
List<RowSet> inputRowSets = getInputRowSets();
for (int i = inputRowSets.size() - 1; i >= 0 && !isStopped(); i--) {
RowSet rowSet = inputRowSets.get(i);
Object[] row = getRowFrom(rowSet);
if (row != null) {
// Add this row to the sortedBuffer...
// Which is not yet sorted, we'll get to that later.
//
data.sortedBuffer.add(new RowSetRow(rowSet, rowSet.getRowMeta(), row));
if (data.rowMeta == null) {
data.rowMeta = rowSet.getRowMeta().clone();
}
//
if (data.fieldIndices == null) {
// Get the indexes of the specified sort fields...
data.fieldIndices = new int[meta.getFieldName().length];
for (int f = 0; f < data.fieldIndices.length; f++) {
data.fieldIndices[f] = data.rowMeta.indexOfValue(meta.getFieldName()[f]);
if (data.fieldIndices[f] < 0) {
throw new KettleStepException("Unable to find fieldname [" + meta.getFieldName()[f] + "] in row : " + data.rowMeta);
}
data.rowMeta.getValueMeta(data.fieldIndices[f]).setSortedDescending(!meta.getAscending()[f]);
}
}
}
data.comparator = new Comparator<RowSetRow>() {
public int compare(RowSetRow o1, RowSetRow o2) {
try {
return o1.getRowMeta().compare(o1.getRowData(), o2.getRowData(), data.fieldIndices);
} catch (KettleValueException e) {
// TODO see if we should fire off alarms over here... Perhaps throw a RuntimeException.
return 0;
}
}
};
// Now sort the sortedBuffer for the first time.
//
Collections.sort(data.sortedBuffer, data.comparator);
}
}
//
if (data.sortedBuffer.isEmpty()) {
return null;
}
// now that we have all rows sorted, all we need to do is find out what the smallest row is.
// The smallest row is the first in our case...
//
RowSetRow smallestRow = data.sortedBuffer.get(0);
data.sortedBuffer.remove(0);
Object[] outputRowData = smallestRow.getRowData();
// We read another row from the row set where the smallest row came from.
// That we we exhaust all row sets.
//
Object[] extraRow = getRowFrom(smallestRow.getRowSet());
//
if (extraRow != null) {
// Add this one to the sortedBuffer
//
RowSetRow add = new RowSetRow(smallestRow.getRowSet(), smallestRow.getRowSet().getRowMeta(), extraRow);
int index = Collections.binarySearch(data.sortedBuffer, add, data.comparator);
if (index < 0) {
data.sortedBuffer.add(-index - 1, add);
} else {
data.sortedBuffer.add(index, add);
}
}
//
if (getTrans().isSafeModeEnabled()) {
// for checking we need to get data and meta
//
safeModeChecking(smallestRow.getRowMeta());
}
return outputRowData;
}
use of org.pentaho.di.core.exception.KettleStepException in project pentaho-kettle by pentaho.
the class ExecSQL method processRow.
@Override
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
meta = (ExecSQLMeta) smi;
data = (ExecSQLData) sdi;
if (!meta.isExecutedEachInputRow()) {
RowMetaAndData resultRow = getResultRow(data.result, meta.getUpdateField(), meta.getInsertField(), meta.getDeleteField(), meta.getReadField());
putRow(resultRow.getRowMeta(), resultRow.getData());
// Stop processing, this is all we do!
setOutputDone();
return false;
}
Object[] row = getRow();
if (row == null) {
// no more input to be expected...
setOutputDone();
return false;
}
if (first) {
// we just got started
first = false;
data.outputRowMeta = getInputRowMeta().clone();
meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore);
// Find the indexes of the arguments
data.argumentIndexes = new int[meta.getArguments().length];
for (int i = 0; i < meta.getArguments().length; i++) {
data.argumentIndexes[i] = this.getInputRowMeta().indexOfValue(meta.getArguments()[i]);
if (data.argumentIndexes[i] < 0) {
logError(BaseMessages.getString(PKG, "ExecSQL.Log.ErrorFindingField") + meta.getArguments()[i] + "]");
throw new KettleStepException(BaseMessages.getString(PKG, "ExecSQL.Exception.CouldNotFindField", meta.getArguments()[i]));
}
if (meta.isParams()) {
if (i == 0) {
// Define parameters meta
data.paramsMeta = new RowMeta();
}
data.paramsMeta.addValueMeta(getInputRowMeta().getValueMeta(data.argumentIndexes[i]));
}
}
if (!meta.isParams()) {
// We need to replace question marks by string value
// Find the locations of the question marks in the String...
// We replace the question marks with the values...
// We ignore quotes etc. to make inserts easier...
data.markerPositions = new ArrayList<Integer>();
int len = data.sql.length();
int pos = len - 1;
while (pos >= 0) {
if (data.sql.charAt(pos) == '?') {
// save the
data.markerPositions.add(Integer.valueOf(pos));
}
// marker
// position
pos--;
}
}
}
String sql;
Object[] paramsData = null;
if (meta.isParams()) {
// Get parameters data
paramsData = new Object[data.argumentIndexes.length];
sql = this.data.sql;
for (int i = 0; i < this.data.argumentIndexes.length; i++) {
paramsData[i] = row[data.argumentIndexes[i]];
}
} else {
int numMarkers = data.markerPositions.size();
if (numMarkers > 0) {
StringBuilder buf = new StringBuilder(data.sql);
//
for (int i = 0; i < numMarkers; i++) {
// Get the appropriate value from the input row...
//
int index = data.argumentIndexes[data.markerPositions.size() - i - 1];
ValueMetaInterface valueMeta = getInputRowMeta().getValueMeta(index);
Object valueData = row[index];
// replace the '?' with the String in the row.
//
int pos = data.markerPositions.get(i);
String replaceValue = valueMeta.getString(valueData);
replaceValue = Const.NVL(replaceValue, "");
if (meta.isQuoteString() && (valueMeta.getType() == ValueMetaInterface.TYPE_STRING)) {
// Have the database dialect do the quoting.
// This also adds the quotes around the string
replaceValue = meta.getDatabaseMeta().quoteSQLString(replaceValue);
}
buf.replace(pos, pos + 1, replaceValue);
}
sql = buf.toString();
} else {
sql = data.sql;
}
}
if (log.isRowLevel()) {
logRowlevel(BaseMessages.getString(PKG, "ExecSQL.Log.ExecutingSQLScript") + Const.CR + sql);
}
boolean sendToErrorRow = false;
String errorMessage = null;
try {
if (meta.isSingleStatement()) {
data.result = data.db.execStatement(sql, data.paramsMeta, paramsData);
} else {
data.result = data.db.execStatements(sql, data.paramsMeta, paramsData);
}
RowMetaAndData add = getResultRow(data.result, meta.getUpdateField(), meta.getInsertField(), meta.getDeleteField(), meta.getReadField());
row = RowDataUtil.addRowData(row, getInputRowMeta().size(), add.getData());
if (!data.db.isAutoCommit()) {
data.db.commit();
}
// send it out!
putRow(data.outputRowMeta, row);
if (checkFeedback(getLinesWritten())) {
if (log.isBasic()) {
logBasic(BaseMessages.getString(PKG, "ExecSQL.Log.LineNumber") + getLinesWritten());
}
}
} catch (KettleException e) {
if (getStepMeta().isDoingErrorHandling()) {
sendToErrorRow = true;
errorMessage = e.toString();
} else {
throw new KettleStepException(BaseMessages.getString(PKG, "ExecSQL.Log.ErrorInStep"), e);
}
if (sendToErrorRow) {
// Simply add this row to the error row
putError(getInputRowMeta(), row, 1, errorMessage, null, "ExecSQL001");
}
}
return true;
}
Aggregations