use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class JobEntrySetVariables method execute.
public Result execute(Result result, int nr) throws KettleException {
result.setResult(true);
result.setNrErrors(0);
try {
List<String> variables = new ArrayList<String>();
List<String> variableValues = new ArrayList<String>();
List<Integer> variableTypes = new ArrayList<Integer>();
String realFilename = environmentSubstitute(filename);
if (!Utils.isEmpty(realFilename)) {
try (InputStream is = KettleVFS.getInputStream(realFilename);
// for UTF8 properties files
InputStreamReader isr = new InputStreamReader(is, "UTF-8");
BufferedReader reader = new BufferedReader(isr)) {
Properties properties = new Properties();
properties.load(reader);
for (Object key : properties.keySet()) {
variables.add((String) key);
variableValues.add((String) properties.get(key));
variableTypes.add(fileVariableType);
}
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "JobEntrySetVariables.Error.UnableReadPropertiesFile", realFilename));
}
}
for (int i = 0; i < variableName.length; i++) {
variables.add(variableName[i]);
variableValues.add(variableValue[i]);
variableTypes.add(variableType[i]);
}
for (int i = 0; i < variables.size(); i++) {
String varname = variables.get(i);
String value = variableValues.get(i);
int type = variableTypes.get(i);
if (replaceVars) {
varname = environmentSubstitute(varname);
value = environmentSubstitute(value);
}
// OK, where do we set this value...
switch(type) {
case VARIABLE_TYPE_JVM:
System.setProperty(varname, value);
setVariable(varname, value);
Job parentJobTraverse = parentJob;
while (parentJobTraverse != null) {
parentJobTraverse.setVariable(varname, value);
parentJobTraverse = parentJobTraverse.getParentJob();
}
break;
case VARIABLE_TYPE_ROOT_JOB:
// set variable in this job entry
setVariable(varname, value);
Job rootJob = parentJob;
while (rootJob != null) {
rootJob.setVariable(varname, value);
rootJob = rootJob.getParentJob();
}
break;
case VARIABLE_TYPE_CURRENT_JOB:
changedInitialVariables.put(varname, getVariable(varname));
setVariable(varname, value);
if (parentJob != null) {
parentJob.setVariable(varname, value);
} else {
throw new KettleJobException(BaseMessages.getString(PKG, "JobEntrySetVariables.Error.UnableSetVariableCurrentJob", varname));
}
break;
case VARIABLE_TYPE_PARENT_JOB:
setVariable(varname, value);
if (parentJob != null) {
parentJob.setVariable(varname, value);
Job gpJob = parentJob.getParentJob();
if (gpJob != null) {
gpJob.setVariable(varname, value);
} else {
throw new KettleJobException(BaseMessages.getString(PKG, "JobEntrySetVariables.Error.UnableSetVariableParentJob", varname));
}
} else {
throw new KettleJobException(BaseMessages.getString(PKG, "JobEntrySetVariables.Error.UnableSetVariableCurrentJob", varname));
}
break;
default:
break;
}
// ok we can process this line
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobEntrySetVariables.Log.SetVariableToValue", varname, value));
}
}
} catch (Exception e) {
result.setResult(false);
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobEntrySetVariables.UnExcpectedError", e.getMessage()));
}
return result;
}
use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class SFTPClient method get.
public void get(FileObject localFile, String remoteFile) throws KettleJobException {
OutputStream localStream = null;
try {
localStream = KettleVFS.getOutputStream(localFile, false);
c.get(remoteFile, localStream);
} catch (SftpException e) {
throw new KettleJobException(e);
} catch (IOException e) {
throw new KettleJobException(e);
} finally {
if (localStream != null) {
try {
localStream.close();
} catch (IOException ignore) {
// Ignore any IOException, as we're trying to close the stream anyways
}
}
}
}
use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class SFTPClient method folderExists.
public boolean folderExists(String foldername) {
boolean retval = false;
try {
SftpATTRS attrs = c.stat(foldername);
if (attrs == null) {
return false;
}
if ((attrs.getFlags() & SftpATTRS.SSH_FILEXFER_ATTR_PERMISSIONS) == 0) {
throw new KettleJobException("Unknown permissions error");
}
retval = attrs.isDir();
} catch (Exception e) {
// Folder can not be found!
}
return retval;
}
use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class SFTPClient method login.
public void login(String password) throws KettleJobException {
this.password = password;
s.setPassword(this.getPassword());
try {
java.util.Properties config = new java.util.Properties();
config.put("StrictHostKeyChecking", "no");
// set compression property
// zlib, none
String compress = getCompression();
if (compress != null) {
config.put(COMPRESSION_S2C, compress);
config.put(COMPRESSION_C2S, compress);
}
s.setConfig(config);
s.connect();
Channel channel = s.openChannel("sftp");
channel.connect();
c = (ChannelSftp) channel;
} catch (JSchException e) {
throw new KettleJobException(e);
}
}
use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class Job method execute.
/**
* Execute a job without previous results. This is a job entry point (not recursive)<br>
* <br>
*
* @return the result of the execution
*
* @throws KettleException
*/
private Result execute() throws KettleException {
try {
log.snap(Metrics.METRIC_JOB_START);
finished.set(false);
stopped.set(false);
KettleEnvironment.setExecutionInformation(this, rep);
log.logMinimal(BaseMessages.getString(PKG, "Job.Comment.JobStarted"));
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobStart.id, this);
// Start the tracking...
JobEntryResult jerStart = new JobEntryResult(null, null, BaseMessages.getString(PKG, "Job.Comment.JobStarted"), BaseMessages.getString(PKG, "Job.Reason.Started"), null, 0, null);
jobTracker.addJobTracker(new JobTracker(jobMeta, jerStart));
active.set(true);
// Where do we start?
JobEntryCopy startpoint;
// synchronize this to a parent job if needed.
//
Object syncObject = this;
if (parentJob != null) {
// parallel execution in a job
syncObject = parentJob;
}
synchronized (syncObject) {
beginProcessing();
}
Result res = null;
if (startJobEntryCopy == null) {
startpoint = jobMeta.findJobEntry(JobMeta.STRING_SPECIAL_START, 0, false);
} else {
startpoint = startJobEntryCopy;
res = startJobEntryResult;
}
if (startpoint == null) {
throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.CounldNotFindStartingPoint"));
}
JobEntryResult jerEnd = null;
if (startpoint.isStart()) {
// Perform optional looping in the special Start job entry...
//
// long iteration = 0;
boolean isFirst = true;
JobEntrySpecial jes = (JobEntrySpecial) startpoint.getEntry();
while ((jes.isRepeat() || isFirst) && !isStopped()) {
isFirst = false;
res = execute(0, null, startpoint, null, BaseMessages.getString(PKG, "Job.Reason.Started"));
//
// if (iteration > 0 && (iteration % 500) == 0) {
// System.out.println("other 500 iterations: " + iteration);
// }
// iteration++;
//
}
jerEnd = new JobEntryResult(res, jes.getLogChannelId(), BaseMessages.getString(PKG, "Job.Comment.JobFinished"), BaseMessages.getString(PKG, "Job.Reason.Finished"), null, 0, null);
} else {
res = execute(0, res, startpoint, null, BaseMessages.getString(PKG, "Job.Reason.Started"));
jerEnd = new JobEntryResult(res, startpoint.getEntry().getLogChannel().getLogChannelId(), BaseMessages.getString(PKG, "Job.Comment.JobFinished"), BaseMessages.getString(PKG, "Job.Reason.Finished"), null, 0, null);
}
// Save this result...
jobTracker.addJobTracker(new JobTracker(jobMeta, jerEnd));
log.logMinimal(BaseMessages.getString(PKG, "Job.Comment.JobFinished"));
active.set(false);
finished.set(true);
return res;
} finally {
log.snap(Metrics.METRIC_JOB_STOP);
}
}
Aggregations