use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class SFTPClient method dir.
public String[] dir() throws KettleJobException {
String[] fileList = null;
try {
java.util.Vector<?> v = c.ls(".");
java.util.Vector<String> o = new java.util.Vector<String>();
if (v != null) {
for (int i = 0; i < v.size(); i++) {
Object obj = v.elementAt(i);
if (obj != null && obj instanceof com.jcraft.jsch.ChannelSftp.LsEntry) {
LsEntry lse = (com.jcraft.jsch.ChannelSftp.LsEntry) obj;
if (!lse.getAttrs().isDir()) {
o.add(lse.getFilename());
}
}
}
}
if (o.size() > 0) {
fileList = new String[o.size()];
o.copyInto(fileList);
}
} catch (SftpException e) {
throw new KettleJobException(e);
}
return fileList;
}
use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class JobEntrySpecial method execute.
public Result execute(Result previousResult, int nr) throws KettleJobException {
Result result = previousResult;
if (isStart()) {
try {
long sleepTime = getNextExecutionTime();
if (sleepTime > 0) {
parentJob.getLogChannel().logBasic(parentJob.getJobname(), "Sleeping: " + (sleepTime / 1000 / 60) + " minutes (sleep time=" + sleepTime + ")");
long totalSleep = 0L;
while (totalSleep < sleepTime && !parentJob.isStopped()) {
Thread.sleep(1000L);
totalSleep += 1000L;
}
}
} catch (InterruptedException e) {
throw new KettleJobException(e);
}
result = previousResult;
result.setResult(true);
} else if (isDummy()) {
result = previousResult;
}
return result;
}
use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class Job method endProcessing.
//
// Handle logging at end
/**
* End processing.
*
* @return true, if successful
* @throws KettleJobException
* the kettle job exception
*/
private boolean endProcessing() throws KettleJobException {
LogStatus status;
if (!isActive()) {
if (isStopped()) {
status = LogStatus.STOP;
} else {
status = LogStatus.END;
}
} else {
status = LogStatus.RUNNING;
}
try {
if (errors.get() == 0 && result != null && !result.getResult()) {
errors.incrementAndGet();
}
logDate = new Date();
/*
* Sums errors, read, written, etc.
*/
JobLogTable jobLogTable = jobMeta.getJobLogTable();
if (jobLogTable.isDefined()) {
writeLogTableInformation(jobLogTable, status);
}
return true;
} catch (Exception e) {
// In case something else goes wrong.
throw new KettleJobException(e);
}
}
use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class Job method writeLogTableInformation.
/**
* Writes information to Job Log table.
* Cleans old records, in case job is finished.
*/
protected void writeLogTableInformation(JobLogTable jobLogTable, LogStatus status) throws KettleJobException, KettleDatabaseException {
boolean cleanLogRecords = status.equals(LogStatus.END);
String tableName = jobLogTable.getActualTableName();
DatabaseMeta logcon = jobLogTable.getDatabaseMeta();
Database ldb = createDataBase(logcon);
ldb.shareVariablesWith(this);
try {
ldb.connect();
ldb.setCommit(logCommitSize);
ldb.writeLogRecord(jobLogTable, status, this, null);
if (cleanLogRecords) {
ldb.cleanupLogRecords(jobLogTable);
}
} catch (KettleDatabaseException dbe) {
addErrors(1);
throw new KettleJobException("Unable to end processing by writing log record to table " + tableName, dbe);
} finally {
if (!ldb.isAutoCommit()) {
ldb.commitLog(true, jobLogTable);
}
ldb.disconnect();
}
}
use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class Job method execute.
/**
* Execute a job with previous results passed in.<br>
* <br>
* Execute called by JobEntryJob: don't clear the jobEntryResults.
*
* @param nr
* The job entry number
* @param result
* the result of the previous execution
* @return Result of the job execution
* @throws KettleJobException
*/
public Result execute(int nr, Result result) throws KettleException {
finished.set(false);
active.set(true);
initialized.set(true);
KettleEnvironment.setExecutionInformation(this, rep);
// Where do we start?
JobEntryCopy startpoint;
// Perhaps there is already a list of input rows available?
if (getSourceRows() != null) {
result.setRows(getSourceRows());
}
startpoint = jobMeta.findJobEntry(JobMeta.STRING_SPECIAL_START, 0, false);
if (startpoint == null) {
throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.CounldNotFindStartingPoint"));
}
Result res = execute(nr, result, startpoint, null, BaseMessages.getString(PKG, "Job.Reason.StartOfJobentry"));
active.set(false);
return res;
}
Aggregations