use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class Job method beginProcessing.
/**
* Handle logging at start
*
* @return true if it went OK.
*
* @throws KettleException
*/
public boolean beginProcessing() throws KettleException {
currentDate = new Date();
logDate = new Date();
startDate = Const.MIN_DATE;
endDate = currentDate;
resetErrors();
final JobLogTable jobLogTable = jobMeta.getJobLogTable();
int intervalInSeconds = Const.toInt(environmentSubstitute(jobLogTable.getLogInterval()), -1);
if (jobLogTable.isDefined()) {
DatabaseMeta logcon = jobMeta.getJobLogTable().getDatabaseMeta();
String schemaName = environmentSubstitute(jobMeta.getJobLogTable().getActualSchemaName());
String tableName = environmentSubstitute(jobMeta.getJobLogTable().getActualTableName());
String schemaAndTable = jobMeta.getJobLogTable().getDatabaseMeta().getQuotedSchemaTableCombination(schemaName, tableName);
Database ldb = new Database(this, logcon);
ldb.shareVariablesWith(this);
ldb.connect();
ldb.setCommit(logCommitSize);
try {
// See if we have to add a batch id...
Long id_batch = new Long(1);
if (jobMeta.getJobLogTable().isBatchIdUsed()) {
id_batch = logcon.getNextBatchId(ldb, schemaName, tableName, jobLogTable.getKeyField().getFieldName());
setBatchId(id_batch.longValue());
if (getPassedBatchId() <= 0) {
setPassedBatchId(id_batch.longValue());
}
}
Object[] lastr = ldb.getLastLogDate(schemaAndTable, jobMeta.getName(), true, LogStatus.END);
if (!Utils.isEmpty(lastr)) {
Date last;
try {
last = ldb.getReturnRowMeta().getDate(lastr, 0);
} catch (KettleValueException e) {
throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.ConversionError", "" + tableName), e);
}
if (last != null) {
startDate = last;
}
}
depDate = currentDate;
ldb.writeLogRecord(jobMeta.getJobLogTable(), LogStatus.START, this, null);
if (!ldb.isAutoCommit()) {
ldb.commitLog(true, jobMeta.getJobLogTable());
}
ldb.disconnect();
//
if (intervalInSeconds > 0) {
final Timer timer = new Timer(getName() + " - interval logging timer");
TimerTask timerTask = new TimerTask() {
public void run() {
try {
endProcessing();
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformIntervalLogging"), e);
// Also stop the show...
//
errors.incrementAndGet();
stopAll();
}
}
};
timer.schedule(timerTask, intervalInSeconds * 1000, intervalInSeconds * 1000);
addJobListener(new JobAdapter() {
public void jobFinished(Job job) {
timer.cancel();
}
});
}
// Add a listener at the end of the job to take of writing the final job
// log record...
//
addJobListener(new JobAdapter() {
public void jobFinished(Job job) throws KettleException {
try {
endProcessing();
} catch (KettleJobException e) {
log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToWriteToLoggingTable", jobLogTable.toString()), e);
// job is failed in case log database record is failed!
throw new KettleException(e);
}
}
});
} catch (KettleDatabaseException dbe) {
// This is even before actual execution
addErrors(1);
throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.UnableToProcessLoggingStart", "" + tableName), dbe);
} finally {
ldb.disconnect();
}
}
// If we need to write out the job entry logging information, do so at the end of the job:
//
JobEntryLogTable jobEntryLogTable = jobMeta.getJobEntryLogTable();
if (jobEntryLogTable.isDefined()) {
addJobListener(new JobAdapter() {
public void jobFinished(Job job) throws KettleException {
try {
writeJobEntryLogInformation();
} catch (KettleException e) {
throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformJobEntryLoggingAtJobEnd"), e);
}
}
});
}
// If we need to write the log channel hierarchy and lineage information,
// add a listener for that too...
//
ChannelLogTable channelLogTable = jobMeta.getChannelLogTable();
if (channelLogTable.isDefined()) {
addJobListener(new JobAdapter() {
public void jobFinished(Job job) throws KettleException {
try {
writeLogChannelInformation();
} catch (KettleException e) {
throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformLoggingAtTransEnd"), e);
}
}
});
}
JobExecutionExtension extension = new JobExecutionExtension(this, result, null, false);
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobBeginProcessing.id, extension);
return true;
}
use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class RepositoryBrowserController method rename.
public ObjectId rename(String id, String path, String newName, String type, String oldName) throws KettleException {
RepositoryDirectoryInterface repositoryDirectoryInterface = findDirectory(path);
ObjectId objectId = null;
switch(type) {
case JOB:
if (getRepository().exists(newName, repositoryDirectoryInterface, RepositoryObjectType.JOB)) {
throw new KettleObjectExistsException();
}
if (isJobOpened(id, path, oldName)) {
throw new KettleJobException();
}
renameRecent(id, type, newName);
objectId = getRepository().renameJob(() -> id, repositoryDirectoryInterface, newName);
break;
case TRANSFORMATION:
if (getRepository().exists(newName, repositoryDirectoryInterface, RepositoryObjectType.TRANSFORMATION)) {
throw new KettleObjectExistsException();
}
if (isTransOpened(id, path, oldName)) {
throw new KettleTransException();
}
renameRecent(id, type, newName);
objectId = getRepository().renameTransformation(() -> id, repositoryDirectoryInterface, newName);
break;
case FOLDER:
isFileOpenedInFolder(path);
RepositoryDirectoryInterface parent = findDirectory(path).getParent();
if (parent == null) {
parent = findDirectory(path);
}
RepositoryDirectoryInterface child = parent.findChild(newName);
if (child != null) {
throw new KettleObjectExistsException();
}
if (getRepository() instanceof RepositoryExtended) {
objectId = ((RepositoryExtended) getRepository()).renameRepositoryDirectory(() -> id, null, newName, true);
} else {
objectId = getRepository().renameRepositoryDirectory(() -> id, null, newName);
}
break;
}
return objectId;
}
use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class RepositoryBrowserController method remove.
public boolean remove(String id, String name, String path, String type) throws KettleException {
try {
switch(type) {
case JOB:
if (isJobOpened(id, path, name)) {
throw new KettleJobException();
}
getRepository().deleteJob(() -> id);
break;
case TRANSFORMATION:
if (isTransOpened(id, path, name)) {
throw new KettleTransException();
}
getRepository().deleteTransformation(() -> id);
break;
case FOLDER:
isFileOpenedInFolder(path);
removeRecentsUsingPath(path);
RepositoryDirectoryInterface repositoryDirectoryInterface = findDirectory(path);
if (getRepository() instanceof RepositoryExtended) {
((RepositoryExtended) getRepository()).deleteRepositoryDirectory(repositoryDirectoryInterface, true);
} else {
getRepository().deleteRepositoryDirectory(repositoryDirectoryInterface);
}
break;
}
return true;
} catch (KettleTransException | KettleJobException ke) {
throw ke;
} catch (Exception e) {
return false;
}
}
use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class SFTPClient method setProxy.
public void setProxy(String host, String port, String user, String pass, String proxyType) throws KettleJobException {
if (Utils.isEmpty(host) || Const.toInt(port, 0) == 0) {
throw new KettleJobException("Proxy server name must be set and server port must be greater than zero.");
}
Proxy proxy = null;
String proxyhost = host + ":" + port;
if (proxyType.equals(PROXY_TYPE_HTTP)) {
proxy = new ProxyHTTP(proxyhost);
if (!Utils.isEmpty(user)) {
((ProxyHTTP) proxy).setUserPasswd(user, pass);
}
} else if (proxyType.equals(PROXY_TYPE_SOCKS5)) {
proxy = new ProxySOCKS5(proxyhost);
if (!Utils.isEmpty(user)) {
((ProxySOCKS5) proxy).setUserPasswd(user, pass);
}
}
s.setProxy(proxy);
}
use of org.pentaho.di.core.exception.KettleJobException in project pentaho-kettle by pentaho.
the class SFTPClient method put.
public void put(FileObject fileObject, String remoteFile) throws KettleJobException {
int mode = ChannelSftp.OVERWRITE;
InputStream inputStream = null;
try {
inputStream = KettleVFS.getInputStream(fileObject);
c.put(inputStream, remoteFile, null, mode);
} catch (Exception e) {
throw new KettleJobException(e);
} finally {
if (inputStream != null) {
try {
inputStream.close();
} catch (IOException e) {
throw new KettleJobException(e);
}
}
}
}
Aggregations