use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryPGPVerify method saveRep.
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
try {
rep.saveJobEntryAttribute(id_job, getObjectId(), "gpglocation", gpglocation);
rep.saveJobEntryAttribute(id_job, getObjectId(), "filename", filename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "detachedfilename", detachedfilename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "useDetachedSignature", useDetachedSignature);
} catch (KettleDatabaseException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobEntryPGPVerify.ERROR_0003_Cannot_Save_Job_Entry", id_job), dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryPing method saveRep.
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
try {
rep.saveJobEntryAttribute(id_job, getObjectId(), "hostname", hostname);
rep.saveJobEntryAttribute(id_job, getObjectId(), "nbr_packets", nbrPackets);
// TODO: The following line may be removed 3 versions after 2.5.0
rep.saveJobEntryAttribute(id_job, getObjectId(), "nbrpaquets", nbrPackets);
rep.saveJobEntryAttribute(id_job, getObjectId(), "timeout", timeout);
rep.saveJobEntryAttribute(id_job, getObjectId(), "pingtype", pingtype);
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to save job entry of type 'ping' to the repository for id_job=" + id_job, dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntrySFTP method saveRep.
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
try {
rep.saveJobEntryAttribute(id_job, getObjectId(), "servername", serverName);
rep.saveJobEntryAttribute(id_job, getObjectId(), "serverport", serverPort);
rep.saveJobEntryAttribute(id_job, getObjectId(), "username", userName);
rep.saveJobEntryAttribute(id_job, getObjectId(), "password", Encr.encryptPasswordIfNotUsingVariables(password));
rep.saveJobEntryAttribute(id_job, getObjectId(), "sftpdirectory", sftpDirectory);
rep.saveJobEntryAttribute(id_job, getObjectId(), "targetdirectory", targetDirectory);
rep.saveJobEntryAttribute(id_job, getObjectId(), "wildcard", wildcard);
rep.saveJobEntryAttribute(id_job, getObjectId(), "remove", remove);
rep.saveJobEntryAttribute(id_job, getObjectId(), "isaddresult", isaddresult);
rep.saveJobEntryAttribute(id_job, getObjectId(), "createtargetfolder", createtargetfolder);
rep.saveJobEntryAttribute(id_job, getObjectId(), "copyprevious", copyprevious);
rep.saveJobEntryAttribute(id_job, getObjectId(), "usekeyfilename", usekeyfilename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "keyfilename", keyfilename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "keyfilepass", Encr.encryptPasswordIfNotUsingVariables(keyfilepass));
rep.saveJobEntryAttribute(id_job, getObjectId(), "compression", compression);
rep.saveJobEntryAttribute(id_job, getObjectId(), "proxyType", proxyType);
rep.saveJobEntryAttribute(id_job, getObjectId(), "proxyHost", proxyHost);
rep.saveJobEntryAttribute(id_job, getObjectId(), "proxyPort", proxyPort);
rep.saveJobEntryAttribute(id_job, getObjectId(), "proxyUsername", proxyUsername);
rep.saveJobEntryAttribute(id_job, getObjectId(), "proxyPassword", Encr.encryptPasswordIfNotUsingVariables(proxyPassword));
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to save job entry of type 'SFTP' to the repository for id_job=" + id_job, dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntrySFTPPUT method saveRep.
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
try {
rep.saveJobEntryAttribute(id_job, getObjectId(), "servername", serverName);
rep.saveJobEntryAttribute(id_job, getObjectId(), "serverport", serverPort);
rep.saveJobEntryAttribute(id_job, getObjectId(), "username", userName);
rep.saveJobEntryAttribute(id_job, getObjectId(), "password", Encr.encryptPasswordIfNotUsingVariables(password));
rep.saveJobEntryAttribute(id_job, getObjectId(), "sftpdirectory", sftpDirectory);
rep.saveJobEntryAttribute(id_job, getObjectId(), "localdirectory", localDirectory);
rep.saveJobEntryAttribute(id_job, getObjectId(), "wildcard", wildcard);
rep.saveJobEntryAttribute(id_job, getObjectId(), "copyprevious", copyprevious);
rep.saveJobEntryAttribute(id_job, getObjectId(), "copypreviousfiles", copypreviousfiles);
rep.saveJobEntryAttribute(id_job, getObjectId(), "addFilenameResut", addFilenameResut);
rep.saveJobEntryAttribute(id_job, getObjectId(), "usekeyfilename", usekeyfilename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "keyfilename", keyfilename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "keyfilepass", Encr.encryptPasswordIfNotUsingVariables(keyfilepass));
rep.saveJobEntryAttribute(id_job, getObjectId(), "compression", compression);
rep.saveJobEntryAttribute(id_job, getObjectId(), "proxyType", proxyType);
rep.saveJobEntryAttribute(id_job, getObjectId(), "proxyHost", proxyHost);
rep.saveJobEntryAttribute(id_job, getObjectId(), "proxyPort", proxyPort);
rep.saveJobEntryAttribute(id_job, getObjectId(), "proxyUsername", proxyUsername);
rep.saveJobEntryAttribute(id_job, getObjectId(), "proxyPassword", Encr.encryptPasswordIfNotUsingVariables(proxyPassword));
rep.saveJobEntryAttribute(id_job, getObjectId(), "aftersftpput", getAfterSFTPPutCode(getAfterFTPS()));
rep.saveJobEntryAttribute(id_job, getObjectId(), "createRemoteFolder", createRemoteFolder);
rep.saveJobEntryAttribute(id_job, getObjectId(), "destinationfolder", destinationfolder);
rep.saveJobEntryAttribute(id_job, getObjectId(), "createdestinationfolder", createDestinationFolder);
rep.saveJobEntryAttribute(id_job, getObjectId(), "successWhenNoFile", successWhenNoFile);
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to load job entry of type 'SFTPPUT' to the repository for id_job=" + id_job, dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryShell method loadRep.
// Load the jobentry from repository
public void loadRep(Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
try {
setFileName(rep.getJobEntryAttributeString(id_jobentry, "file_name"));
setWorkDirectory(rep.getJobEntryAttributeString(id_jobentry, "work_directory"));
argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous");
execPerRow = rep.getJobEntryAttributeBoolean(id_jobentry, "exec_per_row");
setLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_logfile");
setAppendLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_append_logfile");
addDate = rep.getJobEntryAttributeBoolean(id_jobentry, "add_date");
addTime = rep.getJobEntryAttributeBoolean(id_jobentry, "add_time");
logfile = rep.getJobEntryAttributeString(id_jobentry, "logfile");
logext = rep.getJobEntryAttributeString(id_jobentry, "logext");
logFileLevel = LogLevel.getLogLevelForCode(rep.getJobEntryAttributeString(id_jobentry, "loglevel"));
insertScript = rep.getJobEntryAttributeBoolean(id_jobentry, "insertScript");
script = rep.getJobEntryAttributeString(id_jobentry, "script");
// How many arguments?
int argnr = rep.countNrJobEntryAttributes(id_jobentry, "argument");
allocate(argnr);
// Read them all...
for (int a = 0; a < argnr; a++) {
arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "argument");
}
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to load job entry of type 'shell' from the repository with id_jobentry=" + id_jobentry, dbe);
}
}
Aggregations