use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryWebServiceAvailable method saveRep.
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
try {
rep.saveJobEntryAttribute(id_job, getObjectId(), "url", url);
rep.saveJobEntryAttribute(id_job, getObjectId(), "connectTimeOut", connectTimeOut);
rep.saveJobEntryAttribute(id_job, getObjectId(), "readTimeOut", readTimeOut);
} catch (KettleDatabaseException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobEntryWebServiceAvailable.ERROR_0003_Cannot_Save_Job_Entry", "" + id_job), dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryWriteToLog method loadRep.
@Override
public void loadRep(Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
try {
logmessage = rep.getJobEntryAttributeString(id_jobentry, "logmessage");
entryLogLevel = LogLevel.getLogLevelForCode(rep.getJobEntryAttributeString(id_jobentry, "loglevel"));
logsubject = rep.getJobEntryAttributeString(id_jobentry, "logsubject");
} catch (KettleDatabaseException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "WriteToLog.Error.UnableToLoadFromRepository.Label") + id_jobentry, dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryZipFile method saveRep.
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
try {
rep.saveJobEntryAttribute(id_job, getObjectId(), "zipfilename", zipFilename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "compressionrate", compressionRate);
rep.saveJobEntryAttribute(id_job, getObjectId(), "ifzipfileexists", ifZipFileExists);
rep.saveJobEntryAttribute(id_job, getObjectId(), "afterzip", afterZip);
rep.saveJobEntryAttribute(id_job, getObjectId(), "wildcard", wildCard);
rep.saveJobEntryAttribute(id_job, getObjectId(), "wildcardexclude", excludeWildCard);
rep.saveJobEntryAttribute(id_job, getObjectId(), "sourcedirectory", sourceDirectory);
rep.saveJobEntryAttribute(id_job, getObjectId(), "movetodirectory", movetoDirectory);
rep.saveJobEntryAttribute(id_job, getObjectId(), "addfiletoresult", addFileToResult);
rep.saveJobEntryAttribute(id_job, getObjectId(), "isfromprevious", isFromPrevious);
rep.saveJobEntryAttribute(id_job, getObjectId(), "createparentfolder", createParentFolder);
rep.saveJobEntryAttribute(id_job, getObjectId(), "addtime", addTime);
rep.saveJobEntryAttribute(id_job, getObjectId(), "adddate", addDate);
rep.saveJobEntryAttribute(id_job, getObjectId(), "SpecifyFormat", specifyFormat);
rep.saveJobEntryAttribute(id_job, getObjectId(), "date_time_format", dateTimeFormat);
rep.saveJobEntryAttribute(id_job, getObjectId(), "createMoveToDirectory", createMoveToDirectory);
rep.saveJobEntryAttribute(id_job, getObjectId(), "include_subfolders", includingSubFolders);
rep.saveJobEntryAttribute(id_job, getObjectId(), "stored_source_path_depth", storedSourcePathDepth);
} catch (KettleDatabaseException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobEntryZipFile.UnableSaveJobEntryRep", "" + id_job), dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobMeta method getSQLStatements.
/**
* Builds a list of all the SQL statements that this transformation needs in order to work properly.
*
* @return An ArrayList of SQLStatement objects.
*/
public List<SQLStatement> getSQLStatements(Repository repository, IMetaStore metaStore, ProgressMonitorListener monitor) throws KettleException {
if (monitor != null) {
monitor.beginTask(BaseMessages.getString(PKG, "JobMeta.Monitor.GettingSQLNeededForThisJob"), nrJobEntries() + 1);
}
List<SQLStatement> stats = new ArrayList<SQLStatement>();
for (int i = 0; i < nrJobEntries(); i++) {
JobEntryCopy copy = getJobEntry(i);
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.GettingSQLForJobEntryCopy") + copy + "]");
}
stats.addAll(copy.getEntry().getSQLStatements(repository, metaStore, this));
stats.addAll(compatibleGetEntrySQLStatements(copy.getEntry(), repository));
stats.addAll(compatibleGetEntrySQLStatements(copy.getEntry(), repository, this));
if (monitor != null) {
monitor.worked(1);
}
}
// Also check the sql for the logtable...
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.GettingSQLStatementsForJobLogTables"));
}
if (jobLogTable.getDatabaseMeta() != null && !Utils.isEmpty(jobLogTable.getTableName())) {
Database db = new Database(this, jobLogTable.getDatabaseMeta());
try {
db.connect();
RowMetaInterface fields = jobLogTable.getLogRecord(LogStatus.START, null, null).getRowMeta();
String sql = db.getDDL(jobLogTable.getTableName(), fields);
if (sql != null && sql.length() > 0) {
SQLStatement stat = new SQLStatement(BaseMessages.getString(PKG, "JobMeta.SQLFeedback.ThisJob"), jobLogTable.getDatabaseMeta(), sql);
stats.add(stat);
}
} catch (KettleDatabaseException dbe) {
SQLStatement stat = new SQLStatement(BaseMessages.getString(PKG, "JobMeta.SQLFeedback.ThisJob"), jobLogTable.getDatabaseMeta(), null);
stat.setError(BaseMessages.getString(PKG, "JobMeta.SQLFeedback.ErrorObtainingJobLogTableInfo") + dbe.getMessage());
stats.add(stat);
} finally {
db.disconnect();
}
}
if (monitor != null) {
monitor.worked(1);
}
if (monitor != null) {
monitor.done();
}
return stats;
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryColumnsExist method loadRep.
public void loadRep(Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
try {
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
// How many arguments?
int argnr = rep.countNrJobEntryAttributes(id_jobentry, "name");
arguments = new String[argnr];
// Read them all...
for (int a = 0; a < argnr; a++) {
arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "name");
}
} catch (KettleDatabaseException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobEntryColumnsExist.Meta.UnableLoadRep", "" + id_jobentry), dbe);
}
}
Aggregations