use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryShell method saveRep.
// Save the attributes of this job entry
//
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
try {
rep.saveJobEntryAttribute(id_job, getObjectId(), "file_name", filename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "work_directory", workDirectory);
rep.saveJobEntryAttribute(id_job, getObjectId(), "arg_from_previous", argFromPrevious);
rep.saveJobEntryAttribute(id_job, getObjectId(), "exec_per_row", execPerRow);
rep.saveJobEntryAttribute(id_job, getObjectId(), "set_logfile", setLogfile);
rep.saveJobEntryAttribute(id_job, getObjectId(), "set_append_logfile", setAppendLogfile);
rep.saveJobEntryAttribute(id_job, getObjectId(), "add_date", addDate);
rep.saveJobEntryAttribute(id_job, getObjectId(), "add_time", addTime);
rep.saveJobEntryAttribute(id_job, getObjectId(), "logfile", logfile);
rep.saveJobEntryAttribute(id_job, getObjectId(), "logext", logext);
rep.saveJobEntryAttribute(id_job, getObjectId(), "loglevel", logFileLevel == null ? LogLevel.NOTHING.getCode() : logFileLevel.getCode());
rep.saveJobEntryAttribute(id_job, getObjectId(), "insertScript", insertScript);
rep.saveJobEntryAttribute(id_job, getObjectId(), "script", script);
// save the arguments...
if (arguments != null) {
for (int i = 0; i < arguments.length; i++) {
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "argument", arguments[i]);
}
}
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to save job entry of type 'shell' to the repository", dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntrySimpleEval method saveRep.
@Override
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
try {
rep.saveJobEntryAttribute(id_job, getObjectId(), "valuetype", getValueTypeCode(valuetype));
rep.saveJobEntryAttribute(id_job, getObjectId(), "fieldname", fieldname);
rep.saveJobEntryAttribute(id_job, getObjectId(), "variablename", variablename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "fieldtype", getFieldTypeCode(fieldtype));
rep.saveJobEntryAttribute(id_job, getObjectId(), "mask", mask);
rep.saveJobEntryAttribute(id_job, getObjectId(), "comparevalue", comparevalue);
rep.saveJobEntryAttribute(id_job, getObjectId(), "minvalue", minvalue);
rep.saveJobEntryAttribute(id_job, getObjectId(), "maxvalue", maxvalue);
rep.saveJobEntryAttribute(id_job, getObjectId(), "successcondition", getSuccessConditionCode(successcondition));
rep.saveJobEntryAttribute(id_job, getObjectId(), "successnumbercondition", getSuccessNumberConditionCode(successnumbercondition));
rep.saveJobEntryAttribute(id_job, getObjectId(), "successbooleancondition", getSuccessBooleanConditionCode(successbooleancondition));
rep.saveJobEntryAttribute(id_job, getObjectId(), "successwhenvarset", successwhenvarset);
} catch (KettleDatabaseException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobEntrySimple.Error.Exception.UnableSaveRep") + id_job, dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntrySQL method saveRep.
// Save the attributes of this job entry
//
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
try {
rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
rep.saveJobEntryAttribute(id_job, getObjectId(), "sql", sql);
rep.saveJobEntryAttribute(id_job, getObjectId(), "useVariableSubstitution", useVariableSubstitution ? "T" : "F");
rep.saveJobEntryAttribute(id_job, getObjectId(), "sqlfromfile", sqlfromfile ? "T" : "F");
rep.saveJobEntryAttribute(id_job, getObjectId(), "sqlfilename", sqlfilename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "sendOneStatement", sendOneStatement ? "T" : "F");
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to save job entry of type 'sql' to the repository for id_job=" + id_job, dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntrySQL method execute.
public Result execute(Result previousResult, int nr) {
Result result = previousResult;
if (connection != null) {
Database db = new Database(this, connection);
FileObject SQLfile = null;
db.shareVariablesWith(this);
try {
String theSQL = null;
db.connect(parentJob.getTransactionId(), null);
if (sqlfromfile) {
if (sqlfilename == null) {
throw new KettleDatabaseException(BaseMessages.getString(PKG, "JobSQL.NoSQLFileSpecified"));
}
try {
String realfilename = environmentSubstitute(sqlfilename);
SQLfile = KettleVFS.getFileObject(realfilename, this);
if (!SQLfile.exists()) {
logError(BaseMessages.getString(PKG, "JobSQL.SQLFileNotExist", realfilename));
throw new KettleDatabaseException(BaseMessages.getString(PKG, "JobSQL.SQLFileNotExist", realfilename));
}
if (isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobSQL.SQLFileExists", realfilename));
}
InputStream IS = KettleVFS.getInputStream(SQLfile);
try {
InputStreamReader BIS = new InputStreamReader(new BufferedInputStream(IS, 500));
StringBuilder lineSB = new StringBuilder(256);
lineSB.setLength(0);
BufferedReader buff = new BufferedReader(BIS);
String sLine = null;
theSQL = Const.CR;
while ((sLine = buff.readLine()) != null) {
if (Utils.isEmpty(sLine)) {
theSQL = theSQL + Const.CR;
} else {
theSQL = theSQL + Const.CR + sLine;
}
}
} finally {
IS.close();
}
} catch (Exception e) {
throw new KettleDatabaseException(BaseMessages.getString(PKG, "JobSQL.ErrorRunningSQLfromFile"), e);
}
} else {
theSQL = sql;
}
if (!Utils.isEmpty(theSQL)) {
// let it run
if (useVariableSubstitution) {
theSQL = environmentSubstitute(theSQL);
}
if (isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobSQL.Log.SQlStatement", theSQL));
}
if (sendOneStatement) {
db.execStatement(theSQL);
} else {
db.execStatements(theSQL);
}
}
} catch (KettleDatabaseException je) {
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobSQL.ErrorRunJobEntry", je.getMessage()));
} finally {
db.disconnect();
if (SQLfile != null) {
try {
SQLfile.close();
} catch (Exception e) {
// Ignore errors
}
}
}
} else {
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobSQL.NoDatabaseConnection"));
}
if (result.getNrErrors() == 0) {
result.setResult(true);
} else {
result.setResult(false);
}
return result;
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryTableExists method loadRep.
public void loadRep(Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
try {
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
} catch (KettleDatabaseException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "TableExists.Meta.UnableLoadRep", "" + id_jobentry), dbe);
}
}
Aggregations