use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryTableExists method execute.
public Result execute(Result previousResult, int nr) {
Result result = previousResult;
result.setResult(false);
if (connection != null) {
Database db = new Database(this, connection);
db.shareVariablesWith(this);
try {
db.connect(parentJob.getTransactionId(), null);
String realTablename = environmentSubstitute(tablename);
String realSchemaname = environmentSubstitute(schemaname);
if (!Utils.isEmpty(realSchemaname)) {
realTablename = db.getDatabaseMeta().getQuotedSchemaTableCombination(realSchemaname, realTablename);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "TableExists.Log.SchemaTable", realTablename));
}
} else {
realTablename = db.getDatabaseMeta().quoteField(realTablename);
}
if (db.checkTableExists(realTablename)) {
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "TableExists.Log.TableExists", realTablename));
}
result.setResult(true);
} else {
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "TableExists.Log.TableNotExists", realTablename));
}
}
} catch (KettleDatabaseException dbe) {
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "TableExists.Error.RunningJobEntry", dbe.getMessage()));
} finally {
if (db != null) {
try {
db.disconnect();
} catch (Exception e) {
/* Ignore */
}
}
}
} else {
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "TableExists.Error.NoConnectionDefined"));
}
return result;
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryTableExists method saveRep.
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
try {
rep.saveJobEntryAttribute(id_job, getObjectId(), "tablename", tablename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "schemaname", schemaname);
rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
} catch (KettleDatabaseException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "TableExists.Meta.UnableSaveRep", "" + id_job), dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryTalendJobExec method saveRep.
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
try {
rep.saveJobEntryAttribute(id_job, getObjectId(), "filename", filename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "class_name", className);
} catch (KettleDatabaseException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobEntryTalendJobExec.ERROR_0003_Cannot_Save_Job_Entry", id_job), dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryTrans method loadRep.
// Load the jobentry from repository
//
@Override
public void loadRep(Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
try {
String method = rep.getJobEntryAttributeString(id_jobentry, "specification_method");
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
String transId = rep.getJobEntryAttributeString(id_jobentry, "trans_object_id");
transObjectId = Utils.isEmpty(transId) ? null : new StringObjectId(transId);
transname = rep.getJobEntryAttributeString(id_jobentry, "name");
directory = rep.getJobEntryAttributeString(id_jobentry, "dir_path");
filename = rep.getJobEntryAttributeString(id_jobentry, "file_name");
// Backward compatibility check for object specification
//
checkObjectLocationSpecificationMethod();
argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous");
paramsFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "params_from_previous");
execPerRow = rep.getJobEntryAttributeBoolean(id_jobentry, "exec_per_row");
clearResultRows = rep.getJobEntryAttributeBoolean(id_jobentry, "clear_rows", true);
clearResultFiles = rep.getJobEntryAttributeBoolean(id_jobentry, "clear_files", true);
setLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_logfile");
addDate = rep.getJobEntryAttributeBoolean(id_jobentry, "add_date");
addTime = rep.getJobEntryAttributeBoolean(id_jobentry, "add_time");
logfile = rep.getJobEntryAttributeString(id_jobentry, "logfile");
logext = rep.getJobEntryAttributeString(id_jobentry, "logext");
logFileLevel = LogLevel.getLogLevelForCode(rep.getJobEntryAttributeString(id_jobentry, "loglevel"));
clustering = rep.getJobEntryAttributeBoolean(id_jobentry, "cluster");
createParentFolder = rep.getJobEntryAttributeBoolean(id_jobentry, "create_parent_folder");
remoteSlaveServerName = rep.getJobEntryAttributeString(id_jobentry, "slave_server_name");
setAppendLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_append_logfile");
waitingToFinish = rep.getJobEntryAttributeBoolean(id_jobentry, "wait_until_finished", true);
followingAbortRemotely = rep.getJobEntryAttributeBoolean(id_jobentry, "follow_abort_remote");
loggingRemoteWork = rep.getJobEntryAttributeBoolean(id_jobentry, "logging_remote_work");
runConfiguration = rep.getJobEntryAttributeString(id_jobentry, "run_configuration");
// How many arguments?
int argnr = rep.countNrJobEntryAttributes(id_jobentry, "argument");
allocateArgs(argnr);
// Read all arguments...
for (int a = 0; a < argnr; a++) {
arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "argument");
}
// How many arguments?
int parameternr = rep.countNrJobEntryAttributes(id_jobentry, "parameter_name");
allocateParams(parameternr);
// Read all parameters ...
for (int a = 0; a < parameternr; a++) {
parameters[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_name");
parameterFieldNames[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_stream_name");
parameterValues[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_value");
}
passingAllParameters = rep.getJobEntryAttributeBoolean(id_jobentry, "pass_all_parameters", true);
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to load job entry of type 'trans' from the repository for id_jobentry=" + id_jobentry, dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryTrans method saveRep.
// Save the attributes of this job entry
//
@Override
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
try {
rep.saveJobEntryAttribute(id_job, getObjectId(), "specification_method", specificationMethod == null ? null : specificationMethod.getCode());
rep.saveJobEntryAttribute(id_job, getObjectId(), "trans_object_id", transObjectId == null ? null : transObjectId.toString());
rep.saveJobEntryAttribute(id_job, getObjectId(), "name", getTransname());
rep.saveJobEntryAttribute(id_job, getObjectId(), "dir_path", getDirectory() != null ? getDirectory() : "");
rep.saveJobEntryAttribute(id_job, getObjectId(), "file_name", filename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "arg_from_previous", argFromPrevious);
rep.saveJobEntryAttribute(id_job, getObjectId(), "params_from_previous", paramsFromPrevious);
rep.saveJobEntryAttribute(id_job, getObjectId(), "exec_per_row", execPerRow);
rep.saveJobEntryAttribute(id_job, getObjectId(), "clear_rows", clearResultRows);
rep.saveJobEntryAttribute(id_job, getObjectId(), "clear_files", clearResultFiles);
rep.saveJobEntryAttribute(id_job, getObjectId(), "set_logfile", setLogfile);
rep.saveJobEntryAttribute(id_job, getObjectId(), "add_date", addDate);
rep.saveJobEntryAttribute(id_job, getObjectId(), "add_time", addTime);
rep.saveJobEntryAttribute(id_job, getObjectId(), "logfile", logfile);
rep.saveJobEntryAttribute(id_job, getObjectId(), "logext", logext);
rep.saveJobEntryAttribute(id_job, getObjectId(), "loglevel", logFileLevel != null ? logFileLevel.getCode() : null);
rep.saveJobEntryAttribute(id_job, getObjectId(), "cluster", clustering);
rep.saveJobEntryAttribute(id_job, getObjectId(), "slave_server_name", remoteSlaveServerName);
rep.saveJobEntryAttribute(id_job, getObjectId(), "set_append_logfile", setAppendLogfile);
rep.saveJobEntryAttribute(id_job, getObjectId(), "wait_until_finished", waitingToFinish);
rep.saveJobEntryAttribute(id_job, getObjectId(), "follow_abort_remote", followingAbortRemotely);
rep.saveJobEntryAttribute(id_job, getObjectId(), "create_parent_folder", createParentFolder);
rep.saveJobEntryAttribute(id_job, getObjectId(), "logging_remote_work", loggingRemoteWork);
rep.saveJobEntryAttribute(id_job, getObjectId(), "run_configuration", runConfiguration);
// Save the arguments...
if (arguments != null) {
for (int i = 0; i < arguments.length; i++) {
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "argument", arguments[i]);
}
}
// Save the parameters...
if (parameters != null) {
for (int i = 0; i < parameters.length; i++) {
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "parameter_name", parameters[i]);
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "parameter_stream_name", Const.NVL(parameterFieldNames[i], ""));
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "parameter_value", Const.NVL(parameterValues[i], ""));
}
}
rep.saveJobEntryAttribute(id_job, getObjectId(), "pass_all_parameters", passingAllParameters);
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to save job entry of type 'trans' to the repository for id_job=" + id_job, dbe);
}
}
Aggregations