use of org.pentaho.di.core.RowMetaAndData in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryJobDelegate method loadJobMeta.
/**
* Load a job in a directory
*
* @param log
* the logging channel
* @param rep
* The Repository
* @param jobname
* The name of the job
* @param repdir
* The directory in which the job resides.
* @throws KettleException
*/
public JobMeta loadJobMeta(String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor) throws KettleException {
JobMeta jobMeta = new JobMeta();
synchronized (repository) {
try {
// Clear everything...
jobMeta.clear();
jobMeta.setRepositoryDirectory(repdir);
// Get the transformation id
jobMeta.setObjectId(getJobID(jobname, repdir.getObjectId()));
// If no valid id is available in the database, then give error...
if (jobMeta.getObjectId() != null) {
// Load the notes...
ObjectId[] noteids = repository.getJobNoteIDs(jobMeta.getObjectId());
ObjectId[] jecids = repository.getJobEntryCopyIDs(jobMeta.getObjectId());
ObjectId[] hopid = repository.getJobHopIDs(jobMeta.getObjectId());
int nrWork = 2 + noteids.length + jecids.length + hopid.length;
if (monitor != null) {
monitor.beginTask(BaseMessages.getString(PKG, "JobMeta.Monitor.LoadingJob") + repdir + Const.FILE_SEPARATOR + jobname, nrWork);
}
//
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobInformation"));
}
RowMetaAndData jobRow = getJob(jobMeta.getObjectId());
jobMeta.setName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_NAME, null));
jobMeta.setDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, null));
jobMeta.setExtendedDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, null));
jobMeta.setJobversion(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, null));
jobMeta.setJobstatus(Const.toInt(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, null), -1));
jobMeta.setCreatedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_CREATED_USER, null));
jobMeta.setCreatedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, new Date()));
jobMeta.setModifiedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, null));
jobMeta.setModifiedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, new Date()));
long id_logdb = jobRow.getInteger(KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, 0);
if (id_logdb > 0) {
// Get the logconnection
//
DatabaseMeta logDb = repository.loadDatabaseMeta(new LongObjectId(id_logdb), null);
jobMeta.getJobLogTable().setConnectionName(logDb.getName());
// jobMeta.getJobLogTable().getDatabaseMeta().shareVariablesWith(jobMeta);
}
jobMeta.getJobLogTable().setTableName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, null));
jobMeta.getJobLogTable().setBatchIdUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, false));
jobMeta.getJobLogTable().setLogFieldUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, false));
jobMeta.getJobLogTable().setLogSizeLimit(getJobAttributeString(jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT));
jobMeta.setBatchIdPassed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, false));
// Load all the log tables for the job...
//
RepositoryAttributeInterface attributeInterface = new KettleDatabaseRepositoryJobAttribute(repository.connectionDelegate, jobMeta.getObjectId());
for (LogTableInterface logTable : jobMeta.getLogTables()) {
logTable.loadFromRepository(attributeInterface);
}
if (monitor != null) {
monitor.worked(1);
}
//
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingAvailableDatabasesFromRepository"));
}
// Read objects from the shared XML file & the repository
try {
jobMeta.setSharedObjectsFile(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, null));
jobMeta.setSharedObjects(repository != null ? repository.readJobMetaSharedObjects(jobMeta) : jobMeta.readSharedObjects());
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "JobMeta.ErrorReadingSharedObjects.Message", e.toString()));
//
log.logError(Const.getStackTracker(e));
}
if (monitor != null) {
monitor.worked(1);
}
if (log.isDetailed()) {
log.logDetailed("Loading " + noteids.length + " notes");
}
for (int i = 0; i < noteids.length; i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingNoteNr") + (i + 1) + "/" + noteids.length);
}
NotePadMeta ni = repository.notePadDelegate.loadNotePadMeta(noteids[i]);
if (jobMeta.indexOfNote(ni) < 0) {
jobMeta.addNote(ni);
}
if (monitor != null) {
monitor.worked(1);
}
}
// Load the group attributes map
//
jobMeta.setAttributesMap(loadJobAttributesMap(jobMeta.getObjectId()));
// Load the job entries...
//
// Keep a unique list of job entries to facilitate in the loading.
//
List<JobEntryInterface> jobentries = new ArrayList<JobEntryInterface>();
if (log.isDetailed()) {
log.logDetailed("Loading " + jecids.length + " job entries");
}
for (int i = 0; i < jecids.length; i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobEntryNr") + (i + 1) + "/" + (jecids.length));
}
JobEntryCopy jec = repository.jobEntryDelegate.loadJobEntryCopy(jobMeta.getObjectId(), jecids[i], jobentries, jobMeta.getDatabases(), jobMeta.getSlaveServers(), jobname);
if (jec.isMissing()) {
jobMeta.addMissingEntry((MissingEntry) jec.getEntry());
}
// Also set the copy number...
// We count the number of job entry copies that use the job
// entry
//
int copyNr = 0;
for (JobEntryCopy copy : jobMeta.getJobCopies()) {
if (jec.getEntry() == copy.getEntry()) {
copyNr++;
}
}
jec.setNr(copyNr);
int idx = jobMeta.indexOfJobEntry(jec);
if (idx < 0) {
if (jec.getName() != null && jec.getName().length() > 0) {
jobMeta.addJobEntry(jec);
}
} else {
// replace it!
jobMeta.setJobEntry(idx, jec);
}
if (monitor != null) {
monitor.worked(1);
}
}
// Load the hops...
if (log.isDetailed()) {
log.logDetailed("Loading " + hopid.length + " job hops");
}
for (int i = 0; i < hopid.length; i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobHopNr") + (i + 1) + "/" + (jecids.length));
}
JobHopMeta hi = loadJobHopMeta(hopid[i], jobMeta.getJobCopies());
jobMeta.getJobhops().add(hi);
if (monitor != null) {
monitor.worked(1);
}
}
loadRepParameters(jobMeta);
// Finally, clear the changed flags...
jobMeta.clearChanged();
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.FinishedLoadOfJob"));
}
if (monitor != null) {
monitor.done();
}
// close prepared statements, minimize locking etc.
//
repository.connectionDelegate.closeAttributeLookupPreparedStatements();
return jobMeta;
} else {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.CanNotFindJob") + jobname);
}
} catch (KettleException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", jobname), dbe);
} finally {
jobMeta.initializeVariablesFrom(jobMeta.getParentVariableSpace());
jobMeta.setInternalKettleVariables();
}
}
}
use of org.pentaho.di.core.RowMetaAndData in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryJobDelegate method loadJobHopMeta.
public JobHopMeta loadJobHopMeta(ObjectId id_job_hop, List<JobEntryCopy> jobcopies) throws KettleException {
JobHopMeta jobHopMeta = new JobHopMeta();
try {
RowMetaAndData r = getJobHop(id_job_hop);
if (r != null) {
long id_jobentry_copy_from = r.getInteger("ID_JOBENTRY_COPY_FROM", -1L);
long id_jobentry_copy_to = r.getInteger("ID_JOBENTRY_COPY_TO", -1L);
jobHopMeta.setEnabled(r.getBoolean("ENABLED", true));
jobHopMeta.setEvaluation(r.getBoolean("EVALUATION", true));
jobHopMeta.setConditional();
if (r.getBoolean("UNCONDITIONAL", !jobHopMeta.getEvaluation())) {
jobHopMeta.setUnconditional();
}
jobHopMeta.setFromEntry(JobMeta.findJobEntryCopy(jobcopies, new LongObjectId(id_jobentry_copy_from)));
jobHopMeta.setToEntry(JobMeta.findJobEntryCopy(jobcopies, new LongObjectId(id_jobentry_copy_to)));
return jobHopMeta;
} else {
throw new KettleException("Unable to find job hop with ID : " + id_job_hop);
}
} catch (KettleDatabaseException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobHopMeta.Exception.UnableToLoadHopInfoRep", "" + id_job_hop), dbe);
}
}
use of org.pentaho.di.core.RowMetaAndData in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryJobDelegate method getJobsWithIDList.
public String[] getJobsWithIDList(ObjectId[] ids) throws KettleException {
String[] jobsList = new String[ids.length];
for (int i = 0; i < ids.length; i++) {
ObjectId id_job = ids[i];
if (id_job != null) {
RowMetaAndData transRow = getJob(id_job);
if (transRow != null) {
String jobName = transRow.getString(KettleDatabaseRepository.FIELD_JOB_NAME, "<name not found>");
long id_directory = transRow.getInteger(KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY, -1L);
RepositoryDirectoryInterface dir = repository.loadRepositoryDirectoryTree().findDirectory(new LongObjectId(id_directory));
jobsList[i] = dir.getPathObjectCombination(jobName);
}
}
}
return jobsList;
}
use of org.pentaho.di.core.RowMetaAndData in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryJobDelegate method getNrJobHops.
public synchronized int getNrJobHops(ObjectId id_job) throws KettleException {
int retval = 0;
RowMetaAndData par = repository.connectionDelegate.getParameterMetaData(id_job);
String sql = "SELECT COUNT(*) FROM " + quoteTable(KettleDatabaseRepository.TABLE_R_JOB_HOP) + " WHERE " + quote(KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB) + " = ? ";
RowMetaAndData r = repository.connectionDelegate.getOneRow(sql, par.getRowMeta(), par.getData());
if (r != null) {
retval = (int) r.getInteger(0, 0L);
}
return retval;
}
use of org.pentaho.di.core.RowMetaAndData in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryJobDelegate method insertJobHop.
public synchronized ObjectId insertJobHop(ObjectId id_job, ObjectId id_jobentry_copy_from, ObjectId id_jobentry_copy_to, boolean enabled, boolean evaluation, boolean unconditional) throws KettleException {
ObjectId id = repository.connectionDelegate.getNextJobHopID();
RowMetaAndData table = new RowMetaAndData();
table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB_HOP), id);
table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOB), id_job);
table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_FROM), id_jobentry_copy_from);
table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_JOB_HOP_ID_JOBENTRY_COPY_TO), id_jobentry_copy_to);
table.addValue(new ValueMetaBoolean(KettleDatabaseRepository.FIELD_JOB_HOP_ENABLED), Boolean.valueOf(enabled));
table.addValue(new ValueMetaBoolean(KettleDatabaseRepository.FIELD_JOB_HOP_EVALUATION), Boolean.valueOf(evaluation));
table.addValue(new ValueMetaBoolean(KettleDatabaseRepository.FIELD_JOB_HOP_UNCONDITIONAL), Boolean.valueOf(unconditional));
repository.connectionDelegate.getDatabase().prepareInsert(table.getRowMeta(), KettleDatabaseRepository.TABLE_R_JOB_HOP);
repository.connectionDelegate.getDatabase().setValuesInsert(table);
repository.connectionDelegate.getDatabase().insertRow();
repository.connectionDelegate.getDatabase().closeInsert();
return id;
}
Aggregations