use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class JobTrackerConcurrencyTest method mockJobEntryCopy.
static JobEntryCopy mockJobEntryCopy(String name, int number) {
JobEntryCopy copy = mock(JobEntryCopy.class);
when(copy.getName()).thenReturn(name);
when(copy.getNr()).thenReturn(number);
return copy;
}
use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class RepositoryExporter method convertFromFileRepository.
private void convertFromFileRepository(JobMeta jobMeta) {
if (repository instanceof KettleFileRepository) {
KettleFileRepository fileRep = (KettleFileRepository) repository;
// The id of the job is the filename.
// Setting the filename also sets internal variables needed to load the trans/job referenced.
//
String jobMetaFilename = fileRep.calcFilename(jobMeta.getObjectId());
jobMeta.setFilename(jobMetaFilename);
for (JobEntryCopy copy : jobMeta.getJobCopies()) {
JobEntryInterface entry = copy.getEntry();
if (entry instanceof JobEntryTrans) {
// convert to a named based reference.
//
JobEntryTrans trans = (JobEntryTrans) entry;
if (trans.getSpecificationMethod() == ObjectLocationSpecificationMethod.FILENAME) {
try {
TransMeta meta = trans.getTransMeta(repository, repository.getMetaStore(), jobMeta);
FileObject fileObject = KettleVFS.getFileObject(meta.getFilename());
trans.setSpecificationMethod(ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME);
trans.setFileName(null);
trans.setTransname(meta.getName());
trans.setDirectory(Const.NVL(calcRepositoryDirectory(fileRep, fileObject), "/"));
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "Repository.Exporter.Log.UnableToLoadJobTrans", trans.getName()), e);
}
}
}
if (entry instanceof JobEntryJob) {
// convert to a named based reference.
//
JobEntryJob jobEntryJob = (JobEntryJob) entry;
if (jobEntryJob.getSpecificationMethod() == ObjectLocationSpecificationMethod.FILENAME) {
try {
JobMeta meta = jobEntryJob.getJobMeta(repository, repository.getMetaStore(), jobMeta);
FileObject fileObject = KettleVFS.getFileObject(meta.getFilename());
jobEntryJob.setSpecificationMethod(ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME);
jobEntryJob.setFileName(null);
jobEntryJob.setJobName(meta.getName());
jobEntryJob.setDirectory(Const.NVL(calcRepositoryDirectory(fileRep, fileObject), "/"));
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "Repository.Exporter.Log.UnableToLoadJobJob", jobEntryJob.getName()), e);
}
}
}
}
}
}
use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryJobDelegate method saveJob.
/**
* Stored a job in the repository
*
* @param jobMeta
* The job to store
* @param monitor
* the (optional) UI progress monitor
* @param overwrite
* Overwrite existing object(s)?
* @throws KettleException
* in case some IO error occurs.
*/
public void saveJob(JobMeta jobMeta, String versionComment, ProgressMonitorListener monitor, boolean overwrite) throws KettleException {
try {
// Before saving the job, see if it's not locked by someone else...
//
int nrWorks = 2 + jobMeta.nrDatabases() + jobMeta.nrNotes() + jobMeta.nrJobEntries() + jobMeta.nrJobHops();
if (monitor != null) {
monitor.beginTask(BaseMessages.getString(PKG, "JobMeta.Monitor.SavingTransformation") + jobMeta.getRepositoryDirectory() + Const.FILE_SEPARATOR + jobMeta.getName(), nrWorks);
}
repository.insertLogEntry("save job '" + jobMeta.getName() + "'");
//
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.HandlingPreviousVersionOfJob"));
}
jobMeta.setObjectId(getJobID(jobMeta.getName(), jobMeta.getRepositoryDirectory().getObjectId()));
// If no valid id is available in the database, assign one...
if (jobMeta.getObjectId() == null) {
jobMeta.setObjectId(repository.connectionDelegate.getNextJobID());
} else {
// If we have a valid ID, we need to make sure everything is
// cleared out
// of the database for this id_job, before we put it back in...
repository.deleteJob(jobMeta.getObjectId());
}
if (monitor != null) {
monitor.worked(1);
}
//
if (log.isDebug()) {
log.logDebug(BaseMessages.getString(PKG, "JobMeta.Log.SavingDatabaseConnections"));
}
for (int i = 0; i < jobMeta.nrDatabases(); i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.SavingDatabaseTask.Title") + (i + 1) + "/" + jobMeta.nrDatabases());
}
DatabaseMeta databaseMeta = jobMeta.getDatabase(i);
// nothing was saved in the repository)
if (overwrite || databaseMeta.hasChanged() || databaseMeta.getObjectId() == null) {
repository.save(databaseMeta, versionComment, monitor, overwrite);
}
if (monitor != null) {
monitor.worked(1);
}
}
// entries to the save job. (retry)
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.SavingJobDetails"));
}
if (log.isDetailed()) {
log.logDetailed("Saving job info to repository...");
}
insertJob(jobMeta);
if (monitor != null) {
monitor.worked(1);
}
// Save the group attributes map
//
saveJobAttributesMap(jobMeta.getObjectId(), jobMeta.getAttributesMap());
//
for (int i = 0; i < jobMeta.getSlaveServers().size(); i++) {
SlaveServer slaveServer = jobMeta.getSlaveServers().get(i);
repository.save(slaveServer, versionComment, null, jobMeta.getObjectId(), false, overwrite);
}
//
if (log.isDetailed()) {
log.logDetailed("Saving notes to repository...");
}
for (int i = 0; i < jobMeta.nrNotes(); i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.SavingNoteNr") + (i + 1) + "/" + jobMeta.nrNotes());
}
NotePadMeta ni = jobMeta.getNote(i);
repository.saveNotePadMeta(ni, jobMeta.getObjectId());
if (ni.getObjectId() != null) {
repository.insertJobNote(jobMeta.getObjectId(), ni.getObjectId());
}
if (monitor != null) {
monitor.worked(1);
}
}
//
if (log.isDetailed()) {
log.logDetailed("Saving " + jobMeta.nrJobEntries() + " Job enty copies to repository...");
}
repository.updateJobEntryTypes();
for (int i = 0; i < jobMeta.nrJobEntries(); i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.SavingJobEntryNr") + (i + 1) + "/" + jobMeta.nrJobEntries());
}
JobEntryCopy cge = jobMeta.getJobEntry(i);
repository.jobEntryDelegate.saveJobEntryCopy(cge, jobMeta.getObjectId(), repository.metaStore);
if (monitor != null) {
monitor.worked(1);
}
}
if (log.isDetailed()) {
log.logDetailed("Saving job hops to repository...");
}
for (int i = 0; i < jobMeta.nrJobHops(); i++) {
if (monitor != null) {
monitor.subTask("Saving job hop #" + (i + 1) + "/" + jobMeta.nrJobHops());
}
JobHopMeta hi = jobMeta.getJobHop(i);
saveJobHopMeta(hi, jobMeta.getObjectId());
if (monitor != null) {
monitor.worked(1);
}
}
saveJobParameters(jobMeta);
// Commit this transaction!!
repository.commit();
jobMeta.clearChanged();
if (monitor != null) {
monitor.done();
}
} catch (KettleDatabaseException dbe) {
repository.rollback();
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.UnableToSaveJobInRepositoryRollbackPerformed"), dbe);
}
}
use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class Job method writeJobEntryLogInformation.
/**
* Write job entry log information.
*
* @throws KettleException
* the kettle exception
*/
protected void writeJobEntryLogInformation() throws KettleException {
Database db = null;
JobEntryLogTable jobEntryLogTable = getJobMeta().getJobEntryLogTable();
try {
db = createDataBase(jobEntryLogTable.getDatabaseMeta());
db.shareVariablesWith(this);
db.connect();
db.setCommit(logCommitSize);
for (JobEntryCopy copy : getJobMeta().getJobCopies()) {
db.writeLogRecord(jobEntryLogTable, LogStatus.START, copy, this);
}
db.cleanupLogRecords(jobEntryLogTable);
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToJobEntryInformationToLogTable"), e);
} finally {
if (!db.isAutoCommit()) {
db.commitLog(true, jobEntryLogTable);
}
db.disconnect();
}
}
use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class Job method execute.
/**
* Execute a job entry recursively and move to the next job entry automatically.<br>
* Uses a back-tracking algorithm.<br>
*
* @param nr
* @param prev_result
* @param jobEntryCopy
* @param previous
* @param reason
* @return
* @throws KettleException
*/
private Result execute(final int nr, Result prev_result, final JobEntryCopy jobEntryCopy, JobEntryCopy previous, String reason) throws KettleException {
Result res = null;
if (stopped.get()) {
res = new Result(nr);
res.stopped = true;
return res;
}
// if we didn't have a previous result, create one, otherwise, copy the content...
//
final Result newResult;
Result prevResult = null;
if (prev_result != null) {
prevResult = prev_result.clone();
} else {
prevResult = new Result();
}
JobExecutionExtension extension = new JobExecutionExtension(this, prevResult, jobEntryCopy, true);
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobBeforeJobEntryExecution.id, extension);
jobMeta.disposeEmbeddedMetastoreProvider();
if (jobMeta.getMetastoreLocatorOsgi() != null) {
jobMeta.setEmbeddedMetastoreProviderKey(jobMeta.getMetastoreLocatorOsgi().setEmbeddedMetastore(jobMeta.getEmbeddedMetaStore()));
}
if (extension.result != null) {
prevResult = extension.result;
}
if (!extension.executeEntry) {
newResult = prevResult;
} else {
if (log.isDetailed()) {
log.logDetailed("exec(" + nr + ", " + (prev_result != null ? prev_result.getNrErrors() : 0) + ", " + (jobEntryCopy != null ? jobEntryCopy.toString() : "null") + ")");
}
// Which entry is next?
JobEntryInterface jobEntryInterface = jobEntryCopy.getEntry();
jobEntryInterface.getLogChannel().setLogLevel(logLevel);
// Track the fact that we are going to launch the next job entry...
JobEntryResult jerBefore = new JobEntryResult(null, null, BaseMessages.getString(PKG, "Job.Comment.JobStarted"), reason, jobEntryCopy.getName(), jobEntryCopy.getNr(), environmentSubstitute(jobEntryCopy.getEntry().getFilename()));
jobTracker.addJobTracker(new JobTracker(jobMeta, jerBefore));
ClassLoader cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(jobEntryInterface.getClass().getClassLoader());
// Execute this entry...
JobEntryInterface cloneJei = (JobEntryInterface) jobEntryInterface.clone();
((VariableSpace) cloneJei).copyVariablesFrom(this);
cloneJei.setRepository(rep);
if (rep != null) {
cloneJei.setMetaStore(rep.getMetaStore());
}
cloneJei.setParentJob(this);
cloneJei.setParentJobMeta(this.getJobMeta());
final long start = System.currentTimeMillis();
cloneJei.getLogChannel().logDetailed("Starting job entry");
for (JobEntryListener jobEntryListener : jobEntryListeners) {
jobEntryListener.beforeExecution(this, jobEntryCopy, cloneJei);
}
if (interactive) {
if (jobEntryCopy.isTransformation()) {
getActiveJobEntryTransformations().put(jobEntryCopy, (JobEntryTrans) cloneJei);
}
if (jobEntryCopy.isJob()) {
getActiveJobEntryJobs().put(jobEntryCopy, (JobEntryJob) cloneJei);
}
}
log.snap(Metrics.METRIC_JOBENTRY_START, cloneJei.toString());
newResult = cloneJei.execute(prevResult, nr);
log.snap(Metrics.METRIC_JOBENTRY_STOP, cloneJei.toString());
final long end = System.currentTimeMillis();
if (interactive) {
if (jobEntryCopy.isTransformation()) {
getActiveJobEntryTransformations().remove(jobEntryCopy);
}
if (jobEntryCopy.isJob()) {
getActiveJobEntryJobs().remove(jobEntryCopy);
}
}
if (cloneJei instanceof JobEntryTrans) {
String throughput = newResult.getReadWriteThroughput((int) ((end - start) / 1000));
if (throughput != null) {
log.logMinimal(throughput);
}
}
for (JobEntryListener jobEntryListener : jobEntryListeners) {
jobEntryListener.afterExecution(this, jobEntryCopy, cloneJei, newResult);
}
Thread.currentThread().setContextClassLoader(cl);
addErrors((int) newResult.getNrErrors());
// Also capture the logging text after the execution...
//
LoggingBuffer loggingBuffer = KettleLogStore.getAppender();
StringBuffer logTextBuffer = loggingBuffer.getBuffer(cloneJei.getLogChannel().getLogChannelId(), false);
newResult.setLogText(logTextBuffer.toString() + newResult.getLogText());
// Save this result as well...
//
JobEntryResult jerAfter = new JobEntryResult(newResult, cloneJei.getLogChannel().getLogChannelId(), BaseMessages.getString(PKG, "Job.Comment.JobFinished"), null, jobEntryCopy.getName(), jobEntryCopy.getNr(), environmentSubstitute(jobEntryCopy.getEntry().getFilename()));
jobTracker.addJobTracker(new JobTracker(jobMeta, jerAfter));
synchronized (jobEntryResults) {
jobEntryResults.add(jerAfter);
//
if (maxJobEntriesLogged > 0) {
while (jobEntryResults.size() > maxJobEntriesLogged) {
// Remove the oldest.
jobEntryResults.removeFirst();
}
}
}
}
extension = new JobExecutionExtension(this, prevResult, jobEntryCopy, extension.executeEntry);
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobAfterJobEntryExecution.id, extension);
// Try all next job entries.
//
// Keep track of all the threads we fired in case of parallel execution...
// Keep track of the results of these executions too.
//
final List<Thread> threads = new ArrayList<Thread>();
// next 2 lists is being modified concurrently so must be synchronized for this case.
final Queue<Result> threadResults = new ConcurrentLinkedQueue<Result>();
final Queue<KettleException> threadExceptions = new ConcurrentLinkedQueue<KettleException>();
final List<JobEntryCopy> threadEntries = new ArrayList<JobEntryCopy>();
// Launch only those where the hop indicates true or false
//
int nrNext = jobMeta.findNrNextJobEntries(jobEntryCopy);
for (int i = 0; i < nrNext && !isStopped(); i++) {
// The next entry is...
final JobEntryCopy nextEntry = jobMeta.findNextJobEntry(jobEntryCopy, i);
// See if we need to execute this...
final JobHopMeta hi = jobMeta.findJobHop(jobEntryCopy, nextEntry);
// The next comment...
final String nextComment;
if (hi.isUnconditional()) {
nextComment = BaseMessages.getString(PKG, "Job.Comment.FollowedUnconditional");
} else {
if (newResult.getResult()) {
nextComment = BaseMessages.getString(PKG, "Job.Comment.FollowedSuccess");
} else {
nextComment = BaseMessages.getString(PKG, "Job.Comment.FollowedFailure");
}
}
//
if (hi.isUnconditional() || (jobEntryCopy.evaluates() && (!(hi.getEvaluation() ^ newResult.getResult())))) {
// Start this next step!
if (log.isBasic()) {
log.logBasic(BaseMessages.getString(PKG, "Job.Log.StartingEntry", nextEntry.getName()));
}
// When an evaluation is executed the errors e.g. should not be reset.
if (nextEntry.resetErrorsBeforeExecution()) {
newResult.setNrErrors(0);
}
//
if (jobEntryCopy.isLaunchingInParallel()) {
threadEntries.add(nextEntry);
Runnable runnable = new Runnable() {
public void run() {
try {
Result threadResult = execute(nr + 1, newResult, nextEntry, jobEntryCopy, nextComment);
threadResults.add(threadResult);
} catch (Throwable e) {
log.logError(Const.getStackTracker(e));
threadExceptions.add(new KettleException(BaseMessages.getString(PKG, "Job.Log.UnexpectedError", nextEntry.toString()), e));
Result threadResult = new Result();
threadResult.setResult(false);
threadResult.setNrErrors(1L);
threadResults.add(threadResult);
}
}
};
Thread thread = new Thread(runnable);
threads.add(thread);
thread.start();
if (log.isBasic()) {
log.logBasic(BaseMessages.getString(PKG, "Job.Log.LaunchedJobEntryInParallel", nextEntry.getName()));
}
} else {
try {
// Same as before: blocks until it's done
//
res = execute(nr + 1, newResult, nextEntry, jobEntryCopy, nextComment);
} catch (Throwable e) {
log.logError(Const.getStackTracker(e));
throw new KettleException(BaseMessages.getString(PKG, "Job.Log.UnexpectedError", nextEntry.toString()), e);
}
if (log.isBasic()) {
log.logBasic(BaseMessages.getString(PKG, "Job.Log.FinishedJobEntry", nextEntry.getName(), res.getResult() + ""));
}
}
}
}
//
if (jobEntryCopy.isLaunchingInParallel()) {
for (int i = 0; i < threads.size(); i++) {
Thread thread = threads.get(i);
JobEntryCopy nextEntry = threadEntries.get(i);
try {
thread.join();
} catch (InterruptedException e) {
log.logError(jobMeta.toString(), BaseMessages.getString(PKG, "Job.Log.UnexpectedErrorWhileWaitingForJobEntry", nextEntry.getName()));
threadExceptions.add(new KettleException(BaseMessages.getString(PKG, "Job.Log.UnexpectedErrorWhileWaitingForJobEntry", nextEntry.getName()), e));
}
}
// if(log.isBasic()) log.logBasic(BaseMessages.getString(PKG,
// "Job.Log.FinishedJobEntry",startpoint.getName(),res.getResult()+""));
}
// In this case, return the previous result.
if (res == null) {
res = prevResult;
}
//
if (threadExceptions.size() > 0) {
res.setResult(false);
res.setNrErrors(threadExceptions.size());
for (KettleException e : threadExceptions) {
log.logError(jobMeta.toString(), e.getMessage(), e);
}
//
throw threadExceptions.poll();
}
//
for (Result threadResult : threadResults) {
res.add(threadResult);
}
//
if (res.getNrErrors() > 0) {
res.setResult(false);
}
return res;
}
Aggregations