Search in sources :

Example 1 with JobHopMeta

use of org.pentaho.di.job.JobHopMeta in project pentaho-metaverse by pentaho.

the class JobAnalyzer method analyze.

@Override
public synchronized IMetaverseNode analyze(final IComponentDescriptor documentDescriptor, final AbstractMeta meta, final IMetaverseNode node, final String documentPath) throws MetaverseAnalyzerException {
    final JobMeta jobMeta = (JobMeta) meta;
    Job j = new Job(null, jobMeta);
    j.setInternalKettleVariables(jobMeta);
    // pull out the standard fields
    String description = jobMeta.getDescription();
    if (description != null) {
        node.setProperty(DictionaryConst.PROPERTY_DESCRIPTION, description);
    }
    String extendedDescription = jobMeta.getExtendedDescription();
    if (extendedDescription != null) {
        node.setProperty("extendedDescription", extendedDescription);
    }
    Date createdDate = jobMeta.getCreatedDate();
    if (createdDate != null) {
        node.setProperty(DictionaryConst.PROPERTY_CREATED, Long.toString(createdDate.getTime()));
    }
    String createdUser = jobMeta.getCreatedUser();
    if (createdUser != null) {
        node.setProperty(DictionaryConst.PROPERTY_CREATED_BY, createdUser);
    }
    Date lastModifiedDate = jobMeta.getModifiedDate();
    if (lastModifiedDate != null) {
        node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED, Long.toString(lastModifiedDate.getTime()));
    }
    String lastModifiedUser = jobMeta.getModifiedUser();
    if (lastModifiedUser != null) {
        node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED_BY, lastModifiedUser);
    }
    String version = jobMeta.getJobversion();
    if (version != null) {
        node.setProperty(DictionaryConst.PROPERTY_ARTIFACT_VERSION, version);
    }
    String status = Messages.getString("INFO.JobOrTrans.Status_" + Integer.toString(jobMeta.getJobstatus()));
    if (status != null && !status.startsWith("!")) {
        node.setProperty(DictionaryConst.PROPERTY_STATUS, status);
    }
    node.setProperty(DictionaryConst.PROPERTY_PATH, documentPath);
    // Process job parameters
    String[] parameters = jobMeta.listParameters();
    if (parameters != null) {
        for (String parameter : parameters) {
            try {
                // Determine parameter properties and add them to a map, then the map to the list
                String defaultParameterValue = jobMeta.getParameterDefault(parameter);
                String parameterValue = jobMeta.getParameterValue(parameter);
                String parameterDescription = jobMeta.getParameterDescription(parameter);
                PropertiesHolder paramProperties = new PropertiesHolder();
                paramProperties.setProperty("defaultValue", defaultParameterValue);
                paramProperties.setProperty("value", parameterValue);
                paramProperties.setProperty("description", parameterDescription);
                node.setProperty("parameter_" + parameter, paramProperties.toString());
            } catch (UnknownParamException upe) {
                // This shouldn't happen as we're using the list provided by the meta
                throw new MetaverseAnalyzerException(upe);
            }
        }
    }
    // handle the entries
    for (int i = 0; i < jobMeta.nrJobEntries(); i++) {
        JobEntryCopy entry = jobMeta.getJobEntry(i);
        try {
            if (entry != null) {
                entry.getEntry().setParentJob(j);
                IMetaverseNode jobEntryNode = null;
                JobEntryInterface jobEntryInterface = entry.getEntry();
                IComponentDescriptor entryDescriptor = new MetaverseComponentDescriptor(entry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY, node, documentDescriptor.getContext());
                Set<IJobEntryAnalyzer> jobEntryAnalyzers = getJobEntryAnalyzers(jobEntryInterface);
                if (jobEntryAnalyzers != null && !jobEntryAnalyzers.isEmpty()) {
                    for (IJobEntryAnalyzer jobEntryAnalyzer : jobEntryAnalyzers) {
                        // change while the job is being analyzed
                        if (jobEntryAnalyzer instanceof IClonableJobEntryAnalyzer) {
                            jobEntryAnalyzer = ((IClonableJobEntryAnalyzer) jobEntryAnalyzer).cloneAnalyzer();
                            ((IClonableJobEntryAnalyzer) jobEntryAnalyzer).setDocumentAnalyzer(this);
                            ((IClonableJobEntryAnalyzer) jobEntryAnalyzer).setDocumentDescriptor(documentDescriptor);
                            ((IClonableJobEntryAnalyzer) jobEntryAnalyzer).setDocumentPath(documentPath);
                        } else {
                            log.debug(Messages.getString("WARNING.CannotCloneAnalyzer"), jobEntryAnalyzer);
                        }
                        jobEntryAnalyzer.setMetaverseBuilder(metaverseBuilder);
                        jobEntryNode = (IMetaverseNode) jobEntryAnalyzer.analyze(entryDescriptor, entry.getEntry());
                    }
                } else if (new AnnotatedClassFields(jobEntryInterface, jobEntryInterface.getParentJobMeta()).hasMetaverseAnnotations()) {
                    AnnotationDrivenJobAnalyzer annotationDrivenJobAnalyzer = new AnnotationDrivenJobAnalyzer(jobEntryInterface);
                    annotationDrivenJobAnalyzer.setMetaverseBuilder(metaverseBuilder);
                    annotationDrivenJobAnalyzer.setDocumentAnalyzer(this);
                    annotationDrivenJobAnalyzer.setDocumentDescriptor(documentDescriptor);
                    annotationDrivenJobAnalyzer.setDocumentPath(documentPath);
                    jobEntryNode = annotationDrivenJobAnalyzer.analyze(entryDescriptor, jobEntryInterface);
                } else {
                    GenericJobEntryMetaAnalyzer defaultJobEntryAnalyzer = new GenericJobEntryMetaAnalyzer();
                    defaultJobEntryAnalyzer.setMetaverseBuilder(metaverseBuilder);
                    jobEntryNode = defaultJobEntryAnalyzer.analyze(entryDescriptor, jobEntryInterface);
                }
                if (jobEntryNode != null) {
                    metaverseBuilder.addLink(node, DictionaryConst.LINK_CONTAINS, jobEntryNode);
                }
            }
        } catch (Exception mae) {
            // Don't throw an exception, just log and carry on
            log.warn(Messages.getString("ERROR.ErrorDuringAnalysis", entry.getName(), Const.NVL(mae.getLocalizedMessage(), "Unspecified")));
            log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), mae);
        }
    }
    // Model the hops between steps
    int numHops = jobMeta.nrJobHops();
    for (int i = 0; i < numHops; i++) {
        JobHopMeta hop = jobMeta.getJobHop(i);
        JobEntryCopy fromEntry = hop.getFromEntry();
        JobEntryCopy toEntry = hop.getToEntry();
        INamespace childNs = new Namespace(node.getLogicalId());
        // process legitimate hops
        if (fromEntry != null && toEntry != null) {
            IMetaverseNode fromEntryNode = metaverseObjectFactory.createNodeObject(childNs, fromEntry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY);
            IMetaverseNode toEntryNode = metaverseObjectFactory.createNodeObject(childNs, toEntry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY);
            metaverseBuilder.addLink(fromEntryNode, DictionaryConst.LINK_HOPSTO, toEntryNode);
        }
    }
    metaverseBuilder.addNode(node);
    addParentLink(documentDescriptor, node);
    return node;
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) PropertiesHolder(org.pentaho.metaverse.api.PropertiesHolder) JobHopMeta(org.pentaho.di.job.JobHopMeta) MetaverseAnalyzerException(org.pentaho.metaverse.api.MetaverseAnalyzerException) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) IMetaverseNode(org.pentaho.metaverse.api.IMetaverseNode) IJobEntryAnalyzer(org.pentaho.metaverse.api.analyzer.kettle.jobentry.IJobEntryAnalyzer) Date(java.util.Date) AnnotatedClassFields(org.pentaho.metaverse.api.analyzer.kettle.annotations.AnnotatedClassFields) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException) MetaverseAnalyzerException(org.pentaho.metaverse.api.MetaverseAnalyzerException) INamespace(org.pentaho.metaverse.api.INamespace) Namespace(org.pentaho.metaverse.api.Namespace) MetaverseComponentDescriptor(org.pentaho.metaverse.api.MetaverseComponentDescriptor) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) IComponentDescriptor(org.pentaho.metaverse.api.IComponentDescriptor) INamespace(org.pentaho.metaverse.api.INamespace) IClonableJobEntryAnalyzer(org.pentaho.metaverse.api.analyzer.kettle.jobentry.IClonableJobEntryAnalyzer) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException) GenericJobEntryMetaAnalyzer(org.pentaho.metaverse.analyzer.kettle.jobentry.GenericJobEntryMetaAnalyzer) Job(org.pentaho.di.job.Job) AnnotationDrivenJobAnalyzer(org.pentaho.metaverse.api.analyzer.kettle.annotations.AnnotationDrivenJobAnalyzer)

Example 2 with JobHopMeta

use of org.pentaho.di.job.JobHopMeta in project pentaho-metaverse by pentaho.

the class JobAnalyzerTest method testAnalyzerJobWithEntriesAndHop.

@Test
public void testAnalyzerJobWithEntriesAndHop() throws MetaverseAnalyzerException {
    JobEntryCopy mockToEntryMeta = mock(JobEntryCopy.class);
    when(mockToEntryMeta.getEntry()).thenReturn(mockJobEntryInterface);
    when(mockToEntryMeta.getParentJobMeta()).thenReturn(mockContent);
    when(mockContent.nrJobEntries()).thenReturn(2);
    when(mockContent.getJobEntry(0)).thenReturn(mockJobEntry);
    when(mockContent.getJobEntry(1)).thenReturn(mockToEntryMeta);
    when(mockContent.nrJobHops()).thenReturn(1);
    final JobHopMeta hop = new JobHopMeta(mockJobEntry, mockToEntryMeta);
    when(mockContent.getJobHop(0)).thenReturn(hop);
    IMetaverseNode node = analyzer.analyze(descriptor, mockJobDoc);
    assertNotNull(node);
}
Also used : JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) JobHopMeta(org.pentaho.di.job.JobHopMeta) IMetaverseNode(org.pentaho.metaverse.api.IMetaverseNode) Test(org.junit.Test)

Example 3 with JobHopMeta

use of org.pentaho.di.job.JobHopMeta in project pentaho-metaverse by pentaho.

the class JobMetaJsonSerializer method serializeHops.

@Override
protected void serializeHops(JobMeta meta, JsonGenerator json) throws IOException {
    // Hops
    json.writeArrayFieldStart(JSON_PROPERTY_HOPS);
    int numberOfHops = meta.nrJobHops();
    for (int i = 0; i < numberOfHops; i++) {
        JobHopMeta hopMeta = meta.getJobHop(i);
        HopInfo hopInfo = new HopInfo(hopMeta);
        json.writeObject(hopInfo);
    }
    json.writeEndArray();
}
Also used : HopInfo(org.pentaho.metaverse.api.model.kettle.HopInfo) JobHopMeta(org.pentaho.di.job.JobHopMeta)

Example 4 with JobHopMeta

use of org.pentaho.di.job.JobHopMeta in project pentaho-metaverse by pentaho.

the class JobMetaJsonSerializerTest method testSerializeHops.

@Test
public void testSerializeHops() throws Exception {
    JobHopMeta jobHopMeta = mock(JobHopMeta.class);
    JobEntryCopy fromJobEntry = mock(JobEntryCopy.class);
    JobEntryCopy toJobEntry = mock(JobEntryCopy.class);
    when(meta.nrJobHops()).thenReturn(2);
    when(meta.getJobHop(anyInt())).thenReturn(jobHopMeta);
    when(jobHopMeta.getFromEntry()).thenReturn(fromJobEntry);
    when(jobHopMeta.getToEntry()).thenReturn(toJobEntry);
    when(jobHopMeta.isEnabled()).thenReturn(true);
    when(fromJobEntry.getName()).thenReturn("from");
    when(toJobEntry.getName()).thenReturn("to");
    serializer.serializeHops(meta, json);
    verify(json, times(2)).writeObject(any(HopInfo.class));
}
Also used : HopInfo(org.pentaho.metaverse.api.model.kettle.HopInfo) JobHopMeta(org.pentaho.di.job.JobHopMeta) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) Test(org.junit.Test)

Example 5 with JobHopMeta

use of org.pentaho.di.job.JobHopMeta in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryJobDelegate method loadJobMeta.

/**
 * Load a job in a directory
 *
 * @param log
 *          the logging channel
 * @param rep
 *          The Repository
 * @param jobname
 *          The name of the job
 * @param repdir
 *          The directory in which the job resides.
 * @throws KettleException
 */
public JobMeta loadJobMeta(String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor) throws KettleException {
    JobMeta jobMeta = new JobMeta();
    synchronized (repository) {
        try {
            // Clear everything...
            jobMeta.clear();
            jobMeta.setRepositoryDirectory(repdir);
            // Get the transformation id
            jobMeta.setObjectId(getJobID(jobname, repdir.getObjectId()));
            // If no valid id is available in the database, then give error...
            if (jobMeta.getObjectId() != null) {
                // Load the notes...
                ObjectId[] noteids = repository.getJobNoteIDs(jobMeta.getObjectId());
                ObjectId[] jecids = repository.getJobEntryCopyIDs(jobMeta.getObjectId());
                ObjectId[] hopid = repository.getJobHopIDs(jobMeta.getObjectId());
                int nrWork = 2 + noteids.length + jecids.length + hopid.length;
                if (monitor != null) {
                    monitor.beginTask(BaseMessages.getString(PKG, "JobMeta.Monitor.LoadingJob") + repdir + Const.FILE_SEPARATOR + jobname, nrWork);
                }
                // 
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobInformation"));
                }
                RowMetaAndData jobRow = getJob(jobMeta.getObjectId());
                jobMeta.setName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_NAME, null));
                jobMeta.setDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, null));
                jobMeta.setExtendedDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, null));
                jobMeta.setJobversion(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, null));
                jobMeta.setJobstatus(Const.toInt(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, null), -1));
                jobMeta.setCreatedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_CREATED_USER, null));
                jobMeta.setCreatedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, new Date()));
                jobMeta.setModifiedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, null));
                jobMeta.setModifiedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, new Date()));
                long id_logdb = jobRow.getInteger(KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, 0);
                if (id_logdb > 0) {
                    // Get the logconnection
                    // 
                    DatabaseMeta logDb = repository.loadDatabaseMeta(new LongObjectId(id_logdb), null);
                    jobMeta.getJobLogTable().setConnectionName(logDb.getName());
                // jobMeta.getJobLogTable().getDatabaseMeta().shareVariablesWith(jobMeta);
                }
                jobMeta.getJobLogTable().setTableName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, null));
                jobMeta.getJobLogTable().setBatchIdUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, false));
                jobMeta.getJobLogTable().setLogFieldUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, false));
                jobMeta.getJobLogTable().setLogSizeLimit(getJobAttributeString(jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT));
                jobMeta.setBatchIdPassed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, false));
                // Load all the log tables for the job...
                // 
                RepositoryAttributeInterface attributeInterface = new KettleDatabaseRepositoryJobAttribute(repository.connectionDelegate, jobMeta.getObjectId());
                for (LogTableInterface logTable : jobMeta.getLogTables()) {
                    logTable.loadFromRepository(attributeInterface);
                }
                if (monitor != null) {
                    monitor.worked(1);
                }
                // 
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingAvailableDatabasesFromRepository"));
                }
                // Read objects from the shared XML file & the repository
                try {
                    jobMeta.setSharedObjectsFile(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, null));
                    jobMeta.setSharedObjects(repository != null ? repository.readJobMetaSharedObjects(jobMeta) : jobMeta.readSharedObjects());
                } catch (Exception e) {
                    log.logError(BaseMessages.getString(PKG, "JobMeta.ErrorReadingSharedObjects.Message", e.toString()));
                    // 
                    log.logError(Const.getStackTracker(e));
                }
                if (monitor != null) {
                    monitor.worked(1);
                }
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + noteids.length + " notes");
                }
                for (int i = 0; i < noteids.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingNoteNr") + (i + 1) + "/" + noteids.length);
                    }
                    NotePadMeta ni = repository.notePadDelegate.loadNotePadMeta(noteids[i]);
                    if (jobMeta.indexOfNote(ni) < 0) {
                        jobMeta.addNote(ni);
                    }
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                // Load the group attributes map
                // 
                jobMeta.setAttributesMap(loadJobAttributesMap(jobMeta.getObjectId()));
                // Load the job entries...
                // 
                // Keep a unique list of job entries to facilitate in the loading.
                // 
                List<JobEntryInterface> jobentries = new ArrayList<JobEntryInterface>();
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + jecids.length + " job entries");
                }
                for (int i = 0; i < jecids.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobEntryNr") + (i + 1) + "/" + (jecids.length));
                    }
                    JobEntryCopy jec = repository.jobEntryDelegate.loadJobEntryCopy(jobMeta.getObjectId(), jecids[i], jobentries, jobMeta.getDatabases(), jobMeta.getSlaveServers(), jobname);
                    if (jec.isMissing()) {
                        jobMeta.addMissingEntry((MissingEntry) jec.getEntry());
                    }
                    // Also set the copy number...
                    // We count the number of job entry copies that use the job
                    // entry
                    // 
                    int copyNr = 0;
                    for (JobEntryCopy copy : jobMeta.getJobCopies()) {
                        if (jec.getEntry() == copy.getEntry()) {
                            copyNr++;
                        }
                    }
                    jec.setNr(copyNr);
                    int idx = jobMeta.indexOfJobEntry(jec);
                    if (idx < 0) {
                        if (jec.getName() != null && jec.getName().length() > 0) {
                            jobMeta.addJobEntry(jec);
                        }
                    } else {
                        // replace it!
                        jobMeta.setJobEntry(idx, jec);
                    }
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                // Load the hops...
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + hopid.length + " job hops");
                }
                for (int i = 0; i < hopid.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobHopNr") + (i + 1) + "/" + (jecids.length));
                    }
                    JobHopMeta hi = loadJobHopMeta(hopid[i], jobMeta.getJobCopies());
                    jobMeta.getJobhops().add(hi);
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                loadRepParameters(jobMeta);
                // Finally, clear the changed flags...
                jobMeta.clearChanged();
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.FinishedLoadOfJob"));
                }
                if (monitor != null) {
                    monitor.done();
                }
                // close prepared statements, minimize locking etc.
                // 
                repository.connectionDelegate.closeAttributeLookupPreparedStatements();
                return jobMeta;
            } else {
                throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.CanNotFindJob") + jobname);
            }
        } catch (KettleException dbe) {
            throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", jobname), dbe);
        } finally {
            jobMeta.initializeVariablesFrom(jobMeta.getParentVariableSpace());
            jobMeta.setInternalKettleVariables();
        }
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) JobHopMeta(org.pentaho.di.job.JobHopMeta) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) LongObjectId(org.pentaho.di.repository.LongObjectId) ObjectId(org.pentaho.di.repository.ObjectId) ArrayList(java.util.ArrayList) LongObjectId(org.pentaho.di.repository.LongObjectId) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Date(java.util.Date) ValueMetaDate(org.pentaho.di.core.row.value.ValueMetaDate) RepositoryAttributeInterface(org.pentaho.di.repository.RepositoryAttributeInterface) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) NotePadMeta(org.pentaho.di.core.NotePadMeta)

Aggregations

JobHopMeta (org.pentaho.di.job.JobHopMeta)31 JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)24 Point (org.pentaho.di.core.gui.Point)17 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)16 NotePadMeta (org.pentaho.di.core.NotePadMeta)12 KettleException (org.pentaho.di.core.exception.KettleException)10 JobMeta (org.pentaho.di.job.JobMeta)10 ArrayList (java.util.ArrayList)8 MessageBox (org.eclipse.swt.widgets.MessageBox)6 JobEntryInterface (org.pentaho.di.job.entry.JobEntryInterface)6 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)4 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)4 AreaOwner (org.pentaho.di.core.gui.AreaOwner)4 Job (org.pentaho.di.job.Job)4 Date (java.util.Date)3 Test (org.junit.Test)3 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)3 JobEntrySpecial (org.pentaho.di.job.entries.special.JobEntrySpecial)3 KettleRepositoryLostException (org.pentaho.di.repository.KettleRepositoryLostException)3 XulException (org.pentaho.ui.xul.XulException)3