Search in sources :

Example 36 with JobEntryCopy

use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.

the class JobMeta method getJobEntryCopy.

/**
 * Gets the job entry copy.
 *
 * @param x        the x
 * @param y        the y
 * @param iconsize the iconsize
 * @return the job entry copy
 */
public JobEntryCopy getJobEntryCopy(int x, int y, int iconsize) {
    int i, s;
    s = nrJobEntries();
    for (i = s - 1; i >= 0; i--) {
        // Back to front because drawing goes from start to end
        JobEntryCopy je = getJobEntry(i);
        Point p = je.getLocation();
        if (p != null) {
            if (x >= p.x && x <= p.x + iconsize && y >= p.y && y <= p.y + iconsize) {
                return je;
            }
        }
    }
    return null;
}
Also used : JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint)

Example 37 with JobEntryCopy

use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.

the class JobPainter method drawJobElements.

private void drawJobElements() {
    if (!shadow && gridSize > 1) {
        drawGrid();
    }
    try {
        ExtensionPointHandler.callExtensionPoint(LogChannel.GENERAL, KettleExtensionPoint.JobPainterStart.id, this);
    } catch (KettleException e) {
        LogChannel.GENERAL.logError("Error in JobPainterStart extension point", e);
    }
    // First draw the notes...
    gc.setFont(EFont.NOTE);
    for (int i = 0; i < jobMeta.nrNotes(); i++) {
        NotePadMeta ni = jobMeta.getNote(i);
        drawNote(ni);
    }
    gc.setFont(EFont.GRAPH);
    // ... and then the rest on top of it...
    for (int i = 0; i < jobMeta.nrJobHops(); i++) {
        JobHopMeta hi = jobMeta.getJobHop(i);
        drawJobHop(hi, false);
    }
    EImage arrow;
    if (candidate != null) {
        drawJobHop(candidate, true);
    } else {
        if (startHopEntry != null && endHopLocation != null) {
            Point fr = startHopEntry.getLocation();
            Point to = endHopLocation;
            if (endHopEntry == null) {
                gc.setForeground(EColor.GRAY);
                arrow = EImage.ARROW_DISABLED;
            } else {
                gc.setForeground(EColor.BLUE);
                arrow = EImage.ARROW_DEFAULT;
            }
            Point start = real2screen(fr.x + iconsize / 2, fr.y + iconsize / 2);
            Point end = real2screen(to.x, to.y);
            drawArrow(arrow, start.x, start.y, end.x, end.y, theta, calcArrowLength(), 1.2, null, startHopEntry, endHopEntry == null ? endHopLocation : endHopEntry);
        } else if (endHopEntry != null && endHopLocation != null) {
            Point fr = endHopLocation;
            Point to = endHopEntry.getLocation();
            if (startHopEntry == null) {
                gc.setForeground(EColor.GRAY);
                arrow = EImage.ARROW_DISABLED;
            } else {
                gc.setForeground(EColor.BLUE);
                arrow = EImage.ARROW_DEFAULT;
            }
            Point start = real2screen(fr.x, fr.y);
            Point end = real2screen(to.x + iconsize / 2, to.y + iconsize / 2);
            drawArrow(arrow, start.x, start.y, end.x, end.y + iconsize / 2, theta, calcArrowLength(), 1.2, null, startHopEntry == null ? endHopLocation : startHopEntry, endHopEntry);
        }
    }
    for (int j = 0; j < jobMeta.nrJobEntries(); j++) {
        JobEntryCopy je = jobMeta.getJobEntry(j);
        drawJobEntryCopy(je);
    }
    // 
    if (noInputEntry != null) {
        gc.setLineWidth(2);
        gc.setForeground(EColor.RED);
        Point n = noInputEntry.getLocation();
        gc.drawLine(offset.x + n.x - 5, offset.y + n.y - 5, offset.x + n.x + iconsize + 5, offset.y + n.y + iconsize + 5);
        gc.drawLine(offset.x + n.x - 5, offset.y + n.y + iconsize + 5, offset.x + n.x + iconsize + 5, offset.y + n.y - 5);
    }
    if (drop_candidate != null) {
        gc.setLineStyle(ELineStyle.SOLID);
        gc.setForeground(EColor.BLACK);
        Point screen = real2screen(drop_candidate.x, drop_candidate.y);
        gc.drawRectangle(screen.x, screen.y, iconsize, iconsize);
    }
    try {
        ExtensionPointHandler.callExtensionPoint(LogChannel.GENERAL, KettleExtensionPoint.JobPainterEnd.id, this);
    } catch (KettleException e) {
        LogChannel.GENERAL.logError("Error in JobPainterEnd extension point", e);
    }
    if (!shadow) {
        drawRect(selrect);
    }
}
Also used : EImage(org.pentaho.di.core.gui.PrimitiveGCInterface.EImage) KettleException(org.pentaho.di.core.exception.KettleException) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) NotePadMeta(org.pentaho.di.core.NotePadMeta) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint)

Example 38 with JobEntryCopy

use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.

the class JobEntrySetVariables method setParentJob.

@Override
public void setParentJob(Job parentJob) {
    super.setParentJob(parentJob);
    // PDI-16387 Add a listener for recovering changed values of variables
    JobEntryListener jobListener = new JobEntryListener() {

        @Override
        public void beforeExecution(Job job, JobEntryCopy jobEntryCopy, JobEntryInterface jobEntryInterface) {
            for (String key : changedInitialVariables.keySet()) {
                setVariable(key, changedInitialVariables.get(key));
                parentJob.setVariable(key, changedInitialVariables.get(key));
            }
            changedInitialVariables.clear();
        }

        @Override
        public void afterExecution(Job job, JobEntryCopy jobEntryCopy, JobEntryInterface jobEntryInterface, Result result) {
        }
    };
    parentJob.addJobEntryListener(jobListener);
}
Also used : JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) JobEntryListener(org.pentaho.di.job.JobEntryListener) Job(org.pentaho.di.job.Job) Result(org.pentaho.di.core.Result)

Example 39 with JobEntryCopy

use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryJobDelegate method loadJobMeta.

/**
 * Load a job in a directory
 *
 * @param log
 *          the logging channel
 * @param rep
 *          The Repository
 * @param jobname
 *          The name of the job
 * @param repdir
 *          The directory in which the job resides.
 * @throws KettleException
 */
public JobMeta loadJobMeta(String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor) throws KettleException {
    JobMeta jobMeta = new JobMeta();
    synchronized (repository) {
        try {
            // Clear everything...
            jobMeta.clear();
            jobMeta.setRepositoryDirectory(repdir);
            // Get the transformation id
            jobMeta.setObjectId(getJobID(jobname, repdir.getObjectId()));
            // If no valid id is available in the database, then give error...
            if (jobMeta.getObjectId() != null) {
                // Load the notes...
                ObjectId[] noteids = repository.getJobNoteIDs(jobMeta.getObjectId());
                ObjectId[] jecids = repository.getJobEntryCopyIDs(jobMeta.getObjectId());
                ObjectId[] hopid = repository.getJobHopIDs(jobMeta.getObjectId());
                int nrWork = 2 + noteids.length + jecids.length + hopid.length;
                if (monitor != null) {
                    monitor.beginTask(BaseMessages.getString(PKG, "JobMeta.Monitor.LoadingJob") + repdir + Const.FILE_SEPARATOR + jobname, nrWork);
                }
                // 
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobInformation"));
                }
                RowMetaAndData jobRow = getJob(jobMeta.getObjectId());
                jobMeta.setName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_NAME, null));
                jobMeta.setDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, null));
                jobMeta.setExtendedDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, null));
                jobMeta.setJobversion(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, null));
                jobMeta.setJobstatus(Const.toInt(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, null), -1));
                jobMeta.setCreatedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_CREATED_USER, null));
                jobMeta.setCreatedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, new Date()));
                jobMeta.setModifiedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, null));
                jobMeta.setModifiedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, new Date()));
                long id_logdb = jobRow.getInteger(KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, 0);
                if (id_logdb > 0) {
                    // Get the logconnection
                    // 
                    DatabaseMeta logDb = repository.loadDatabaseMeta(new LongObjectId(id_logdb), null);
                    jobMeta.getJobLogTable().setConnectionName(logDb.getName());
                // jobMeta.getJobLogTable().getDatabaseMeta().shareVariablesWith(jobMeta);
                }
                jobMeta.getJobLogTable().setTableName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, null));
                jobMeta.getJobLogTable().setBatchIdUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, false));
                jobMeta.getJobLogTable().setLogFieldUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, false));
                jobMeta.getJobLogTable().setLogSizeLimit(getJobAttributeString(jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT));
                jobMeta.setBatchIdPassed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, false));
                // Load all the log tables for the job...
                // 
                RepositoryAttributeInterface attributeInterface = new KettleDatabaseRepositoryJobAttribute(repository.connectionDelegate, jobMeta.getObjectId());
                for (LogTableInterface logTable : jobMeta.getLogTables()) {
                    logTable.loadFromRepository(attributeInterface);
                }
                if (monitor != null) {
                    monitor.worked(1);
                }
                // 
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingAvailableDatabasesFromRepository"));
                }
                // Read objects from the shared XML file & the repository
                try {
                    jobMeta.setSharedObjectsFile(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, null));
                    jobMeta.setSharedObjects(repository != null ? repository.readJobMetaSharedObjects(jobMeta) : jobMeta.readSharedObjects());
                } catch (Exception e) {
                    log.logError(BaseMessages.getString(PKG, "JobMeta.ErrorReadingSharedObjects.Message", e.toString()));
                    // 
                    log.logError(Const.getStackTracker(e));
                }
                if (monitor != null) {
                    monitor.worked(1);
                }
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + noteids.length + " notes");
                }
                for (int i = 0; i < noteids.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingNoteNr") + (i + 1) + "/" + noteids.length);
                    }
                    NotePadMeta ni = repository.notePadDelegate.loadNotePadMeta(noteids[i]);
                    if (jobMeta.indexOfNote(ni) < 0) {
                        jobMeta.addNote(ni);
                    }
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                // Load the group attributes map
                // 
                jobMeta.setAttributesMap(loadJobAttributesMap(jobMeta.getObjectId()));
                // Load the job entries...
                // 
                // Keep a unique list of job entries to facilitate in the loading.
                // 
                List<JobEntryInterface> jobentries = new ArrayList<JobEntryInterface>();
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + jecids.length + " job entries");
                }
                for (int i = 0; i < jecids.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobEntryNr") + (i + 1) + "/" + (jecids.length));
                    }
                    JobEntryCopy jec = repository.jobEntryDelegate.loadJobEntryCopy(jobMeta.getObjectId(), jecids[i], jobentries, jobMeta.getDatabases(), jobMeta.getSlaveServers(), jobname);
                    if (jec.isMissing()) {
                        jobMeta.addMissingEntry((MissingEntry) jec.getEntry());
                    }
                    // Also set the copy number...
                    // We count the number of job entry copies that use the job
                    // entry
                    // 
                    int copyNr = 0;
                    for (JobEntryCopy copy : jobMeta.getJobCopies()) {
                        if (jec.getEntry() == copy.getEntry()) {
                            copyNr++;
                        }
                    }
                    jec.setNr(copyNr);
                    int idx = jobMeta.indexOfJobEntry(jec);
                    if (idx < 0) {
                        if (jec.getName() != null && jec.getName().length() > 0) {
                            jobMeta.addJobEntry(jec);
                        }
                    } else {
                        // replace it!
                        jobMeta.setJobEntry(idx, jec);
                    }
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                // Load the hops...
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + hopid.length + " job hops");
                }
                for (int i = 0; i < hopid.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobHopNr") + (i + 1) + "/" + (jecids.length));
                    }
                    JobHopMeta hi = loadJobHopMeta(hopid[i], jobMeta.getJobCopies());
                    jobMeta.getJobhops().add(hi);
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                loadRepParameters(jobMeta);
                // Finally, clear the changed flags...
                jobMeta.clearChanged();
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.FinishedLoadOfJob"));
                }
                if (monitor != null) {
                    monitor.done();
                }
                // close prepared statements, minimize locking etc.
                // 
                repository.connectionDelegate.closeAttributeLookupPreparedStatements();
                return jobMeta;
            } else {
                throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.CanNotFindJob") + jobname);
            }
        } catch (KettleException dbe) {
            throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", jobname), dbe);
        } finally {
            jobMeta.initializeVariablesFrom(jobMeta.getParentVariableSpace());
            jobMeta.setInternalKettleVariables();
        }
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) JobHopMeta(org.pentaho.di.job.JobHopMeta) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) LongObjectId(org.pentaho.di.repository.LongObjectId) ObjectId(org.pentaho.di.repository.ObjectId) ArrayList(java.util.ArrayList) LongObjectId(org.pentaho.di.repository.LongObjectId) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Date(java.util.Date) ValueMetaDate(org.pentaho.di.core.row.value.ValueMetaDate) RepositoryAttributeInterface(org.pentaho.di.repository.RepositoryAttributeInterface) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) NotePadMeta(org.pentaho.di.core.NotePadMeta)

Example 40 with JobEntryCopy

use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryJobEntryDelegate method loadJobEntryCopy.

/**
 * Load the chef graphical entry from repository We load type, name & description if no entry can be found.
 *
 * @param log
 *          the logging channel
 * @param rep
 *          the Repository
 * @param jobId
 *          The job ID
 * @param jobEntryCopyId
 *          The jobentry copy ID
 * @param jobentries
 *          A list with all jobentries
 * @param databases
 *          A list with all defined databases
 */
public JobEntryCopy loadJobEntryCopy(ObjectId jobId, ObjectId jobEntryCopyId, List<JobEntryInterface> jobentries, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, String jobname) throws KettleException {
    JobEntryCopy jobEntryCopy = new JobEntryCopy();
    try {
        jobEntryCopy.setObjectId(jobEntryCopyId);
        // Handle GUI information: nr, location, ...
        RowMetaAndData r = getJobEntryCopy(jobEntryCopyId);
        if (r != null) {
            // These are the jobentry_copy fields...
            // 
            ObjectId jobEntryId = new LongObjectId(r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY, 0));
            ObjectId jobEntryTypeId = new LongObjectId(r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_TYPE, 0));
            jobEntryCopy.setNr((int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_NR, 0));
            int locx = (int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_X, 0);
            int locy = (int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_Y, 0);
            boolean isdrawn = r.getBoolean(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_DRAW, false);
            boolean isparallel = r.getBoolean(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_PARALLEL, false);
            // Do we have the jobentry already?
            // 
            jobEntryCopy.setEntry(JobMeta.findJobEntry(jobentries, jobEntryId));
            if (jobEntryCopy.getEntry() == null) {
                // What type of jobentry do we load now?
                // Get the jobentry type code
                // 
                RowMetaAndData rt = getJobEntryType(new LongObjectId(jobEntryTypeId));
                if (rt != null) {
                    String jet_code = rt.getString(KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, null);
                    JobEntryInterface jobEntry = null;
                    PluginRegistry registry = PluginRegistry.getInstance();
                    PluginInterface jobPlugin = registry.findPluginWithId(JobEntryPluginType.class, jet_code);
                    if (jobPlugin == null) {
                        jobEntry = new MissingEntry(jobname, jet_code);
                    } else {
                        jobEntry = (JobEntryInterface) registry.loadClass(jobPlugin);
                    }
                    if (jobEntry != null) {
                        jobEntryCopy.setEntry(jobEntry);
                        // 
                        if (jobEntry instanceof JobEntryBase) {
                            loadJobEntryBase((JobEntryBase) jobEntry, jobEntryId, databases, slaveServers);
                            ((JobEntryBase) jobEntry).setAttributesMap(loadJobEntryAttributesMap(jobId, jobEntryId));
                        }
                        compatibleJobEntryLoadRep(jobEntry, repository, jobEntryTypeId, databases, slaveServers);
                        jobEntry.loadRep(repository, repository.metaStore, jobEntryId, databases, slaveServers);
                        jobEntryCopy.getEntry().setObjectId(jobEntryId);
                        jobentries.add(jobEntryCopy.getEntry());
                    } else {
                        throw new KettleException("JobEntryLoader was unable to find Job Entry Plugin with description [" + jet_code + "].");
                    }
                } else {
                    throw new KettleException("Unable to find Job Entry Type with id=" + jobEntryTypeId + " in the repository");
                }
            }
            jobEntryCopy.setLocation(locx, locy);
            jobEntryCopy.setDrawn(isdrawn);
            jobEntryCopy.setLaunchingInParallel(isparallel);
            return jobEntryCopy;
        } else {
            throw new KettleException("Unable to find job entry copy in repository with id_jobentry_copy=" + jobEntryCopyId);
        }
    } catch (KettleDatabaseException dbe) {
        throw new KettleException("Unable to load job entry copy from repository with id_jobentry_copy=" + jobEntryCopyId, dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) LongObjectId(org.pentaho.di.repository.LongObjectId) ObjectId(org.pentaho.di.repository.ObjectId) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) PluginInterface(org.pentaho.di.core.plugins.PluginInterface) LongObjectId(org.pentaho.di.repository.LongObjectId) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) MissingEntry(org.pentaho.di.job.entries.missing.MissingEntry) JobEntryBase(org.pentaho.di.job.entry.JobEntryBase) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) PluginRegistry(org.pentaho.di.core.plugins.PluginRegistry)

Aggregations

JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)149 Point (org.pentaho.di.core.gui.Point)54 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)51 JobMeta (org.pentaho.di.job.JobMeta)47 KettleException (org.pentaho.di.core.exception.KettleException)30 Test (org.junit.Test)28 JobEntryInterface (org.pentaho.di.job.entry.JobEntryInterface)28 NotePadMeta (org.pentaho.di.core.NotePadMeta)24 JobHopMeta (org.pentaho.di.job.JobHopMeta)24 ArrayList (java.util.ArrayList)18 Job (org.pentaho.di.job.Job)18 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)16 JobEntryTrans (org.pentaho.di.job.entries.trans.JobEntryTrans)15 MessageBox (org.eclipse.swt.widgets.MessageBox)13 PluginInterface (org.pentaho.di.core.plugins.PluginInterface)10 Result (org.pentaho.di.core.Result)8 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)8 AreaOwner (org.pentaho.di.core.gui.AreaOwner)8 JobEntrySpecial (org.pentaho.di.job.entries.special.JobEntrySpecial)8 Before (org.junit.Before)7