Search in sources :

Example 16 with JobHopMeta

use of org.pentaho.di.job.JobHopMeta in project pentaho-metaverse by pentaho.

the class JobAnalyzer method analyze.

@Override
public synchronized IMetaverseNode analyze(IComponentDescriptor descriptor, IDocument document) throws MetaverseAnalyzerException {
    validateState(document);
    Object repoObject = document.getContent();
    JobMeta jobMeta = null;
    if (repoObject instanceof String) {
        // hydrate the job
        try {
            String content = (String) repoObject;
            ByteArrayInputStream xmlStream = new ByteArrayInputStream(content.getBytes());
            jobMeta = new JobMeta(xmlStream, null, null);
        } catch (KettleXMLException e) {
            throw new MetaverseAnalyzerException(e);
        }
    } else if (repoObject instanceof JobMeta) {
        jobMeta = (JobMeta) repoObject;
    }
    // construct a dummy job based on our JobMeta so we get out VariableSpace set properly
    jobMeta.setFilename(document.getStringID());
    Job j = new Job(null, jobMeta);
    j.setInternalKettleVariables(jobMeta);
    IComponentDescriptor documentDescriptor = new MetaverseComponentDescriptor(document.getStringID(), DictionaryConst.NODE_TYPE_JOB, new Namespace(descriptor.getLogicalId()), descriptor.getContext());
    // Create a metaverse node and start filling in details
    IMetaverseNode node = metaverseObjectFactory.createNodeObject(document.getNamespace(), jobMeta.getName(), DictionaryConst.NODE_TYPE_JOB);
    node.setLogicalIdGenerator(DictionaryConst.LOGICAL_ID_GENERATOR_DOCUMENT);
    // pull out the standard fields
    String description = jobMeta.getDescription();
    if (description != null) {
        node.setProperty(DictionaryConst.PROPERTY_DESCRIPTION, description);
    }
    String extendedDescription = jobMeta.getExtendedDescription();
    if (extendedDescription != null) {
        node.setProperty("extendedDescription", extendedDescription);
    }
    Date createdDate = jobMeta.getCreatedDate();
    if (createdDate != null) {
        node.setProperty(DictionaryConst.PROPERTY_CREATED, Long.toString(createdDate.getTime()));
    }
    String createdUser = jobMeta.getCreatedUser();
    if (createdUser != null) {
        node.setProperty(DictionaryConst.PROPERTY_CREATED_BY, createdUser);
    }
    Date lastModifiedDate = jobMeta.getModifiedDate();
    if (lastModifiedDate != null) {
        node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED, Long.toString(lastModifiedDate.getTime()));
    }
    String lastModifiedUser = jobMeta.getModifiedUser();
    if (lastModifiedUser != null) {
        node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED_BY, lastModifiedUser);
    }
    String version = jobMeta.getJobversion();
    if (version != null) {
        node.setProperty(DictionaryConst.PROPERTY_ARTIFACT_VERSION, version);
    }
    String status = Messages.getString("INFO.JobOrTrans.Status_" + Integer.toString(jobMeta.getJobstatus()));
    if (status != null && !status.startsWith("!")) {
        node.setProperty(DictionaryConst.PROPERTY_STATUS, status);
    }
    node.setProperty(DictionaryConst.PROPERTY_PATH, document.getProperty(DictionaryConst.PROPERTY_PATH));
    // Process job parameters
    String[] parameters = jobMeta.listParameters();
    if (parameters != null) {
        for (String parameter : parameters) {
            try {
                // Determine parameter properties and add them to a map, then the map to the list
                String defaultParameterValue = jobMeta.getParameterDefault(parameter);
                String parameterValue = jobMeta.getParameterValue(parameter);
                String parameterDescription = jobMeta.getParameterDescription(parameter);
                PropertiesHolder paramProperties = new PropertiesHolder();
                paramProperties.setProperty("defaultValue", defaultParameterValue);
                paramProperties.setProperty("value", parameterValue);
                paramProperties.setProperty("description", parameterDescription);
                node.setProperty("parameter_" + parameter, paramProperties.toString());
            } catch (UnknownParamException upe) {
                // This shouldn't happen as we're using the list provided by the meta
                throw new MetaverseAnalyzerException(upe);
            }
        }
    }
    // handle the entries
    for (int i = 0; i < jobMeta.nrJobEntries(); i++) {
        JobEntryCopy entry = jobMeta.getJobEntry(i);
        try {
            if (entry != null) {
                entry.getEntry().setParentJob(j);
                IMetaverseNode jobEntryNode = null;
                JobEntryInterface jobEntryInterface = entry.getEntry();
                IComponentDescriptor entryDescriptor = new MetaverseComponentDescriptor(entry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY, node, descriptor.getContext());
                Set<IJobEntryAnalyzer> jobEntryAnalyzers = getJobEntryAnalyzers(jobEntryInterface);
                if (jobEntryAnalyzers != null && !jobEntryAnalyzers.isEmpty()) {
                    for (IJobEntryAnalyzer jobEntryAnalyzer : jobEntryAnalyzers) {
                        jobEntryAnalyzer.setMetaverseBuilder(metaverseBuilder);
                        jobEntryNode = (IMetaverseNode) jobEntryAnalyzer.analyze(entryDescriptor, entry.getEntry());
                    }
                } else {
                    GenericJobEntryMetaAnalyzer defaultJobEntryAnalyzer = new GenericJobEntryMetaAnalyzer();
                    defaultJobEntryAnalyzer.setMetaverseBuilder(metaverseBuilder);
                    jobEntryNode = defaultJobEntryAnalyzer.analyze(entryDescriptor, jobEntryInterface);
                }
                if (jobEntryNode != null) {
                    metaverseBuilder.addLink(node, DictionaryConst.LINK_CONTAINS, jobEntryNode);
                }
            }
        } catch (Throwable mae) {
            // Don't throw an exception, just log and carry on
            log.warn(Messages.getString("ERROR.ErrorDuringAnalysis", entry.getName(), Const.NVL(mae.getLocalizedMessage(), "Unspecified")));
            log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), mae);
        }
    }
    // Model the hops between steps
    int numHops = jobMeta.nrJobHops();
    for (int i = 0; i < numHops; i++) {
        JobHopMeta hop = jobMeta.getJobHop(i);
        JobEntryCopy fromEntry = hop.getFromEntry();
        JobEntryCopy toEntry = hop.getToEntry();
        INamespace childNs = new Namespace(node.getLogicalId());
        // process legitimate hops
        if (fromEntry != null && toEntry != null) {
            IMetaverseNode fromEntryNode = metaverseObjectFactory.createNodeObject(childNs, fromEntry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY);
            IMetaverseNode toEntryNode = metaverseObjectFactory.createNodeObject(childNs, toEntry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY);
            metaverseBuilder.addLink(fromEntryNode, DictionaryConst.LINK_HOPSTO, toEntryNode);
        }
    }
    metaverseBuilder.addNode(node);
    addParentLink(documentDescriptor, node);
    return node;
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) PropertiesHolder(org.pentaho.metaverse.api.PropertiesHolder) JobHopMeta(org.pentaho.di.job.JobHopMeta) MetaverseAnalyzerException(org.pentaho.metaverse.api.MetaverseAnalyzerException) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) IMetaverseNode(org.pentaho.metaverse.api.IMetaverseNode) IJobEntryAnalyzer(org.pentaho.metaverse.api.analyzer.kettle.jobentry.IJobEntryAnalyzer) INamespace(org.pentaho.metaverse.api.INamespace) Namespace(org.pentaho.metaverse.api.Namespace) Date(java.util.Date) MetaverseComponentDescriptor(org.pentaho.metaverse.api.MetaverseComponentDescriptor) IComponentDescriptor(org.pentaho.metaverse.api.IComponentDescriptor) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) INamespace(org.pentaho.metaverse.api.INamespace) ByteArrayInputStream(java.io.ByteArrayInputStream) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException) GenericJobEntryMetaAnalyzer(org.pentaho.metaverse.analyzer.kettle.jobentry.GenericJobEntryMetaAnalyzer) Job(org.pentaho.di.job.Job)

Example 17 with JobHopMeta

use of org.pentaho.di.job.JobHopMeta in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryJobDelegate method saveJob.

/**
 * Stored a job in the repository
 *
 * @param jobMeta
 *          The job to store
 * @param monitor
 *          the (optional) UI progress monitor
 * @param overwrite
 *          Overwrite existing object(s)?
 * @throws KettleException
 *           in case some IO error occurs.
 */
public void saveJob(JobMeta jobMeta, String versionComment, ProgressMonitorListener monitor, boolean overwrite) throws KettleException {
    try {
        // Before saving the job, see if it's not locked by someone else...
        // 
        int nrWorks = 2 + jobMeta.nrDatabases() + jobMeta.nrNotes() + jobMeta.nrJobEntries() + jobMeta.nrJobHops();
        if (monitor != null) {
            monitor.beginTask(BaseMessages.getString(PKG, "JobMeta.Monitor.SavingTransformation") + jobMeta.getRepositoryDirectory() + Const.FILE_SEPARATOR + jobMeta.getName(), nrWorks);
        }
        repository.insertLogEntry("save job '" + jobMeta.getName() + "'");
        // 
        if (monitor != null) {
            monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.HandlingPreviousVersionOfJob"));
        }
        jobMeta.setObjectId(getJobID(jobMeta.getName(), jobMeta.getRepositoryDirectory().getObjectId()));
        // If no valid id is available in the database, assign one...
        if (jobMeta.getObjectId() == null) {
            jobMeta.setObjectId(repository.connectionDelegate.getNextJobID());
        } else {
            // If we have a valid ID, we need to make sure everything is
            // cleared out
            // of the database for this id_job, before we put it back in...
            repository.deleteJob(jobMeta.getObjectId());
        }
        if (monitor != null) {
            monitor.worked(1);
        }
        // 
        if (log.isDebug()) {
            log.logDebug(BaseMessages.getString(PKG, "JobMeta.Log.SavingDatabaseConnections"));
        }
        for (int i = 0; i < jobMeta.nrDatabases(); i++) {
            if (monitor != null) {
                monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.SavingDatabaseTask.Title") + (i + 1) + "/" + jobMeta.nrDatabases());
            }
            DatabaseMeta databaseMeta = jobMeta.getDatabase(i);
            // nothing was saved in the repository)
            if (overwrite || databaseMeta.hasChanged() || databaseMeta.getObjectId() == null) {
                repository.save(databaseMeta, versionComment, monitor, overwrite);
            }
            if (monitor != null) {
                monitor.worked(1);
            }
        }
        // entries to the save job. (retry)
        if (monitor != null) {
            monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.SavingJobDetails"));
        }
        if (log.isDetailed()) {
            log.logDetailed("Saving job info to repository...");
        }
        insertJob(jobMeta);
        if (monitor != null) {
            monitor.worked(1);
        }
        // Save the group attributes map
        // 
        saveJobAttributesMap(jobMeta.getObjectId(), jobMeta.getAttributesMap());
        // 
        for (int i = 0; i < jobMeta.getSlaveServers().size(); i++) {
            SlaveServer slaveServer = jobMeta.getSlaveServers().get(i);
            repository.save(slaveServer, versionComment, null, jobMeta.getObjectId(), false, overwrite);
        }
        // 
        if (log.isDetailed()) {
            log.logDetailed("Saving notes to repository...");
        }
        for (int i = 0; i < jobMeta.nrNotes(); i++) {
            if (monitor != null) {
                monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.SavingNoteNr") + (i + 1) + "/" + jobMeta.nrNotes());
            }
            NotePadMeta ni = jobMeta.getNote(i);
            repository.saveNotePadMeta(ni, jobMeta.getObjectId());
            if (ni.getObjectId() != null) {
                repository.insertJobNote(jobMeta.getObjectId(), ni.getObjectId());
            }
            if (monitor != null) {
                monitor.worked(1);
            }
        }
        // 
        if (log.isDetailed()) {
            log.logDetailed("Saving " + jobMeta.nrJobEntries() + " Job enty copies to repository...");
        }
        repository.updateJobEntryTypes();
        for (int i = 0; i < jobMeta.nrJobEntries(); i++) {
            if (monitor != null) {
                monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.SavingJobEntryNr") + (i + 1) + "/" + jobMeta.nrJobEntries());
            }
            JobEntryCopy cge = jobMeta.getJobEntry(i);
            repository.jobEntryDelegate.saveJobEntryCopy(cge, jobMeta.getObjectId(), repository.metaStore);
            if (monitor != null) {
                monitor.worked(1);
            }
        }
        if (log.isDetailed()) {
            log.logDetailed("Saving job hops to repository...");
        }
        for (int i = 0; i < jobMeta.nrJobHops(); i++) {
            if (monitor != null) {
                monitor.subTask("Saving job hop #" + (i + 1) + "/" + jobMeta.nrJobHops());
            }
            JobHopMeta hi = jobMeta.getJobHop(i);
            saveJobHopMeta(hi, jobMeta.getObjectId());
            if (monitor != null) {
                monitor.worked(1);
            }
        }
        saveJobParameters(jobMeta);
        // Commit this transaction!!
        repository.commit();
        jobMeta.clearChanged();
        if (monitor != null) {
            monitor.done();
        }
    } catch (KettleDatabaseException dbe) {
        repository.rollback();
        throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.UnableToSaveJobInRepositoryRollbackPerformed"), dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) JobHopMeta(org.pentaho.di.job.JobHopMeta) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) NotePadMeta(org.pentaho.di.core.NotePadMeta) SlaveServer(org.pentaho.di.cluster.SlaveServer) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta)

Example 18 with JobHopMeta

use of org.pentaho.di.job.JobHopMeta in project pentaho-kettle by pentaho.

the class JobHasNoDisabledHopsImportRule method verifyRule.

@Override
public List<ImportValidationFeedback> verifyRule(Object subject) {
    List<ImportValidationFeedback> feedback = new ArrayList<ImportValidationFeedback>();
    if (!isEnabled()) {
        return feedback;
    }
    if (!(subject instanceof JobMeta)) {
        return feedback;
    }
    JobMeta jobMeta = (JobMeta) subject;
    for (int i = 0; i < jobMeta.nrJobHops(); i++) {
        JobHopMeta hop = jobMeta.getJobHop(i);
        if (!hop.isEnabled()) {
            feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "There is a disabled hop in the job."));
        }
    }
    if (feedback.isEmpty()) {
        feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.APPROVAL, "All hops are enabled in this job."));
    }
    return feedback;
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) JobHopMeta(org.pentaho.di.job.JobHopMeta) ImportValidationFeedback(org.pentaho.di.imp.rule.ImportValidationFeedback) ArrayList(java.util.ArrayList)

Example 19 with JobHopMeta

use of org.pentaho.di.job.JobHopMeta in project pentaho-kettle by pentaho.

the class JobGenerator method generateSqlJob.

public JobMeta generateSqlJob() throws KettleException {
    DatabaseMeta databaseMeta = findTargetDatabaseMeta();
    Database db = new Database(Spoon.loggingObject, databaseMeta);
    try {
        db.connect();
        JobMeta jobMeta = new JobMeta();
        jobMeta.setName("Create tables for '" + ConceptUtil.getName(domain, locale) + "'");
        jobMeta.setDescription(ConceptUtil.getDescription(domain, locale));
        // Let's not forget to add the database connection
        // 
        jobMeta.addDatabase(databaseMeta);
        Point location = new Point(GRAPH_LEFT, GRAPH_TOP);
        // Create a job entry
        // 
        JobEntryCopy startEntry = JobMeta.createStartEntry();
        startEntry.setLocation(location.x, location.y);
        startEntry.setDrawn();
        jobMeta.addJobEntry(startEntry);
        JobEntryCopy lastEntry = startEntry;
        nextLocation(location);
        // Create one SQL entry for all the physically unique dimensions and facts
        // We need to get a list of all known dimensions with physical table name.
        // 
        List<LogicalTable> tables = getUniqueLogicalTables();
        for (LogicalTable logicalTable : tables) {
            String phTable = ConceptUtil.getString(logicalTable, DefaultIDs.LOGICAL_TABLE_PHYSICAL_TABLE_NAME);
            String tableName = ConceptUtil.getName(logicalTable, locale);
            String tableDescription = ConceptUtil.getDescription(logicalTable, locale);
            TableType tableType = ConceptUtil.getTableType(logicalTable);
            DimensionType dimensionType = ConceptUtil.getDimensionType(logicalTable);
            boolean isFact = tableType == TableType.FACT;
            boolean isDimension = tableType == TableType.DIMENSION;
            boolean isJunk = isDimension && dimensionType == DimensionType.JUNK_DIMENSION;
            JobEntrySQL sqlEntry = new JobEntrySQL(phTable);
            sqlEntry.setDatabase(databaseMeta);
            // Get the SQL for this table...
            // 
            String schemaTable = databaseMeta.getQuotedSchemaTableCombination(null, phTable);
            String phKeyField = null;
            // The technical key is the first KEY field...
            // 
            LogicalColumn keyColumn = null;
            if (isDimension) {
                keyColumn = ConceptUtil.findLogicalColumn(logicalTable, AttributeType.TECHNICAL_KEY);
            }
            if (keyColumn != null) {
                phKeyField = ConceptUtil.getString(keyColumn, DefaultIDs.LOGICAL_COLUMN_PHYSICAL_COLUMN_NAME);
            }
            // Get all the fields for the logical table...
            // 
            RowMetaInterface fields = getRowForLogicalTable(databaseMeta, logicalTable);
            // Generate the required SQL to make this happen
            // 
            String sql = db.getCreateTableStatement(schemaTable, fields, phKeyField, databaseMeta.supportsAutoinc() && !isFact, null, true);
            // 
            if (keyColumn != null) {
                ValueMetaInterface keyValueMeta = getValueForLogicalColumn(databaseMeta, keyColumn);
                String indexName = databaseMeta.quoteField("IDX_" + phTable.replace(" ", "_").toUpperCase() + "_" + phKeyField.toUpperCase());
                String indexSql = db.getCreateIndexStatement(schemaTable, indexName, new String[] { keyValueMeta.getName() }, true, false, true, true);
                sql += Const.CR + indexSql;
            }
            // 
            if (isFact) {
                List<LogicalColumn> fks = ConceptUtil.findLogicalColumns(logicalTable, AttributeType.TECHNICAL_KEY);
                for (LogicalColumn fk : fks) {
                    ValueMetaInterface keyValueMeta = getValueForLogicalColumn(databaseMeta, fk);
                    String phColumn = ConceptUtil.getString(fk, DefaultIDs.LOGICAL_COLUMN_PHYSICAL_COLUMN_NAME);
                    if (!Utils.isEmpty(phColumn)) {
                        String indexName = databaseMeta.quoteField("IDX_" + phTable.replace(" ", "_").toUpperCase() + "_" + phColumn.toUpperCase());
                        String indexSql = db.getCreateIndexStatement(schemaTable, indexName, new String[] { keyValueMeta.getName() }, true, false, true, true);
                        sql += Const.CR + indexSql;
                    }
                }
            }
            // 
            if (isDimension) {
                List<LogicalColumn> naturalKeys = ConceptUtil.findLogicalColumns(logicalTable, AttributeType.NATURAL_KEY);
                if (!naturalKeys.isEmpty()) {
                    String indexName = databaseMeta.quoteField("IDX_" + phTable.replace(" ", "_").toUpperCase() + "_LOOKUP");
                    String[] fieldNames = new String[naturalKeys.size()];
                    for (int i = 0; i < fieldNames.length; i++) {
                        ValueMetaInterface keyValueMeta = getValueForLogicalColumn(databaseMeta, naturalKeys.get(i));
                        fieldNames[i] = keyValueMeta.getName();
                    }
                    String indexSql = db.getCreateIndexStatement(schemaTable, indexName, fieldNames, false, false, false, true);
                    sql += Const.CR + indexSql;
                }
            }
            if (isJunk) {
                List<LogicalColumn> attributes = ConceptUtil.findLogicalColumns(logicalTable, AttributeType.ATTRIBUTE);
                if (!attributes.isEmpty()) {
                    String indexName = databaseMeta.quoteField("IDX_" + phTable.replace(" ", "_").toUpperCase() + "_LOOKUP");
                    String[] fieldNames = new String[attributes.size()];
                    for (int i = 0; i < fieldNames.length; i++) {
                        ValueMetaInterface attrValueMeta = getValueForLogicalColumn(databaseMeta, attributes.get(i));
                        fieldNames[i] = attrValueMeta.getName();
                    }
                    String indexSql = db.getCreateIndexStatement(schemaTable, indexName, fieldNames, false, false, false, true);
                    sql += Const.CR + indexSql;
                }
            }
            // If it's
            sqlEntry.setSQL(sql);
            sqlEntry.setDescription("Generated based on logical table '" + tableName + "'" + Const.CR + Const.CR + Const.NVL(tableDescription, ""));
            JobEntryCopy sqlCopy = new JobEntryCopy(sqlEntry);
            sqlCopy.setLocation(location.x, location.y);
            sqlCopy.setDrawn();
            nextLocation(location);
            jobMeta.addJobEntry(sqlCopy);
            // Hook up with the previous job entry too...
            // 
            JobHopMeta jobHop = new JobHopMeta(lastEntry, sqlCopy);
            jobHop.setEnabled();
            jobHop.setConditional();
            jobHop.setEvaluation(true);
            if (lastEntry.isStart()) {
                jobHop.setUnconditional();
            }
            jobMeta.addJobHop(jobHop);
            lastEntry = sqlCopy;
        }
        return jobMeta;
    } catch (Exception e) {
        throw new KettleException("There was an error during the generation of the SQL job", e);
    } finally {
        if (db != null) {
            db.disconnect();
        }
    }
}
Also used : DimensionType(org.pentaho.di.starmodeler.DimensionType) KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) LogicalColumn(org.pentaho.metadata.model.LogicalColumn) JobHopMeta(org.pentaho.di.job.JobHopMeta) TableType(org.pentaho.metadata.model.concept.types.TableType) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) Point(org.pentaho.di.core.gui.Point) LogicalTable(org.pentaho.metadata.model.LogicalTable) JobEntrySQL(org.pentaho.di.job.entries.sql.JobEntrySQL) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Point(org.pentaho.di.core.gui.Point) KettleException(org.pentaho.di.core.exception.KettleException) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) Database(org.pentaho.di.core.database.Database)

Example 20 with JobHopMeta

use of org.pentaho.di.job.JobHopMeta in project pentaho-kettle by pentaho.

the class SpoonJobDelegate method redoJobAction.

public void redoJobAction(JobMeta jobMeta, TransAction transAction) {
    switch(transAction.getType()) {
        // 
        case TransAction.TYPE_ACTION_NEW_JOB_ENTRY:
            // re-delete the entry at correct location:
            JobEntryCopy[] si = (JobEntryCopy[]) transAction.getCurrent();
            int[] idx = transAction.getCurrentIndex();
            for (int i = 0; i < idx.length; i++) {
                jobMeta.addJobEntry(idx[i], si[i]);
            }
            spoon.refreshTree();
            spoon.refreshGraph();
            break;
        case TransAction.TYPE_ACTION_NEW_NOTE:
            // re-insert the note at correct location:
            NotePadMeta[] ni = (NotePadMeta[]) transAction.getCurrent();
            idx = transAction.getCurrentIndex();
            for (int i = 0; i < idx.length; i++) {
                jobMeta.addNote(idx[i], ni[i]);
            }
            spoon.refreshTree();
            spoon.refreshGraph();
            break;
        case TransAction.TYPE_ACTION_NEW_JOB_HOP:
            // re-insert the hop at correct location:
            JobHopMeta[] hi = (JobHopMeta[]) transAction.getCurrent();
            idx = transAction.getCurrentIndex();
            for (int i = 0; i < idx.length; i++) {
                jobMeta.addJobHop(idx[i], hi[i]);
            }
            spoon.refreshTree();
            spoon.refreshGraph();
            break;
        // 
        case TransAction.TYPE_ACTION_DELETE_JOB_ENTRY:
            // re-remove the entry at correct location:
            idx = transAction.getCurrentIndex();
            for (int i = idx.length - 1; i >= 0; i--) {
                jobMeta.removeJobEntry(idx[i]);
            }
            spoon.refreshTree();
            spoon.refreshGraph();
            break;
        case TransAction.TYPE_ACTION_DELETE_NOTE:
            // re-remove the note at correct location:
            idx = transAction.getCurrentIndex();
            for (int i = idx.length - 1; i >= 0; i--) {
                jobMeta.removeNote(idx[i]);
            }
            spoon.refreshTree();
            spoon.refreshGraph();
            break;
        case TransAction.TYPE_ACTION_DELETE_JOB_HOP:
            // re-remove the hop at correct location:
            idx = transAction.getCurrentIndex();
            for (int i = idx.length - 1; i >= 0; i--) {
                jobMeta.removeJobHop(idx[i]);
            }
            spoon.refreshTree();
            spoon.refreshGraph();
            break;
        // We changed a step : undo this...
        case TransAction.TYPE_ACTION_CHANGE_JOB_ENTRY:
            // replace with "current" version.
            for (int i = 0; i < transAction.getCurrent().length; i++) {
                JobEntryCopy copy = (JobEntryCopy) ((JobEntryCopy) (transAction.getCurrent()[i])).clone_deep();
                jobMeta.getJobEntry(transAction.getCurrentIndex()[i]).replaceMeta(copy);
            }
            spoon.refreshTree();
            spoon.refreshGraph();
            break;
        // We changed a note : undo this...
        case TransAction.TYPE_ACTION_CHANGE_NOTE:
            // Delete & re-insert
            ni = (NotePadMeta[]) transAction.getCurrent();
            idx = transAction.getCurrentIndex();
            for (int i = 0; i < idx.length; i++) {
                jobMeta.removeNote(idx[i]);
                jobMeta.addNote(idx[i], ni[i]);
            }
            spoon.refreshTree();
            spoon.refreshGraph();
            break;
        // We changed a hop : undo this...
        case TransAction.TYPE_ACTION_CHANGE_JOB_HOP:
            // Delete & re-insert
            hi = (JobHopMeta[]) transAction.getCurrent();
            idx = transAction.getCurrentIndex();
            for (int i = 0; i < idx.length; i++) {
                jobMeta.removeJobHop(idx[i]);
                jobMeta.addJobHop(idx[i], hi[i]);
            }
            spoon.refreshTree();
            spoon.refreshGraph();
            break;
        // 
        case TransAction.TYPE_ACTION_POSITION_JOB_ENTRY:
            // Find the location of the step:
            idx = transAction.getCurrentIndex();
            Point[] p = transAction.getCurrentLocation();
            for (int i = 0; i < p.length; i++) {
                JobEntryCopy entry = jobMeta.getJobEntry(idx[i]);
                entry.setLocation(p[i]);
            }
            spoon.refreshGraph();
            break;
        case TransAction.TYPE_ACTION_POSITION_NOTE:
            idx = transAction.getCurrentIndex();
            Point[] curr = transAction.getCurrentLocation();
            for (int i = 0; i < idx.length; i++) {
                NotePadMeta npi = jobMeta.getNote(idx[i]);
                npi.setLocation(curr[i]);
            }
            spoon.refreshGraph();
            break;
        default:
            break;
    }
}
Also used : JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) JobHopMeta(org.pentaho.di.job.JobHopMeta) NotePadMeta(org.pentaho.di.core.NotePadMeta) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint)

Aggregations

JobHopMeta (org.pentaho.di.job.JobHopMeta)31 JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)24 Point (org.pentaho.di.core.gui.Point)17 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)16 NotePadMeta (org.pentaho.di.core.NotePadMeta)12 KettleException (org.pentaho.di.core.exception.KettleException)10 JobMeta (org.pentaho.di.job.JobMeta)10 ArrayList (java.util.ArrayList)8 MessageBox (org.eclipse.swt.widgets.MessageBox)6 JobEntryInterface (org.pentaho.di.job.entry.JobEntryInterface)6 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)4 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)4 AreaOwner (org.pentaho.di.core.gui.AreaOwner)4 Job (org.pentaho.di.job.Job)4 Date (java.util.Date)3 Test (org.junit.Test)3 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)3 JobEntrySpecial (org.pentaho.di.job.entries.special.JobEntrySpecial)3 KettleRepositoryLostException (org.pentaho.di.repository.KettleRepositoryLostException)3 XulException (org.pentaho.ui.xul.XulException)3