Search in sources :

Example 6 with PluginInterface

use of org.pentaho.di.core.plugins.PluginInterface in project pentaho-kettle by pentaho.

the class SpoonTreeDelegate method addDragSourceToTree.

public void addDragSourceToTree(final Tree tree, final Tree selectionTree, final Tree coreObjectsTree) {
    // Drag & Drop for steps
    Transfer[] ttypes = new Transfer[] { XMLTransfer.getInstance() };
    DragSource ddSource = new DragSource(tree, DND.DROP_MOVE);
    ddSource.setTransfer(ttypes);
    ddSource.addDragListener(new DragSourceListener() {

        public void dragStart(DragSourceEvent event) {
            TreeSelection[] treeObjects = getTreeObjects(tree, selectionTree, coreObjectsTree);
            if (treeObjects.length == 0) {
                event.doit = false;
                return;
            }
            spoon.hideToolTips();
            TreeSelection treeObject = treeObjects[0];
            Object object = treeObject.getSelection();
            TransMeta transMeta = spoon.getActiveTransformation();
            if (object instanceof StepMeta || object instanceof PluginInterface || (object instanceof DatabaseMeta && transMeta != null) || object instanceof TransHopMeta || object instanceof JobEntryCopy) {
                event.doit = true;
            } else {
                event.doit = false;
            }
        }

        public void dragSetData(DragSourceEvent event) {
            TreeSelection[] treeObjects = getTreeObjects(tree, selectionTree, coreObjectsTree);
            if (treeObjects.length == 0) {
                event.doit = false;
                return;
            }
            int type = 0;
            String id = null;
            String data = null;
            TreeSelection treeObject = treeObjects[0];
            Object object = treeObject.getSelection();
            if (object instanceof StepMeta) {
                StepMeta stepMeta = (StepMeta) object;
                type = DragAndDropContainer.TYPE_STEP;
                // name of the step.
                data = stepMeta.getName();
            } else if (object instanceof PluginInterface) {
                PluginInterface plugin = (PluginInterface) object;
                Class<? extends PluginTypeInterface> pluginType = plugin.getPluginType();
                if (Const.classIsOrExtends(pluginType, StepPluginType.class)) {
                    type = DragAndDropContainer.TYPE_BASE_STEP_TYPE;
                    id = plugin.getIds()[0];
                    // Step type name
                    data = plugin.getName();
                } else {
                    type = DragAndDropContainer.TYPE_BASE_JOB_ENTRY;
                    // job entry type name
                    data = plugin.getName();
                    if (treeObject.getItemText().equals(JobMeta.createStartEntry().getName())) {
                        data = treeObject.getItemText();
                    } else if (treeObject.getItemText().equals(JobMeta.createDummyEntry().getName())) {
                        data = treeObject.getItemText();
                    }
                }
            } else if (object instanceof DatabaseMeta) {
                DatabaseMeta databaseMeta = (DatabaseMeta) object;
                type = DragAndDropContainer.TYPE_DATABASE_CONNECTION;
                data = databaseMeta.getName();
            } else if (object instanceof TransHopMeta) {
                TransHopMeta hop = (TransHopMeta) object;
                type = DragAndDropContainer.TYPE_TRANS_HOP;
                // nothing for really ;-)
                data = hop.toString();
            } else if (object instanceof JobEntryCopy) {
                JobEntryCopy jobEntryCopy = (JobEntryCopy) object;
                type = DragAndDropContainer.TYPE_JOB_ENTRY;
                // name of the job entry.
                data = jobEntryCopy.getName();
            } else {
                event.doit = false;
                // ignore anything else you drag.
                return;
            }
            event.data = new DragAndDropContainer(type, data, id);
        }

        public void dragFinished(DragSourceEvent event) {
        }
    });
}
Also used : PluginInterface(org.pentaho.di.core.plugins.PluginInterface) TransMeta(org.pentaho.di.trans.TransMeta) DragSource(org.eclipse.swt.dnd.DragSource) DragSourceListener(org.eclipse.swt.dnd.DragSourceListener) StepMeta(org.pentaho.di.trans.step.StepMeta) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) DragSourceEvent(org.eclipse.swt.dnd.DragSourceEvent) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) TreeSelection(org.pentaho.di.ui.spoon.TreeSelection) Transfer(org.eclipse.swt.dnd.Transfer) XMLTransfer(org.pentaho.di.core.dnd.XMLTransfer) DragAndDropContainer(org.pentaho.di.core.dnd.DragAndDropContainer) TransHopMeta(org.pentaho.di.trans.TransHopMeta)

Example 7 with PluginInterface

use of org.pentaho.di.core.plugins.PluginInterface in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryStepDelegate method loadStepMeta.

/**
 * Create a new step by loading the metadata from the specified repository.
 *
 * @param rep
 * @param stepId
 * @param databases
 * @param counters
 * @param partitionSchemas
 * @throws KettleException
 */
public StepMeta loadStepMeta(ObjectId stepId, List<DatabaseMeta> databases, List<PartitionSchema> partitionSchemas) throws KettleException {
    StepMeta stepMeta = new StepMeta();
    PluginRegistry registry = PluginRegistry.getInstance();
    try {
        RowMetaAndData r = getStep(stepId);
        if (r != null) {
            stepMeta.setObjectId(stepId);
            stepMeta.setName(r.getString(KettleDatabaseRepository.FIELD_STEP_NAME, null));
            stepMeta.setDescription(r.getString(KettleDatabaseRepository.FIELD_STEP_DESCRIPTION, null));
            long id_step_type = r.getInteger(KettleDatabaseRepository.FIELD_STEP_ID_STEP_TYPE, -1L);
            RowMetaAndData steptyperow = getStepType(new LongObjectId(id_step_type));
            stepMeta.setStepID(steptyperow.getString(KettleDatabaseRepository.FIELD_STEP_TYPE_CODE, null));
            stepMeta.setDistributes(r.getBoolean(KettleDatabaseRepository.FIELD_STEP_DISTRIBUTE, true));
            int copies = (int) r.getInteger(KettleDatabaseRepository.FIELD_STEP_COPIES, 1);
            String copiesString = r.getString(KettleDatabaseRepository.FIELD_STEP_COPIES_STRING, null);
            if (!Utils.isEmpty(copiesString)) {
                stepMeta.setCopiesString(copiesString);
            } else {
                stepMeta.setCopies(copies);
            }
            int x = (int) r.getInteger(KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_X, 0);
            int y = (int) r.getInteger(KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_Y, 0);
            stepMeta.setLocation(new Point(x, y));
            stepMeta.setDraw(r.getBoolean(KettleDatabaseRepository.FIELD_STEP_GUI_DRAW, false));
            // Generate the appropriate class...
            PluginInterface sp = registry.findPluginWithId(StepPluginType.class, stepMeta.getStepID());
            if (sp == null) {
                stepMeta.setStepMetaInterface(new MissingTrans(stepMeta.getName(), stepMeta.getStepID()));
            } else {
                stepMeta.setStepMetaInterface((StepMetaInterface) registry.loadClass(sp));
            }
            if (stepMeta.getStepMetaInterface() != null) {
                // Read the step info from the repository!
                readRepCompatibleStepMeta(stepMeta.getStepMetaInterface(), repository, stepMeta.getObjectId(), databases);
                stepMeta.getStepMetaInterface().readRep(repository, repository.metaStore, stepMeta.getObjectId(), databases);
            }
            // Get the partitioning as well...
            // 
            stepMeta.setStepPartitioningMeta(loadStepPartitioningMeta(stepMeta.getObjectId()));
            stepMeta.getStepPartitioningMeta().setPartitionSchemaAfterLoading(partitionSchemas);
            // Get the cluster schema name
            // 
            stepMeta.setClusterSchemaName(repository.getStepAttributeString(stepId, "cluster_schema"));
            // Are we using a custom row distribution plugin?
            // 
            String rowDistributionCode = repository.getStepAttributeString(stepId, 0, "row_distribution_code");
            RowDistributionInterface rowDistribution = PluginRegistry.getInstance().loadClass(RowDistributionPluginType.class, rowDistributionCode, RowDistributionInterface.class);
            stepMeta.setRowDistribution(rowDistribution);
            // Load the attribute groups map
            // 
            stepMeta.setAttributesMap(loadStepAttributesMap(stepId));
            // 
            return stepMeta;
        } else {
            throw new KettleException(BaseMessages.getString(PKG, "StepMeta.Exception.StepInfoCouldNotBeFound", String.valueOf(stepId)));
        }
    } catch (KettleDatabaseException dbe) {
        throw new KettleException(BaseMessages.getString(PKG, "StepMeta.Exception.StepCouldNotBeLoaded", String.valueOf(stepMeta.getObjectId())), dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) PluginInterface(org.pentaho.di.core.plugins.PluginInterface) LongObjectId(org.pentaho.di.repository.LongObjectId) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) Point(org.pentaho.di.core.gui.Point) StepMeta(org.pentaho.di.trans.step.StepMeta) Point(org.pentaho.di.core.gui.Point) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) PluginRegistry(org.pentaho.di.core.plugins.PluginRegistry) MissingTrans(org.pentaho.di.trans.steps.missing.MissingTrans) RowDistributionInterface(org.pentaho.di.trans.step.RowDistributionInterface)

Example 8 with PluginInterface

use of org.pentaho.di.core.plugins.PluginInterface in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryJobEntryDelegate method loadJobEntryCopy.

/**
 * Load the chef graphical entry from repository We load type, name & description if no entry can be found.
 *
 * @param log
 *          the logging channel
 * @param rep
 *          the Repository
 * @param jobId
 *          The job ID
 * @param jobEntryCopyId
 *          The jobentry copy ID
 * @param jobentries
 *          A list with all jobentries
 * @param databases
 *          A list with all defined databases
 */
public JobEntryCopy loadJobEntryCopy(ObjectId jobId, ObjectId jobEntryCopyId, List<JobEntryInterface> jobentries, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, String jobname) throws KettleException {
    JobEntryCopy jobEntryCopy = new JobEntryCopy();
    try {
        jobEntryCopy.setObjectId(jobEntryCopyId);
        // Handle GUI information: nr, location, ...
        RowMetaAndData r = getJobEntryCopy(jobEntryCopyId);
        if (r != null) {
            // These are the jobentry_copy fields...
            // 
            ObjectId jobEntryId = new LongObjectId(r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY, 0));
            ObjectId jobEntryTypeId = new LongObjectId(r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_TYPE, 0));
            jobEntryCopy.setNr((int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_NR, 0));
            int locx = (int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_X, 0);
            int locy = (int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_Y, 0);
            boolean isdrawn = r.getBoolean(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_DRAW, false);
            boolean isparallel = r.getBoolean(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_PARALLEL, false);
            // Do we have the jobentry already?
            // 
            jobEntryCopy.setEntry(JobMeta.findJobEntry(jobentries, jobEntryId));
            if (jobEntryCopy.getEntry() == null) {
                // What type of jobentry do we load now?
                // Get the jobentry type code
                // 
                RowMetaAndData rt = getJobEntryType(new LongObjectId(jobEntryTypeId));
                if (rt != null) {
                    String jet_code = rt.getString(KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, null);
                    JobEntryInterface jobEntry = null;
                    PluginRegistry registry = PluginRegistry.getInstance();
                    PluginInterface jobPlugin = registry.findPluginWithId(JobEntryPluginType.class, jet_code);
                    if (jobPlugin == null) {
                        jobEntry = new MissingEntry(jobname, jet_code);
                    } else {
                        jobEntry = (JobEntryInterface) registry.loadClass(jobPlugin);
                    }
                    if (jobEntry != null) {
                        jobEntryCopy.setEntry(jobEntry);
                        // 
                        if (jobEntry instanceof JobEntryBase) {
                            loadJobEntryBase((JobEntryBase) jobEntry, jobEntryId, databases, slaveServers);
                            ((JobEntryBase) jobEntry).setAttributesMap(loadJobEntryAttributesMap(jobId, jobEntryId));
                        }
                        compatibleJobEntryLoadRep(jobEntry, repository, jobEntryTypeId, databases, slaveServers);
                        jobEntry.loadRep(repository, repository.metaStore, jobEntryId, databases, slaveServers);
                        jobEntryCopy.getEntry().setObjectId(jobEntryId);
                        jobentries.add(jobEntryCopy.getEntry());
                    } else {
                        throw new KettleException("JobEntryLoader was unable to find Job Entry Plugin with description [" + jet_code + "].");
                    }
                } else {
                    throw new KettleException("Unable to find Job Entry Type with id=" + jobEntryTypeId + " in the repository");
                }
            }
            jobEntryCopy.setLocation(locx, locy);
            jobEntryCopy.setDrawn(isdrawn);
            jobEntryCopy.setLaunchingInParallel(isparallel);
            return jobEntryCopy;
        } else {
            throw new KettleException("Unable to find job entry copy in repository with id_jobentry_copy=" + jobEntryCopyId);
        }
    } catch (KettleDatabaseException dbe) {
        throw new KettleException("Unable to load job entry copy from repository with id_jobentry_copy=" + jobEntryCopyId, dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) LongObjectId(org.pentaho.di.repository.LongObjectId) ObjectId(org.pentaho.di.repository.ObjectId) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) PluginInterface(org.pentaho.di.core.plugins.PluginInterface) LongObjectId(org.pentaho.di.repository.LongObjectId) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) MissingEntry(org.pentaho.di.job.entries.missing.MissingEntry) JobEntryBase(org.pentaho.di.job.entry.JobEntryBase) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) PluginRegistry(org.pentaho.di.core.plugins.PluginRegistry)

Example 9 with PluginInterface

use of org.pentaho.di.core.plugins.PluginInterface in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryCreationHelper method updateJobEntryTypes.

/**
 * Update the list in R_JOBENTRY_TYPE
 *
 * @param create
 *
 * @exception KettleException
 *              if something went wrong during the update.
 */
public void updateJobEntryTypes(List<String> statements, boolean dryrun, boolean create) throws KettleException {
    synchronized (repository) {
        // We should only do an update if something has changed...
        PluginRegistry registry = PluginRegistry.getInstance();
        List<PluginInterface> jobPlugins = registry.getPlugins(JobEntryPluginType.class);
        for (int i = 0; i < jobPlugins.size(); i++) {
            PluginInterface jobPlugin = jobPlugins.get(i);
            String type_desc = jobPlugin.getIds()[0];
            String type_desc_long = jobPlugin.getName();
            ObjectId id = null;
            if (!create) {
                id = repository.jobEntryDelegate.getJobEntryTypeID(type_desc);
            }
            if (id == null) {
                // Not found, we need to add this one...
                // We need to add this one ...
                id = new LongObjectId(i + 1);
                if (!create) {
                    id = repository.connectionDelegate.getNextJobEntryTypeID();
                }
                RowMetaAndData table = new RowMetaAndData();
                table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_ID_JOBENTRY_TYPE), id);
                table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE), type_desc);
                table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_DESCRIPTION), type_desc_long);
                if (dryrun) {
                    String sql = database.getSQLOutput(null, KettleDatabaseRepository.TABLE_R_JOBENTRY_TYPE, table.getRowMeta(), table.getData(), null);
                    statements.add(sql);
                } else {
                    database.prepareInsert(table.getRowMeta(), null, KettleDatabaseRepository.TABLE_R_JOBENTRY_TYPE);
                    database.setValuesInsert(table);
                    database.insertRow();
                    database.closeInsert();
                }
            }
        }
    }
}
Also used : ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) LongObjectId(org.pentaho.di.repository.LongObjectId) ObjectId(org.pentaho.di.repository.ObjectId) PluginRegistry(org.pentaho.di.core.plugins.PluginRegistry) PluginInterface(org.pentaho.di.core.plugins.PluginInterface) ValueMetaInteger(org.pentaho.di.core.row.value.ValueMetaInteger) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) LongObjectId(org.pentaho.di.repository.LongObjectId)

Example 10 with PluginInterface

use of org.pentaho.di.core.plugins.PluginInterface in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryCreationHelper method updateDatabaseTypes.

/**
 * Update the list in R_DATABASE_TYPE using the database plugin entries
 *
 * @throws KettleException
 *           if the update didn't go as planned.
 */
public List<String> updateDatabaseTypes(List<String> statements, boolean dryrun, boolean create) throws KettleException {
    synchronized (repository) {
        // We should only do an update if something has changed...
        // 
        List<PluginInterface> plugins = pluginRegistry.getPlugins(DatabasePluginType.class);
        for (int i = 0; i < plugins.size(); i++) {
            PluginInterface plugin = plugins.get(i);
            ObjectId id = null;
            if (!create) {
                id = repository.databaseDelegate.getDatabaseTypeID(plugin.getIds()[0]);
            }
            if (id == null) {
                // Not found, we need to add this one...
                // We need to add this one ...
                id = new LongObjectId(i + 1);
                if (!create) {
                    id = repository.connectionDelegate.getNextDatabaseTypeID();
                }
                RowMetaAndData table = new RowMetaAndData();
                table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE), id);
                table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE), plugin.getIds()[0]);
                table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_DATABASE_TYPE_DESCRIPTION), plugin.getName());
                if (dryrun) {
                    String sql = database.getSQLOutput(null, KettleDatabaseRepository.TABLE_R_DATABASE_TYPE, table.getRowMeta(), table.getData(), null);
                    statements.add(sql);
                } else {
                    database.prepareInsert(table.getRowMeta(), null, KettleDatabaseRepository.TABLE_R_DATABASE_TYPE);
                    database.setValuesInsert(table);
                    database.insertRow();
                    database.closeInsert();
                }
            }
        }
    }
    return statements;
}
Also used : ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) LongObjectId(org.pentaho.di.repository.LongObjectId) ObjectId(org.pentaho.di.repository.ObjectId) PluginInterface(org.pentaho.di.core.plugins.PluginInterface) ValueMetaInteger(org.pentaho.di.core.row.value.ValueMetaInteger) LongObjectId(org.pentaho.di.repository.LongObjectId) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString)

Aggregations

PluginInterface (org.pentaho.di.core.plugins.PluginInterface)99 PluginRegistry (org.pentaho.di.core.plugins.PluginRegistry)45 KettleException (org.pentaho.di.core.exception.KettleException)24 ArrayList (java.util.ArrayList)17 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)14 ErrorDialog (org.pentaho.di.ui.core.dialog.ErrorDialog)12 TransMeta (org.pentaho.di.trans.TransMeta)11 StepMeta (org.pentaho.di.trans.step.StepMeta)11 JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)10 Test (org.junit.Test)9 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)9 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)8 Point (org.pentaho.di.core.gui.Point)8 JobMeta (org.pentaho.di.job.JobMeta)8 KettlePluginException (org.pentaho.di.core.exception.KettlePluginException)7 TransHopMeta (org.pentaho.di.trans.TransHopMeta)7 TreeItem (org.eclipse.swt.widgets.TreeItem)6 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)6 LongObjectId (org.pentaho.di.repository.LongObjectId)6 ObjectId (org.pentaho.di.repository.ObjectId)6