Search in sources :

Example 1 with JobEntryBase

use of org.pentaho.di.job.entry.JobEntryBase in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryJobEntryDelegate method loadJobEntryCopy.

/**
 * Load the chef graphical entry from repository We load type, name & description if no entry can be found.
 *
 * @param log
 *          the logging channel
 * @param rep
 *          the Repository
 * @param jobId
 *          The job ID
 * @param jobEntryCopyId
 *          The jobentry copy ID
 * @param jobentries
 *          A list with all jobentries
 * @param databases
 *          A list with all defined databases
 */
public JobEntryCopy loadJobEntryCopy(ObjectId jobId, ObjectId jobEntryCopyId, List<JobEntryInterface> jobentries, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, String jobname) throws KettleException {
    JobEntryCopy jobEntryCopy = new JobEntryCopy();
    try {
        jobEntryCopy.setObjectId(jobEntryCopyId);
        // Handle GUI information: nr, location, ...
        RowMetaAndData r = getJobEntryCopy(jobEntryCopyId);
        if (r != null) {
            // These are the jobentry_copy fields...
            // 
            ObjectId jobEntryId = new LongObjectId(r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY, 0));
            ObjectId jobEntryTypeId = new LongObjectId(r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_ID_JOBENTRY_TYPE, 0));
            jobEntryCopy.setNr((int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_NR, 0));
            int locx = (int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_X, 0);
            int locy = (int) r.getInteger(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_LOCATION_Y, 0);
            boolean isdrawn = r.getBoolean(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_GUI_DRAW, false);
            boolean isparallel = r.getBoolean(KettleDatabaseRepository.FIELD_JOBENTRY_COPY_PARALLEL, false);
            // Do we have the jobentry already?
            // 
            jobEntryCopy.setEntry(JobMeta.findJobEntry(jobentries, jobEntryId));
            if (jobEntryCopy.getEntry() == null) {
                // What type of jobentry do we load now?
                // Get the jobentry type code
                // 
                RowMetaAndData rt = getJobEntryType(new LongObjectId(jobEntryTypeId));
                if (rt != null) {
                    String jet_code = rt.getString(KettleDatabaseRepository.FIELD_JOBENTRY_TYPE_CODE, null);
                    JobEntryInterface jobEntry = null;
                    PluginRegistry registry = PluginRegistry.getInstance();
                    PluginInterface jobPlugin = registry.findPluginWithId(JobEntryPluginType.class, jet_code);
                    if (jobPlugin == null) {
                        jobEntry = new MissingEntry(jobname, jet_code);
                    } else {
                        jobEntry = (JobEntryInterface) registry.loadClass(jobPlugin);
                    }
                    if (jobEntry != null) {
                        jobEntryCopy.setEntry(jobEntry);
                        // 
                        if (jobEntry instanceof JobEntryBase) {
                            loadJobEntryBase((JobEntryBase) jobEntry, jobEntryId, databases, slaveServers);
                            ((JobEntryBase) jobEntry).setAttributesMap(loadJobEntryAttributesMap(jobId, jobEntryId));
                        }
                        compatibleJobEntryLoadRep(jobEntry, repository, jobEntryTypeId, databases, slaveServers);
                        jobEntry.loadRep(repository, repository.metaStore, jobEntryId, databases, slaveServers);
                        jobEntryCopy.getEntry().setObjectId(jobEntryId);
                        jobentries.add(jobEntryCopy.getEntry());
                    } else {
                        throw new KettleException("JobEntryLoader was unable to find Job Entry Plugin with description [" + jet_code + "].");
                    }
                } else {
                    throw new KettleException("Unable to find Job Entry Type with id=" + jobEntryTypeId + " in the repository");
                }
            }
            jobEntryCopy.setLocation(locx, locy);
            jobEntryCopy.setDrawn(isdrawn);
            jobEntryCopy.setLaunchingInParallel(isparallel);
            return jobEntryCopy;
        } else {
            throw new KettleException("Unable to find job entry copy in repository with id_jobentry_copy=" + jobEntryCopyId);
        }
    } catch (KettleDatabaseException dbe) {
        throw new KettleException("Unable to load job entry copy from repository with id_jobentry_copy=" + jobEntryCopyId, dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) LongObjectId(org.pentaho.di.repository.LongObjectId) ObjectId(org.pentaho.di.repository.ObjectId) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) PluginInterface(org.pentaho.di.core.plugins.PluginInterface) LongObjectId(org.pentaho.di.repository.LongObjectId) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) MissingEntry(org.pentaho.di.job.entries.missing.MissingEntry) JobEntryBase(org.pentaho.di.job.entry.JobEntryBase) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) PluginRegistry(org.pentaho.di.core.plugins.PluginRegistry)

Example 2 with JobEntryBase

use of org.pentaho.di.job.entry.JobEntryBase in project pentaho-metaverse by pentaho.

the class JobMetaJsonSerializer method serializeSteps.

@Override
protected void serializeSteps(JobMeta meta, JsonGenerator json) throws IOException {
    json.writeArrayFieldStart(JSON_PROPERTY_STEPS);
    int numberOfEntries = meta.nrJobEntries();
    for (int i = 0; i < numberOfEntries; i++) {
        JobEntryCopy jobEntry = meta.getJobEntry(i);
        LineageRepository repo = getLineageRepository();
        ObjectId jobId = meta.getObjectId() == null ? new StringObjectId(meta.getName()) : meta.getObjectId();
        ObjectId entryId = jobEntry.getObjectId() == null ? new StringObjectId(jobEntry.getName()) : jobEntry.getObjectId();
        JobEntryInterface jobEntryInterface = jobEntry.getEntry();
        JobEntryBase jobEntryBase = getJobEntryBase(jobEntryInterface);
        Job job = new Job(null, meta);
        jobEntryBase.setParentJob(job);
        jobEntryInterface.setObjectId(entryId);
        try {
            jobEntryInterface.saveRep(repo, null, jobId);
        } catch (KettleException e) {
            LOGGER.warn(Messages.getString("INFO.Serialization.Trans.Step", jobEntry.getName()), e);
        }
        json.writeObject(jobEntryBase);
    }
    json.writeEndArray();
}
Also used : JobEntryBase(org.pentaho.di.job.entry.JobEntryBase) KettleException(org.pentaho.di.core.exception.KettleException) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) ObjectId(org.pentaho.di.repository.ObjectId) StringObjectId(org.pentaho.di.repository.StringObjectId) LineageRepository(org.pentaho.metaverse.impl.model.kettle.LineageRepository) Job(org.pentaho.di.job.Job) StringObjectId(org.pentaho.di.repository.StringObjectId)

Example 3 with JobEntryBase

use of org.pentaho.di.job.entry.JobEntryBase in project pentaho-metaverse by pentaho.

the class JobEntryExternalResourceConsumerListenerTest method testCallJobEntryExtensionPoint.

@Test
public void testCallJobEntryExtensionPoint() throws Exception {
    JobEntryExternalResourceConsumerListener jobEntryExtensionPoint = new JobEntryExternalResourceConsumerListener();
    jobEntryExtensionPoint.setJobEntryExternalResourceConsumerProvider(MetaverseTestUtils.getJobEntryExternalResourceConsumerProvider());
    JobExecutionExtension jobExec = mock(JobExecutionExtension.class);
    JobEntryBase jobEntryBase = mock(JobEntryBase.class, withSettings().extraInterfaces(JobEntryInterface.class));
    JobEntryInterface jobEntryInterface = (JobEntryInterface) jobEntryBase;
    JobEntryCopy jobEntryCopy = mock(JobEntryCopy.class);
    when(jobEntryCopy.getEntry()).thenReturn(jobEntryInterface);
    jobExec.jobEntryCopy = jobEntryCopy;
    jobEntryExtensionPoint.callExtensionPoint(null, jobExec);
    // Adda consumer
    Map<Class<? extends JobEntryBase>, Set<IJobEntryExternalResourceConsumer>> jobEntryConsumerMap = new JobEntryExternalResourceConsumerProvider().getJobEntryConsumerMap();
    Set<IJobEntryExternalResourceConsumer> consumers = new HashSet<IJobEntryExternalResourceConsumer>();
    jobEntryConsumerMap.put(jobEntryBase.getClass(), consumers);
    jobEntryExtensionPoint.callExtensionPoint(null, jobExec);
    IJobEntryExternalResourceConsumer consumer = mock(IJobEntryExternalResourceConsumer.class);
    when(consumer.getResourcesFromMeta(Mockito.any())).thenReturn(Collections.emptyList());
    consumers.add(consumer);
    Job mockJob = mock(Job.class);
    when(jobEntryInterface.getParentJob()).thenReturn(mockJob);
    jobExec.job = mockJob;
    jobEntryExtensionPoint.callExtensionPoint(null, jobExec);
    when(consumer.isDataDriven(Mockito.any())).thenReturn(Boolean.TRUE);
    jobEntryExtensionPoint.callExtensionPoint(null, jobExec);
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) JobExecutionExtension(org.pentaho.di.job.JobExecutionExtension) JobEntryBase(org.pentaho.di.job.entry.JobEntryBase) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) JobEntryExternalResourceConsumerProvider(org.pentaho.metaverse.analyzer.kettle.jobentry.JobEntryExternalResourceConsumerProvider) IJobEntryExternalResourceConsumer(org.pentaho.metaverse.api.analyzer.kettle.jobentry.IJobEntryExternalResourceConsumer) Job(org.pentaho.di.job.Job) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 4 with JobEntryBase

use of org.pentaho.di.job.entry.JobEntryBase in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryJobEntryDelegate method saveJobEntryCopy.

public void saveJobEntryCopy(JobEntryCopy copy, ObjectId id_job, KettleDatabaseRepositoryMetaStore metaStore) throws KettleException {
    try {
        JobEntryInterface entry = copy.getEntry();
        /*
       * --1-- Save the JobEntryCopy details... --2-- If we don't find a id_jobentry, save the jobentry (meaning: only
       * once)
       */
        // See if an entry with the same name is already available...
        ObjectId id_jobentry = getJobEntryID(copy.getName(), id_job);
        if (id_jobentry == null) {
            insertJobEntry(id_job, (JobEntryBase) entry);
            // THIS IS THE PLUGIN/JOB-ENTRY BEING SAVED!
            // 
            entry.saveRep(repository, metaStore, id_job);
            compatibleEntrySaveRep(entry, repository, id_job);
            // 
            if (entry instanceof JobEntryBase) {
                saveAttributesMap(id_job, copy.getObjectId(), ((JobEntryBase) entry).getAttributesMap());
            }
            id_jobentry = entry.getObjectId();
        }
        // OK, the entry is saved.
        // Get the entry type...
        // 
        ObjectId id_jobentry_type = getJobEntryTypeID(entry.getPluginId());
        // Oops, not found: update the repository!
        if (id_jobentry_type == null) {
            repository.updateJobEntryTypes();
            // Try again!
            id_jobentry_type = getJobEntryTypeID(entry.getPluginId());
        }
        // Save the entry copy..
        // 
        copy.setObjectId(insertJobEntryCopy(id_job, id_jobentry, id_jobentry_type, copy.getNr(), copy.getLocation().x, copy.getLocation().y, copy.isDrawn(), copy.isLaunchingInParallel()));
    } catch (KettleDatabaseException dbe) {
        throw new KettleException("Unable to save job entry copy to the repository, id_job=" + id_job, dbe);
    }
}
Also used : JobEntryBase(org.pentaho.di.job.entry.JobEntryBase) KettleException(org.pentaho.di.core.exception.KettleException) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) LongObjectId(org.pentaho.di.repository.LongObjectId) ObjectId(org.pentaho.di.repository.ObjectId) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 5 with JobEntryBase

use of org.pentaho.di.job.entry.JobEntryBase in project pentaho-kettle by pentaho.

the class JobDelegate method dataNodeToElement.

public void dataNodeToElement(final DataNode rootNode, final RepositoryElementInterface element) throws KettleException {
    JobMeta jobMeta = (JobMeta) element;
    Set<String> privateDatabases = null;
    // read the private databases
    DataNode privateDbsNode = rootNode.getNode(NODE_JOB_PRIVATE_DATABASES);
    // BACKLOG-6635
    if (privateDbsNode != null) {
        privateDatabases = new HashSet<>();
        if (privateDbsNode.hasProperty(PROP_JOB_PRIVATE_DATABASE_NAMES)) {
            for (String privateDatabaseName : getString(privateDbsNode, PROP_JOB_PRIVATE_DATABASE_NAMES).split(JOB_PRIVATE_DATABASE_DELIMITER)) {
                if (!privateDatabaseName.isEmpty()) {
                    privateDatabases.add(privateDatabaseName);
                }
            }
        } else {
            for (DataNode privateDatabase : privateDbsNode.getNodes()) {
                privateDatabases.add(privateDatabase.getName());
            }
        }
    }
    jobMeta.setPrivateDatabases(privateDatabases);
    jobMeta.setSharedObjectsFile(getString(rootNode, PROP_SHARED_FILE));
    // Keep a unique list of job entries to facilitate in the loading.
    // 
    List<JobEntryInterface> jobentries = new ArrayList<>();
    // Read the job entry copies
    // 
    DataNode entriesNode = rootNode.getNode(NODE_ENTRIES);
    int nrCopies = (int) entriesNode.getProperty(PROP_NR_JOB_ENTRY_COPIES).getLong();
    // 
    for (DataNode copyNode : entriesNode.getNodes()) {
        // Read the entry...
        // 
        JobEntryInterface jobEntry = readJobEntry(copyNode, jobMeta, jobentries);
        JobEntryCopy copy = new JobEntryCopy(jobEntry);
        copy.setName(getString(copyNode, PROP_NAME));
        copy.setDescription(getString(copyNode, PROP_DESCRIPTION));
        copy.setObjectId(new StringObjectId(copyNode.getId().toString()));
        copy.setNr((int) copyNode.getProperty(PROP_NR).getLong());
        int x = (int) copyNode.getProperty(PROP_GUI_LOCATION_X).getLong();
        int y = (int) copyNode.getProperty(PROP_GUI_LOCATION_Y).getLong();
        copy.setLocation(x, y);
        copy.setDrawn(copyNode.getProperty(PROP_GUI_DRAW).getBoolean());
        copy.setLaunchingInParallel(copyNode.getProperty(PROP_PARALLEL).getBoolean());
        // Read the job entry group attributes map
        if (jobEntry instanceof JobEntryBase) {
            AttributesMapUtil.loadAttributesMap(copyNode, (JobEntryBase) jobEntry);
        }
        loadAttributesMap(copyNode, copy);
        jobMeta.addJobEntry(copy);
    }
    if (jobMeta.getJobCopies().size() != nrCopies) {
        throw new KettleException("The number of job entry copies read [" + jobMeta.getJobCopies().size() + "] was not the number we expected [" + nrCopies + "]");
    }
    // Read the notes...
    // 
    DataNode notesNode = rootNode.getNode(NODE_NOTES);
    int nrNotes = (int) notesNode.getProperty(PROP_NR_NOTES).getLong();
    for (DataNode noteNode : notesNode.getNodes()) {
        String xml = getString(noteNode, PROP_XML);
        jobMeta.addNote(new NotePadMeta(XMLHandler.getSubNode(XMLHandler.loadXMLString(xml), NotePadMeta.XML_TAG)));
    }
    if (jobMeta.nrNotes() != nrNotes) {
        throw new KettleException("The number of notes read [" + jobMeta.nrNotes() + "] was not the number we expected [" + nrNotes + "]");
    }
    // Read the hops...
    // 
    DataNode hopsNode = rootNode.getNode(NODE_HOPS);
    int nrHops = (int) hopsNode.getProperty(PROP_NR_HOPS).getLong();
    for (DataNode hopNode : hopsNode.getNodes()) {
        String copyFromName = getString(hopNode, JOB_HOP_FROM);
        int copyFromNr = (int) hopNode.getProperty(JOB_HOP_FROM_NR).getLong();
        String copyToName = getString(hopNode, JOB_HOP_TO);
        int copyToNr = (int) hopNode.getProperty(JOB_HOP_TO_NR).getLong();
        boolean enabled = true;
        if (hopNode.hasProperty(JOB_HOP_ENABLED)) {
            enabled = hopNode.getProperty(JOB_HOP_ENABLED).getBoolean();
        }
        boolean evaluation = true;
        if (hopNode.hasProperty(JOB_HOP_EVALUATION)) {
            evaluation = hopNode.getProperty(JOB_HOP_EVALUATION).getBoolean();
        }
        boolean unconditional = true;
        if (hopNode.hasProperty(JOB_HOP_UNCONDITIONAL)) {
            unconditional = hopNode.getProperty(JOB_HOP_UNCONDITIONAL).getBoolean();
        }
        JobEntryCopy copyFrom = jobMeta.findJobEntry(copyFromName, copyFromNr, true);
        JobEntryCopy copyTo = jobMeta.findJobEntry(copyToName, copyToNr, true);
        JobHopMeta jobHopMeta = new JobHopMeta(copyFrom, copyTo);
        jobHopMeta.setEnabled(enabled);
        jobHopMeta.setEvaluation(evaluation);
        jobHopMeta.setUnconditional(unconditional);
        jobMeta.addJobHop(jobHopMeta);
    }
    if (jobMeta.nrJobHops() != nrHops) {
        throw new KettleException("The number of hops read [" + jobMeta.nrJobHops() + "] was not the number we expected [" + nrHops + "]");
    }
    // Load the details at the end, to make sure we reference the databases correctly, etc.
    // 
    loadJobMetaDetails(rootNode, jobMeta);
    jobMeta.eraseParameters();
    DataNode paramsNode = rootNode.getNode(NODE_PARAMETERS);
    int count = (int) paramsNode.getProperty(PROP_NR_PARAMETERS).getLong();
    for (int idx = 0; idx < count; idx++) {
        DataNode paramNode = paramsNode.getNode(PARAM_PREFIX + idx);
        String key = getString(paramNode, PARAM_KEY);
        String def = getString(paramNode, PARAM_DEFAULT);
        String desc = getString(paramNode, PARAM_DESC);
        jobMeta.addParameterDefinition(key, def, desc);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) JobHopMeta(org.pentaho.di.job.JobHopMeta) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) ArrayList(java.util.ArrayList) StringObjectId(org.pentaho.di.repository.StringObjectId) JobEntryBase(org.pentaho.di.job.entry.JobEntryBase) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) DataNode(org.pentaho.platform.api.repository2.unified.data.node.DataNode) NotePadMeta(org.pentaho.di.core.NotePadMeta)

Aggregations

JobEntryBase (org.pentaho.di.job.entry.JobEntryBase)6 JobEntryInterface (org.pentaho.di.job.entry.JobEntryInterface)6 JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)5 KettleException (org.pentaho.di.core.exception.KettleException)4 ObjectId (org.pentaho.di.repository.ObjectId)3 NotePadMeta (org.pentaho.di.core.NotePadMeta)2 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)2 Job (org.pentaho.di.job.Job)2 JobHopMeta (org.pentaho.di.job.JobHopMeta)2 JobMeta (org.pentaho.di.job.JobMeta)2 LongObjectId (org.pentaho.di.repository.LongObjectId)2 StringObjectId (org.pentaho.di.repository.StringObjectId)2 DataNode (org.pentaho.platform.api.repository2.unified.data.node.DataNode)2 ArrayList (java.util.ArrayList)1 HashSet (java.util.HashSet)1 Set (java.util.Set)1 Test (org.junit.Test)1 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)1 PluginInterface (org.pentaho.di.core.plugins.PluginInterface)1 PluginRegistry (org.pentaho.di.core.plugins.PluginRegistry)1