Search in sources :

Example 61 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class StreamToJobNodeConverter method saveSharedObjects.

public void saveSharedObjects(final Repository repo, final RepositoryElementInterface element) throws KettleException {
    JobMeta jobMeta = (JobMeta) element;
    // First store the databases and other depending objects in the transformation.
    List<String> databaseNames = Arrays.asList(repo.getDatabaseNames(true));
    int dbIndex = 0;
    int indexToReplace = 0;
    boolean updateMeta = Boolean.FALSE;
    for (DatabaseMeta databaseMeta : jobMeta.getDatabases()) {
        if (!databaseNames.contains(databaseMeta.getName())) {
            if (databaseMeta.getObjectId() == null || !StringUtils.isEmpty(databaseMeta.getHostname())) {
                repo.save(databaseMeta, null, null);
            }
        } else if (databaseMeta.getObjectId() == null) {
            indexToReplace = dbIndex;
            updateMeta = Boolean.TRUE;
        }
        dbIndex++;
    }
    // in the transMeta db collection
    if (updateMeta) {
        DatabaseMeta dbMetaToReplace = jobMeta.getDatabase(indexToReplace);
        dbMetaToReplace.setObjectId(repo.getDatabaseID(dbMetaToReplace.getName()));
        jobMeta.removeDatabase(indexToReplace);
        jobMeta.addDatabase(dbMetaToReplace);
    }
    // 
    for (SlaveServer slaveServer : jobMeta.getSlaveServers()) {
        if (slaveServer.getObjectId() == null) {
            repo.save(slaveServer, null, null);
        }
    }
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) SlaveServer(org.pentaho.di.cluster.SlaveServer) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta)

Example 62 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class JobDelegate method loadSharedObjects.

// ~ Methods =========================================================================================================
@SuppressWarnings("unchecked")
public SharedObjects loadSharedObjects(final RepositoryElementInterface element, final Map<RepositoryObjectType, List<? extends SharedObjectInterface>> sharedObjectsByType) throws KettleException {
    JobMeta jobMeta = (JobMeta) element;
    jobMeta.setSharedObjects(jobMeta.readSharedObjects());
    // Repository objects take priority so let's overwrite them...
    // 
    readDatabases(jobMeta, true, (List<DatabaseMeta>) sharedObjectsByType.get(RepositoryObjectType.DATABASE));
    readSlaves(jobMeta, true, (List<SlaveServer>) sharedObjectsByType.get(RepositoryObjectType.SLAVE_SERVER));
    return jobMeta.getSharedObjects();
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) SlaveServer(org.pentaho.di.cluster.SlaveServer) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta)

Example 63 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class PurRepository method loadJobs.

/**
 * Load all jobs referenced by {@code files}.
 *
 * @param monitor
 * @param log
 * @param files
 *          Job files to load.
 * @param setInternalVariables
 *          Should internal variables be set when loading? (Note: THIS IS IGNORED, they are always set)
 * @return Loaded jobs
 * @throws KettleException
 *           Error loading data for jobs from repository
 */
protected List<JobMeta> loadJobs(final ProgressMonitorListener monitor, final LogChannelInterface log, final List<RepositoryFile> files, final boolean setInternalVariables) throws KettleException {
    List<JobMeta> jobs = new ArrayList<JobMeta>(files.size());
    List<NodeRepositoryFileData> filesData = pur.getDataForReadInBatch(files, NodeRepositoryFileData.class);
    List<VersionSummary> versions = pur.getVersionSummaryInBatch(files);
    Iterator<RepositoryFile> filesIter = files.iterator();
    Iterator<NodeRepositoryFileData> filesDataIter = filesData.iterator();
    Iterator<VersionSummary> versionsIter = versions.iterator();
    while ((monitor == null || !monitor.isCanceled()) && filesIter.hasNext()) {
        RepositoryFile file = filesIter.next();
        NodeRepositoryFileData fileData = filesDataIter.next();
        VersionSummary version = versionsIter.next();
        try {
            String dirPath = file.getPath().substring(0, file.getPath().lastIndexOf(RepositoryDirectory.DIRECTORY_SEPARATOR));
            log.logDetailed("Loading/Exporting job [{0} : {1}]  ({2})", dirPath, file.getTitle(), // $NON-NLS-1$
            file.getPath());
            if (monitor != null) {
                // $NON-NLS-1$ //$NON-NLS-2$
                monitor.subTask("Exporting job [" + file.getPath() + "]");
            }
            JobMeta jobMeta = buildJobMeta(file, findDirectory(dirPath), fileData, createObjectRevision(version));
            ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobMetaLoaded.id, jobMeta);
            jobs.add(jobMeta);
        } catch (Exception ex) {
            // $NON-NLS-1$ //$NON-NLS-2$
            log.logError("Unable to load job [" + file.getPath() + "]", ex);
        }
    }
    return jobs;
}
Also used : EEJobMeta(org.pentaho.di.repository.pur.model.EEJobMeta) JobMeta(org.pentaho.di.job.JobMeta) ArrayList(java.util.ArrayList) MetaStoreNamespaceExistsException(org.pentaho.metastore.api.exceptions.MetaStoreNamespaceExistsException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) MetaStoreException(org.pentaho.metastore.api.exceptions.MetaStoreException) UnifiedRepositoryCreateFileException(org.pentaho.platform.api.repository2.unified.UnifiedRepositoryCreateFileException) UnifiedRepositoryUpdateFileException(org.pentaho.platform.api.repository2.unified.UnifiedRepositoryUpdateFileException) SOAPFaultException(javax.xml.ws.soap.SOAPFaultException) KettleException(org.pentaho.di.core.exception.KettleException) IdNotFoundException(org.pentaho.di.core.exception.IdNotFoundException) KettleSecurityException(org.pentaho.di.core.exception.KettleSecurityException) NodeRepositoryFileData(org.pentaho.platform.api.repository2.unified.data.node.NodeRepositoryFileData) VersionSummary(org.pentaho.platform.api.repository2.unified.VersionSummary) RepositoryFile(org.pentaho.platform.api.repository2.unified.RepositoryFile)

Example 64 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class PurRepository method loadJob.

@Override
public JobMeta loadJob(String jobname, RepositoryDirectoryInterface parentDir, ProgressMonitorListener monitor, String versionId) throws KettleException {
    String absPath = null;
    try {
        absPath = getPath(jobname, parentDir, RepositoryObjectType.JOB);
        if (absPath == null) {
            // Couldn't resolve path, throw an exception
            throw new KettleFileException(BaseMessages.getString(PKG, "PurRepository.ERROR_0003_JOB_NOT_FOUND", jobname));
        }
        RepositoryFile file = pur.getFile(absPath);
        if (versionId != null) {
            // need to go back to server to get versioned info
            file = pur.getFileAtVersion(file.getId(), versionId);
        }
        NodeRepositoryFileData data = null;
        ObjectRevision revision = null;
        data = pur.getDataAtVersionForRead(file.getId(), versionId, NodeRepositoryFileData.class);
        revision = getObjectRevision(new StringObjectId(file.getId().toString()), versionId);
        JobMeta jobMeta = buildJobMeta(file, parentDir, data, revision);
        ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobMetaLoaded.id, jobMeta);
        return jobMeta;
    } catch (Exception e) {
        throw new KettleException("Unable to load job from path [" + absPath + "]", e);
    }
}
Also used : ObjectRevision(org.pentaho.di.repository.ObjectRevision) KettleException(org.pentaho.di.core.exception.KettleException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) EEJobMeta(org.pentaho.di.repository.pur.model.EEJobMeta) JobMeta(org.pentaho.di.job.JobMeta) NodeRepositoryFileData(org.pentaho.platform.api.repository2.unified.data.node.NodeRepositoryFileData) RepositoryFile(org.pentaho.platform.api.repository2.unified.RepositoryFile) StringObjectId(org.pentaho.di.repository.StringObjectId) MetaStoreNamespaceExistsException(org.pentaho.metastore.api.exceptions.MetaStoreNamespaceExistsException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) MetaStoreException(org.pentaho.metastore.api.exceptions.MetaStoreException) UnifiedRepositoryCreateFileException(org.pentaho.platform.api.repository2.unified.UnifiedRepositoryCreateFileException) UnifiedRepositoryUpdateFileException(org.pentaho.platform.api.repository2.unified.UnifiedRepositoryUpdateFileException) SOAPFaultException(javax.xml.ws.soap.SOAPFaultException) KettleException(org.pentaho.di.core.exception.KettleException) IdNotFoundException(org.pentaho.di.core.exception.IdNotFoundException) KettleSecurityException(org.pentaho.di.core.exception.KettleSecurityException)

Example 65 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class JobExecutorMetaTest method testExportResources.

@Test
public void testExportResources() throws KettleException {
    JobExecutorMeta jobExecutorMeta = spy(new JobExecutorMeta());
    JobMeta jobMeta = mock(JobMeta.class);
    String testName = "test";
    doReturn(jobMeta).when(jobExecutorMeta).loadJobMetaProxy(any(JobExecutorMeta.class), any(Repository.class), any(VariableSpace.class));
    when(jobMeta.exportResources(any(JobMeta.class), any(Map.class), any(ResourceNamingInterface.class), any(Repository.class), any(IMetaStore.class))).thenReturn(testName);
    jobExecutorMeta.exportResources(null, null, null, null, null);
    verify(jobMeta).setFilename("${" + Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY + "}/" + testName);
    verify(jobExecutorMeta).setSpecificationMethod(ObjectLocationSpecificationMethod.FILENAME);
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) Repository(org.pentaho.di.repository.Repository) VariableSpace(org.pentaho.di.core.variables.VariableSpace) HashMap(java.util.HashMap) Map(java.util.Map) IMetaStore(org.pentaho.metastore.api.IMetaStore) ResourceNamingInterface(org.pentaho.di.resource.ResourceNamingInterface) Test(org.junit.Test)

Aggregations

JobMeta (org.pentaho.di.job.JobMeta)254 Test (org.junit.Test)88 TransMeta (org.pentaho.di.trans.TransMeta)69 KettleException (org.pentaho.di.core.exception.KettleException)62 JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)48 Job (org.pentaho.di.job.Job)45 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)27 Repository (org.pentaho.di.repository.Repository)25 RepositoryDirectoryInterface (org.pentaho.di.repository.RepositoryDirectoryInterface)25 Point (org.pentaho.di.core.gui.Point)24 ArrayList (java.util.ArrayList)23 ErrorDialog (org.pentaho.di.ui.core.dialog.ErrorDialog)22 SlaveServer (org.pentaho.di.cluster.SlaveServer)17 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)17 FileObject (org.apache.commons.vfs2.FileObject)16 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)16 LogChannelInterface (org.pentaho.di.core.logging.LogChannelInterface)15 SimpleLoggingObject (org.pentaho.di.core.logging.SimpleLoggingObject)15 PrintWriter (java.io.PrintWriter)12 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)12