Search in sources :

Example 6 with JobEntryTrans

use of org.pentaho.di.job.entries.trans.JobEntryTrans in project pentaho-kettle by pentaho.

the class RunConfigurationDelegateTest method testUpdateLoadedJobs_PDI16777.

@Test
public void testUpdateLoadedJobs_PDI16777() {
    JobEntryTrans trans = new JobEntryTrans();
    trans.setRunConfiguration("key");
    JobMeta meta = new JobMeta();
    meta.addJobEntry(new JobEntryCopy(trans));
    JobMeta[] jobs = new JobMeta[] { meta };
    doReturn(jobs).when(spoon).getLoadedJobs();
    DefaultRunConfiguration config = new DefaultRunConfiguration();
    config.setName("Test");
    config.setServer("localhost");
    delegate.updateLoadedJobs("key", config);
    assertEquals("Test", trans.getRunConfiguration());
    assertEquals("localhost", trans.getRemoteSlaveServerName());
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) JobEntryTrans(org.pentaho.di.job.entries.trans.JobEntryTrans) DefaultRunConfiguration(org.pentaho.di.engine.configuration.impl.pentaho.DefaultRunConfiguration) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 7 with JobEntryTrans

use of org.pentaho.di.job.entries.trans.JobEntryTrans in project pentaho-kettle by pentaho.

the class RunConfigurationImportExtensionPoint method createSlaveServerRunConfigurations.

private List<RunConfiguration> createSlaveServerRunConfigurations(List<String> existingConfigurationNames, AbstractMeta abstractMeta) {
    List<RunConfiguration> runConfigurations = new ArrayList<>();
    if (abstractMeta instanceof JobMeta) {
        JobMeta jobMeta = (JobMeta) abstractMeta;
        Map<String, List<JobEntryTrans>> slaveServerGroups = jobMeta.getJobCopies().stream().map(JobEntryCopy::getEntry).filter(entry -> entry instanceof JobEntryTrans).map(entry -> (JobEntryTrans) entry).filter(entry -> Utils.isEmpty(entry.getRunConfiguration())).filter(entry -> !Utils.isEmpty(entry.getRemoteSlaveServerName())).collect(Collectors.groupingBy(JobEntryTrans::getRemoteSlaveServerName));
        slaveServerGroups.forEach((remoteServerName, entries) -> {
            String runConfigurationName = createRunConfigurationName(existingConfigurationNames, remoteServerName);
            DefaultRunConfiguration runConfiguration = createRunConfiguration(runConfigurationName, remoteServerName);
            runConfigurations.add(runConfiguration);
            entries.forEach(e -> e.setRunConfiguration(runConfiguration.getName()));
        });
    }
    return runConfigurations;
}
Also used : RunConfiguration(org.pentaho.di.engine.configuration.api.RunConfiguration) DefaultRunConfiguration(org.pentaho.di.engine.configuration.impl.pentaho.DefaultRunConfiguration) EmbeddedMetaStore(org.pentaho.di.core.attributes.metastore.EmbeddedMetaStore) KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) LogChannelInterface(org.pentaho.di.core.logging.LogChannelInterface) Utils(org.pentaho.di.core.util.Utils) AbstractMeta(org.pentaho.di.base.AbstractMeta) Collectors(java.util.stream.Collectors) ArrayList(java.util.ArrayList) RunConfigurationManager(org.pentaho.di.engine.configuration.impl.RunConfigurationManager) List(java.util.List) EmbeddedRunConfigurationManager(org.pentaho.di.engine.configuration.impl.EmbeddedRunConfigurationManager) DefaultRunConfigurationProvider(org.pentaho.di.engine.configuration.impl.pentaho.DefaultRunConfigurationProvider) Map(java.util.Map) Optional(java.util.Optional) VisibleForTesting(com.google.common.annotations.VisibleForTesting) ExtensionPointInterface(org.pentaho.di.core.extension.ExtensionPointInterface) JobEntryTrans(org.pentaho.di.job.entries.trans.JobEntryTrans) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) Comparator(java.util.Comparator) ExtensionPoint(org.pentaho.di.core.extension.ExtensionPoint) JobMeta(org.pentaho.di.job.JobMeta) RunConfiguration(org.pentaho.di.engine.configuration.api.RunConfiguration) DefaultRunConfiguration(org.pentaho.di.engine.configuration.impl.pentaho.DefaultRunConfiguration) JobEntryTrans(org.pentaho.di.job.entries.trans.JobEntryTrans) ArrayList(java.util.ArrayList) DefaultRunConfiguration(org.pentaho.di.engine.configuration.impl.pentaho.DefaultRunConfiguration) ArrayList(java.util.ArrayList) List(java.util.List)

Example 8 with JobEntryTrans

use of org.pentaho.di.job.entries.trans.JobEntryTrans in project pentaho-kettle by pentaho.

the class RunConfigurationImportExtensionPointTest method shouldCreateRunConfigurationsForSlaveServer.

@Test
public void shouldCreateRunConfigurationsForSlaveServer() throws Exception {
    JobMeta jobMeta = mock(JobMeta.class);
    JobEntryCopy jobEntryCopy1 = mock(JobEntryCopy.class);
    JobEntryCopy jobEntryCopy2 = mock(JobEntryCopy.class);
    JobEntryCopy jobEntryCopy3 = mock(JobEntryCopy.class);
    JobEntryTrans trans1 = mock(JobEntryTrans.class);
    JobEntryTrans trans2 = mock(JobEntryTrans.class);
    JobEntryTrans trans3 = mock(JobEntryTrans.class);
    ArgumentCaptor<DefaultRunConfiguration> rcCaptor = ArgumentCaptor.forClass(DefaultRunConfiguration.class);
    when(jobMeta.getEmbeddedMetaStore()).thenReturn(embeddedMetaStore);
    when(jobMeta.getSlaveServers()).thenReturn(Arrays.asList(new SlaveServer("carte1", "host1", "1234", "user", "passw"), new SlaveServer("carte2", "host2", "1234", "user", "passw")));
    when(jobMeta.getJobCopies()).thenReturn(Arrays.asList(jobEntryCopy1, jobEntryCopy2, jobEntryCopy3));
    when(jobEntryCopy1.getEntry()).thenReturn(trans1);
    when(jobEntryCopy2.getEntry()).thenReturn(trans2);
    when(jobEntryCopy3.getEntry()).thenReturn(trans3);
    when(trans1.getRemoteSlaveServerName()).thenReturn("carte1");
    when(trans2.getRemoteSlaveServerName()).thenReturn("carte1");
    when(trans3.getRemoteSlaveServerName()).thenReturn("carte2");
    when(trans1.getRunConfiguration()).thenReturn(null);
    when(trans2.getRunConfiguration()).thenReturn(null);
    when(trans3.getRunConfiguration()).thenReturn(null);
    when(runConfigurationManager.getNames()).thenReturn(Collections.singletonList("pentaho_auto_carte1_config"));
    runConfigurationImportExtensionPoint.callExtensionPoint(log, jobMeta);
    verify(runConfigurationManager, times(2)).save(rcCaptor.capture());
    verify(trans1).setRunConfiguration("pentaho_auto_carte1_config_1");
    verify(trans2).setRunConfiguration("pentaho_auto_carte1_config_1");
    verify(trans3).setRunConfiguration("pentaho_auto_carte2_config");
    List<DefaultRunConfiguration> allValues = rcCaptor.getAllValues();
    DefaultRunConfiguration runConfiguration1 = allValues.get(0);
    assertEquals("pentaho_auto_carte1_config_1", runConfiguration1.getName());
    assertEquals("carte1", runConfiguration1.getServer());
    DefaultRunConfiguration runConfiguration2 = allValues.get(1);
    assertEquals("pentaho_auto_carte2_config", runConfiguration2.getName());
    assertEquals("carte2", runConfiguration2.getServer());
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) JobEntryTrans(org.pentaho.di.job.entries.trans.JobEntryTrans) DefaultRunConfiguration(org.pentaho.di.engine.configuration.impl.pentaho.DefaultRunConfiguration) SlaveServer(org.pentaho.di.cluster.SlaveServer) Test(org.junit.Test)

Example 9 with JobEntryTrans

use of org.pentaho.di.job.entries.trans.JobEntryTrans in project pentaho-kettle by pentaho.

the class RepositoryExporter method convertFromFileRepository.

private void convertFromFileRepository(JobMeta jobMeta) {
    if (repository instanceof KettleFileRepository) {
        KettleFileRepository fileRep = (KettleFileRepository) repository;
        // The id of the job is the filename.
        // Setting the filename also sets internal variables needed to load the trans/job referenced.
        // 
        String jobMetaFilename = fileRep.calcFilename(jobMeta.getObjectId());
        jobMeta.setFilename(jobMetaFilename);
        for (JobEntryCopy copy : jobMeta.getJobCopies()) {
            JobEntryInterface entry = copy.getEntry();
            if (entry instanceof JobEntryTrans) {
                // convert to a named based reference.
                // 
                JobEntryTrans trans = (JobEntryTrans) entry;
                if (trans.getSpecificationMethod() == ObjectLocationSpecificationMethod.FILENAME) {
                    try {
                        TransMeta meta = trans.getTransMeta(repository, repository.getMetaStore(), jobMeta);
                        FileObject fileObject = KettleVFS.getFileObject(meta.getFilename());
                        trans.setSpecificationMethod(ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME);
                        trans.setFileName(null);
                        trans.setTransname(meta.getName());
                        trans.setDirectory(Const.NVL(calcRepositoryDirectory(fileRep, fileObject), "/"));
                    } catch (Exception e) {
                        log.logError(BaseMessages.getString(PKG, "Repository.Exporter.Log.UnableToLoadJobTrans", trans.getName()), e);
                    }
                }
            }
            if (entry instanceof JobEntryJob) {
                // convert to a named based reference.
                // 
                JobEntryJob jobEntryJob = (JobEntryJob) entry;
                if (jobEntryJob.getSpecificationMethod() == ObjectLocationSpecificationMethod.FILENAME) {
                    try {
                        JobMeta meta = jobEntryJob.getJobMeta(repository, repository.getMetaStore(), jobMeta);
                        FileObject fileObject = KettleVFS.getFileObject(meta.getFilename());
                        jobEntryJob.setSpecificationMethod(ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME);
                        jobEntryJob.setFileName(null);
                        jobEntryJob.setJobName(meta.getName());
                        jobEntryJob.setDirectory(Const.NVL(calcRepositoryDirectory(fileRep, fileObject), "/"));
                    } catch (Exception e) {
                        log.logError(BaseMessages.getString(PKG, "Repository.Exporter.Log.UnableToLoadJobJob", jobEntryJob.getName()), e);
                    }
                }
            }
        }
    }
}
Also used : JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) JobMeta(org.pentaho.di.job.JobMeta) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) JobEntryTrans(org.pentaho.di.job.entries.trans.JobEntryTrans) TransMeta(org.pentaho.di.trans.TransMeta) FileObject(org.apache.commons.vfs2.FileObject) JobEntryJob(org.pentaho.di.job.entries.job.JobEntryJob) KettleFileRepository(org.pentaho.di.repository.filerep.KettleFileRepository) KettleException(org.pentaho.di.core.exception.KettleException) FileSystemException(org.apache.commons.vfs2.FileSystemException) IOException(java.io.IOException) UnsupportedEncodingException(java.io.UnsupportedEncodingException)

Example 10 with JobEntryTrans

use of org.pentaho.di.job.entries.trans.JobEntryTrans in project pentaho-kettle by pentaho.

the class Job method execute.

/**
 * Execute a job entry recursively and move to the next job entry automatically.<br>
 * Uses a back-tracking algorithm.<br>
 *
 * @param nr
 * @param prev_result
 * @param jobEntryCopy
 * @param previous
 * @param reason
 * @return
 * @throws KettleException
 */
private Result execute(final int nr, Result prev_result, final JobEntryCopy jobEntryCopy, JobEntryCopy previous, String reason) throws KettleException {
    Result res = null;
    if (stopped.get()) {
        res = new Result(nr);
        res.stopped = true;
        return res;
    }
    // if we didn't have a previous result, create one, otherwise, copy the content...
    // 
    final Result newResult;
    Result prevResult = null;
    if (prev_result != null) {
        prevResult = prev_result.clone();
    } else {
        prevResult = new Result();
    }
    JobExecutionExtension extension = new JobExecutionExtension(this, prevResult, jobEntryCopy, true);
    ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobBeforeJobEntryExecution.id, extension);
    jobMeta.disposeEmbeddedMetastoreProvider();
    if (jobMeta.getMetastoreLocatorOsgi() != null) {
        jobMeta.setEmbeddedMetastoreProviderKey(jobMeta.getMetastoreLocatorOsgi().setEmbeddedMetastore(jobMeta.getEmbeddedMetaStore()));
    }
    if (extension.result != null) {
        prevResult = extension.result;
    }
    if (!extension.executeEntry) {
        newResult = prevResult;
    } else {
        if (log.isDetailed()) {
            log.logDetailed("exec(" + nr + ", " + (prev_result != null ? prev_result.getNrErrors() : 0) + ", " + (jobEntryCopy != null ? jobEntryCopy.toString() : "null") + ")");
        }
        // Which entry is next?
        JobEntryInterface jobEntryInterface = jobEntryCopy.getEntry();
        jobEntryInterface.getLogChannel().setLogLevel(logLevel);
        // Track the fact that we are going to launch the next job entry...
        JobEntryResult jerBefore = new JobEntryResult(null, null, BaseMessages.getString(PKG, "Job.Comment.JobStarted"), reason, jobEntryCopy.getName(), jobEntryCopy.getNr(), environmentSubstitute(jobEntryCopy.getEntry().getFilename()));
        jobTracker.addJobTracker(new JobTracker(jobMeta, jerBefore));
        ClassLoader cl = Thread.currentThread().getContextClassLoader();
        Thread.currentThread().setContextClassLoader(jobEntryInterface.getClass().getClassLoader());
        // Execute this entry...
        JobEntryInterface cloneJei = (JobEntryInterface) jobEntryInterface.clone();
        ((VariableSpace) cloneJei).copyVariablesFrom(this);
        cloneJei.setRepository(rep);
        if (rep != null) {
            cloneJei.setMetaStore(rep.getMetaStore());
        }
        cloneJei.setParentJob(this);
        cloneJei.setParentJobMeta(this.getJobMeta());
        final long start = System.currentTimeMillis();
        cloneJei.getLogChannel().logDetailed("Starting job entry");
        for (JobEntryListener jobEntryListener : jobEntryListeners) {
            jobEntryListener.beforeExecution(this, jobEntryCopy, cloneJei);
        }
        if (interactive) {
            if (jobEntryCopy.isTransformation()) {
                getActiveJobEntryTransformations().put(jobEntryCopy, (JobEntryTrans) cloneJei);
            }
            if (jobEntryCopy.isJob()) {
                getActiveJobEntryJobs().put(jobEntryCopy, (JobEntryJob) cloneJei);
            }
        }
        log.snap(Metrics.METRIC_JOBENTRY_START, cloneJei.toString());
        newResult = cloneJei.execute(prevResult, nr);
        log.snap(Metrics.METRIC_JOBENTRY_STOP, cloneJei.toString());
        final long end = System.currentTimeMillis();
        if (interactive) {
            if (jobEntryCopy.isTransformation()) {
                getActiveJobEntryTransformations().remove(jobEntryCopy);
            }
            if (jobEntryCopy.isJob()) {
                getActiveJobEntryJobs().remove(jobEntryCopy);
            }
        }
        if (cloneJei instanceof JobEntryTrans) {
            String throughput = newResult.getReadWriteThroughput((int) ((end - start) / 1000));
            if (throughput != null) {
                log.logMinimal(throughput);
            }
        }
        for (JobEntryListener jobEntryListener : jobEntryListeners) {
            jobEntryListener.afterExecution(this, jobEntryCopy, cloneJei, newResult);
        }
        Thread.currentThread().setContextClassLoader(cl);
        addErrors((int) newResult.getNrErrors());
        // Also capture the logging text after the execution...
        // 
        LoggingBuffer loggingBuffer = KettleLogStore.getAppender();
        StringBuffer logTextBuffer = loggingBuffer.getBuffer(cloneJei.getLogChannel().getLogChannelId(), false);
        newResult.setLogText(logTextBuffer.toString() + newResult.getLogText());
        // Save this result as well...
        // 
        JobEntryResult jerAfter = new JobEntryResult(newResult, cloneJei.getLogChannel().getLogChannelId(), BaseMessages.getString(PKG, "Job.Comment.JobFinished"), null, jobEntryCopy.getName(), jobEntryCopy.getNr(), environmentSubstitute(jobEntryCopy.getEntry().getFilename()));
        jobTracker.addJobTracker(new JobTracker(jobMeta, jerAfter));
        synchronized (jobEntryResults) {
            jobEntryResults.add(jerAfter);
            // 
            if (maxJobEntriesLogged > 0) {
                while (jobEntryResults.size() > maxJobEntriesLogged) {
                    // Remove the oldest.
                    jobEntryResults.removeFirst();
                }
            }
        }
    }
    extension = new JobExecutionExtension(this, prevResult, jobEntryCopy, extension.executeEntry);
    ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobAfterJobEntryExecution.id, extension);
    // Try all next job entries.
    // 
    // Keep track of all the threads we fired in case of parallel execution...
    // Keep track of the results of these executions too.
    // 
    final List<Thread> threads = new ArrayList<Thread>();
    // next 2 lists is being modified concurrently so must be synchronized for this case.
    final Queue<Result> threadResults = new ConcurrentLinkedQueue<Result>();
    final Queue<KettleException> threadExceptions = new ConcurrentLinkedQueue<KettleException>();
    final List<JobEntryCopy> threadEntries = new ArrayList<JobEntryCopy>();
    // Launch only those where the hop indicates true or false
    // 
    int nrNext = jobMeta.findNrNextJobEntries(jobEntryCopy);
    for (int i = 0; i < nrNext && !isStopped(); i++) {
        // The next entry is...
        final JobEntryCopy nextEntry = jobMeta.findNextJobEntry(jobEntryCopy, i);
        // See if we need to execute this...
        final JobHopMeta hi = jobMeta.findJobHop(jobEntryCopy, nextEntry);
        // The next comment...
        final String nextComment;
        if (hi.isUnconditional()) {
            nextComment = BaseMessages.getString(PKG, "Job.Comment.FollowedUnconditional");
        } else {
            if (newResult.getResult()) {
                nextComment = BaseMessages.getString(PKG, "Job.Comment.FollowedSuccess");
            } else {
                nextComment = BaseMessages.getString(PKG, "Job.Comment.FollowedFailure");
            }
        }
        // 
        if (hi.isUnconditional() || (jobEntryCopy.evaluates() && (!(hi.getEvaluation() ^ newResult.getResult())))) {
            // Start this next step!
            if (log.isBasic()) {
                log.logBasic(BaseMessages.getString(PKG, "Job.Log.StartingEntry", nextEntry.getName()));
            }
            // When an evaluation is executed the errors e.g. should not be reset.
            if (nextEntry.resetErrorsBeforeExecution()) {
                newResult.setNrErrors(0);
            }
            // 
            if (jobEntryCopy.isLaunchingInParallel()) {
                threadEntries.add(nextEntry);
                Runnable runnable = new Runnable() {

                    public void run() {
                        try {
                            Result threadResult = execute(nr + 1, newResult, nextEntry, jobEntryCopy, nextComment);
                            threadResults.add(threadResult);
                        } catch (Throwable e) {
                            log.logError(Const.getStackTracker(e));
                            threadExceptions.add(new KettleException(BaseMessages.getString(PKG, "Job.Log.UnexpectedError", nextEntry.toString()), e));
                            Result threadResult = new Result();
                            threadResult.setResult(false);
                            threadResult.setNrErrors(1L);
                            threadResults.add(threadResult);
                        }
                    }
                };
                Thread thread = new Thread(runnable);
                threads.add(thread);
                thread.start();
                if (log.isBasic()) {
                    log.logBasic(BaseMessages.getString(PKG, "Job.Log.LaunchedJobEntryInParallel", nextEntry.getName()));
                }
            } else {
                try {
                    // Same as before: blocks until it's done
                    // 
                    res = execute(nr + 1, newResult, nextEntry, jobEntryCopy, nextComment);
                } catch (Throwable e) {
                    log.logError(Const.getStackTracker(e));
                    throw new KettleException(BaseMessages.getString(PKG, "Job.Log.UnexpectedError", nextEntry.toString()), e);
                }
                if (log.isBasic()) {
                    log.logBasic(BaseMessages.getString(PKG, "Job.Log.FinishedJobEntry", nextEntry.getName(), res.getResult() + ""));
                }
            }
        }
    }
    // 
    if (jobEntryCopy.isLaunchingInParallel()) {
        for (int i = 0; i < threads.size(); i++) {
            Thread thread = threads.get(i);
            JobEntryCopy nextEntry = threadEntries.get(i);
            try {
                thread.join();
            } catch (InterruptedException e) {
                log.logError(jobMeta.toString(), BaseMessages.getString(PKG, "Job.Log.UnexpectedErrorWhileWaitingForJobEntry", nextEntry.getName()));
                threadExceptions.add(new KettleException(BaseMessages.getString(PKG, "Job.Log.UnexpectedErrorWhileWaitingForJobEntry", nextEntry.getName()), e));
            }
        }
    // if(log.isBasic()) log.logBasic(BaseMessages.getString(PKG,
    // "Job.Log.FinishedJobEntry",startpoint.getName(),res.getResult()+""));
    }
    // In this case, return the previous result.
    if (res == null) {
        res = prevResult;
    }
    // 
    if (threadExceptions.size() > 0) {
        res.setResult(false);
        res.setNrErrors(threadExceptions.size());
        for (KettleException e : threadExceptions) {
            log.logError(jobMeta.toString(), e.getMessage(), e);
        }
        // 
        throw threadExceptions.poll();
    }
    // 
    for (Result threadResult : threadResults) {
        res.add(threadResult);
    }
    // 
    if (res.getNrErrors() > 0) {
        res.setResult(false);
    }
    return res;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) ArrayList(java.util.ArrayList) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) WebResult(org.pentaho.di.www.WebResult) Result(org.pentaho.di.core.Result) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) JobEntryTrans(org.pentaho.di.job.entries.trans.JobEntryTrans) VariableSpace(org.pentaho.di.core.variables.VariableSpace) JobTracker(org.pentaho.di.core.gui.JobTracker) LoggingBuffer(org.pentaho.di.core.logging.LoggingBuffer) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue)

Aggregations

JobEntryTrans (org.pentaho.di.job.entries.trans.JobEntryTrans)17 JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)14 JobMeta (org.pentaho.di.job.JobMeta)10 Test (org.junit.Test)7 KettleException (org.pentaho.di.core.exception.KettleException)7 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)5 JobEntryInterface (org.pentaho.di.job.entry.JobEntryInterface)5 DefaultRunConfiguration (org.pentaho.di.engine.configuration.impl.pentaho.DefaultRunConfiguration)4 TransMeta (org.pentaho.di.trans.TransMeta)4 ArrayList (java.util.ArrayList)3 VisibleForTesting (com.google.common.annotations.VisibleForTesting)2 HashMap (java.util.HashMap)2 List (java.util.List)2 Map (java.util.Map)2 FileObject (org.apache.commons.vfs2.FileObject)2 ObjectLocationSpecificationMethod (org.pentaho.di.core.ObjectLocationSpecificationMethod)2 EmbeddedMetaStore (org.pentaho.di.core.attributes.metastore.EmbeddedMetaStore)2 Utils (org.pentaho.di.core.util.Utils)2 JobEntryJob (org.pentaho.di.job.entries.job.JobEntryJob)2 IOException (java.io.IOException)1