Search in sources :

Example 36 with VariableSpace

use of org.pentaho.di.core.variables.VariableSpace in project pentaho-kettle by pentaho.

the class KettleFileRepository method readDatabases.

/**
 * Read all the databases from the repository, insert into the has databases object, overwriting optionally
 *
 * @param TransMeta
 *          The transformation to load into.
 * @param overWriteShared
 *          if an object with the same name exists, overwrite
 * @throws KettleException
 */
public void readDatabases(HasDatabasesInterface transMeta, boolean overWriteShared) throws KettleException {
    try {
        ObjectId[] dbids = getDatabaseIDs(false);
        for (int i = 0; i < dbids.length; i++) {
            // reads last version
            DatabaseMeta databaseMeta = loadDatabaseMeta(dbids[i], null);
            if (transMeta instanceof VariableSpace) {
                databaseMeta.shareVariablesWith((VariableSpace) transMeta);
            }
            // Check if there already is one in the
            DatabaseMeta check = transMeta.findDatabase(databaseMeta.getName());
            // transformation
            if (check == null || overWriteShared) {
                // We only add, never overwrite database connections.
                if (databaseMeta.getName() != null) {
                    transMeta.addOrReplaceDatabase(databaseMeta);
                    if (!overWriteShared) {
                        databaseMeta.setChanged(false);
                    }
                }
            }
        }
    } catch (KettleException e) {
        throw e;
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) StringObjectId(org.pentaho.di.repository.StringObjectId) ObjectId(org.pentaho.di.repository.ObjectId) VariableSpace(org.pentaho.di.core.variables.VariableSpace) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta)

Example 37 with VariableSpace

use of org.pentaho.di.core.variables.VariableSpace in project pentaho-kettle by pentaho.

the class TransExecutionConfiguration method getUsedVariables.

public void getUsedVariables(TransMeta transMeta) {
    Properties sp = new Properties();
    VariableSpace space = Variables.getADefaultVariableSpace();
    String[] keys = space.listVariables();
    for (int i = 0; i < keys.length; i++) {
        sp.put(keys[i], space.getVariable(keys[i]));
    }
    List<String> vars = transMeta.getUsedVariables();
    if (vars != null && vars.size() > 0) {
        HashMap<String, String> newVariables = new HashMap<String, String>();
        for (int i = 0; i < vars.size(); i++) {
            String varname = vars.get(i);
            if (!varname.startsWith(Const.INTERNAL_VARIABLE_PREFIX)) {
                newVariables.put(varname, Const.NVL(variables.get(varname), sp.getProperty(varname, "")));
            }
        }
        // variables.clear();
        variables.putAll(newVariables);
    }
    // 
    for (String variableName : Const.INTERNAL_JOB_VARIABLES) {
        String value = transMeta.getVariable(variableName);
        if (!Utils.isEmpty(value)) {
            variables.put(variableName, value);
        }
    }
}
Also used : HashMap(java.util.HashMap) VariableSpace(org.pentaho.di.core.variables.VariableSpace) Properties(java.util.Properties)

Example 38 with VariableSpace

use of org.pentaho.di.core.variables.VariableSpace in project pentaho-kettle by pentaho.

the class Job method execute.

/**
 * Execute a job entry recursively and move to the next job entry automatically.<br>
 * Uses a back-tracking algorithm.<br>
 *
 * @param nr
 * @param prev_result
 * @param jobEntryCopy
 * @param previous
 * @param reason
 * @return
 * @throws KettleException
 */
private Result execute(final int nr, Result prev_result, final JobEntryCopy jobEntryCopy, JobEntryCopy previous, String reason) throws KettleException {
    Result res = null;
    if (stopped.get()) {
        res = new Result(nr);
        res.stopped = true;
        return res;
    }
    // if we didn't have a previous result, create one, otherwise, copy the content...
    // 
    final Result newResult;
    Result prevResult = null;
    if (prev_result != null) {
        prevResult = prev_result.clone();
    } else {
        prevResult = new Result();
    }
    JobExecutionExtension extension = new JobExecutionExtension(this, prevResult, jobEntryCopy, true);
    ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobBeforeJobEntryExecution.id, extension);
    jobMeta.disposeEmbeddedMetastoreProvider();
    if (jobMeta.getMetastoreLocatorOsgi() != null) {
        jobMeta.setEmbeddedMetastoreProviderKey(jobMeta.getMetastoreLocatorOsgi().setEmbeddedMetastore(jobMeta.getEmbeddedMetaStore()));
    }
    if (extension.result != null) {
        prevResult = extension.result;
    }
    if (!extension.executeEntry) {
        newResult = prevResult;
    } else {
        if (log.isDetailed()) {
            log.logDetailed("exec(" + nr + ", " + (prev_result != null ? prev_result.getNrErrors() : 0) + ", " + (jobEntryCopy != null ? jobEntryCopy.toString() : "null") + ")");
        }
        // Which entry is next?
        JobEntryInterface jobEntryInterface = jobEntryCopy.getEntry();
        jobEntryInterface.getLogChannel().setLogLevel(logLevel);
        // Track the fact that we are going to launch the next job entry...
        JobEntryResult jerBefore = new JobEntryResult(null, null, BaseMessages.getString(PKG, "Job.Comment.JobStarted"), reason, jobEntryCopy.getName(), jobEntryCopy.getNr(), environmentSubstitute(jobEntryCopy.getEntry().getFilename()));
        jobTracker.addJobTracker(new JobTracker(jobMeta, jerBefore));
        ClassLoader cl = Thread.currentThread().getContextClassLoader();
        Thread.currentThread().setContextClassLoader(jobEntryInterface.getClass().getClassLoader());
        // Execute this entry...
        JobEntryInterface cloneJei = (JobEntryInterface) jobEntryInterface.clone();
        ((VariableSpace) cloneJei).copyVariablesFrom(this);
        cloneJei.setRepository(rep);
        if (rep != null) {
            cloneJei.setMetaStore(rep.getMetaStore());
        }
        cloneJei.setParentJob(this);
        cloneJei.setParentJobMeta(this.getJobMeta());
        final long start = System.currentTimeMillis();
        cloneJei.getLogChannel().logDetailed("Starting job entry");
        for (JobEntryListener jobEntryListener : jobEntryListeners) {
            jobEntryListener.beforeExecution(this, jobEntryCopy, cloneJei);
        }
        if (interactive) {
            if (jobEntryCopy.isTransformation()) {
                getActiveJobEntryTransformations().put(jobEntryCopy, (JobEntryTrans) cloneJei);
            }
            if (jobEntryCopy.isJob()) {
                getActiveJobEntryJobs().put(jobEntryCopy, (JobEntryJob) cloneJei);
            }
        }
        log.snap(Metrics.METRIC_JOBENTRY_START, cloneJei.toString());
        newResult = cloneJei.execute(prevResult, nr);
        log.snap(Metrics.METRIC_JOBENTRY_STOP, cloneJei.toString());
        final long end = System.currentTimeMillis();
        if (interactive) {
            if (jobEntryCopy.isTransformation()) {
                getActiveJobEntryTransformations().remove(jobEntryCopy);
            }
            if (jobEntryCopy.isJob()) {
                getActiveJobEntryJobs().remove(jobEntryCopy);
            }
        }
        if (cloneJei instanceof JobEntryTrans) {
            String throughput = newResult.getReadWriteThroughput((int) ((end - start) / 1000));
            if (throughput != null) {
                log.logMinimal(throughput);
            }
        }
        for (JobEntryListener jobEntryListener : jobEntryListeners) {
            jobEntryListener.afterExecution(this, jobEntryCopy, cloneJei, newResult);
        }
        Thread.currentThread().setContextClassLoader(cl);
        addErrors((int) newResult.getNrErrors());
        // Also capture the logging text after the execution...
        // 
        LoggingBuffer loggingBuffer = KettleLogStore.getAppender();
        StringBuffer logTextBuffer = loggingBuffer.getBuffer(cloneJei.getLogChannel().getLogChannelId(), false);
        newResult.setLogText(logTextBuffer.toString() + newResult.getLogText());
        // Save this result as well...
        // 
        JobEntryResult jerAfter = new JobEntryResult(newResult, cloneJei.getLogChannel().getLogChannelId(), BaseMessages.getString(PKG, "Job.Comment.JobFinished"), null, jobEntryCopy.getName(), jobEntryCopy.getNr(), environmentSubstitute(jobEntryCopy.getEntry().getFilename()));
        jobTracker.addJobTracker(new JobTracker(jobMeta, jerAfter));
        synchronized (jobEntryResults) {
            jobEntryResults.add(jerAfter);
            // 
            if (maxJobEntriesLogged > 0) {
                while (jobEntryResults.size() > maxJobEntriesLogged) {
                    // Remove the oldest.
                    jobEntryResults.removeFirst();
                }
            }
        }
    }
    extension = new JobExecutionExtension(this, prevResult, jobEntryCopy, extension.executeEntry);
    ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobAfterJobEntryExecution.id, extension);
    // Try all next job entries.
    // 
    // Keep track of all the threads we fired in case of parallel execution...
    // Keep track of the results of these executions too.
    // 
    final List<Thread> threads = new ArrayList<Thread>();
    // next 2 lists is being modified concurrently so must be synchronized for this case.
    final Queue<Result> threadResults = new ConcurrentLinkedQueue<Result>();
    final Queue<KettleException> threadExceptions = new ConcurrentLinkedQueue<KettleException>();
    final List<JobEntryCopy> threadEntries = new ArrayList<JobEntryCopy>();
    // Launch only those where the hop indicates true or false
    // 
    int nrNext = jobMeta.findNrNextJobEntries(jobEntryCopy);
    for (int i = 0; i < nrNext && !isStopped(); i++) {
        // The next entry is...
        final JobEntryCopy nextEntry = jobMeta.findNextJobEntry(jobEntryCopy, i);
        // See if we need to execute this...
        final JobHopMeta hi = jobMeta.findJobHop(jobEntryCopy, nextEntry);
        // The next comment...
        final String nextComment;
        if (hi.isUnconditional()) {
            nextComment = BaseMessages.getString(PKG, "Job.Comment.FollowedUnconditional");
        } else {
            if (newResult.getResult()) {
                nextComment = BaseMessages.getString(PKG, "Job.Comment.FollowedSuccess");
            } else {
                nextComment = BaseMessages.getString(PKG, "Job.Comment.FollowedFailure");
            }
        }
        // 
        if (hi.isUnconditional() || (jobEntryCopy.evaluates() && (!(hi.getEvaluation() ^ newResult.getResult())))) {
            // Start this next step!
            if (log.isBasic()) {
                log.logBasic(BaseMessages.getString(PKG, "Job.Log.StartingEntry", nextEntry.getName()));
            }
            // When an evaluation is executed the errors e.g. should not be reset.
            if (nextEntry.resetErrorsBeforeExecution()) {
                newResult.setNrErrors(0);
            }
            // 
            if (jobEntryCopy.isLaunchingInParallel()) {
                threadEntries.add(nextEntry);
                Runnable runnable = new Runnable() {

                    public void run() {
                        try {
                            Result threadResult = execute(nr + 1, newResult, nextEntry, jobEntryCopy, nextComment);
                            threadResults.add(threadResult);
                        } catch (Throwable e) {
                            log.logError(Const.getStackTracker(e));
                            threadExceptions.add(new KettleException(BaseMessages.getString(PKG, "Job.Log.UnexpectedError", nextEntry.toString()), e));
                            Result threadResult = new Result();
                            threadResult.setResult(false);
                            threadResult.setNrErrors(1L);
                            threadResults.add(threadResult);
                        }
                    }
                };
                Thread thread = new Thread(runnable);
                threads.add(thread);
                thread.start();
                if (log.isBasic()) {
                    log.logBasic(BaseMessages.getString(PKG, "Job.Log.LaunchedJobEntryInParallel", nextEntry.getName()));
                }
            } else {
                try {
                    // Same as before: blocks until it's done
                    // 
                    res = execute(nr + 1, newResult, nextEntry, jobEntryCopy, nextComment);
                } catch (Throwable e) {
                    log.logError(Const.getStackTracker(e));
                    throw new KettleException(BaseMessages.getString(PKG, "Job.Log.UnexpectedError", nextEntry.toString()), e);
                }
                if (log.isBasic()) {
                    log.logBasic(BaseMessages.getString(PKG, "Job.Log.FinishedJobEntry", nextEntry.getName(), res.getResult() + ""));
                }
            }
        }
    }
    // 
    if (jobEntryCopy.isLaunchingInParallel()) {
        for (int i = 0; i < threads.size(); i++) {
            Thread thread = threads.get(i);
            JobEntryCopy nextEntry = threadEntries.get(i);
            try {
                thread.join();
            } catch (InterruptedException e) {
                log.logError(jobMeta.toString(), BaseMessages.getString(PKG, "Job.Log.UnexpectedErrorWhileWaitingForJobEntry", nextEntry.getName()));
                threadExceptions.add(new KettleException(BaseMessages.getString(PKG, "Job.Log.UnexpectedErrorWhileWaitingForJobEntry", nextEntry.getName()), e));
            }
        }
    // if(log.isBasic()) log.logBasic(BaseMessages.getString(PKG,
    // "Job.Log.FinishedJobEntry",startpoint.getName(),res.getResult()+""));
    }
    // In this case, return the previous result.
    if (res == null) {
        res = prevResult;
    }
    // 
    if (threadExceptions.size() > 0) {
        res.setResult(false);
        res.setNrErrors(threadExceptions.size());
        for (KettleException e : threadExceptions) {
            log.logError(jobMeta.toString(), e.getMessage(), e);
        }
        // 
        throw threadExceptions.poll();
    }
    // 
    for (Result threadResult : threadResults) {
        res.add(threadResult);
    }
    // 
    if (res.getNrErrors() > 0) {
        res.setResult(false);
    }
    return res;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) ArrayList(java.util.ArrayList) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) WebResult(org.pentaho.di.www.WebResult) Result(org.pentaho.di.core.Result) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) JobEntryTrans(org.pentaho.di.job.entries.trans.JobEntryTrans) VariableSpace(org.pentaho.di.core.variables.VariableSpace) JobTracker(org.pentaho.di.core.gui.JobTracker) LoggingBuffer(org.pentaho.di.core.logging.LoggingBuffer) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue)

Example 39 with VariableSpace

use of org.pentaho.di.core.variables.VariableSpace in project pentaho-kettle by pentaho.

the class JobExecutionConfiguration method getUsedVariables.

public void getUsedVariables(JobMeta jobMeta) {
    Properties sp = new Properties();
    VariableSpace space = Variables.getADefaultVariableSpace();
    String[] keys = space.listVariables();
    for (int i = 0; i < keys.length; i++) {
        sp.put(keys[i], space.getVariable(keys[i]));
    }
    List<String> vars = jobMeta.getUsedVariables();
    if (vars != null && vars.size() > 0) {
        HashMap<String, String> newVariables = new HashMap<String, String>();
        for (int i = 0; i < vars.size(); i++) {
            String varname = vars.get(i);
            if (!varname.startsWith(Const.INTERNAL_VARIABLE_PREFIX)) {
                // add a variable only if it's defined within this configuration or it is a system property
                if (variables.containsKey(varname) || sp.getProperty(varname) != null) {
                    newVariables.put(varname, Const.NVL(variables.get(varname), sp.getProperty(varname, "")));
                }
            }
        }
        // variables.clear();
        variables.putAll(newVariables);
    }
}
Also used : HashMap(java.util.HashMap) VariableSpace(org.pentaho.di.core.variables.VariableSpace) Properties(java.util.Properties)

Example 40 with VariableSpace

use of org.pentaho.di.core.variables.VariableSpace in project pentaho-kettle by pentaho.

the class StepWithMappingMetaTest method loadMappingMetaTest.

@Test
@PrepareForTest(StepWithMappingMeta.class)
public void loadMappingMetaTest() throws Exception {
    String childParam = "childParam";
    String childValue = "childValue";
    String paramOverwrite = "paramOverwrite";
    String parentParam = "parentParam";
    String parentValue = "parentValue";
    String variablePath = "Internal.Entry.Current.Directory";
    String virtualDir = "/testFolder/CDA-91";
    String fileName = "testTrans.ktr";
    VariableSpace variables = new Variables();
    variables.setVariable(parentParam, parentValue);
    variables.setVariable(paramOverwrite, parentValue);
    StepMeta stepMeta = new StepMeta();
    TransMeta parentTransMeta = new TransMeta();
    stepMeta.setParentTransMeta(parentTransMeta);
    RepositoryDirectoryInterface repositoryDirectory = Mockito.mock(RepositoryDirectoryInterface.class);
    when(repositoryDirectory.toString()).thenReturn(virtualDir);
    stepMeta.getParentTransMeta().setRepositoryDirectory(repositoryDirectory);
    StepWithMappingMeta mappingMetaMock = mock(StepWithMappingMeta.class);
    when(mappingMetaMock.getSpecificationMethod()).thenReturn(ObjectLocationSpecificationMethod.FILENAME);
    when(mappingMetaMock.getFileName()).thenReturn("${" + variablePath + "}/" + fileName);
    when(mappingMetaMock.getParentStepMeta()).thenReturn(stepMeta);
    Repository rep = mock(Repository.class);
    Mockito.doReturn(Mockito.mock(RepositoryDirectoryInterface.class)).when(rep).findDirectory(anyString());
    TransMeta child = new TransMeta();
    child.setVariable(childParam, childValue);
    child.setVariable(paramOverwrite, childValue);
    Mockito.doReturn(child).when(rep).loadTransformation(anyString(), any(), any(), anyBoolean(), any());
    TransMeta transMeta = StepWithMappingMeta.loadMappingMeta(mappingMetaMock, rep, null, variables, true);
    Assert.assertNotNull(transMeta);
    // When the child parameter does exist in the parent parameters, overwrite the child parameter by the parent parameter.
    Assert.assertEquals(parentValue, transMeta.getVariable(paramOverwrite));
    // When the child parameter does not exist in the parent parameters, keep it.
    Assert.assertEquals(childValue, transMeta.getVariable(childParam));
    // All other parent parameters need to get copied into the child parameters  (when the 'Inherit all
    // variables from the transformation?' option is checked)
    Assert.assertEquals(parentValue, transMeta.getVariable(parentParam));
}
Also used : Variables(org.pentaho.di.core.variables.Variables) RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface) Repository(org.pentaho.di.repository.Repository) VariableSpace(org.pentaho.di.core.variables.VariableSpace) Mockito.anyString(org.mockito.Mockito.anyString) StepMeta(org.pentaho.di.trans.step.StepMeta) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest)

Aggregations

VariableSpace (org.pentaho.di.core.variables.VariableSpace)49 Test (org.junit.Test)21 Variables (org.pentaho.di.core.variables.Variables)14 KettleException (org.pentaho.di.core.exception.KettleException)12 Repository (org.pentaho.di.repository.Repository)10 TransMeta (org.pentaho.di.trans.TransMeta)10 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)9 RepositoryDirectoryInterface (org.pentaho.di.repository.RepositoryDirectoryInterface)8 StepMeta (org.pentaho.di.trans.step.StepMeta)8 RowMetaInterface (org.pentaho.di.core.row.RowMetaInterface)7 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)6 IOException (java.io.IOException)5 FileObject (org.apache.commons.vfs2.FileObject)5 CurrentDirectoryResolver (org.pentaho.di.core.util.CurrentDirectoryResolver)5 HashMap (java.util.HashMap)4 Properties (java.util.Properties)4 Matchers.anyString (org.mockito.Matchers.anyString)4 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)4 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)4 ValueMetaInterface (org.pentaho.di.core.row.ValueMetaInterface)4