Search in sources :

Example 86 with SlaveServer

use of org.pentaho.di.cluster.SlaveServer in project pentaho-kettle by pentaho.

the class JobEntryJob method execute.

@Override
public Result execute(Result result, int nr) throws KettleException {
    result.setEntryNr(nr);
    LogChannelFileWriter logChannelFileWriter = null;
    LogLevel jobLogLevel = parentJob.getLogLevel();
    // Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
    if (parentJobMeta.getNamedClusterEmbedManager() != null) {
        parentJobMeta.getNamedClusterEmbedManager().passEmbeddedMetastoreKey(this, parentJobMeta.getEmbeddedMetastoreProviderKey());
    }
    if (setLogfile) {
        String realLogFilename = environmentSubstitute(getLogFilename());
        // if we do not have one, we must fail
        if (Utils.isEmpty(realLogFilename)) {
            logError(BaseMessages.getString(PKG, "JobJob.Exception.LogFilenameMissing"));
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        // create parent folder?
        if (!createParentFolder(realLogFilename)) {
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        try {
            logChannelFileWriter = new LogChannelFileWriter(this.getLogChannelId(), KettleVFS.getFileObject(realLogFilename), setAppendLogfile);
            logChannelFileWriter.startLogging();
        } catch (KettleException e) {
            logError("Unable to open file appender for file [" + getLogFilename() + "] : " + e.toString());
            logError(Const.getStackTracker(e));
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        jobLogLevel = logFileLevel;
    }
    try {
        // First load the job, outside of the loop...
        if (parentJob.getJobMeta() != null) {
            // reset the internal variables again.
            // Maybe we should split up the variables even more like in UNIX shells.
            // The internal variables need to be reset to be able use them properly
            // in 2 sequential sub jobs.
            parentJob.getJobMeta().setInternalKettleVariables();
        }
        // 
        switch(specificationMethod) {
            case REPOSITORY_BY_NAME:
                if (log.isDetailed()) {
                    logDetailed("Loading job from repository : [" + directory + " : " + environmentSubstitute(jobname) + "]");
                }
                break;
            case FILENAME:
                if (log.isDetailed()) {
                    logDetailed("Loading job from XML file : [" + environmentSubstitute(filename) + "]");
                }
                break;
            case REPOSITORY_BY_REFERENCE:
                if (log.isDetailed()) {
                    logDetailed("Loading job from repository by reference : [" + jobObjectId + "]");
                }
                break;
            default:
                break;
        }
        JobMeta jobMeta = getJobMeta(rep, this);
        // 
        if (jobMeta == null) {
            throw new KettleException("Unable to load the job: please specify the name and repository directory OR a filename");
        }
        verifyRecursiveExecution(parentJob, jobMeta);
        int iteration = 0;
        String[] args1 = arguments;
        // no arguments? Check the parent jobs arguments
        if (args1 == null || args1.length == 0) {
            args1 = parentJob.getArguments();
        }
        copyVariablesFrom(parentJob);
        setParentVariableSpace(parentJob);
        // 
        // For the moment only do variable translation at the start of a job, not
        // for every input row (if that would be switched on)
        // 
        String[] args = null;
        if (args1 != null) {
            args = new String[args1.length];
            for (int idx = 0; idx < args1.length; idx++) {
                args[idx] = environmentSubstitute(args1[idx]);
            }
        }
        RowMetaAndData resultRow = null;
        boolean first = true;
        List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>(result.getRows());
        while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0)) {
            first = false;
            // 
            if (execPerRow) {
                result.getRows().clear();
            }
            // PDI-18776: Get the result row from rows based on iteration index if rows is not empty otherwise result row is null
            if (rows != null && execPerRow && !rows.isEmpty()) {
                // after processing the last row. Otherwise if iteration == rows.size() indicates that we processed already the last row and just need to break from the loop
                if (iteration != rows.size()) {
                    resultRow = rows.get(iteration);
                } else {
                    break;
                }
            } else {
                resultRow = null;
            }
            NamedParams namedParam = new NamedParamsDefault();
            // 
            if (paramsFromPrevious) {
                String[] parentParameters = parentJob.listParameters();
                for (int idx = 0; idx < parentParameters.length; idx++) {
                    String par = parentParameters[idx];
                    String def = parentJob.getParameterDefault(par);
                    String val = parentJob.getParameterValue(par);
                    String des = parentJob.getParameterDescription(par);
                    namedParam.addParameterDefinition(par, def, des);
                    namedParam.setParameterValue(par, val);
                }
            }
            // 
            if (parameters != null) {
                for (int idx = 0; idx < parameters.length; idx++) {
                    if (!Utils.isEmpty(parameters[idx])) {
                        // 
                        if (Const.indexOfString(parameters[idx], namedParam.listParameters()) < 0) {
                            // We have a parameter
                            try {
                                namedParam.addParameterDefinition(parameters[idx], "", "Job entry runtime");
                            } catch (DuplicateParamException e) {
                                // Should never happen
                                // 
                                logError("Duplicate parameter definition for " + parameters[idx]);
                            }
                        }
                        if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                            namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                        } else {
                            // something filled in, in the field column...
                            // 
                            String value = "";
                            if (resultRow != null) {
                                value = resultRow.getString(parameterFieldNames[idx], "");
                            }
                            namedParam.setParameterValue(parameters[idx], value);
                        }
                    }
                }
            }
            Result oneResult = new Result();
            List<RowMetaAndData> sourceRows = null;
            if (execPerRow) {
                if (argFromPrevious) {
                    // Copy the input row to the (command line) arguments
                    args = null;
                    if (resultRow != null) {
                        args = new String[resultRow.size()];
                        for (int i = 0; i < resultRow.size(); i++) {
                            args[i] = resultRow.getString(i, null);
                        }
                    }
                } else {
                    // Just pass a single row
                    List<RowMetaAndData> newList = new ArrayList<RowMetaAndData>();
                    if (resultRow != null) {
                        newList.add(resultRow);
                    }
                    sourceRows = newList;
                }
                if (paramsFromPrevious) {
                    if (parameters != null) {
                        for (int idx = 0; idx < parameters.length; idx++) {
                            if (!Utils.isEmpty(parameters[idx])) {
                                // We have a parameter
                                if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                                } else {
                                    String fieldValue = "";
                                    if (resultRow != null) {
                                        fieldValue = resultRow.getString(parameterFieldNames[idx], "");
                                    }
                                    // Get the value from the input stream
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
                                }
                            }
                        }
                    }
                }
            } else {
                if (argFromPrevious) {
                    // Only put the first Row on the arguments
                    args = null;
                    if (resultRow != null) {
                        args = new String[resultRow.size()];
                        for (int i = 0; i < resultRow.size(); i++) {
                            args[i] = resultRow.getString(i, null);
                        }
                    }
                } else {
                    // Keep it as it was...
                    sourceRows = result.getRows();
                }
                if (paramsFromPrevious) {
                    if (parameters != null) {
                        for (int idx = 0; idx < parameters.length; idx++) {
                            if (!Utils.isEmpty(parameters[idx])) {
                                // We have a parameter
                                if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                                } else {
                                    String fieldValue = "";
                                    if (resultRow != null) {
                                        fieldValue = resultRow.getString(parameterFieldNames[idx], "");
                                    }
                                    // Get the value from the input stream
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
                                }
                            }
                        }
                    }
                }
            }
            boolean doFallback = true;
            SlaveServer remoteSlaveServer = null;
            JobExecutionConfiguration executionConfiguration = new JobExecutionConfiguration();
            if (!Utils.isEmpty(runConfiguration)) {
                runConfiguration = environmentSubstitute(runConfiguration);
                log.logBasic(BaseMessages.getString(PKG, "JobJob.RunConfig.Message"), runConfiguration);
                executionConfiguration.setRunConfiguration(runConfiguration);
                try {
                    ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.SpoonTransBeforeStart.id, new Object[] { executionConfiguration, parentJob.getJobMeta(), jobMeta, rep });
                    List<Object> items = Arrays.asList(runConfiguration, false);
                    try {
                        ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.RunConfigurationSelection.id, items);
                        if (waitingToFinish && (Boolean) items.get(IS_PENTAHO)) {
                            String jobName = parentJob.getJobMeta().getName();
                            String name = jobMeta.getName();
                            logBasic(BaseMessages.getString(PKG, "JobJob.Log.InvalidRunConfigurationCombination", jobName, name, jobName));
                        }
                    } catch (Exception ignored) {
                    // Ignored
                    }
                    if (!executionConfiguration.isExecutingLocally() && !executionConfiguration.isExecutingRemotely()) {
                        result.setResult(true);
                        return result;
                    }
                    remoteSlaveServer = executionConfiguration.getRemoteServer();
                    doFallback = false;
                } catch (KettleException e) {
                    log.logError(e.getMessage(), getName());
                    result.setNrErrors(1);
                    result.setResult(false);
                    return result;
                }
            }
            if (doFallback) {
                // 
                if (!Utils.isEmpty(remoteSlaveServerName)) {
                    String realRemoteSlaveServerName = environmentSubstitute(remoteSlaveServerName);
                    remoteSlaveServer = parentJob.getJobMeta().findSlaveServer(realRemoteSlaveServerName);
                    if (remoteSlaveServer == null) {
                        throw new KettleException(BaseMessages.getString(PKG, "JobTrans.Exception.UnableToFindRemoteSlaveServer", realRemoteSlaveServerName));
                    }
                }
            }
            if (remoteSlaveServer == null) {
                // Local execution...
                // 
                // Create a new job
                // 
                job = new Job(rep, jobMeta, this);
                job.setParentJob(parentJob);
                job.setLogLevel(jobLogLevel);
                job.shareVariablesWith(this);
                job.setInternalKettleVariables(this);
                job.copyParametersFrom(jobMeta);
                job.setInteractive(parentJob.isInteractive());
                job.setGatheringMetrics(parentJob.isGatheringMetrics());
                if (job.isInteractive()) {
                    job.getJobEntryListeners().addAll(parentJob.getJobEntryListeners());
                }
                // Pass the socket repository all around.
                // 
                job.setSocketRepository(parentJob.getSocketRepository());
                // Set the parameters calculated above on this instance.
                // 
                job.clearParameters();
                String[] parameterNames = job.listParameters();
                for (int idx = 0; idx < parameterNames.length; idx++) {
                    // Grab the parameter value set in the job entry
                    // 
                    String thisValue = namedParam.getParameterValue(parameterNames[idx]);
                    if (!Utils.isEmpty(thisValue)) {
                        // Set the value as specified by the user in the job entry
                        // 
                        job.setParameterValue(parameterNames[idx], thisValue);
                    } else {
                        // 
                        if (isPassingAllParameters()) {
                            String parentValue = parentJob.getParameterValue(parameterNames[idx]);
                            if (Utils.isEmpty(parentValue)) {
                                parentValue = parentJob.getParameterDefault(parameterNames[idx]);
                            }
                            if (!Utils.isEmpty(parentValue)) {
                                job.setParameterValue(parameterNames[idx], parentValue);
                            }
                        }
                    }
                }
                job.activateParameters();
                // Set the source rows we calculated above...
                // 
                job.setSourceRows(sourceRows);
                // Don't forget the logging...
                job.beginProcessing();
                // Link the job with the sub-job
                parentJob.getJobTracker().addJobTracker(job.getJobTracker());
                // Link both ways!
                job.getJobTracker().setParentJobTracker(parentJob.getJobTracker());
                if (parentJob.getJobMeta().isBatchIdPassed()) {
                    job.setPassedBatchId(parentJob.getBatchId());
                }
                job.setArguments(args);
                // 
                for (DelegationListener delegationListener : parentJob.getDelegationListeners()) {
                    // TODO: copy some settings in the job execution configuration, not strictly needed
                    // but the execution configuration information is useful in case of a job re-start
                    // 
                    delegationListener.jobDelegationStarted(job, new JobExecutionConfiguration());
                }
                JobEntryJobRunner runner = new JobEntryJobRunner(job, result, nr, log);
                Thread jobRunnerThread = new Thread(runner);
                // PDI-6518
                // added UUID to thread name, otherwise threads do share names if jobs entries are executed in parallel in a
                // parent job
                // if that happens, contained transformations start closing each other's connections
                jobRunnerThread.setName(Const.NVL(job.getJobMeta().getName(), job.getJobMeta().getFilename()) + " UUID: " + UUID.randomUUID().toString());
                jobRunnerThread.start();
                // 
                while (!runner.isFinished() && !parentJob.isStopped()) {
                    try {
                        Thread.sleep(0, 1);
                    } catch (InterruptedException e) {
                    // Ignore
                    }
                }
                // if the parent-job was stopped, stop the sub-job too...
                if (parentJob.isStopped()) {
                    job.stopAll();
                    // Wait until finished!
                    runner.waitUntilFinished();
                }
                oneResult = runner.getResult();
            } else {
                // Make sure we can parameterize the slave server connection
                // 
                remoteSlaveServer.shareVariablesWith(this);
                // Remote execution...
                // 
                JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration();
                // lightClone() because rows are
                jobExecutionConfiguration.setPreviousResult(result.lightClone());
                // overwritten in next line.
                jobExecutionConfiguration.getPreviousResult().setRows(sourceRows);
                jobExecutionConfiguration.setArgumentStrings(args);
                jobExecutionConfiguration.setVariables(this);
                jobExecutionConfiguration.setRemoteServer(remoteSlaveServer);
                jobExecutionConfiguration.setRepository(rep);
                jobExecutionConfiguration.setLogLevel(jobLogLevel);
                jobExecutionConfiguration.setPassingExport(passingExport);
                jobExecutionConfiguration.setExpandingRemoteJob(expandingRemoteJob);
                for (String param : namedParam.listParameters()) {
                    String defValue = namedParam.getParameterDefault(param);
                    String value = namedParam.getParameterValue(param);
                    jobExecutionConfiguration.getParams().put(param, Const.NVL(value, defValue));
                }
                if (parentJob.getJobMeta().isBatchIdPassed()) {
                    jobExecutionConfiguration.setPassedBatchId(parentJob.getBatchId());
                }
                // Send the XML over to the slave server
                // Also start the job over there...
                // 
                String carteObjectId = null;
                try {
                    carteObjectId = Job.sendToSlaveServer(jobMeta, jobExecutionConfiguration, rep, metaStore);
                } catch (KettleException e) {
                    // Perhaps the job exists on the remote server, carte is down, etc.
                    // This is an abort situation, stop the parent job...
                    // We want this in case we are running in parallel. The other job
                    // entries can stop running now.
                    // 
                    parentJob.stopAll();
                    // 
                    throw e;
                }
                // Now start the monitoring...
                // 
                SlaveServerJobStatus jobStatus = null;
                while (!parentJob.isStopped() && waitingToFinish) {
                    try {
                        jobStatus = remoteSlaveServer.getJobStatus(jobMeta.getName(), carteObjectId, 0);
                        if (jobStatus.getResult() != null) {
                            // The job is finished, get the result...
                            // 
                            oneResult = jobStatus.getResult();
                            break;
                        }
                    } catch (Exception e1) {
                        logError("Unable to contact slave server [" + remoteSlaveServer + "] to verify the status of job [" + jobMeta.getName() + "]", e1);
                        oneResult.setNrErrors(1L);
                        // Stop looking too, chances are too low the server will
                        break;
                    // come back on-line
                    }
                    // sleep for 1 second
                    try {
                        Thread.sleep(1000);
                    } catch (InterruptedException e) {
                    // Ignore
                    }
                }
                // Write log from carte to file
                if (setLogfile && jobStatus != null) {
                    String logFromCarte = jobStatus.getLoggingString();
                    if (!Utils.isEmpty(logFromCarte)) {
                        FileObject logfile = logChannelFileWriter.getLogFile();
                        OutputStream logFileOutputStream = null;
                        try {
                            logFileOutputStream = KettleVFS.getOutputStream(logfile, setAppendLogfile);
                            logFileOutputStream.write(logFromCarte.getBytes());
                            logFileOutputStream.flush();
                        } catch (Exception e) {
                            logError("There was an error logging to file '" + logfile + "'", e);
                        } finally {
                            try {
                                if (logFileOutputStream != null) {
                                    logFileOutputStream.close();
                                    logFileOutputStream = null;
                                }
                            } catch (Exception e) {
                                logError("There was an error closing log file file '" + logfile + "'", e);
                            }
                        }
                    }
                }
                if (!waitingToFinish) {
                    // Since the job was posted successfully, the result is true...
                    // 
                    oneResult = new Result();
                    oneResult.setResult(true);
                }
                if (parentJob.isStopped()) {
                    try {
                        // 
                        if (jobStatus == null || jobStatus.isRunning()) {
                            // Try a remote abort ...
                            // 
                            remoteSlaveServer.stopJob(jobMeta.getName(), carteObjectId);
                        }
                    } catch (Exception e1) {
                        logError("Unable to contact slave server [" + remoteSlaveServer + "] to stop job [" + jobMeta.getName() + "]", e1);
                        oneResult.setNrErrors(1L);
                        // Stop looking too, chances are too low the server will
                        break;
                    // come back on-line
                    }
                }
            }
            // clear only the numbers, NOT the files or rows.
            result.clear();
            result.add(oneResult);
            // Set the result rows too, if any ...
            if (!Utils.isEmpty(oneResult.getRows())) {
                result.setRows(new ArrayList<RowMetaAndData>(oneResult.getRows()));
            }
            // 
            if (oneResult.getResult() == false) {
                result.setNrErrors(result.getNrErrors() + 1);
            }
            iteration++;
        }
    } catch (KettleException ke) {
        logError("Error running job entry 'job' : ", ke);
        result.setResult(false);
        result.setNrErrors(1L);
    }
    if (setLogfile) {
        if (logChannelFileWriter != null) {
            logChannelFileWriter.stopLogging();
            ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, logChannelFileWriter.getLogFile(), parentJob.getJobname(), getName());
            result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
            // 
            if (logChannelFileWriter.getException() != null) {
                logError("Unable to open log file [" + getLogFilename() + "] : ");
                logError(Const.getStackTracker(logChannelFileWriter.getException()));
                result.setNrErrors(1);
                result.setResult(false);
                return result;
            }
        }
    }
    if (result.getNrErrors() > 0) {
        result.setResult(false);
    } else {
        result.setResult(true);
    }
    return result;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) DuplicateParamException(org.pentaho.di.core.parameters.DuplicateParamException) NamedParams(org.pentaho.di.core.parameters.NamedParams) OutputStream(java.io.OutputStream) ArrayList(java.util.ArrayList) SlaveServer(org.pentaho.di.cluster.SlaveServer) LogLevel(org.pentaho.di.core.logging.LogLevel) Result(org.pentaho.di.core.Result) SlaveServerJobStatus(org.pentaho.di.www.SlaveServerJobStatus) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) FileObject(org.apache.commons.vfs2.FileObject) Job(org.pentaho.di.job.Job) NamedParamsDefault(org.pentaho.di.core.parameters.NamedParamsDefault) LogChannelFileWriter(org.pentaho.di.core.logging.LogChannelFileWriter) ResultFile(org.pentaho.di.core.ResultFile) JobExecutionConfiguration(org.pentaho.di.job.JobExecutionConfiguration) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) DuplicateParamException(org.pentaho.di.core.parameters.DuplicateParamException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) RepositoryObject(org.pentaho.di.repository.RepositoryObject) FileObject(org.apache.commons.vfs2.FileObject) DelegationListener(org.pentaho.di.job.DelegationListener)

Example 87 with SlaveServer

use of org.pentaho.di.cluster.SlaveServer in project pentaho-kettle by pentaho.

the class JobEntrySetVariablesTest method testJobEntrySetVariablesExecute_VARIABLE_TYPE_JVM_NullVariable.

@Test
public void testJobEntrySetVariablesExecute_VARIABLE_TYPE_JVM_NullVariable() throws Exception {
    List<DatabaseMeta> databases = mock(List.class);
    List<SlaveServer> slaveServers = mock(List.class);
    Repository repository = mock(Repository.class);
    IMetaStore metaStore = mock(IMetaStore.class);
    entry.loadXML(getEntryNode("nullVariable", null, "JVM"), databases, slaveServers, repository, metaStore);
    Result result = entry.execute(new Result(), 0);
    assertTrue("Result should be true", result.getResult());
    assertNull(System.getProperty("nullVariable"));
}
Also used : Repository(org.pentaho.di.repository.Repository) SlaveServer(org.pentaho.di.cluster.SlaveServer) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) IMetaStore(org.pentaho.metastore.api.IMetaStore) Result(org.pentaho.di.core.Result) Test(org.junit.Test)

Example 88 with SlaveServer

use of org.pentaho.di.cluster.SlaveServer in project pentaho-kettle by pentaho.

the class JobEntrySetVariablesTest method testJobEntrySetVariablesExecute_VARIABLE_TYPE_CURRENT_JOB_VariableNotNull.

@Test
public void testJobEntrySetVariablesExecute_VARIABLE_TYPE_CURRENT_JOB_VariableNotNull() throws Exception {
    List<DatabaseMeta> databases = mock(List.class);
    List<SlaveServer> slaveServers = mock(List.class);
    Repository repository = mock(Repository.class);
    IMetaStore metaStore = mock(IMetaStore.class);
    entry.loadXML(getEntryNode("variableNotNull", "someValue", "CURRENT_JOB"), databases, slaveServers, repository, metaStore);
    assertNull(System.getProperty("variableNotNull"));
    Result result = entry.execute(new Result(), 0);
    assertTrue("Result should be true", result.getResult());
    assertEquals("someValue", entry.getVariable("variableNotNull"));
}
Also used : Repository(org.pentaho.di.repository.Repository) SlaveServer(org.pentaho.di.cluster.SlaveServer) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) IMetaStore(org.pentaho.metastore.api.IMetaStore) Result(org.pentaho.di.core.Result) Test(org.junit.Test)

Example 89 with SlaveServer

use of org.pentaho.di.cluster.SlaveServer in project pentaho-kettle by pentaho.

the class JobEntrySetVariablesTest method testJobEntrySetVariablesExecute_VARIABLE_TYPE_JVM_VariableNotNull.

@Test
public void testJobEntrySetVariablesExecute_VARIABLE_TYPE_JVM_VariableNotNull() throws Exception {
    List<DatabaseMeta> databases = mock(List.class);
    List<SlaveServer> slaveServers = mock(List.class);
    Repository repository = mock(Repository.class);
    IMetaStore metaStore = mock(IMetaStore.class);
    entry.loadXML(getEntryNode("variableNotNull", "someValue", "JVM"), databases, slaveServers, repository, metaStore);
    assertNull(System.getProperty("variableNotNull"));
    Result result = entry.execute(new Result(), 0);
    assertTrue("Result should be true", result.getResult());
    assertEquals("someValue", System.getProperty("variableNotNull"));
}
Also used : Repository(org.pentaho.di.repository.Repository) SlaveServer(org.pentaho.di.cluster.SlaveServer) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) IMetaStore(org.pentaho.metastore.api.IMetaStore) Result(org.pentaho.di.core.Result) Test(org.junit.Test)

Example 90 with SlaveServer

use of org.pentaho.di.cluster.SlaveServer in project pentaho-kettle by pentaho.

the class PurRepository method updateSharedObjectCache.

/**
 * Do not call this method directly. Instead call updateSharedObjectCache or removeFromSharedObjectCache.
 */
private void updateSharedObjectCache(final RepositoryElementInterface element, final RepositoryObjectType type, final ObjectId id) throws KettleException {
    if (element != null && (element.getObjectId() == null || element.getObjectId().getId() == null)) {
        throw new IllegalArgumentException(element.getName() + " has a null id");
    }
    loadAndCacheSharedObjects(false);
    boolean remove = element == null;
    ObjectId idToFind = element != null ? element.getObjectId() : id;
    RepositoryObjectType typeToUpdate = element != null ? element.getRepositoryElementType() : type;
    RepositoryElementInterface elementToUpdate = null;
    List<? extends SharedObjectInterface> origSharedObjects = null;
    sharedObjectsLock.writeLock().lock();
    try {
        switch(typeToUpdate) {
            case DATABASE:
                origSharedObjects = sharedObjectsByType.get(RepositoryObjectType.DATABASE);
                if (!remove) {
                    elementToUpdate = (RepositoryElementInterface) ((DatabaseMeta) element).clone();
                }
                break;
            case SLAVE_SERVER:
                origSharedObjects = sharedObjectsByType.get(RepositoryObjectType.SLAVE_SERVER);
                if (!remove) {
                    elementToUpdate = (RepositoryElementInterface) ((SlaveServer) element).clone();
                }
                break;
            case CLUSTER_SCHEMA:
                origSharedObjects = sharedObjectsByType.get(RepositoryObjectType.CLUSTER_SCHEMA);
                if (!remove) {
                    elementToUpdate = ((ClusterSchema) element).clone();
                }
                break;
            case PARTITION_SCHEMA:
                origSharedObjects = sharedObjectsByType.get(RepositoryObjectType.PARTITION_SCHEMA);
                if (!remove) {
                    elementToUpdate = (RepositoryElementInterface) ((PartitionSchema) element).clone();
                }
                break;
            default:
                throw new KettleException("unknown type [" + typeToUpdate + "]");
        }
        List<SharedObjectInterface> newSharedObjects = new ArrayList<SharedObjectInterface>(origSharedObjects);
        // if there's a match on id, replace the element
        boolean found = false;
        for (int i = 0; i < origSharedObjects.size(); i++) {
            RepositoryElementInterface repositoryElementInterface = (RepositoryElementInterface) origSharedObjects.get(i);
            if (repositoryElementInterface == null) {
                continue;
            }
            ObjectId objectId = repositoryElementInterface.getObjectId();
            if (objectId != null && objectId.equals(idToFind)) {
                if (remove) {
                    newSharedObjects.remove(i);
                } else {
                    // because some clones don't clone the ID!!!
                    elementToUpdate.setObjectId(idToFind);
                    newSharedObjects.set(i, (SharedObjectInterface) elementToUpdate);
                }
                found = true;
            }
        }
        // otherwise, add it
        if (!remove && !found) {
            // because some clones don't clone the ID!!!
            elementToUpdate.setObjectId(idToFind);
            newSharedObjects.add((SharedObjectInterface) elementToUpdate);
        }
        sharedObjectsByType.put(typeToUpdate, newSharedObjects);
    } finally {
        sharedObjectsLock.writeLock().unlock();
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) StringObjectId(org.pentaho.di.repository.StringObjectId) ObjectId(org.pentaho.di.repository.ObjectId) PartitionSchema(org.pentaho.di.partition.PartitionSchema) ArrayList(java.util.ArrayList) SlaveServer(org.pentaho.di.cluster.SlaveServer) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) RepositoryElementInterface(org.pentaho.di.repository.RepositoryElementInterface) RepositoryObjectType(org.pentaho.di.repository.RepositoryObjectType) SharedObjectInterface(org.pentaho.di.shared.SharedObjectInterface)

Aggregations

SlaveServer (org.pentaho.di.cluster.SlaveServer)110 KettleException (org.pentaho.di.core.exception.KettleException)35 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)32 Test (org.junit.Test)22 ClusterSchema (org.pentaho.di.cluster.ClusterSchema)22 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)18 PartitionSchema (org.pentaho.di.partition.PartitionSchema)18 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)17 JobMeta (org.pentaho.di.job.JobMeta)16 ObjectId (org.pentaho.di.repository.ObjectId)16 StepMeta (org.pentaho.di.trans.step.StepMeta)14 ArrayList (java.util.ArrayList)13 TransMeta (org.pentaho.di.trans.TransMeta)11 Result (org.pentaho.di.core.Result)10 KettleFileException (org.pentaho.di.core.exception.KettleFileException)10 UnknownParamException (org.pentaho.di.core.parameters.UnknownParamException)10 NotePadMeta (org.pentaho.di.core.NotePadMeta)9 Point (org.pentaho.di.core.gui.Point)8 List (java.util.List)7 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)7