Search in sources :

Example 46 with Job

use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.

the class JobEntryExternalResourceConsumerListenerTest method testCallJobEntryExtensionPoint.

@Test
public void testCallJobEntryExtensionPoint() throws Exception {
    JobEntryExternalResourceConsumerListener jobEntryExtensionPoint = new JobEntryExternalResourceConsumerListener();
    jobEntryExtensionPoint.setJobEntryExternalResourceConsumerProvider(MetaverseTestUtils.getJobEntryExternalResourceConsumerProvider());
    JobExecutionExtension jobExec = mock(JobExecutionExtension.class);
    JobEntryBase jobEntryBase = mock(JobEntryBase.class, withSettings().extraInterfaces(JobEntryInterface.class));
    JobEntryInterface jobEntryInterface = (JobEntryInterface) jobEntryBase;
    JobEntryCopy jobEntryCopy = mock(JobEntryCopy.class);
    when(jobEntryCopy.getEntry()).thenReturn(jobEntryInterface);
    jobExec.jobEntryCopy = jobEntryCopy;
    jobEntryExtensionPoint.callExtensionPoint(null, jobExec);
    // Adda consumer
    Map<Class<? extends JobEntryBase>, Set<IJobEntryExternalResourceConsumer>> jobEntryConsumerMap = new JobEntryExternalResourceConsumerProvider().getJobEntryConsumerMap();
    Set<IJobEntryExternalResourceConsumer> consumers = new HashSet<IJobEntryExternalResourceConsumer>();
    jobEntryConsumerMap.put(jobEntryBase.getClass(), consumers);
    jobEntryExtensionPoint.callExtensionPoint(null, jobExec);
    IJobEntryExternalResourceConsumer consumer = mock(IJobEntryExternalResourceConsumer.class);
    when(consumer.getResourcesFromMeta(Mockito.any())).thenReturn(Collections.emptyList());
    consumers.add(consumer);
    Job mockJob = mock(Job.class);
    when(jobEntryInterface.getParentJob()).thenReturn(mockJob);
    jobExec.job = mockJob;
    jobEntryExtensionPoint.callExtensionPoint(null, jobExec);
    when(consumer.isDataDriven(Mockito.any())).thenReturn(Boolean.TRUE);
    jobEntryExtensionPoint.callExtensionPoint(null, jobExec);
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) JobExecutionExtension(org.pentaho.di.job.JobExecutionExtension) JobEntryBase(org.pentaho.di.job.entry.JobEntryBase) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) JobEntryExternalResourceConsumerProvider(org.pentaho.metaverse.analyzer.kettle.jobentry.JobEntryExternalResourceConsumerProvider) IJobEntryExternalResourceConsumer(org.pentaho.metaverse.api.analyzer.kettle.jobentry.IJobEntryExternalResourceConsumer) Job(org.pentaho.di.job.Job) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 47 with Job

use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.

the class JobEntryAnalyzerTest method testValidateStateNullParentJob.

@Test(expected = MetaverseAnalyzerException.class)
public void testValidateStateNullParentJob() throws MetaverseAnalyzerException {
    final Job parentJob = null;
    when(mockJobEntryInterface.getParentJob()).thenReturn(parentJob);
    analyzer.validateState(mockDescriptor, mockJobEntryInterface);
}
Also used : Job(org.pentaho.di.job.Job) Test(org.junit.Test)

Example 48 with Job

use of org.pentaho.di.job.Job in project pentaho-kettle by pentaho.

the class BaseJobServlet method createJob.

protected Job createJob(JobConfiguration jobConfiguration) throws UnknownParamException {
    JobExecutionConfiguration jobExecutionConfiguration = jobConfiguration.getJobExecutionConfiguration();
    JobMeta jobMeta = jobConfiguration.getJobMeta();
    jobMeta.setLogLevel(jobExecutionConfiguration.getLogLevel());
    jobMeta.injectVariables(jobExecutionConfiguration.getVariables());
    // If there was a repository, we know about it at this point in time.
    final Repository repository = jobConfiguration.getJobExecutionConfiguration().getRepository();
    String carteObjectId = UUID.randomUUID().toString();
    SimpleLoggingObject servletLoggingObject = getServletLogging(carteObjectId, jobExecutionConfiguration.getLogLevel());
    // Create the transformation and store in the list...
    final Job job = new Job(repository, jobMeta, servletLoggingObject);
    // Setting variables
    job.initializeVariablesFrom(null);
    job.getJobMeta().setMetaStore(jobMap.getSlaveServerConfig().getMetaStore());
    job.getJobMeta().setInternalKettleVariables(job);
    job.injectVariables(jobConfiguration.getJobExecutionConfiguration().getVariables());
    job.setArguments(jobExecutionConfiguration.getArgumentStrings());
    job.setSocketRepository(getSocketRepository());
    copyJobParameters(job, jobExecutionConfiguration.getParams());
    // Check if there is a starting point specified.
    String startCopyName = jobExecutionConfiguration.getStartCopyName();
    if (startCopyName != null && !startCopyName.isEmpty()) {
        int startCopyNr = jobExecutionConfiguration.getStartCopyNr();
        JobEntryCopy startJobEntryCopy = jobMeta.findJobEntry(startCopyName, startCopyNr, false);
        job.setStartJobEntryCopy(startJobEntryCopy);
    }
    // Note: the plugin (Job and Trans) job entries need to call the delegation listeners in the parent job.
    if (jobExecutionConfiguration.isExpandingRemoteJob()) {
        job.addDelegationListener(new CarteDelegationHandler(getTransformationMap(), getJobMap()));
    }
    // Make sure to disconnect from the repository when the job finishes.
    if (repository != null) {
        job.addJobListener(new JobAdapter() {

            public void jobFinished(Job job) {
                repository.disconnect();
            }
        });
    }
    getJobMap().addJob(job.getJobname(), carteObjectId, job, jobConfiguration);
    final Long passedBatchId = jobExecutionConfiguration.getPassedBatchId();
    if (passedBatchId != null) {
        job.setPassedBatchId(passedBatchId);
    }
    return job;
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) Repository(org.pentaho.di.repository.Repository) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) SimpleLoggingObject(org.pentaho.di.core.logging.SimpleLoggingObject) Job(org.pentaho.di.job.Job) JobExecutionConfiguration(org.pentaho.di.job.JobExecutionConfiguration) JobAdapter(org.pentaho.di.job.JobAdapter)

Example 49 with Job

use of org.pentaho.di.job.Job in project pentaho-kettle by pentaho.

the class CarteSingleton method installPurgeTimer.

public static void installPurgeTimer(final SlaveServerConfig config, final LogChannelInterface log, final TransformationMap transformationMap, final JobMap jobMap) {
    final int objectTimeout;
    String systemTimeout = EnvUtil.getSystemProperty(Const.KETTLE_CARTE_OBJECT_TIMEOUT_MINUTES, null);
    // 
    if (config.getObjectTimeoutMinutes() > 0) {
        objectTimeout = config.getObjectTimeoutMinutes();
    } else if (!Utils.isEmpty(systemTimeout)) {
        objectTimeout = Const.toInt(systemTimeout, 1440);
    } else {
        // 1440 : default is a one day time-out
        objectTimeout = 24 * 60;
    }
    // 
    if (objectTimeout > 0) {
        log.logBasic("Installing timer to purge stale objects after " + objectTimeout + " minutes.");
        Timer timer = new Timer(true);
        final AtomicBoolean busy = new AtomicBoolean(false);
        TimerTask timerTask = new TimerTask() {

            public void run() {
                if (!busy.get()) {
                    busy.set(true);
                    try {
                        // 
                        for (CarteObjectEntry entry : transformationMap.getTransformationObjects()) {
                            Trans trans = transformationMap.getTransformation(entry);
                            // 
                            if (trans != null && (trans.isFinished() || trans.isStopped()) && trans.getLogDate() != null) {
                                // check the last log time
                                // 
                                int diffInMinutes = (int) Math.floor((System.currentTimeMillis() - trans.getLogDate().getTime()) / 60000);
                                if (diffInMinutes >= objectTimeout) {
                                    // Let's remove this from the transformation map...
                                    // 
                                    transformationMap.removeTransformation(entry);
                                    // Remove the logging information from the log registry & central log store
                                    // 
                                    LoggingRegistry.getInstance().removeIncludingChildren(trans.getLogChannelId());
                                    KettleLogStore.discardLines(trans.getLogChannelId(), false);
                                    // transformationMap.deallocateServerSocketPorts(entry);
                                    log.logMinimal("Cleaned up transformation " + entry.getName() + " with id " + entry.getId() + " from " + trans.getLogDate() + ", diff=" + diffInMinutes);
                                }
                            }
                        }
                        // 
                        for (CarteObjectEntry entry : jobMap.getJobObjects()) {
                            Job job = jobMap.getJob(entry);
                            // 
                            if (job != null && (job.isFinished() || job.isStopped()) && job.getLogDate() != null) {
                                // check the last log time
                                // 
                                int diffInMinutes = (int) Math.floor((System.currentTimeMillis() - job.getLogDate().getTime()) / 60000);
                                if (diffInMinutes >= objectTimeout) {
                                    // Let's remove this from the job map...
                                    // 
                                    String id = jobMap.getJob(entry).getLogChannelId();
                                    LoggingRegistry.getInstance().removeLogChannelFileWriterBuffer(id);
                                    jobMap.removeJob(entry);
                                    log.logMinimal("Cleaned up job " + entry.getName() + " with id " + entry.getId() + " from " + job.getLogDate());
                                }
                            }
                        }
                    } finally {
                        busy.set(false);
                    }
                }
            }
        };
        // Search for stale objects every 20 seconds:
        // 
        timer.schedule(timerTask, 20000, 20000);
    }
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Timer(java.util.Timer) TimerTask(java.util.TimerTask) Job(org.pentaho.di.job.Job) Trans(org.pentaho.di.trans.Trans)

Example 50 with Job

use of org.pentaho.di.job.Job in project pentaho-kettle by pentaho.

the class ExecuteJobServlet method doGet.

/**
 * <div id="mindtouch">
 *    <h1>/kettle/executeJob</h1>
 *    <a name="GET"></a>
 *    <h2>GET</h2>
 *    <p>Executes job from the specified repository.
 *  Connects to the repository provided as a parameter, loads the job from it and executes it.
 *  Empty response is returned or response contains output of an error happened during the job execution.
 *  Response contains <code>ERROR</code> result if error happened during job execution.</p>
 *
 *    <p><b>Example Request:</b><br />
 *    <pre function="syntax.xml">
 *    GET /kettle/executeJob/?rep=my_repository&user=my_user&pass=my_password&job=my_job&level=INFO
 *    </pre>
 *
 *    </p>
 *    <h3>Parameters</h3>
 *    <table class="pentaho-table">
 *    <tbody>
 *    <tr>
 *      <th>name</th>
 *      <th>description</th>
 *      <th>type</th>
 *    </tr>
 *    <tr>
 *    <td>rep</td>
 *    <td>Repository id to connect to.</td>
 *    <td>query</td>
 *    </tr>
 *    <tr>
 *    <td>user</td>
 *    <td>User name to be used to connect to repository.</td>
 *    <td>query</td>
 *    </tr>
 *    <tr>
 *    <td>pass</td>
 *    <td>User password to be used to connect to repository.</td>
 *    <td>query</td>
 *    </tr>
 *    <tr>
 *    <td>job</td>
 *    <td>Job name to be loaded and executed.</td>
 *    <td>query</td>
 *    </tr>
 *    <tr>
 *    <td>level</td>
 *    <td>Logging level to be used for job execution (i.e. Debug).</td>
 *    <td>query</td>
 *    </tr>
 *    <tr>
 *    <td>*any name*</td>
 *    <td>All the other parameters will be sent to the job for using as variables.
 *  When necessary you can add custom parameters to the request.
 *  They will be used to set the job variables values.</td>
 *    <td>query</td>
 *    </tr>
 *    </tbody>
 *    </table>
 *
 *  <h3>Response Body</h3>
 *
 *  <table class="pentaho-table">
 *    <tbody>
 *      <tr>
 *        <td align="right">element:</td>
 *        <td>(custom)</td>
 *      </tr>
 *      <tr>
 *        <td align="right">media types:</td>
 *        <td>application/xml</td>
 *      </tr>
 *    </tbody>
 *  </table>
 *    <p>Response contains error output of the job executed or Carte object Id
 *  if the execution was successful.</p>
 *
 *    <p><b>Example Error Response:</b></p>
 *    <pre function="syntax.xml">
 *  <webresult>
 *    <result>OK</result>
 *    <message>Job started</message>
 *    <id>74d96aa6-f29a-4bac-a26a-06a8c8f107e5</id>
 *  </webresult>
 *    </pre>
 *
 *    <h3>Status Codes</h3>
 *    <table class="pentaho-table">
 *  <tbody>
 *    <tr>
 *      <th>code</th>
 *      <th>description</th>
 *    </tr>
 *    <tr>
 *      <td>200</td>
 *      <td>Request was processed.</td>
 *    </tr>
 *    <tr>
 *      <td>500</td>
 *      <td>Internal server error occurs during request processing.</td>
 *    </tr>
 *  </tbody>
 *</table>
 *</div>
 */
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
    if (isJettyMode() && !request.getContextPath().startsWith(CONTEXT_PATH)) {
        return;
    }
    if (log.isDebug()) {
        logDebug(BaseMessages.getString(PKG, "ExecuteJobServlet.Log.ExecuteJobRequested"));
    }
    // Options taken from PAN
    // 
    String[] knownOptions = new String[] { "rep", "user", "pass", "job", "level" };
    String repOption = request.getParameter("rep");
    String userOption = request.getParameter("user");
    String passOption = Encr.decryptPasswordOptionallyEncrypted(request.getParameter("pass"));
    String jobOption = request.getParameter("job");
    String levelOption = request.getParameter("level");
    response.setStatus(HttpServletResponse.SC_OK);
    String encoding = System.getProperty("KETTLE_DEFAULT_SERVLET_ENCODING", null);
    if (encoding != null && !Utils.isEmpty(encoding.trim())) {
        response.setCharacterEncoding(encoding);
        response.setContentType("text/html; charset=" + encoding);
    }
    PrintWriter out = response.getWriter();
    try {
        final Repository repository = openRepository(repOption, userOption, passOption);
        final JobMeta jobMeta = loadJob(repository, jobOption);
        // Set the servlet parameters as variables in the job
        // 
        String[] parameters = jobMeta.listParameters();
        Enumeration<?> parameterNames = request.getParameterNames();
        while (parameterNames.hasMoreElements()) {
            String parameter = (String) parameterNames.nextElement();
            String[] values = request.getParameterValues(parameter);
            // 
            if (Const.indexOfString(parameter, knownOptions) < 0) {
                // 
                if (Const.indexOfString(parameter, parameters) < 0) {
                    jobMeta.setVariable(parameter, values[0]);
                } else {
                    jobMeta.setParameterValue(parameter, values[0]);
                }
            }
        }
        JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration();
        LogLevel logLevel = LogLevel.getLogLevelForCode(levelOption);
        jobExecutionConfiguration.setLogLevel(logLevel);
        JobConfiguration jobConfiguration = new JobConfiguration(jobMeta, jobExecutionConfiguration);
        String carteObjectId = UUID.randomUUID().toString();
        SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(CONTEXT_PATH, LoggingObjectType.CARTE, null);
        servletLoggingObject.setContainerObjectId(carteObjectId);
        servletLoggingObject.setLogLevel(logLevel);
        // Create the job and store in the list...
        // 
        final Job job = new Job(repository, jobMeta, servletLoggingObject);
        job.setRepository(repository);
        job.setSocketRepository(getSocketRepository());
        getJobMap().addJob(jobMeta.getName(), carteObjectId, job, jobConfiguration);
        job.setContainerObjectId(carteObjectId);
        if (repository != null) {
            // The repository connection is open: make sure we disconnect from the repository once we
            // are done with this job.
            // 
            job.addJobListener(new JobAdapter() {

                public void jobFinished(Job job) {
                    repository.disconnect();
                }
            });
        }
        try {
            runJob(job);
            WebResult webResult = new WebResult(WebResult.STRING_OK, "Job started", carteObjectId);
            out.println(webResult.getXML());
            out.flush();
        } catch (Exception executionException) {
            String logging = KettleLogStore.getAppender().getBuffer(job.getLogChannelId(), false).toString();
            throw new KettleException("Error executing job: " + logging, executionException);
        }
    } catch (Exception ex) {
        out.println(new WebResult(WebResult.STRING_ERROR, BaseMessages.getString(PKG, "ExecuteJobServlet.Error.UnexpectedError", Const.CR + Const.getStackTracker(ex))));
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) SimpleLoggingObject(org.pentaho.di.core.logging.SimpleLoggingObject) JobExecutionConfiguration(org.pentaho.di.job.JobExecutionConfiguration) JobAdapter(org.pentaho.di.job.JobAdapter) LogLevel(org.pentaho.di.core.logging.LogLevel) ServletException(javax.servlet.ServletException) KettleException(org.pentaho.di.core.exception.KettleException) IOException(java.io.IOException) Repository(org.pentaho.di.repository.Repository) Job(org.pentaho.di.job.Job) JobConfiguration(org.pentaho.di.job.JobConfiguration) PrintWriter(java.io.PrintWriter)

Aggregations

Job (org.pentaho.di.job.Job)95 JobMeta (org.pentaho.di.job.JobMeta)44 Test (org.junit.Test)35 Result (org.pentaho.di.core.Result)22 KettleException (org.pentaho.di.core.exception.KettleException)20 PrintWriter (java.io.PrintWriter)17 JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)17 Trans (org.pentaho.di.trans.Trans)14 IOException (java.io.IOException)11 Before (org.junit.Before)11 Point (org.pentaho.di.core.gui.Point)11 LogChannelInterface (org.pentaho.di.core.logging.LogChannelInterface)11 JobExecutionConfiguration (org.pentaho.di.job.JobExecutionConfiguration)10 Repository (org.pentaho.di.repository.Repository)10 ArrayList (java.util.ArrayList)9 ServletException (javax.servlet.ServletException)9 HttpServletRequest (javax.servlet.http.HttpServletRequest)9 HttpServletResponse (javax.servlet.http.HttpServletResponse)9 SimpleLoggingObject (org.pentaho.di.core.logging.SimpleLoggingObject)9 JobConfiguration (org.pentaho.di.job.JobConfiguration)9