use of org.pentaho.di.job.JobAdapter in project pentaho-kettle by pentaho.
the class RunJobServlet method doGet.
/**
* <div id="mindtouch">
* <h1>/kettle/runJob</h1>
* <a name="GET"></a>
* <h2>GET</h2>
* <p>Execute job from enterprise repository. Repository should be configured in Carte xml file.
* Response contains <code>ERROR</code> result if error happened during job execution.</p>
*
* <p><b>Example Request:</b><br />
* <pre function="syntax.xml">
* GET /kettle/runJob?job=home%2Fadmin%2Fdummy_job&level=Debug
* </pre>
*
* </p>
* <h3>Parameters</h3>
* <table class="pentaho-table">
* <tbody>
* <tr>
* <th>name</th>
* <th>description</th>
* <th>type</th>
* </tr>
* <tr>
* <td>job</td>
* <td>Full path to the job in repository.</td>
* <td>query</td>
* </tr>
* <tr>
* <td>level</td>
* <td>Logging level to be used for job execution (i.e. Debug).</td>
* <td>query</td>
* </tr>
* </tbody>
* </table>
*
* <h3>Response Body</h3>
*
* <table class="pentaho-table">
* <tbody>
* <tr>
* <td align="right">element:</td>
* <td>(custom)</td>
* </tr>
* <tr>
* <td align="right">media types:</td>
* <td>text/xml</td>
* </tr>
* </tbody>
* </table>
* <p>Response contains result of the operation. It is either <code>OK</code> or <code>ERROR</code>.
* If an error occurred during job execution, response also contains information about the error.</p>
*
* <p><b>Example Response:</b></p>
* <pre function="syntax.xml">
* <webresult>
* <result>OK</result>
* <message>Job started</message>
* <id>05d919b0-74a3-48d6-84d8-afce359d0449</id>
* </webresult>
* </pre>
*
* <h3>Status Codes</h3>
* <table class="pentaho-table">
* <tbody>
* <tr>
* <th>code</th>
* <th>description</th>
* </tr>
* <tr>
* <td>200</td>
* <td>Request was processed.</td>
* </tr>
* <tr>
* <td>500</td>
* <td>Internal server error occurs during request processing.</td>
* </tr>
* </tbody>
*</table>
*</div>
*/
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (isJettyMode() && !request.getContextPath().startsWith(CONTEXT_PATH)) {
return;
}
if (log.isDebug()) {
logDebug(BaseMessages.getString(PKG, "RunJobServlet.Log.RunJobRequested"));
}
// Options taken from PAN
//
String[] knownOptions = new String[] { "job", "level" };
String transOption = request.getParameter("job");
String levelOption = request.getParameter("level");
response.setStatus(HttpServletResponse.SC_OK);
PrintWriter out = response.getWriter();
try {
SlaveServerConfig serverConfig = transformationMap.getSlaveServerConfig();
Repository slaveServerRepository = serverConfig.getRepository();
if (slaveServerRepository == null) {
throw new KettleException("Unable to connect to repository in Slave Server Config: " + serverConfig.getRepositoryId());
}
final JobMeta jobMeta = loadJob(slaveServerRepository, transOption);
// Set the servlet parameters as variables in the transformation
//
String[] parameters = jobMeta.listParameters();
Enumeration<?> parameterNames = request.getParameterNames();
while (parameterNames.hasMoreElements()) {
String parameter = (String) parameterNames.nextElement();
String[] values = request.getParameterValues(parameter);
//
if (Const.indexOfString(parameter, knownOptions) < 0) {
//
if (Const.indexOfString(parameter, parameters) < 0) {
jobMeta.setVariable(parameter, values[0]);
} else {
jobMeta.setParameterValue(parameter, values[0]);
}
}
}
JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration();
LogLevel logLevel = LogLevel.getLogLevelForCode(levelOption);
jobExecutionConfiguration.setLogLevel(logLevel);
// Create new repository connection for this job
//
final Repository repository = jobExecutionConfiguration.connectRepository(serverConfig.getRepositoryId(), serverConfig.getRepositoryUsername(), serverConfig.getRepositoryPassword());
JobConfiguration jobConfiguration = new JobConfiguration(jobMeta, jobExecutionConfiguration);
String carteObjectId = UUID.randomUUID().toString();
SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(CONTEXT_PATH, LoggingObjectType.CARTE, null);
servletLoggingObject.setContainerObjectId(carteObjectId);
servletLoggingObject.setLogLevel(logLevel);
// Create the transformation and store in the list...
//
final Job job = new Job(repository, jobMeta, servletLoggingObject);
// Setting variables
//
job.initializeVariablesFrom(null);
job.getJobMeta().setInternalKettleVariables(job);
job.injectVariables(jobConfiguration.getJobExecutionConfiguration().getVariables());
// Also copy the parameters over...
//
job.copyParametersFrom(jobMeta);
job.clearParameters();
/*
* String[] parameterNames = job.listParameters(); for (int idx = 0; idx < parameterNames.length; idx++) { // Grab
* the parameter value set in the job entry // String thisValue =
* jobExecutionConfiguration.getParams().get(parameterNames[idx]); if (!Utils.isEmpty(thisValue)) { // Set the
* value as specified by the user in the job entry // jobMeta.setParameterValue(parameterNames[idx], thisValue); }
* }
*/
jobMeta.activateParameters();
job.setSocketRepository(getSocketRepository());
JobMap jobMap = getJobMap();
jobMap.addJob(job.getJobname(), carteObjectId, job, jobConfiguration);
// Disconnect from the job's repository when the job finishes.
//
job.addJobListener(new JobAdapter() {
public void jobFinished(Job job) {
repository.disconnect();
}
});
String message = "Job '" + job.getJobname() + "' was added to the list with id " + carteObjectId;
logBasic(message);
try {
runJob(job);
WebResult webResult = new WebResult(WebResult.STRING_OK, "Job started", carteObjectId);
out.println(webResult.getXML());
out.flush();
} catch (Exception executionException) {
String logging = KettleLogStore.getAppender().getBuffer(job.getLogChannelId(), false).toString();
throw new KettleException("Error executing Job: " + logging, executionException);
}
} catch (Exception ex) {
out.println(new WebResult(WebResult.STRING_ERROR, BaseMessages.getString(PKG, "RunJobServlet.Error.UnexpectedError", Const.CR + Const.getStackTracker(ex))));
}
}
use of org.pentaho.di.job.JobAdapter in project pentaho-kettle by pentaho.
the class AddJobServlet method doGet.
/**
* /**
*
* <div id="mindtouch">
* <h1>/kettle/addJob</h1>
* <a name="POST"></a>
* <h2>POST</h2>
* <p>Uploads and executes job configuration XML file.
* Uploads xml file containing job and job_execution_configuration (wrapped in job_configuration tag)
* to be executed and executes it. Method relies on the input parameter to determine if xml or html
* reply should be produced. The job_configuration xml is
* transferred within request body.
*
* <code>Job name of the executed job </code> will be returned in the Response object
* or <code>message</code> describing error occurred. To determine if the call successful or not you should
* rely on <code>result</code> parameter in response.</p>
*
* <p><b>Example Request:</b><br />
* <pre function="syntax.xml">
* POST /kettle/addJob/?xml=Y
* </pre>
* <p>Request body should contain xml containing job_configuration (job + job_execution_configuration
* wrapped in job_configuration tag).</p>
* </p>
* <h3>Parameters</h3>
* <table class="pentaho-table">
* <tbody>
* <tr>
* <th>name</th>
* <th>description</th>
* <th>type</th>
* </tr>
* <tr>
* <td>xml</td>
* <td>Boolean flag set to either <code>Y</code> or <code>N</code> describing if xml or html reply
* should be produced.</td>
* <td>boolean, optional</td>
* </tr>
* </tbody>
* </table>
*
* <h3>Response Body</h3>
*
* <table class="pentaho-table">
* <tbody>
* <tr>
* <td align="right">element:</td>
* <td>(custom)</td>
* </tr>
* <tr>
* <td align="right">media types:</td>
* <td>text/xml, text/html</td>
* </tr>
* </tbody>
* </table>
* <p>Response wraps job name that was executed or error stack trace
* if an error occurred. Response has <code>result</code> OK if there were no errors. Otherwise it returns ERROR.</p>
*
* <p><b>Example Response:</b></p>
* <pre function="syntax.xml">
* <?xml version="1.0" encoding="UTF-8"?>
* <webresult>
* <result>OK</result>
* <message>Job 'dummy_job' was added to the list with id 1e90eca8-4d4c-47f7-8e5c-99ec36525e7c</message>
* <id>1e90eca8-4d4c-47f7-8e5c-99ec36525e7c</id>
* </webresult>
* </pre>
*
* <h3>Status Codes</h3>
* <table class="pentaho-table">
* <tbody>
* <tr>
* <th>code</th>
* <th>description</th>
* </tr>
* <tr>
* <td>200</td>
* <td>Request was processed and XML response is returned.</td>
* </tr>
* <tr>
* <td>500</td>
* <td>Internal server error occurs during request processing.</td>
* </tr>
* </tbody>
*</table>
*</div>
*/
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (isJettyMode() && !request.getRequestURI().startsWith(CONTEXT_PATH)) {
return;
}
if (log.isDebug()) {
logDebug("Addition of job requested");
}
boolean useXML = "Y".equalsIgnoreCase(request.getParameter("xml"));
PrintWriter out = response.getWriter();
// read from the client
BufferedReader in = request.getReader();
if (log.isDetailed()) {
logDetailed("Encoding: " + request.getCharacterEncoding());
}
if (useXML) {
response.setContentType("text/xml");
out.print(XMLHandler.getXMLHeader());
} else {
response.setContentType("text/html");
out.println("<HTML>");
out.println("<HEAD><TITLE>Add job</TITLE></HEAD>");
out.println("<BODY>");
}
response.setStatus(HttpServletResponse.SC_OK);
try {
// First read the complete transformation in memory from the request
int c;
StringBuilder xml = new StringBuilder();
while ((c = in.read()) != -1) {
xml.append((char) c);
}
// Parse the XML, create a job configuration
//
// System.out.println(xml);
//
JobConfiguration jobConfiguration = JobConfiguration.fromXML(xml.toString());
JobMeta jobMeta = jobConfiguration.getJobMeta();
JobExecutionConfiguration jobExecutionConfiguration = jobConfiguration.getJobExecutionConfiguration();
jobMeta.setLogLevel(jobExecutionConfiguration.getLogLevel());
jobMeta.injectVariables(jobExecutionConfiguration.getVariables());
// If there was a repository, we know about it at this point in time.
//
final Repository repository = jobConfiguration.getJobExecutionConfiguration().getRepository();
String carteObjectId = UUID.randomUUID().toString();
SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(CONTEXT_PATH, LoggingObjectType.CARTE, null);
servletLoggingObject.setContainerObjectId(carteObjectId);
servletLoggingObject.setLogLevel(jobExecutionConfiguration.getLogLevel());
// Create the transformation and store in the list...
//
final Job job = new Job(repository, jobMeta, servletLoggingObject);
// Setting variables
//
job.initializeVariablesFrom(null);
job.getJobMeta().setInternalKettleVariables(job);
job.injectVariables(jobConfiguration.getJobExecutionConfiguration().getVariables());
job.setArguments(jobExecutionConfiguration.getArgumentStrings());
// Also copy the parameters over...
//
job.copyParametersFrom(jobMeta);
job.clearParameters();
String[] parameterNames = job.listParameters();
for (int idx = 0; idx < parameterNames.length; idx++) {
// Grab the parameter value set in the job entry
//
String thisValue = jobExecutionConfiguration.getParams().get(parameterNames[idx]);
if (!Utils.isEmpty(thisValue)) {
// Set the value as specified by the user in the job entry
//
jobMeta.setParameterValue(parameterNames[idx], thisValue);
}
}
jobMeta.activateParameters();
// Check if there is a starting point specified.
String startCopyName = jobExecutionConfiguration.getStartCopyName();
if (startCopyName != null && !startCopyName.isEmpty()) {
int startCopyNr = jobExecutionConfiguration.getStartCopyNr();
JobEntryCopy startJobEntryCopy = jobMeta.findJobEntry(startCopyName, startCopyNr, false);
job.setStartJobEntryCopy(startJobEntryCopy);
}
job.setSocketRepository(getSocketRepository());
//
if (jobExecutionConfiguration.isExpandingRemoteJob()) {
job.addDelegationListener(new CarteDelegationHandler(getTransformationMap(), getJobMap()));
}
getJobMap().addJob(job.getJobname(), carteObjectId, job, jobConfiguration);
//
if (repository != null) {
job.addJobListener(new JobAdapter() {
public void jobFinished(Job job) {
repository.disconnect();
}
});
}
String message = "Job '" + job.getJobname() + "' was added to the list with id " + carteObjectId;
if (useXML) {
out.println(new WebResult(WebResult.STRING_OK, message, carteObjectId));
} else {
out.println("<H1>" + message + "</H1>");
out.println("<p><a href=\"" + convertContextPath(GetJobStatusServlet.CONTEXT_PATH) + "?name=" + job.getJobname() + "&id=" + carteObjectId + "\">Go to the job status page</a><p>");
}
} catch (Exception ex) {
if (useXML) {
out.println(new WebResult(WebResult.STRING_ERROR, Const.getStackTracker(ex)));
} else {
out.println("<p>");
out.println("<pre>");
ex.printStackTrace(out);
out.println("</pre>");
}
}
if (!useXML) {
out.println("<p>");
out.println("</BODY>");
out.println("</HTML>");
}
}
use of org.pentaho.di.job.JobAdapter in project pentaho-kettle by pentaho.
the class BaseJobServlet method createJob.
protected Job createJob(JobConfiguration jobConfiguration) throws UnknownParamException {
JobExecutionConfiguration jobExecutionConfiguration = jobConfiguration.getJobExecutionConfiguration();
JobMeta jobMeta = jobConfiguration.getJobMeta();
jobMeta.setLogLevel(jobExecutionConfiguration.getLogLevel());
jobMeta.injectVariables(jobExecutionConfiguration.getVariables());
// If there was a repository, we know about it at this point in time.
final Repository repository = jobConfiguration.getJobExecutionConfiguration().getRepository();
String carteObjectId = UUID.randomUUID().toString();
SimpleLoggingObject servletLoggingObject = getServletLogging(carteObjectId, jobExecutionConfiguration.getLogLevel());
// Create the transformation and store in the list...
final Job job = new Job(repository, jobMeta, servletLoggingObject);
// Setting variables
job.initializeVariablesFrom(null);
job.getJobMeta().setMetaStore(jobMap.getSlaveServerConfig().getMetaStore());
job.getJobMeta().setInternalKettleVariables(job);
job.injectVariables(jobConfiguration.getJobExecutionConfiguration().getVariables());
job.setArguments(jobExecutionConfiguration.getArgumentStrings());
job.setSocketRepository(getSocketRepository());
copyJobParameters(job, jobExecutionConfiguration.getParams());
// Check if there is a starting point specified.
String startCopyName = jobExecutionConfiguration.getStartCopyName();
if (startCopyName != null && !startCopyName.isEmpty()) {
int startCopyNr = jobExecutionConfiguration.getStartCopyNr();
JobEntryCopy startJobEntryCopy = jobMeta.findJobEntry(startCopyName, startCopyNr, false);
job.setStartJobEntryCopy(startJobEntryCopy);
}
// Note: the plugin (Job and Trans) job entries need to call the delegation listeners in the parent job.
if (jobExecutionConfiguration.isExpandingRemoteJob()) {
job.addDelegationListener(new CarteDelegationHandler(getTransformationMap(), getJobMap()));
}
// Make sure to disconnect from the repository when the job finishes.
if (repository != null) {
job.addJobListener(new JobAdapter() {
public void jobFinished(Job job) {
repository.disconnect();
}
});
}
getJobMap().addJob(job.getJobname(), carteObjectId, job, jobConfiguration);
final Long passedBatchId = jobExecutionConfiguration.getPassedBatchId();
if (passedBatchId != null) {
job.setPassedBatchId(passedBatchId);
}
return job;
}
use of org.pentaho.di.job.JobAdapter in project pentaho-kettle by pentaho.
the class ExecuteJobServlet method doGet.
/**
* <div id="mindtouch">
* <h1>/kettle/executeJob</h1>
* <a name="GET"></a>
* <h2>GET</h2>
* <p>Executes job from the specified repository.
* Connects to the repository provided as a parameter, loads the job from it and executes it.
* Empty response is returned or response contains output of an error happened during the job execution.
* Response contains <code>ERROR</code> result if error happened during job execution.</p>
*
* <p><b>Example Request:</b><br />
* <pre function="syntax.xml">
* GET /kettle/executeJob/?rep=my_repository&user=my_user&pass=my_password&job=my_job&level=INFO
* </pre>
*
* </p>
* <h3>Parameters</h3>
* <table class="pentaho-table">
* <tbody>
* <tr>
* <th>name</th>
* <th>description</th>
* <th>type</th>
* </tr>
* <tr>
* <td>rep</td>
* <td>Repository id to connect to.</td>
* <td>query</td>
* </tr>
* <tr>
* <td>user</td>
* <td>User name to be used to connect to repository.</td>
* <td>query</td>
* </tr>
* <tr>
* <td>pass</td>
* <td>User password to be used to connect to repository.</td>
* <td>query</td>
* </tr>
* <tr>
* <td>job</td>
* <td>Job name to be loaded and executed.</td>
* <td>query</td>
* </tr>
* <tr>
* <td>level</td>
* <td>Logging level to be used for job execution (i.e. Debug).</td>
* <td>query</td>
* </tr>
* <tr>
* <td>*any name*</td>
* <td>All the other parameters will be sent to the job for using as variables.
* When necessary you can add custom parameters to the request.
* They will be used to set the job variables values.</td>
* <td>query</td>
* </tr>
* </tbody>
* </table>
*
* <h3>Response Body</h3>
*
* <table class="pentaho-table">
* <tbody>
* <tr>
* <td align="right">element:</td>
* <td>(custom)</td>
* </tr>
* <tr>
* <td align="right">media types:</td>
* <td>application/xml</td>
* </tr>
* </tbody>
* </table>
* <p>Response contains error output of the job executed or Carte object Id
* if the execution was successful.</p>
*
* <p><b>Example Error Response:</b></p>
* <pre function="syntax.xml">
* <webresult>
* <result>OK</result>
* <message>Job started</message>
* <id>74d96aa6-f29a-4bac-a26a-06a8c8f107e5</id>
* </webresult>
* </pre>
*
* <h3>Status Codes</h3>
* <table class="pentaho-table">
* <tbody>
* <tr>
* <th>code</th>
* <th>description</th>
* </tr>
* <tr>
* <td>200</td>
* <td>Request was processed.</td>
* </tr>
* <tr>
* <td>500</td>
* <td>Internal server error occurs during request processing.</td>
* </tr>
* </tbody>
*</table>
*</div>
*/
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (isJettyMode() && !request.getContextPath().startsWith(CONTEXT_PATH)) {
return;
}
if (log.isDebug()) {
logDebug(BaseMessages.getString(PKG, "ExecuteJobServlet.Log.ExecuteJobRequested"));
}
// Options taken from PAN
//
String[] knownOptions = new String[] { "rep", "user", "pass", "job", "level" };
String repOption = request.getParameter("rep");
String userOption = request.getParameter("user");
String passOption = Encr.decryptPasswordOptionallyEncrypted(request.getParameter("pass"));
String jobOption = request.getParameter("job");
String levelOption = request.getParameter("level");
response.setStatus(HttpServletResponse.SC_OK);
String encoding = System.getProperty("KETTLE_DEFAULT_SERVLET_ENCODING", null);
if (encoding != null && !Utils.isEmpty(encoding.trim())) {
response.setCharacterEncoding(encoding);
response.setContentType("text/html; charset=" + encoding);
}
PrintWriter out = response.getWriter();
try {
final Repository repository = openRepository(repOption, userOption, passOption);
final JobMeta jobMeta = loadJob(repository, jobOption);
// Set the servlet parameters as variables in the job
//
String[] parameters = jobMeta.listParameters();
Enumeration<?> parameterNames = request.getParameterNames();
while (parameterNames.hasMoreElements()) {
String parameter = (String) parameterNames.nextElement();
String[] values = request.getParameterValues(parameter);
//
if (Const.indexOfString(parameter, knownOptions) < 0) {
//
if (Const.indexOfString(parameter, parameters) < 0) {
jobMeta.setVariable(parameter, values[0]);
} else {
jobMeta.setParameterValue(parameter, values[0]);
}
}
}
JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration();
LogLevel logLevel = LogLevel.getLogLevelForCode(levelOption);
jobExecutionConfiguration.setLogLevel(logLevel);
JobConfiguration jobConfiguration = new JobConfiguration(jobMeta, jobExecutionConfiguration);
String carteObjectId = UUID.randomUUID().toString();
SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(CONTEXT_PATH, LoggingObjectType.CARTE, null);
servletLoggingObject.setContainerObjectId(carteObjectId);
servletLoggingObject.setLogLevel(logLevel);
// Create the job and store in the list...
//
final Job job = new Job(repository, jobMeta, servletLoggingObject);
job.setRepository(repository);
job.setSocketRepository(getSocketRepository());
getJobMap().addJob(jobMeta.getName(), carteObjectId, job, jobConfiguration);
job.setContainerObjectId(carteObjectId);
if (repository != null) {
// The repository connection is open: make sure we disconnect from the repository once we
// are done with this job.
//
job.addJobListener(new JobAdapter() {
public void jobFinished(Job job) {
repository.disconnect();
}
});
}
try {
runJob(job);
WebResult webResult = new WebResult(WebResult.STRING_OK, "Job started", carteObjectId);
out.println(webResult.getXML());
out.flush();
} catch (Exception executionException) {
String logging = KettleLogStore.getAppender().getBuffer(job.getLogChannelId(), false).toString();
throw new KettleException("Error executing job: " + logging, executionException);
}
} catch (Exception ex) {
out.println(new WebResult(WebResult.STRING_ERROR, BaseMessages.getString(PKG, "ExecuteJobServlet.Error.UnexpectedError", Const.CR + Const.getStackTracker(ex))));
}
}
use of org.pentaho.di.job.JobAdapter in project pentaho-kettle by pentaho.
the class JobResource method addJob.
@PUT
@Path("/add")
@Produces({ MediaType.APPLICATION_JSON })
public JobStatus addJob(String xml) {
// Parse the XML, create a job configuration
//
// System.out.println(xml);
//
JobConfiguration jobConfiguration;
try {
jobConfiguration = JobConfiguration.fromXML(xml.toString());
JobMeta jobMeta = jobConfiguration.getJobMeta();
JobExecutionConfiguration jobExecutionConfiguration = jobConfiguration.getJobExecutionConfiguration();
jobMeta.setLogLevel(jobExecutionConfiguration.getLogLevel());
jobMeta.injectVariables(jobExecutionConfiguration.getVariables());
// If there was a repository, we know about it at this point in time.
//
final Repository repository = jobConfiguration.getJobExecutionConfiguration().getRepository();
String carteObjectId = UUID.randomUUID().toString();
SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(getClass().getName(), LoggingObjectType.CARTE, null);
servletLoggingObject.setContainerObjectId(carteObjectId);
servletLoggingObject.setLogLevel(jobExecutionConfiguration.getLogLevel());
// Create the transformation and store in the list...
//
final Job job = new Job(repository, jobMeta, servletLoggingObject);
// Setting variables
//
job.initializeVariablesFrom(null);
job.getJobMeta().setInternalKettleVariables(job);
job.injectVariables(jobConfiguration.getJobExecutionConfiguration().getVariables());
job.setArguments(jobExecutionConfiguration.getArgumentStrings());
// Also copy the parameters over...
//
job.copyParametersFrom(jobMeta);
job.clearParameters();
String[] parameterNames = job.listParameters();
for (int idx = 0; idx < parameterNames.length; idx++) {
// Grab the parameter value set in the job entry
//
String thisValue = jobExecutionConfiguration.getParams().get(parameterNames[idx]);
if (!Utils.isEmpty(thisValue)) {
// Set the value as specified by the user in the job entry
//
jobMeta.setParameterValue(parameterNames[idx], thisValue);
}
}
jobMeta.activateParameters();
job.setSocketRepository(CarteSingleton.getInstance().getSocketRepository());
CarteSingleton.getInstance().getJobMap().addJob(job.getJobname(), carteObjectId, job, jobConfiguration);
//
if (repository != null) {
job.addJobListener(new JobAdapter() {
public void jobFinished(Job job) {
repository.disconnect();
}
});
}
return getJobStatus(carteObjectId);
} catch (KettleException e) {
e.printStackTrace();
}
return null;
}
Aggregations