use of org.pentaho.di.job.JobExecutionConfiguration in project pentaho-kettle by pentaho.
the class SpoonJobDelegate method executeJob.
public void executeJob(JobMeta jobMeta, boolean local, boolean remote, Date replayDate, boolean safe, String startCopyName, int startCopyNr) throws KettleException {
if (jobMeta == null) {
return;
}
JobExecutionConfiguration executionConfiguration = spoon.getJobExecutionConfiguration();
// Remember the variables set previously
//
Object[] data = spoon.variables.getData();
String[] fields = spoon.variables.getRowMeta().getFieldNames();
Map<String, String> variableMap = new HashMap<>();
for (int idx = 0; idx < fields.length; idx++) {
variableMap.put(fields[idx], data[idx].toString());
}
executionConfiguration.setVariables(variableMap);
executionConfiguration.getUsedVariables(jobMeta);
executionConfiguration.setReplayDate(replayDate);
executionConfiguration.setRepository(spoon.rep);
executionConfiguration.setSafeModeEnabled(safe);
executionConfiguration.setStartCopyName(startCopyName);
executionConfiguration.setStartCopyNr(startCopyNr);
executionConfiguration.getUsedArguments(jobMeta, spoon.getArguments(), spoon.getMetaStore());
executionConfiguration.setLogLevel(DefaultLogLevel.getLogLevel());
JobExecutionConfigurationDialog dialog = newJobExecutionConfigurationDialog(executionConfiguration, jobMeta);
if (!jobMeta.isShowDialog() || dialog.open()) {
JobGraph jobGraph = spoon.getActiveJobGraph();
jobGraph.jobLogDelegate.addJobLog();
//
for (String varName : executionConfiguration.getVariables().keySet()) {
String varValue = executionConfiguration.getVariables().get(varName);
jobMeta.setVariable(varName, varValue);
}
//
for (String paramName : executionConfiguration.getParams().keySet()) {
String paramValue = executionConfiguration.getParams().get(paramName);
jobMeta.setParameterValue(paramName, paramValue);
}
jobMeta.activateParameters();
//
if (executionConfiguration.getLogLevel() != null) {
jobMeta.setLogLevel(executionConfiguration.getLogLevel());
}
//
if (executionConfiguration.getStartCopyName() != null) {
jobMeta.setStartCopyName(executionConfiguration.getStartCopyName());
}
// Set the run options
//
jobMeta.setClearingLog(executionConfiguration.isClearingLog());
jobMeta.setSafeModeEnabled(executionConfiguration.isSafeModeEnabled());
jobMeta.setExpandingRemoteJob(executionConfiguration.isExpandingRemoteJob());
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.SpoonJobMetaExecutionStart.id, jobMeta);
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.SpoonJobExecutionConfiguration.id, executionConfiguration);
try {
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.SpoonTransBeforeStart.id, new Object[] { executionConfiguration, jobMeta, jobMeta, spoon.getRepository() });
} catch (KettleException e) {
log.logError(e.getMessage(), jobMeta.getFilename());
return;
}
if (!executionConfiguration.isExecutingLocally() && !executionConfiguration.isExecutingRemotely()) {
if (jobMeta.hasChanged()) {
jobGraph.showSaveFileMessage();
}
}
//
if (executionConfiguration.isExecutingLocally()) {
jobGraph.startJob(executionConfiguration);
} else if (executionConfiguration.isExecutingRemotely()) {
// Executing remotely
// Check if jobMeta has changed
jobGraph.handleJobMetaChanges(jobMeta);
// Activate the parameters, turn them into variables...
// jobMeta.hasChanged()
jobMeta.activateParameters();
if (executionConfiguration.getRemoteServer() != null) {
Job.sendToSlaveServer(jobMeta, executionConfiguration, spoon.rep, spoon.metaStore);
spoon.delegates.slaves.addSpoonSlave(executionConfiguration.getRemoteServer());
} else {
MessageBox mb = new MessageBox(spoon.getShell(), SWT.OK | SWT.ICON_ERROR);
mb.setMessage(BaseMessages.getString(PKG, "Spoon.Dialog.NoRemoteServerSpecified.Message"));
mb.setText(BaseMessages.getString(PKG, "Spoon.Dialog.NoRemoteServerSpecified.Title"));
mb.open();
}
}
}
}
use of org.pentaho.di.job.JobExecutionConfiguration in project pentaho-kettle by pentaho.
the class JobEntryJobDialog method getInfo.
private void getInfo(JobEntryJob jej) {
jej.setName(wName.getText());
if (rep != null) {
specificationMethod = ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME;
} else {
specificationMethod = ObjectLocationSpecificationMethod.FILENAME;
}
jej.setSpecificationMethod(specificationMethod);
switch(specificationMethod) {
case FILENAME:
jej.setFileName(wPath.getText());
jej.setDirectory(null);
jej.setJobName(null);
jej.setJobObjectId(null);
break;
case REPOSITORY_BY_NAME:
String jobPath = wPath.getText();
String jobName = jobPath;
String directory = "";
int index = jobPath.lastIndexOf("/");
if (index != -1) {
jobName = jobPath.substring(index + 1);
directory = index == 0 ? "/" : jobPath.substring(0, index);
}
jej.setDirectory(directory);
jej.setJobName(jobName);
jej.setFileName(null);
jej.setJobObjectId(null);
break;
default:
break;
}
// Do the arguments
int nritems = wFields.nrNonEmpty();
int nr = 0;
for (int i = 0; i < nritems; i++) {
String arg = wFields.getNonEmpty(i).getText(1);
if (arg != null && arg.length() != 0) {
nr++;
}
}
jej.arguments = new String[nr];
nr = 0;
for (int i = 0; i < nritems; i++) {
String arg = wFields.getNonEmpty(i).getText(1);
if (arg != null && arg.length() != 0) {
jej.arguments[nr] = arg;
nr++;
}
}
// Do the parameters
nritems = wParameters.nrNonEmpty();
nr = 0;
for (int i = 0; i < nritems; i++) {
String param = wParameters.getNonEmpty(i).getText(1);
if (param != null && param.length() != 0) {
nr++;
}
}
jej.parameters = new String[nr];
jej.parameterFieldNames = new String[nr];
jej.parameterValues = new String[nr];
nr = 0;
for (int i = 0; i < nritems; i++) {
String param = wParameters.getNonEmpty(i).getText(1);
String fieldName = wParameters.getNonEmpty(i).getText(2);
String value = wParameters.getNonEmpty(i).getText(3);
jej.parameters[nr] = param;
if (!Utils.isEmpty(Const.trim(fieldName))) {
jej.parameterFieldNames[nr] = fieldName;
} else {
jej.parameterFieldNames[nr] = "";
}
if (!Utils.isEmpty(Const.trim(value))) {
jej.parameterValues[nr] = value;
} else {
jej.parameterValues[nr] = "";
}
nr++;
}
jej.setPassingAllParameters(wPassParams.getSelection());
jej.setLogfile = wSetLogfile.getSelection();
jej.addDate = wAddDate.getSelection();
jej.addTime = wAddTime.getSelection();
jej.logfile = wLogfile.getText();
jej.logext = wLogext.getText();
if (wLoglevel.getSelectionIndex() >= 0) {
jej.logFileLevel = LogLevel.values()[wLoglevel.getSelectionIndex()];
} else {
jej.logFileLevel = LogLevel.BASIC;
}
jej.argFromPrevious = wPrevious.getSelection();
jej.paramsFromPrevious = wPrevToParams.getSelection();
jej.execPerRow = wEveryRow.getSelection();
jej.setPassingExport(wPassExport.getSelection());
jej.setAppendLogfile = wAppendLogfile.getSelection();
jej.setWaitingToFinish(wWaitingToFinish.getSelection());
jej.createParentFolder = wCreateParentFolder.getSelection();
jej.setFollowingAbortRemotely(wFollowingAbortRemotely.getSelection());
jej.setExpandingRemoteJob(wExpandRemote.getSelection());
jej.setRunConfiguration(wRunConfiguration.getText());
JobExecutionConfiguration executionConfiguration = new JobExecutionConfiguration();
executionConfiguration.setRunConfiguration(jej.getRunConfiguration());
try {
ExtensionPointHandler.callExtensionPoint(jobEntry.getLogChannel(), KettleExtensionPoint.SpoonTransBeforeStart.id, new Object[] { executionConfiguration, jobMeta, jobMeta, null });
} catch (KettleException e) {
// Ignore errors
}
try {
ExtensionPointHandler.callExtensionPoint(jobEntry.getLogChannel(), KettleExtensionPoint.JobEntryTransSave.id, new Object[] { jobMeta, jej.getRunConfiguration() });
} catch (KettleException e) {
// Ignore errors
}
if (executionConfiguration.getRemoteServer() != null) {
jej.setRemoteSlaveServerName(executionConfiguration.getRemoteServer().getName());
}
}
use of org.pentaho.di.job.JobExecutionConfiguration in project pentaho-kettle by pentaho.
the class DefaultRunConfigurationExecutorTest method testExecuteRemoteNotFoundJob.
@Test
public void testExecuteRemoteNotFoundJob() throws Exception {
DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration();
defaultRunConfiguration.setName("Default Configuration");
defaultRunConfiguration.setLocal(false);
defaultRunConfiguration.setRemote(true);
defaultRunConfiguration.setServer("Test Server");
JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration();
doReturn(slaveServer).when(abstractMeta).findSlaveServer(null);
try {
defaultRunConfigurationExecutor.execute(defaultRunConfiguration, jobExecutionConfiguration, abstractMeta, variableSpace, null);
fail();
} catch (KettleException e) {
// expected
}
}
use of org.pentaho.di.job.JobExecutionConfiguration in project pentaho-kettle by pentaho.
the class DefaultRunConfigurationExecutorTest method testExecutePentahoJob.
@Test
public void testExecutePentahoJob() throws Exception {
DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration();
defaultRunConfiguration.setName("Default Configuration");
defaultRunConfiguration.setLocal(false);
defaultRunConfiguration.setPentaho(true);
defaultRunConfiguration.setRemote(false);
JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration();
defaultRunConfigurationExecutor.execute(defaultRunConfiguration, jobExecutionConfiguration, abstractMeta, variableSpace, null);
assertFalse(jobExecutionConfiguration.isExecutingLocally());
assertFalse(jobExecutionConfiguration.isExecutingRemotely());
}
use of org.pentaho.di.job.JobExecutionConfiguration in project pentaho-kettle by pentaho.
the class RunJobServlet method doGet.
/**
* <div id="mindtouch">
* <h1>/kettle/runJob</h1>
* <a name="GET"></a>
* <h2>GET</h2>
* <p>Execute job from enterprise repository. Repository should be configured in Carte xml file.
* Response contains <code>ERROR</code> result if error happened during job execution.</p>
*
* <p><b>Example Request:</b><br />
* <pre function="syntax.xml">
* GET /kettle/runJob?job=home%2Fadmin%2Fdummy_job&level=Debug
* </pre>
*
* </p>
* <h3>Parameters</h3>
* <table class="pentaho-table">
* <tbody>
* <tr>
* <th>name</th>
* <th>description</th>
* <th>type</th>
* </tr>
* <tr>
* <td>job</td>
* <td>Full path to the job in repository.</td>
* <td>query</td>
* </tr>
* <tr>
* <td>level</td>
* <td>Logging level to be used for job execution (i.e. Debug).</td>
* <td>query</td>
* </tr>
* </tbody>
* </table>
*
* <h3>Response Body</h3>
*
* <table class="pentaho-table">
* <tbody>
* <tr>
* <td align="right">element:</td>
* <td>(custom)</td>
* </tr>
* <tr>
* <td align="right">media types:</td>
* <td>text/xml</td>
* </tr>
* </tbody>
* </table>
* <p>Response contains result of the operation. It is either <code>OK</code> or <code>ERROR</code>.
* If an error occurred during job execution, response also contains information about the error.</p>
*
* <p><b>Example Response:</b></p>
* <pre function="syntax.xml">
* <webresult>
* <result>OK</result>
* <message>Job started</message>
* <id>05d919b0-74a3-48d6-84d8-afce359d0449</id>
* </webresult>
* </pre>
*
* <h3>Status Codes</h3>
* <table class="pentaho-table">
* <tbody>
* <tr>
* <th>code</th>
* <th>description</th>
* </tr>
* <tr>
* <td>200</td>
* <td>Request was processed.</td>
* </tr>
* <tr>
* <td>500</td>
* <td>Internal server error occurs during request processing.</td>
* </tr>
* </tbody>
*</table>
*</div>
*/
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (isJettyMode() && !request.getContextPath().startsWith(CONTEXT_PATH)) {
return;
}
if (log.isDebug()) {
logDebug(BaseMessages.getString(PKG, "RunJobServlet.Log.RunJobRequested"));
}
// Options taken from PAN
//
String[] knownOptions = new String[] { "job", "level" };
String transOption = request.getParameter("job");
String levelOption = request.getParameter("level");
response.setStatus(HttpServletResponse.SC_OK);
PrintWriter out = response.getWriter();
try {
SlaveServerConfig serverConfig = transformationMap.getSlaveServerConfig();
Repository slaveServerRepository = serverConfig.getRepository();
if (slaveServerRepository == null) {
throw new KettleException("Unable to connect to repository in Slave Server Config: " + serverConfig.getRepositoryId());
}
final JobMeta jobMeta = loadJob(slaveServerRepository, transOption);
// Set the servlet parameters as variables in the transformation
//
String[] parameters = jobMeta.listParameters();
Enumeration<?> parameterNames = request.getParameterNames();
while (parameterNames.hasMoreElements()) {
String parameter = (String) parameterNames.nextElement();
String[] values = request.getParameterValues(parameter);
//
if (Const.indexOfString(parameter, knownOptions) < 0) {
//
if (Const.indexOfString(parameter, parameters) < 0) {
jobMeta.setVariable(parameter, values[0]);
} else {
jobMeta.setParameterValue(parameter, values[0]);
}
}
}
JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration();
LogLevel logLevel = LogLevel.getLogLevelForCode(levelOption);
jobExecutionConfiguration.setLogLevel(logLevel);
// Create new repository connection for this job
//
final Repository repository = jobExecutionConfiguration.connectRepository(serverConfig.getRepositoryId(), serverConfig.getRepositoryUsername(), serverConfig.getRepositoryPassword());
JobConfiguration jobConfiguration = new JobConfiguration(jobMeta, jobExecutionConfiguration);
String carteObjectId = UUID.randomUUID().toString();
SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(CONTEXT_PATH, LoggingObjectType.CARTE, null);
servletLoggingObject.setContainerObjectId(carteObjectId);
servletLoggingObject.setLogLevel(logLevel);
// Create the transformation and store in the list...
//
final Job job = new Job(repository, jobMeta, servletLoggingObject);
// Setting variables
//
job.initializeVariablesFrom(null);
job.getJobMeta().setInternalKettleVariables(job);
job.injectVariables(jobConfiguration.getJobExecutionConfiguration().getVariables());
// Also copy the parameters over...
//
job.copyParametersFrom(jobMeta);
job.clearParameters();
/*
* String[] parameterNames = job.listParameters(); for (int idx = 0; idx < parameterNames.length; idx++) { // Grab
* the parameter value set in the job entry // String thisValue =
* jobExecutionConfiguration.getParams().get(parameterNames[idx]); if (!Utils.isEmpty(thisValue)) { // Set the
* value as specified by the user in the job entry // jobMeta.setParameterValue(parameterNames[idx], thisValue); }
* }
*/
jobMeta.activateParameters();
job.setSocketRepository(getSocketRepository());
JobMap jobMap = getJobMap();
jobMap.addJob(job.getJobname(), carteObjectId, job, jobConfiguration);
// Disconnect from the job's repository when the job finishes.
//
job.addJobListener(new JobAdapter() {
public void jobFinished(Job job) {
repository.disconnect();
}
});
String message = "Job '" + job.getJobname() + "' was added to the list with id " + carteObjectId;
logBasic(message);
try {
runJob(job);
WebResult webResult = new WebResult(WebResult.STRING_OK, "Job started", carteObjectId);
out.println(webResult.getXML());
out.flush();
} catch (Exception executionException) {
String logging = KettleLogStore.getAppender().getBuffer(job.getLogChannelId(), false).toString();
throw new KettleException("Error executing Job: " + logging, executionException);
}
} catch (Exception ex) {
out.println(new WebResult(WebResult.STRING_ERROR, BaseMessages.getString(PKG, "RunJobServlet.Error.UnexpectedError", Const.CR + Const.getStackTracker(ex))));
}
}
Aggregations