use of org.pentaho.di.core.logging.SimpleLoggingObject in project pentaho-kettle by pentaho.
the class JobTrackerExecution method after.
@After
public void after() throws KettleDatabaseException {
// DatabaseMeta databaseMeta = new DatabaseMeta( NAME, "H2", "JDBC", null, TMP, null, USER, USER );
DatabaseMeta databaseMeta = new DatabaseMeta(NAME, "Hypersonic", "JDBC", null, "mem:HSQLDB-JUNIT-LOGJOB", null, null, null);
LoggingObjectInterface log = new SimpleLoggingObject("junit", LoggingObjectType.GENERAL, null);
Database db = new Database(log, databaseMeta);
db.connect();
db.execStatements("DROP SCHEMA PUBLIC CASCADE");
db.commit(true);
db.disconnect();
}
use of org.pentaho.di.core.logging.SimpleLoggingObject in project pentaho-kettle by pentaho.
the class RunTransServletTest method testRunTransServletCheckParameter.
@Test
public void testRunTransServletCheckParameter() throws Exception {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
Mockito.when(request.getParameter("trans")).thenReturn("home/test.rtr");
StringWriter out = new StringWriter();
PrintWriter printWriter = new PrintWriter(out);
Mockito.when(request.getContextPath()).thenReturn(RunTransServlet.CONTEXT_PATH);
Mockito.when(response.getWriter()).thenReturn(printWriter);
TransformationMap mockTransformationMap = Mockito.mock(TransformationMap.class);
SlaveServerConfig slaveServerConfig = Mockito.mock(SlaveServerConfig.class);
Mockito.when(mockTransformationMap.getSlaveServerConfig()).thenReturn(slaveServerConfig);
Repository repository = Mockito.mock(Repository.class);
Mockito.when(slaveServerConfig.getRepository()).thenReturn(repository);
RepositoryDirectoryInterface repositoryDirectoryInterface = Mockito.mock(RepositoryDirectoryInterface.class);
Mockito.when(repository.loadRepositoryDirectoryTree()).thenReturn(repositoryDirectoryInterface);
Mockito.when(repositoryDirectoryInterface.findDirectory(Mockito.anyString())).thenReturn(repositoryDirectoryInterface);
TransMeta transMeta = Mockito.mock(TransMeta.class);
Mockito.when(repository.loadTransformation(Mockito.any(), Mockito.any())).thenReturn(transMeta);
String testParameter = "testParameter";
Mockito.when(transMeta.listVariables()).thenReturn(new String[] { testParameter });
Mockito.when(transMeta.getVariable(Mockito.anyString())).thenReturn("default value");
Mockito.when(transMeta.listParameters()).thenReturn(new String[] { testParameter });
Mockito.when(request.getParameterNames()).thenReturn(new StringTokenizer(testParameter));
String testValue = "testValue";
Mockito.when(request.getParameterValues(testParameter)).thenReturn(new String[] { testValue });
RunTransServlet runTransServlet = Mockito.mock(RunTransServlet.class);
Mockito.doCallRealMethod().when(runTransServlet).doGet(Mockito.anyObject(), Mockito.anyObject());
Trans trans = new Trans(transMeta, new SimpleLoggingObject(RunTransServlet.CONTEXT_PATH, LoggingObjectType.CARTE, null));
Mockito.when(runTransServlet.createTrans(Mockito.anyObject(), Mockito.anyObject())).thenReturn(trans);
Mockito.when(transMeta.getParameterValue(Mockito.eq(testParameter))).thenReturn(testValue);
runTransServlet.log = new LogChannel("RunTransServlet");
runTransServlet.transformationMap = mockTransformationMap;
runTransServlet.doGet(request, response);
Assert.assertEquals(testValue, trans.getParameterValue(testParameter));
}
use of org.pentaho.di.core.logging.SimpleLoggingObject in project pentaho-kettle by pentaho.
the class RunJobServlet method doGet.
/**
* <div id="mindtouch">
* <h1>/kettle/runJob</h1>
* <a name="GET"></a>
* <h2>GET</h2>
* <p>Execute job from enterprise repository. Repository should be configured in Carte xml file.
* Response contains <code>ERROR</code> result if error happened during job execution.</p>
*
* <p><b>Example Request:</b><br />
* <pre function="syntax.xml">
* GET /kettle/runJob?job=home%2Fadmin%2Fdummy_job&level=Debug
* </pre>
*
* </p>
* <h3>Parameters</h3>
* <table class="pentaho-table">
* <tbody>
* <tr>
* <th>name</th>
* <th>description</th>
* <th>type</th>
* </tr>
* <tr>
* <td>job</td>
* <td>Full path to the job in repository.</td>
* <td>query</td>
* </tr>
* <tr>
* <td>level</td>
* <td>Logging level to be used for job execution (i.e. Debug).</td>
* <td>query</td>
* </tr>
* </tbody>
* </table>
*
* <h3>Response Body</h3>
*
* <table class="pentaho-table">
* <tbody>
* <tr>
* <td align="right">element:</td>
* <td>(custom)</td>
* </tr>
* <tr>
* <td align="right">media types:</td>
* <td>text/xml</td>
* </tr>
* </tbody>
* </table>
* <p>Response contains result of the operation. It is either <code>OK</code> or <code>ERROR</code>.
* If an error occurred during job execution, response also contains information about the error.</p>
*
* <p><b>Example Response:</b></p>
* <pre function="syntax.xml">
* <webresult>
* <result>OK</result>
* <message>Job started</message>
* <id>05d919b0-74a3-48d6-84d8-afce359d0449</id>
* </webresult>
* </pre>
*
* <h3>Status Codes</h3>
* <table class="pentaho-table">
* <tbody>
* <tr>
* <th>code</th>
* <th>description</th>
* </tr>
* <tr>
* <td>200</td>
* <td>Request was processed.</td>
* </tr>
* <tr>
* <td>500</td>
* <td>Internal server error occurs during request processing.</td>
* </tr>
* </tbody>
*</table>
*</div>
*/
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (isJettyMode() && !request.getContextPath().startsWith(CONTEXT_PATH)) {
return;
}
if (log.isDebug()) {
logDebug(BaseMessages.getString(PKG, "RunJobServlet.Log.RunJobRequested"));
}
// Options taken from PAN
//
String[] knownOptions = new String[] { "job", "level" };
String transOption = request.getParameter("job");
String levelOption = request.getParameter("level");
response.setStatus(HttpServletResponse.SC_OK);
PrintWriter out = response.getWriter();
try {
SlaveServerConfig serverConfig = transformationMap.getSlaveServerConfig();
Repository slaveServerRepository = serverConfig.getRepository();
if (slaveServerRepository == null) {
throw new KettleException("Unable to connect to repository in Slave Server Config: " + serverConfig.getRepositoryId());
}
final JobMeta jobMeta = loadJob(slaveServerRepository, transOption);
// Set the servlet parameters as variables in the transformation
//
String[] parameters = jobMeta.listParameters();
Enumeration<?> parameterNames = request.getParameterNames();
while (parameterNames.hasMoreElements()) {
String parameter = (String) parameterNames.nextElement();
String[] values = request.getParameterValues(parameter);
//
if (Const.indexOfString(parameter, knownOptions) < 0) {
//
if (Const.indexOfString(parameter, parameters) < 0) {
jobMeta.setVariable(parameter, values[0]);
} else {
jobMeta.setParameterValue(parameter, values[0]);
}
}
}
JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration();
LogLevel logLevel = LogLevel.getLogLevelForCode(levelOption);
jobExecutionConfiguration.setLogLevel(logLevel);
// Create new repository connection for this job
//
final Repository repository = jobExecutionConfiguration.connectRepository(serverConfig.getRepositoryId(), serverConfig.getRepositoryUsername(), serverConfig.getRepositoryPassword());
JobConfiguration jobConfiguration = new JobConfiguration(jobMeta, jobExecutionConfiguration);
String carteObjectId = UUID.randomUUID().toString();
SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(CONTEXT_PATH, LoggingObjectType.CARTE, null);
servletLoggingObject.setContainerObjectId(carteObjectId);
servletLoggingObject.setLogLevel(logLevel);
// Create the transformation and store in the list...
//
final Job job = new Job(repository, jobMeta, servletLoggingObject);
// Setting variables
//
job.initializeVariablesFrom(null);
job.getJobMeta().setInternalKettleVariables(job);
job.injectVariables(jobConfiguration.getJobExecutionConfiguration().getVariables());
// Also copy the parameters over...
//
job.copyParametersFrom(jobMeta);
job.clearParameters();
/*
* String[] parameterNames = job.listParameters(); for (int idx = 0; idx < parameterNames.length; idx++) { // Grab
* the parameter value set in the job entry // String thisValue =
* jobExecutionConfiguration.getParams().get(parameterNames[idx]); if (!Utils.isEmpty(thisValue)) { // Set the
* value as specified by the user in the job entry // jobMeta.setParameterValue(parameterNames[idx], thisValue); }
* }
*/
jobMeta.activateParameters();
job.setSocketRepository(getSocketRepository());
JobMap jobMap = getJobMap();
jobMap.addJob(job.getJobname(), carteObjectId, job, jobConfiguration);
// Disconnect from the job's repository when the job finishes.
//
job.addJobListener(new JobAdapter() {
public void jobFinished(Job job) {
repository.disconnect();
}
});
String message = "Job '" + job.getJobname() + "' was added to the list with id " + carteObjectId;
logBasic(message);
try {
runJob(job);
WebResult webResult = new WebResult(WebResult.STRING_OK, "Job started", carteObjectId);
out.println(webResult.getXML());
out.flush();
} catch (Exception executionException) {
String logging = KettleLogStore.getAppender().getBuffer(job.getLogChannelId(), false).toString();
throw new KettleException("Error executing Job: " + logging, executionException);
}
} catch (Exception ex) {
out.println(new WebResult(WebResult.STRING_ERROR, BaseMessages.getString(PKG, "RunJobServlet.Error.UnexpectedError", Const.CR + Const.getStackTracker(ex))));
}
}
use of org.pentaho.di.core.logging.SimpleLoggingObject in project pentaho-kettle by pentaho.
the class RunTransServlet method doGet.
/**
* <div id="mindtouch">
* <h1>/kettle/runTrans</h1>
* <a name="GET"></a>
* <h2>GET</h2>
* <p>Execute transformation from enterprise repository. Repository should be configured in Carte xml file.
* Response contains <code>ERROR</code> result if error happened during transformation execution.</p>
*
* <p><b>Example Request:</b><br />
* <pre function="syntax.xml">
* GET /kettle/runTrans?trans=home%2Fadmin%2Fdummy-trans&level=Debug
* </pre>
*
* </p>
* <h3>Parameters</h3>
* <table class="pentaho-table">
* <tbody>
* <tr>
* <th>name</th>
* <th>description</th>
* <th>type</th>
* </tr>
* <tr>
* <td>trans</td>
* <td>Full path to the transformation in repository.</td>
* <td>query</td>
* </tr>
* <tr>
* <td>level</td>
* <td>Logging level to be used for transformation execution (i.e. Debug).</td>
* <td>query</td>
* </tr>
* </tbody>
* </table>
*
* <h3>Response Body</h3>
*
* <table class="pentaho-table">
* <tbody>
* <tr>
* <td align="right">element:</td>
* <td>(custom)</td>
* </tr>
* <tr>
* <td align="right">media types:</td>
* <td>text/xml</td>
* </tr>
* </tbody>
* </table>
* <p>Response contains result of the operation. It is either <code>OK</code> or <code>ERROR</code>.
* If an error occurred during transformation execution, response also contains information about the error.</p>
*
* <p><b>Example Response:</b></p>
* <pre function="syntax.xml">
* <webresult>
* <result>OK</result>
* <message>Transformation started</message>
* <id>7c082e8f-b4fe-40bc-b424-e0f881a61874</id>
* </webresult>
* </pre>
*
* <h3>Status Codes</h3>
* <table class="pentaho-table">
* <tbody>
* <tr>
* <th>code</th>
* <th>description</th>
* </tr>
* <tr>
* <td>200</td>
* <td>Request was processed.</td>
* </tr>
* <tr>
* <td>500</td>
* <td>Internal server error occurs during request processing.</td>
* </tr>
* </tbody>
*</table>
*</div>
*/
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (isJettyMode() && !request.getContextPath().startsWith(CONTEXT_PATH)) {
return;
}
if (log.isDebug()) {
logDebug(BaseMessages.getString(PKG, "RunTransServlet.Log.RunTransRequested"));
}
// Options taken from PAN
//
String[] knownOptions = new String[] { "trans", "level" };
String transOption = request.getParameter("trans");
String levelOption = request.getParameter("level");
response.setStatus(HttpServletResponse.SC_OK);
String encoding = System.getProperty("KETTLE_DEFAULT_SERVLET_ENCODING", null);
if (encoding != null && !Utils.isEmpty(encoding.trim())) {
response.setCharacterEncoding(encoding);
response.setContentType("text/html; charset=" + encoding);
}
PrintWriter out = response.getWriter();
try {
final Repository repository = transformationMap.getSlaveServerConfig().getRepository();
final TransMeta transMeta = loadTrans(repository, transOption);
// Set the servlet parameters as variables in the transformation
//
String[] parameters = transMeta.listParameters();
Enumeration<?> parameterNames = request.getParameterNames();
while (parameterNames.hasMoreElements()) {
String parameter = (String) parameterNames.nextElement();
String[] values = request.getParameterValues(parameter);
//
if (Const.indexOfString(parameter, knownOptions) < 0) {
//
if (Const.indexOfString(parameter, parameters) < 0) {
transMeta.setVariable(parameter, values[0]);
} else {
transMeta.setParameterValue(parameter, values[0]);
}
}
}
TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration();
LogLevel logLevel = LogLevel.getLogLevelForCode(levelOption);
transExecutionConfiguration.setLogLevel(logLevel);
TransConfiguration transConfiguration = new TransConfiguration(transMeta, transExecutionConfiguration);
String carteObjectId = UUID.randomUUID().toString();
SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(CONTEXT_PATH, LoggingObjectType.CARTE, null);
servletLoggingObject.setContainerObjectId(carteObjectId);
servletLoggingObject.setLogLevel(logLevel);
// Create the transformation and store in the list...
//
final Trans trans = createTrans(transMeta, servletLoggingObject);
// Pass information
//
trans.setRepository(repository);
trans.setServletPrintWriter(out);
trans.setServletReponse(response);
trans.setServletRequest(request);
// Setting variables
//
trans.initializeVariablesFrom(null);
trans.getTransMeta().setInternalKettleVariables(trans);
trans.injectVariables(transConfiguration.getTransExecutionConfiguration().getVariables());
// Also copy the parameters over...
//
trans.copyParametersFrom(transMeta);
/*
* String[] parameterNames = job.listParameters(); for (int idx = 0; idx < parameterNames.length; idx++) { // Grab
* the parameter value set in the job entry // String thisValue =
* jobExecutionConfiguration.getParams().get(parameterNames[idx]); if (!Utils.isEmpty(thisValue)) { // Set the
* value as specified by the user in the job entry // jobMeta.setParameterValue(parameterNames[idx], thisValue); }
* }
*/
transMeta.activateParameters();
trans.setSocketRepository(getSocketRepository());
getTransformationMap().addTransformation(trans.getName(), carteObjectId, trans, transConfiguration);
// DO NOT disconnect from the shared repository connection when the job finishes.
//
String message = "Transformation '" + trans.getName() + "' was added to the list with id " + carteObjectId;
logBasic(message);
try {
// Execute the transformation...
//
trans.execute(null);
finishProcessing(trans, out);
} catch (Exception executionException) {
String logging = KettleLogStore.getAppender().getBuffer(trans.getLogChannelId(), false).toString();
throw new KettleException("Error executing Transformation: " + logging, executionException);
}
} catch (Exception ex) {
out.println(new WebResult(WebResult.STRING_ERROR, BaseMessages.getString(PKG, "RunTransServlet.Error.UnexpectedError", Const.CR + Const.getStackTracker(ex))));
}
}
use of org.pentaho.di.core.logging.SimpleLoggingObject in project pentaho-kettle by pentaho.
the class JobResource method startJob.
// change from GET to UPDATE/POST for proper REST method
@GET
@Path("/start/{id : .+}")
@Produces({ MediaType.APPLICATION_JSON })
public JobStatus startJob(@PathParam("id") String id) {
Job job = CarteResource.getJob(id);
CarteObjectEntry entry = CarteResource.getCarteObjectEntry(id);
try {
if (job.isInitialized() && !job.isActive()) {
//
if (job.getRep() != null && !job.getRep().isConnected()) {
if (job.getRep().getUserInfo() != null) {
job.getRep().connect(job.getRep().getUserInfo().getLogin(), job.getRep().getUserInfo().getPassword());
} else {
job.getRep().connect(null, null);
}
}
//
synchronized (this) {
JobConfiguration jobConfiguration = CarteSingleton.getInstance().getJobMap().getConfiguration(entry);
String carteObjectId = UUID.randomUUID().toString();
SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(getClass().getName(), LoggingObjectType.CARTE, null);
servletLoggingObject.setContainerObjectId(carteObjectId);
Job newJob = new Job(job.getRep(), job.getJobMeta(), servletLoggingObject);
newJob.setLogLevel(job.getLogLevel());
// Discard old log lines from the old job
//
KettleLogStore.discardLines(job.getLogChannelId(), true);
CarteSingleton.getInstance().getJobMap().replaceJob(entry, newJob, jobConfiguration);
job = newJob;
}
}
job.start();
} catch (KettleException e) {
e.printStackTrace();
}
return getJobStatus(id);
}
Aggregations