use of org.pentaho.di.repository.Repository in project pentaho-kettle by pentaho.
the class AddTransServlet method doGet.
/**
* <div id="mindtouch">
* <h1>/kettle/addTrans</h1>
* <a name="POST"></a>
* <h2>POST</h2>
* <p>Uploads and executes transformation configuration XML file.
* Uploads xml file containing transformation and transformation_execution_configuration
* (wrapped in transformation_configuration tag) to be executed and executes it. Method relies
* on the input parameter to determine if xml or html reply should be produced. The transformation_configuration xml is
* transferred within request body.
*
* <code>transformation name of the executed transformation </code> will be returned in the Response object
* or <code>message</code> describing error occurred. To determine if the call successful or not you should
* rely on <code>result</code> parameter in response.</p>
*
* <p><b>Example Request:</b><br />
* <pre function="syntax.xml">
* POST /kettle/addTrans/?xml=Y
* </pre>
* <p>Request body should contain xml containing transformation_configuration (transformation and
* transformation_execution_configuration wrapped in transformation_configuration tag).</p>
* </p>
* <h3>Parameters</h3>
* <table class="pentaho-table">
* <tbody>
* <tr>
* <th>name</th>
* <th>description</th>
* <th>type</th>
* </tr>
* <tr>
* <td>xml</td>
* <td>Boolean flag set to either <code>Y</code> or <code>N</code> describing if xml or html reply
* should be produced.</td>
* <td>boolean, optional</td>
* </tr>
* </tbody>
* </table>
*
* <h3>Response Body</h3>
*
* <table class="pentaho-table">
* <tbody>
* <tr>
* <td align="right">element:</td>
* <td>(custom)</td>
* </tr>
* <tr>
* <td align="right">media types:</td>
* <td>text/xml, text/html</td>
* </tr>
* </tbody>
* </table>
* <p>Response wraps transformation name that was executed or error stack trace
* if an error occurred. Response has <code>result</code> OK if there were no errors. Otherwise it returns ERROR.</p>
*
* <p><b>Example Response:</b></p>
* <pre function="syntax.xml">
* <?xml version="1.0" encoding="UTF-8"?>
* <webresult>
* <result>OK</result>
* <message>Transformation 'dummy-trans' was added to Carte with id eb4a92ff-6852-4307-9f74-3c74bd61f829</message>
* <id>eb4a92ff-6852-4307-9f74-3c74bd61f829</id>
* </webresult>
* </pre>
*
* <h3>Status Codes</h3>
* <table class="pentaho-table">
* <tbody>
* <tr>
* <th>code</th>
* <th>description</th>
* </tr>
* <tr>
* <td>200</td>
* <td>Request was processed and XML response is returned.</td>
* </tr>
* <tr>
* <td>500</td>
* <td>Internal server error occurs during request processing.</td>
* </tr>
* </tbody>
*</table>
*</div>
*/
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (isJettyMode() && !request.getRequestURI().startsWith(CONTEXT_PATH)) {
return;
}
if (log.isDebug()) {
logDebug("Addition of transformation requested");
}
boolean useXML = "Y".equalsIgnoreCase(request.getParameter("xml"));
PrintWriter out = response.getWriter();
BufferedReader in = request.getReader();
if (log.isDetailed()) {
logDetailed("Encoding: " + request.getCharacterEncoding());
}
if (useXML) {
response.setContentType("text/xml");
out.print(XMLHandler.getXMLHeader());
} else {
response.setContentType("text/html");
out.println("<HTML>");
out.println("<HEAD><TITLE>Add transformation</TITLE></HEAD>");
out.println("<BODY>");
}
response.setStatus(HttpServletResponse.SC_OK);
String realLogFilename = null;
TransExecutionConfiguration transExecutionConfiguration = null;
try {
// First read the complete transformation in memory from the request
//
StringBuilder xml = new StringBuilder(request.getContentLength());
int c;
while ((c = in.read()) != -1) {
xml.append((char) c);
}
// Parse the XML, create a transformation configuration
//
TransConfiguration transConfiguration = TransConfiguration.fromXML(xml.toString());
TransMeta transMeta = transConfiguration.getTransMeta();
transExecutionConfiguration = transConfiguration.getTransExecutionConfiguration();
transMeta.setLogLevel(transExecutionConfiguration.getLogLevel());
if (log.isDetailed()) {
logDetailed("Logging level set to " + log.getLogLevel().getDescription());
}
transMeta.injectVariables(transExecutionConfiguration.getVariables());
// Also copy the parameters over...
//
Map<String, String> params = transExecutionConfiguration.getParams();
for (String param : params.keySet()) {
String value = params.get(param);
transMeta.setParameterValue(param, value);
}
// If there was a repository, we know about it at this point in time.
//
final Repository repository = transExecutionConfiguration.getRepository();
String carteObjectId = UUID.randomUUID().toString();
SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(CONTEXT_PATH, LoggingObjectType.CARTE, null);
servletLoggingObject.setContainerObjectId(carteObjectId);
servletLoggingObject.setLogLevel(transExecutionConfiguration.getLogLevel());
// Create the transformation and store in the list...
//
final Trans trans = new Trans(transMeta, servletLoggingObject);
if (transExecutionConfiguration.isSetLogfile()) {
realLogFilename = transExecutionConfiguration.getLogFileName();
final LogChannelFileWriter logChannelFileWriter;
try {
FileUtil.createParentFolder(AddTransServlet.class, realLogFilename, transExecutionConfiguration.isCreateParentFolder(), trans.getLogChannel(), trans);
logChannelFileWriter = new LogChannelFileWriter(servletLoggingObject.getLogChannelId(), KettleVFS.getFileObject(realLogFilename), transExecutionConfiguration.isSetAppendLogfile());
logChannelFileWriter.startLogging();
trans.addTransListener(new TransAdapter() {
@Override
public void transFinished(Trans trans) throws KettleException {
if (logChannelFileWriter != null) {
logChannelFileWriter.stopLogging();
}
}
});
} catch (KettleException e) {
logError(Const.getStackTracker(e));
}
}
trans.setRepository(repository);
trans.setSocketRepository(getSocketRepository());
getTransformationMap().addTransformation(transMeta.getName(), carteObjectId, trans, transConfiguration);
trans.setContainerObjectId(carteObjectId);
if (repository != null) {
// The repository connection is open: make sure we disconnect from the repository once we
// are done with this transformation.
//
trans.addTransListener(new TransAdapter() {
public void transFinished(Trans trans) {
repository.disconnect();
}
});
}
String message = "Transformation '" + trans.getName() + "' was added to Carte with id " + carteObjectId;
if (useXML) {
// Return the log channel id as well
//
out.println(new WebResult(WebResult.STRING_OK, message, carteObjectId));
} else {
out.println("<H1>" + message + "</H1>");
out.println("<p><a href=\"" + convertContextPath(GetTransStatusServlet.CONTEXT_PATH) + "?name=" + trans.getName() + "&id=" + carteObjectId + "\">Go to the transformation status page</a><p>");
}
} catch (Exception ex) {
if (useXML) {
out.println(new WebResult(WebResult.STRING_ERROR, Const.getStackTracker(ex)));
} else {
out.println("<p>");
out.println("<pre>");
ex.printStackTrace(out);
out.println("</pre>");
}
}
if (!useXML) {
out.println("<p>");
out.println("</BODY>");
out.println("</HTML>");
}
}
use of org.pentaho.di.repository.Repository in project pentaho-kettle by pentaho.
the class BaseJobServlet method createTrans.
protected Trans createTrans(TransConfiguration transConfiguration) throws UnknownParamException {
TransMeta transMeta = transConfiguration.getTransMeta();
TransExecutionConfiguration transExecutionConfiguration = transConfiguration.getTransExecutionConfiguration();
transMeta.setLogLevel(transExecutionConfiguration.getLogLevel());
transMeta.injectVariables(transExecutionConfiguration.getVariables());
// Also copy the parameters over...
copyParameters(transMeta, transExecutionConfiguration.getParams());
String carteObjectId = UUID.randomUUID().toString();
SimpleLoggingObject servletLoggingObject = getServletLogging(carteObjectId, transExecutionConfiguration.getLogLevel());
// Create the transformation and store in the list...
final Trans trans = new Trans(transMeta, servletLoggingObject);
trans.setMetaStore(transformationMap.getSlaveServerConfig().getMetaStore());
if (transExecutionConfiguration.isSetLogfile()) {
String realLogFilename = transExecutionConfiguration.getLogFileName();
try {
FileUtil.createParentFolder(AddTransServlet.class, realLogFilename, transExecutionConfiguration.isCreateParentFolder(), trans.getLogChannel(), trans);
final LogChannelFileWriter logChannelFileWriter = new LogChannelFileWriter(servletLoggingObject.getLogChannelId(), KettleVFS.getFileObject(realLogFilename), transExecutionConfiguration.isSetAppendLogfile());
logChannelFileWriter.startLogging();
trans.addTransListener(new TransAdapter() {
@Override
public void transFinished(Trans trans) throws KettleException {
if (logChannelFileWriter != null) {
logChannelFileWriter.stopLogging();
}
}
});
} catch (KettleException e) {
logError(Const.getStackTracker(e));
}
}
// If there was a repository, we know about it at this point in time.
final Repository repository = transExecutionConfiguration.getRepository();
trans.setRepository(repository);
trans.setSocketRepository(getSocketRepository());
trans.setContainerObjectId(carteObjectId);
getTransformationMap().addTransformation(transMeta.getName(), carteObjectId, trans, transConfiguration);
if (repository != null) {
// The repository connection is open: make sure we disconnect from the repository once we
// are done with this transformation.
trans.addTransListener(new TransAdapter() {
public void transFinished(Trans trans) {
repository.disconnect();
}
});
}
final Long passedBatchId = transExecutionConfiguration.getPassedBatchId();
if (passedBatchId != null) {
trans.setPassedBatchId(passedBatchId);
}
return trans;
}
use of org.pentaho.di.repository.Repository in project pentaho-kettle by pentaho.
the class ExecuteJobServlet method openRepository.
private Repository openRepository(String repositoryName, String user, String pass) throws KettleException {
if (Utils.isEmpty(repositoryName)) {
return null;
}
RepositoriesMeta repositoriesMeta = new RepositoriesMeta();
repositoriesMeta.readData();
RepositoryMeta repositoryMeta = repositoriesMeta.findRepository(repositoryName);
if (repositoryMeta == null) {
throw new KettleException("Unable to find repository: " + repositoryName);
}
PluginRegistry registry = PluginRegistry.getInstance();
Repository repository = registry.loadClass(RepositoryPluginType.class, repositoryMeta, Repository.class);
repository.init(repositoryMeta);
repository.connect(user, pass);
return repository;
}
use of org.pentaho.di.repository.Repository in project pentaho-kettle by pentaho.
the class Spoon method openRepository.
public void openRepository() {
// Check to tabs are dirty and warn user that they must save tabs prior to connecting. Don't connect!
if (Spoon.getInstance().isTabsChanged()) {
MessageBox mb = new MessageBox(Spoon.getInstance().getShell(), SWT.OK);
mb.setMessage(BaseMessages.getString(PKG, "Spoon.Dialog.WarnToSaveAllPriorToConnect.Message"));
mb.setText(BaseMessages.getString(PKG, "Spoon.Dialog.WarnToCloseAllForce.Disconnect.Title"));
mb.open();
// Don't connect, user will need to save all their dirty tabs.
return;
}
loginDialog = new RepositoriesDialog(shell, null, new ILoginCallback() {
@Override
public void onSuccess(Repository repository) {
// Close previous repository...
if (rep != null) {
rep.disconnect();
SpoonPluginManager.getInstance().notifyLifecycleListeners(SpoonLifeCycleEvent.REPOSITORY_DISCONNECTED);
}
setRepository(repository);
loadSessionInformation(repository, true);
refreshTree();
setShellText();
SpoonPluginManager.getInstance().notifyLifecycleListeners(SpoonLifeCycleEvent.REPOSITORY_CONNECTED);
}
@Override
public void onError(Throwable t) {
closeRepository();
onLoginError(t);
}
@Override
public void onCancel() {
}
});
loginDialog.show();
}
use of org.pentaho.di.repository.Repository in project pentaho-metaverse by pentaho.
the class JobExecutorStepAnalyzer method customAnalyze.
@Override
protected void customAnalyze(JobExecutorMeta meta, IMetaverseNode node) throws MetaverseAnalyzerException {
String jobPath = meta.getFileName();
JobMeta subJobMeta = null;
Repository repo = parentTransMeta.getRepository();
switch(meta.getSpecificationMethod()) {
case FILENAME:
jobPath = parentTransMeta.environmentSubstitute(meta.getFileName());
try {
String normalized = KettleAnalyzerUtil.normalizeFilePath(jobPath);
subJobMeta = getSubJobMeta(parentTransMeta, normalized);
jobPath = normalized;
} catch (Exception e) {
throw new MetaverseAnalyzerException("Sub transformation can not be found - " + jobPath, e);
}
break;
case REPOSITORY_BY_NAME:
if (repo != null) {
String dir = parentTransMeta.environmentSubstitute(meta.getDirectoryPath());
String file = parentTransMeta.environmentSubstitute(meta.getJobName());
try {
RepositoryDirectoryInterface rdi = repo.findDirectory(dir);
subJobMeta = repo.loadJob(file, rdi, null, null);
String filename = subJobMeta.getFilename() == null ? subJobMeta.toString() : subJobMeta.getFilename();
jobPath = filename + "." + subJobMeta.getDefaultExtension();
} catch (KettleException e) {
throw new MetaverseAnalyzerException("Sub transformation can not be found in repository - " + file, e);
}
} else {
throw new MetaverseAnalyzerException("Not connected to a repository, can't get the transformation");
}
break;
case REPOSITORY_BY_REFERENCE:
if (repo != null) {
try {
subJobMeta = repo.loadJob(meta.getJobObjectId(), null);
String filename = subJobMeta.getFilename() == null ? subJobMeta.toString() : subJobMeta.getFilename();
jobPath = filename + "." + subJobMeta.getDefaultExtension();
} catch (KettleException e) {
throw new MetaverseAnalyzerException("Sub transformation can not be found by reference - " + meta.getJobObjectId(), e);
}
} else {
throw new MetaverseAnalyzerException("Not connected to a repository, can't get the transformation");
}
break;
}
// analyze the sub trans?
IComponentDescriptor ds = new MetaverseComponentDescriptor(subJobMeta.getName(), DictionaryConst.NODE_TYPE_JOB, descriptor.getNamespace().getParentNamespace());
IMetaverseNode jobNode = createNodeFromDescriptor(ds);
jobNode.setProperty(DictionaryConst.PROPERTY_NAMESPACE, ds.getNamespaceId());
jobNode.setProperty(DictionaryConst.PROPERTY_PATH, jobPath);
jobNode.setLogicalIdGenerator(DictionaryConst.LOGICAL_ID_GENERATOR_DOCUMENT);
metaverseBuilder.addLink(node, DictionaryConst.LINK_EXECUTES, jobNode);
connectToSubJobOutputFields(meta, subJobMeta, jobNode, descriptor);
node.setProperty(JOB_TO_EXECUTE, jobPath);
if (StringUtils.isNotEmpty(meta.getExecutionResultTargetStep())) {
node.setProperty(EXECUTION_RESULTS_TARGET, meta.getExecutionResultTargetStep());
}
if (StringUtils.isNotEmpty(meta.getResultFilesTargetStep())) {
node.setProperty(RESULT_FILES_TARGET, meta.getResultFilesTargetStep());
}
}
Aggregations