Search in sources :

Example 1 with TransConfiguration

use of org.pentaho.di.trans.TransConfiguration in project pdi-platform-plugin by pentaho.

the class PdiAction method executeTransformation.

/**
 * Executes a PDI transformation
 *
 * @param transMeta
 * @param logWriter
 * @return
 * @throws ActionExecutionException
 */
protected void executeTransformation(final TransMeta transMeta, final LogWriter logWriter) throws ActionExecutionException {
    localTrans = null;
    if (transMeta != null) {
        TransExecutionConfiguration transExConfig = newTransExecutionConfiguration();
        if (logLevel != null) {
            transExConfig.setLogLevel(LogLevel.getLogLevelForCode(logLevel));
        }
        if (clearLog != null) {
            transExConfig.setClearingLog(Boolean.valueOf(clearLog));
        }
        if (runSafeMode != null) {
            transExConfig.setSafeModeEnabled(Boolean.valueOf(runSafeMode));
        }
        if (gatheringMetrics != null) {
            transExConfig.setGatheringMetrics(Boolean.valueOf(gatheringMetrics));
        }
        try {
            localTrans = newTrans(transMeta);
            localTrans.setArguments(arguments);
            localTrans.shareVariablesWith(transMeta);
            String carteObjectId = UUID.randomUUID().toString();
            localTrans.setContainerObjectId(carteObjectId);
            CarteSingleton.getInstance().getTransformationMap().addTransformation(getTransformationName(carteObjectId), carteObjectId, localTrans, new TransConfiguration(localTrans.getTransMeta(), transExConfig));
        } catch (Exception e) {
            throw new ActionExecutionException(Messages.getInstance().getErrorString("Kettle.ERROR_0010_BAD_TRANSFORMATION_METADATA"), // $NON-NLS-1$
            e);
        }
    }
    if (localTrans == null) {
        throw new ActionExecutionException(Messages.getInstance().getErrorString(// $NON-NLS-1$
        "Kettle.ERROR_0010_BAD_TRANSFORMATION_METADATA"));
    }
    if (localTrans != null) {
        // OK, we have the transformation, now run it!
        if (!customizeTrans(localTrans, logWriter)) {
            throw new ActionExecutionException(Messages.getInstance().getErrorString(// $NON-NLS-1$
            "Kettle.ERROR_0028_CUSTOMIZATION_FUNCITON_FAILED"));
        }
        if (log.isDebugEnabled()) {
            // $NON-NLS-1$
            log.debug(Messages.getInstance().getString("Kettle.DEBUG_PREPARING_TRANSFORMATION"));
        }
        try {
            localTrans.setLogLevel(LogLevel.getLogLevelForCode(logLevel));
            localTrans.setSafeModeEnabled(Boolean.valueOf(runSafeMode));
            localTrans.prepareExecution(transMeta.getArguments());
        } catch (Exception e) {
            transPrepExecutionFailure = true;
            // don't throw exception, because the scheduler may try to run this transformation again
            // $NON-NLS-1$
            log.error(Messages.getInstance().getErrorString("Kettle.ERROR_0011_TRANSFORMATION_PREPARATION_FAILED"), e);
            return;
        }
        String stepName = null;
        try {
            if (log.isDebugEnabled()) {
                // $NON-NLS-1$
                log.debug(Messages.getInstance().getString("Kettle.DEBUG_FINDING_STEP_IMPORTER"));
            }
            stepName = getMonitorStepName();
            if (stepName != null) {
                registerAsStepListener(stepName, localTrans);
            }
        } catch (Exception e) {
            throw new ActionExecutionException(Messages.getInstance().getErrorString("Kettle.ERROR_0012_ROW_LISTENER_CREATE_FAILED"), // $NON-NLS-1$
            e);
        }
        try {
            if (log.isDebugEnabled()) {
                // $NON-NLS-1$
                log.debug(Messages.getInstance().getString("Kettle.DEBUG_FINDING_STEP_IMPORTER"));
            }
            if (injectorStep != null) {
                registerAsProducer(injectorStep, localTrans);
            }
        } catch (Exception e) {
            throw new ActionExecutionException(Messages.getInstance().getErrorString("Kettle.ERROR_0012_ROW_INJECTOR_CREATE_FAILED"), // $NON-NLS-1$
            e);
        }
        try {
            if (log.isDebugEnabled()) {
                // $NON-NLS-1$
                log.debug(Messages.getInstance().getString("Kettle.DEBUG_STARTING_TRANSFORMATION"));
            }
            localTrans.startThreads();
        } catch (Exception e) {
            throw new ActionExecutionException(Messages.getInstance().getErrorString("Kettle.ERROR_0013_TRANSFORMATION_START_FAILED"), // $NON-NLS-1$
            e);
        }
        // inject rows if necessary
        if (injectorRows != null) {
            // create a row meta
            try {
                if (log.isDebugEnabled()) {
                    // $NON-NLS-1$
                    log.debug(Messages.getInstance().getString("Injecting rows"));
                }
                RowMeta rowMeta = new RowMeta();
                RowMetaInterface rowMetaInterface = transMeta.getStepFields(injectorStep);
                rowMeta.addRowMeta(rowMetaInterface);
                // inject the rows
                Object[] row = injectorRows.next();
                while (row != null) {
                    rowInjector.putRow(rowMeta, row);
                    row = injectorRows.next();
                }
                rowInjector.finished();
            } catch (Exception e) {
                // $NON-NLS-1$
                throw new ActionExecutionException(Messages.getInstance().getErrorString("Row injection failed"), e);
            }
        }
        try {
            // It's running in a separate thread to allow monitoring, etc.
            if (log.isDebugEnabled()) {
                // $NON-NLS-1$
                log.debug(Messages.getInstance().getString("Kettle.DEBUG_TRANSFORMATION_RUNNING"));
            }
            localTrans.waitUntilFinished();
            localTrans.cleanup();
        } catch (Exception e) {
            int transErrors = localTrans.getErrors();
            throw new ActionExecutionException(org.pentaho.platform.plugin.kettle.messages.Messages.getInstance().getErrorString("PdiAction.ERROR_0009_TRANSFORMATION_HAD_ERRORS", Integer.toString(transErrors)), // $NON-NLS-1$
            e);
        }
        // Dump the Kettle log...
        if (log.isDebugEnabled()) {
            log.debug(pdiUserAppender.getBuffer().toString());
        }
        // Build written row output
        if (transformationOutputRows != null) {
            transformationOutputRowsCount = transformationOutputRows.getRowCount();
        }
        // Build error row output
        if (transformationOutputErrorRows != null) {
            transformationOutputErrorRowsCount = transformationOutputErrorRows.getRowCount();
        }
    }
}
Also used : TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) RowMeta(org.pentaho.di.core.row.RowMeta) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) ActionExecutionException(org.pentaho.platform.api.engine.ActionExecutionException) TransConfiguration(org.pentaho.di.trans.TransConfiguration) ActionExecutionException(org.pentaho.platform.api.engine.ActionExecutionException) ActionValidationException(org.pentaho.platform.api.engine.ActionValidationException) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException) FileNotFoundException(java.io.FileNotFoundException) KettleValueException(org.pentaho.di.core.exception.KettleValueException) KettleStepException(org.pentaho.di.core.exception.KettleStepException) KettleException(org.pentaho.di.core.exception.KettleException) KettleSecurityException(org.pentaho.di.core.exception.KettleSecurityException)

Example 2 with TransConfiguration

use of org.pentaho.di.trans.TransConfiguration in project pentaho-kettle by pentaho.

the class RegisterPackageServlet method generateBody.

@Override
WebResult generateBody(HttpServletRequest request, HttpServletResponse response, boolean useXML) throws KettleException {
    String archiveUrl = copyRequestToDirectory(request, createTempDirString());
    // the resource to load
    String load = request.getParameter(PARAMETER_LOAD);
    String zipBaseUrl = extract(archiveUrl);
    if (!Utils.isEmpty(load)) {
        String fileUrl = getStartFileUrl(zipBaseUrl, load);
        String resultId;
        if (isJob(request)) {
            Node node = getConfigNode(zipBaseUrl, Job.CONFIGURATION_IN_EXPORT_FILENAME, JobExecutionConfiguration.XML_TAG);
            JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(node);
            JobMeta jobMeta = new JobMeta(fileUrl, jobExecutionConfiguration.getRepository());
            JobConfiguration jobConfiguration = new JobConfiguration(jobMeta, jobExecutionConfiguration);
            Job job = createJob(jobConfiguration);
            resultId = job.getContainerObjectId();
        } else {
            Node node = getConfigNode(zipBaseUrl, Trans.CONFIGURATION_IN_EXPORT_FILENAME, TransExecutionConfiguration.XML_TAG);
            TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration(node);
            TransMeta transMeta = new TransMeta(fileUrl, transExecutionConfiguration.getRepository());
            TransConfiguration transConfiguration = new TransConfiguration(transMeta, transExecutionConfiguration);
            Trans trans = createTrans(transConfiguration);
            resultId = trans.getContainerObjectId();
        }
        // zip file no longer needed, contents were extracted
        deleteArchive(archiveUrl);
        return new WebResult(WebResult.STRING_OK, fileUrl, resultId);
    }
    return null;
}
Also used : TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) JobMeta(org.pentaho.di.job.JobMeta) Node(org.w3c.dom.Node) TransMeta(org.pentaho.di.trans.TransMeta) Job(org.pentaho.di.job.Job) JobExecutionConfiguration(org.pentaho.di.job.JobExecutionConfiguration) TransConfiguration(org.pentaho.di.trans.TransConfiguration) Trans(org.pentaho.di.trans.Trans) JobConfiguration(org.pentaho.di.job.JobConfiguration)

Example 3 with TransConfiguration

use of org.pentaho.di.trans.TransConfiguration in project pentaho-kettle by pentaho.

the class RunTransServlet method doGet.

/**
 * <div id="mindtouch">
 *    <h1>/kettle/runTrans</h1>
 *    <a name="GET"></a>
 *    <h2>GET</h2>
 *    <p>Execute transformation from enterprise repository. Repository should be configured in Carte xml file.
 *  Response contains <code>ERROR</code> result if error happened during transformation execution.</p>
 *
 *    <p><b>Example Request:</b><br />
 *    <pre function="syntax.xml">
 *    GET /kettle/runTrans?trans=home%2Fadmin%2Fdummy-trans&level=Debug
 *    </pre>
 *
 *    </p>
 *    <h3>Parameters</h3>
 *    <table class="pentaho-table">
 *    <tbody>
 *    <tr>
 *      <th>name</th>
 *      <th>description</th>
 *      <th>type</th>
 *    </tr>
 *    <tr>
 *    <td>trans</td>
 *    <td>Full path to the transformation in repository.</td>
 *    <td>query</td>
 *    </tr>
 *    <tr>
 *    <td>level</td>
 *    <td>Logging level to be used for transformation execution (i.e. Debug).</td>
 *    <td>query</td>
 *    </tr>
 *    </tbody>
 *    </table>
 *
 *  <h3>Response Body</h3>
 *
 *  <table class="pentaho-table">
 *    <tbody>
 *      <tr>
 *        <td align="right">element:</td>
 *        <td>(custom)</td>
 *      </tr>
 *      <tr>
 *        <td align="right">media types:</td>
 *        <td>text/xml</td>
 *      </tr>
 *    </tbody>
 *  </table>
 *    <p>Response contains result of the operation. It is either <code>OK</code> or <code>ERROR</code>.
 *     If an error occurred during transformation execution, response also contains information about the error.</p>
 *
 *    <p><b>Example Response:</b></p>
 *    <pre function="syntax.xml">
 *    <webresult>
 *      <result>OK</result>
 *      <message>Transformation started</message>
 *      <id>7c082e8f-b4fe-40bc-b424-e0f881a61874</id>
 *    </webresult>
 *    </pre>
 *
 *    <h3>Status Codes</h3>
 *    <table class="pentaho-table">
 *  <tbody>
 *    <tr>
 *      <th>code</th>
 *      <th>description</th>
 *    </tr>
 *    <tr>
 *      <td>200</td>
 *      <td>Request was processed.</td>
 *    </tr>
 *    <tr>
 *      <td>500</td>
 *      <td>Internal server error occurs during request processing.</td>
 *    </tr>
 *  </tbody>
 *</table>
 *</div>
 */
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
    if (isJettyMode() && !request.getContextPath().startsWith(CONTEXT_PATH)) {
        return;
    }
    if (log.isDebug()) {
        logDebug(BaseMessages.getString(PKG, "RunTransServlet.Log.RunTransRequested"));
    }
    // Options taken from PAN
    // 
    String[] knownOptions = new String[] { "trans", "level" };
    String transOption = request.getParameter("trans");
    String levelOption = request.getParameter("level");
    response.setStatus(HttpServletResponse.SC_OK);
    String encoding = System.getProperty("KETTLE_DEFAULT_SERVLET_ENCODING", null);
    if (encoding != null && !Utils.isEmpty(encoding.trim())) {
        response.setCharacterEncoding(encoding);
        response.setContentType("text/html; charset=" + encoding);
    }
    PrintWriter out = response.getWriter();
    try {
        final Repository repository = transformationMap.getSlaveServerConfig().getRepository();
        final TransMeta transMeta = loadTrans(repository, transOption);
        // Set the servlet parameters as variables in the transformation
        // 
        String[] parameters = transMeta.listParameters();
        Enumeration<?> parameterNames = request.getParameterNames();
        while (parameterNames.hasMoreElements()) {
            String parameter = (String) parameterNames.nextElement();
            String[] values = request.getParameterValues(parameter);
            // 
            if (Const.indexOfString(parameter, knownOptions) < 0) {
                // 
                if (Const.indexOfString(parameter, parameters) < 0) {
                    transMeta.setVariable(parameter, values[0]);
                } else {
                    transMeta.setParameterValue(parameter, values[0]);
                }
            }
        }
        TransExecutionConfiguration transExecutionConfiguration = new TransExecutionConfiguration();
        LogLevel logLevel = LogLevel.getLogLevelForCode(levelOption);
        transExecutionConfiguration.setLogLevel(logLevel);
        TransConfiguration transConfiguration = new TransConfiguration(transMeta, transExecutionConfiguration);
        String carteObjectId = UUID.randomUUID().toString();
        SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(CONTEXT_PATH, LoggingObjectType.CARTE, null);
        servletLoggingObject.setContainerObjectId(carteObjectId);
        servletLoggingObject.setLogLevel(logLevel);
        // Create the transformation and store in the list...
        // 
        final Trans trans = createTrans(transMeta, servletLoggingObject);
        // Pass information
        // 
        trans.setRepository(repository);
        trans.setServletPrintWriter(out);
        trans.setServletReponse(response);
        trans.setServletRequest(request);
        // Setting variables
        // 
        trans.initializeVariablesFrom(null);
        trans.getTransMeta().setInternalKettleVariables(trans);
        trans.injectVariables(transConfiguration.getTransExecutionConfiguration().getVariables());
        // Also copy the parameters over...
        // 
        trans.copyParametersFrom(transMeta);
        /*
       * String[] parameterNames = job.listParameters(); for (int idx = 0; idx < parameterNames.length; idx++) { // Grab
       * the parameter value set in the job entry // String thisValue =
       * jobExecutionConfiguration.getParams().get(parameterNames[idx]); if (!Utils.isEmpty(thisValue)) { // Set the
       * value as specified by the user in the job entry // jobMeta.setParameterValue(parameterNames[idx], thisValue); }
       * }
       */
        transMeta.activateParameters();
        trans.setSocketRepository(getSocketRepository());
        getTransformationMap().addTransformation(trans.getName(), carteObjectId, trans, transConfiguration);
        // DO NOT disconnect from the shared repository connection when the job finishes.
        // 
        String message = "Transformation '" + trans.getName() + "' was added to the list with id " + carteObjectId;
        logBasic(message);
        try {
            // Execute the transformation...
            // 
            trans.execute(null);
            finishProcessing(trans, out);
        } catch (Exception executionException) {
            String logging = KettleLogStore.getAppender().getBuffer(trans.getLogChannelId(), false).toString();
            throw new KettleException("Error executing Transformation: " + logging, executionException);
        }
    } catch (Exception ex) {
        out.println(new WebResult(WebResult.STRING_ERROR, BaseMessages.getString(PKG, "RunTransServlet.Error.UnexpectedError", Const.CR + Const.getStackTracker(ex))));
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) TransMeta(org.pentaho.di.trans.TransMeta) SimpleLoggingObject(org.pentaho.di.core.logging.SimpleLoggingObject) TransConfiguration(org.pentaho.di.trans.TransConfiguration) LogLevel(org.pentaho.di.core.logging.LogLevel) ServletException(javax.servlet.ServletException) KettleException(org.pentaho.di.core.exception.KettleException) IOException(java.io.IOException) TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) Repository(org.pentaho.di.repository.Repository) Trans(org.pentaho.di.trans.Trans) PrintWriter(java.io.PrintWriter)

Example 4 with TransConfiguration

use of org.pentaho.di.trans.TransConfiguration in project pentaho-kettle by pentaho.

the class AddExportServlet method doGet.

/**
 *    <div id="mindtouch">
 *    <h1>/kettle/addExport</h1>
 *    <a name="POST"></a>
 *    <h2>POST</h2>
 *    <p>Returns the list of users in the platform. This list is in an xml format as shown in the example response.
 *    Uploads and executes previously exported job or transformation.
 *    Uploads zip file containing job or transformation to be executed and executes it.
 *    Method relies on the input parameters to find the entity to be executed. The archive is
 *    transferred within request body.
 *
 *    <code>File url of the executed entity </code> will be returned in the Response object
 *    or <code>message</code> describing error occurred. To determine if the call is successful
 *    rely on <code>result</code> parameter in response.</p>
 *
 *    <p><b>Example Request:</b><br />
 *    <pre function="syntax.xml">
 *    POST /kettle/addExport/?type=job&load=dummy_job.kjb
 *    </pre>
 *    Request body should contain zip file prepared for Carte execution.
 *    </p>
 *    <h3>Parameters</h3>
 *    <table class="pentaho-table">
 *    <tbody>
 *    <tr>
 *      <th>name</th>
 *      <th>description</th>
 *      <th>type</th>
 *    </tr>
 *    <tr>
 *    <td>type</td>
 *    <td>The type of the entity to be executed either <code>job</code> or <code>trans</code>.</td>
 *    <td>query</td>
 *    </tr>
 *    <tr>
 *    <td>load</td>
 *    <td>The name of the entity within archive to be executed.</td>
 *    <td>query</td>
 *    </tr>
 *    </tbody>
 *    </table>
 *
 *  <h3>Response Body</h3>
 *
 *  <table class="pentaho-table">
 *    <tbody>
 *      <tr>
 *        <td align="right">element:</td>
 *        <td>(custom)</td>
 *      </tr>
 *      <tr>
 *        <td align="right">media types:</td>
 *        <td>application/xml</td>
 *      </tr>
 *    </tbody>
 *  </table>
 *    <p>Response wraps file url of the entity that was executed or error stack trace if an error occurred.
 *     Response has <code>result</code> OK if there were no errors. Otherwise it returns ERROR.</p>
 *
 *    <p><b>Example Response:</b></p>
 *    <pre function="syntax.xml">
 *    <?xml version="1.0" encoding="UTF-8"?>
 *    <webresult>
 *      <result>OK</result>
 *      <message>zip&#x3a;file&#x3a;&#x2f;&#x2f;&#x2f;temp&#x2f;export_ee2a67de-6a72-11e4-82c0-4701a2bac6a5.zip&#x21;dummy_job.kjb</message>
 *      <id>74cf4219-c881-4633-a71a-2ed16b7db7b8</id>
 *    </webresult>
 *    </pre>
 *
 *    <h3>Status Codes</h3>
 *    <table class="pentaho-table">
 *  <tbody>
 *    <tr>
 *      <th>code</th>
 *      <th>description</th>
 *    </tr>
 *    <tr>
 *      <td>200</td>
 *      <td>Request was processed and XML response is returned.</td>
 *    </tr>
 *    <tr>
 *      <td>500</td>
 *      <td>Internal server error occurs during request processing.</td>
 *    </tr>
 *  </tbody>
 *</table>
 *</div>
 */
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
    if (isJettyMode() && !request.getRequestURI().startsWith(CONTEXT_PATH)) {
        return;
    }
    if (log.isDebug()) {
        logDebug("Addition of export requested");
    }
    PrintWriter out = response.getWriter();
    // read from the client
    InputStream in = request.getInputStream();
    if (log.isDetailed()) {
        logDetailed("Encoding: " + request.getCharacterEncoding());
    }
    boolean isJob = TYPE_JOB.equalsIgnoreCase(request.getParameter(PARAMETER_TYPE));
    // the resource to load
    String load = request.getParameter(PARAMETER_LOAD);
    response.setContentType("text/xml");
    out.print(XMLHandler.getXMLHeader());
    response.setStatus(HttpServletResponse.SC_OK);
    OutputStream outputStream = null;
    try {
        FileObject tempFile = KettleVFS.createTempFile("export", ".zip", System.getProperty("java.io.tmpdir"));
        outputStream = KettleVFS.getOutputStream(tempFile, false);
        // Pass the input directly to a temporary file
        // 
        // int size = 0;
        int c;
        while ((c = in.read()) != -1) {
            outputStream.write(c);
        // size++;
        }
        outputStream.flush();
        outputStream.close();
        // don't close it twice
        outputStream = null;
        String archiveUrl = tempFile.getName().toString();
        String fileUrl = null;
        String carteObjectId = null;
        SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(CONTEXT_PATH, LoggingObjectType.CARTE, null);
        // 
        if (!Utils.isEmpty(load)) {
            fileUrl = "zip:" + archiveUrl + "!" + load;
            if (isJob) {
                // Open the job from inside the ZIP archive
                // 
                KettleVFS.getFileObject(fileUrl);
                // never with a repository
                JobMeta jobMeta = new JobMeta(fileUrl, null);
                // Also read the execution configuration information
                // 
                String configUrl = "zip:" + archiveUrl + "!" + Job.CONFIGURATION_IN_EXPORT_FILENAME;
                Document configDoc = XMLHandler.loadXMLFile(configUrl);
                JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(XMLHandler.getSubNode(configDoc, JobExecutionConfiguration.XML_TAG));
                carteObjectId = UUID.randomUUID().toString();
                servletLoggingObject.setContainerObjectId(carteObjectId);
                servletLoggingObject.setLogLevel(jobExecutionConfiguration.getLogLevel());
                Job job = new Job(null, jobMeta, servletLoggingObject);
                // 
                if (jobExecutionConfiguration.isExpandingRemoteJob()) {
                    job.addDelegationListener(new CarteDelegationHandler(getTransformationMap(), getJobMap()));
                }
                // store it all in the map...
                // 
                getJobMap().addJob(job.getJobname(), carteObjectId, job, new JobConfiguration(jobMeta, jobExecutionConfiguration));
                // Apply the execution configuration...
                // 
                log.setLogLevel(jobExecutionConfiguration.getLogLevel());
                job.setArguments(jobExecutionConfiguration.getArgumentStrings());
                jobMeta.injectVariables(jobExecutionConfiguration.getVariables());
                // Also copy the parameters over...
                // 
                Map<String, String> params = jobExecutionConfiguration.getParams();
                for (Map.Entry<String, String> entry : params.entrySet()) {
                    jobMeta.setParameterValue(entry.getKey(), entry.getValue());
                }
            } else {
                // Open the transformation from inside the ZIP archive
                // 
                TransMeta transMeta = new TransMeta(fileUrl);
                // Also read the execution configuration information
                // 
                String configUrl = "zip:" + archiveUrl + "!" + Trans.CONFIGURATION_IN_EXPORT_FILENAME;
                Document configDoc = XMLHandler.loadXMLFile(configUrl);
                TransExecutionConfiguration executionConfiguration = new TransExecutionConfiguration(XMLHandler.getSubNode(configDoc, TransExecutionConfiguration.XML_TAG));
                carteObjectId = UUID.randomUUID().toString();
                servletLoggingObject.setContainerObjectId(carteObjectId);
                servletLoggingObject.setLogLevel(executionConfiguration.getLogLevel());
                Trans trans = new Trans(transMeta, servletLoggingObject);
                // store it all in the map...
                // 
                getTransformationMap().addTransformation(trans.getName(), carteObjectId, trans, new TransConfiguration(transMeta, executionConfiguration));
            }
        } else {
            fileUrl = archiveUrl;
        }
        out.println(new WebResult(WebResult.STRING_OK, fileUrl, carteObjectId));
    } catch (Exception ex) {
        out.println(new WebResult(WebResult.STRING_ERROR, Const.getStackTracker(ex)));
    } finally {
        if (outputStream != null) {
            outputStream.close();
        }
    }
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) InputStream(java.io.InputStream) OutputStream(java.io.OutputStream) TransMeta(org.pentaho.di.trans.TransMeta) SimpleLoggingObject(org.pentaho.di.core.logging.SimpleLoggingObject) Document(org.w3c.dom.Document) JobExecutionConfiguration(org.pentaho.di.job.JobExecutionConfiguration) TransConfiguration(org.pentaho.di.trans.TransConfiguration) ServletException(javax.servlet.ServletException) IOException(java.io.IOException) TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) FileObject(org.apache.commons.vfs2.FileObject) Job(org.pentaho.di.job.Job) Map(java.util.Map) Trans(org.pentaho.di.trans.Trans) JobConfiguration(org.pentaho.di.job.JobConfiguration) PrintWriter(java.io.PrintWriter)

Example 5 with TransConfiguration

use of org.pentaho.di.trans.TransConfiguration in project pentaho-kettle by pentaho.

the class CarteDelegationHandler method transformationDelegationStarted.

@Override
public synchronized void transformationDelegationStarted(Trans delegatedTrans, TransExecutionConfiguration transExecutionConfiguration) {
    TransConfiguration tc = new TransConfiguration(delegatedTrans.getTransMeta(), transExecutionConfiguration);
    transformationMap.registerTransformation(delegatedTrans, tc);
    delegatedTrans.addDelegationListener(this);
}
Also used : TransConfiguration(org.pentaho.di.trans.TransConfiguration)

Aggregations

TransConfiguration (org.pentaho.di.trans.TransConfiguration)15 TransExecutionConfiguration (org.pentaho.di.trans.TransExecutionConfiguration)13 Trans (org.pentaho.di.trans.Trans)12 IOException (java.io.IOException)8 KettleException (org.pentaho.di.core.exception.KettleException)8 TransMeta (org.pentaho.di.trans.TransMeta)8 PrintWriter (java.io.PrintWriter)6 ServletException (javax.servlet.ServletException)6 SimpleLoggingObject (org.pentaho.di.core.logging.SimpleLoggingObject)6 Repository (org.pentaho.di.repository.Repository)4 Map (java.util.Map)3 TransAdapter (org.pentaho.di.trans.TransAdapter)3 ByteArrayInputStream (java.io.ByteArrayInputStream)2 Path (javax.ws.rs.Path)2 Produces (javax.ws.rs.Produces)2 ParserConfigurationException (javax.xml.parsers.ParserConfigurationException)2 XPathExpressionException (javax.xml.xpath.XPathExpressionException)2 Test (org.junit.Test)2 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)2 LogChannelInterface (org.pentaho.di.core.logging.LogChannelInterface)2