Search in sources :

Example 81 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class RunConfigurationImportExtensionPointTest method shouldCreateRunConfigurationsForSlaveServer.

@Test
public void shouldCreateRunConfigurationsForSlaveServer() throws Exception {
    JobMeta jobMeta = mock(JobMeta.class);
    JobEntryCopy jobEntryCopy1 = mock(JobEntryCopy.class);
    JobEntryCopy jobEntryCopy2 = mock(JobEntryCopy.class);
    JobEntryCopy jobEntryCopy3 = mock(JobEntryCopy.class);
    JobEntryTrans trans1 = mock(JobEntryTrans.class);
    JobEntryTrans trans2 = mock(JobEntryTrans.class);
    JobEntryTrans trans3 = mock(JobEntryTrans.class);
    ArgumentCaptor<DefaultRunConfiguration> rcCaptor = ArgumentCaptor.forClass(DefaultRunConfiguration.class);
    when(jobMeta.getEmbeddedMetaStore()).thenReturn(embeddedMetaStore);
    when(jobMeta.getSlaveServers()).thenReturn(Arrays.asList(new SlaveServer("carte1", "host1", "1234", "user", "passw"), new SlaveServer("carte2", "host2", "1234", "user", "passw")));
    when(jobMeta.getJobCopies()).thenReturn(Arrays.asList(jobEntryCopy1, jobEntryCopy2, jobEntryCopy3));
    when(jobEntryCopy1.getEntry()).thenReturn(trans1);
    when(jobEntryCopy2.getEntry()).thenReturn(trans2);
    when(jobEntryCopy3.getEntry()).thenReturn(trans3);
    when(trans1.getRemoteSlaveServerName()).thenReturn("carte1");
    when(trans2.getRemoteSlaveServerName()).thenReturn("carte1");
    when(trans3.getRemoteSlaveServerName()).thenReturn("carte2");
    when(trans1.getRunConfiguration()).thenReturn(null);
    when(trans2.getRunConfiguration()).thenReturn(null);
    when(trans3.getRunConfiguration()).thenReturn(null);
    when(runConfigurationManager.getNames()).thenReturn(Collections.singletonList("pentaho_auto_carte1_config"));
    runConfigurationImportExtensionPoint.callExtensionPoint(log, jobMeta);
    verify(runConfigurationManager, times(2)).save(rcCaptor.capture());
    verify(trans1).setRunConfiguration("pentaho_auto_carte1_config_1");
    verify(trans2).setRunConfiguration("pentaho_auto_carte1_config_1");
    verify(trans3).setRunConfiguration("pentaho_auto_carte2_config");
    List<DefaultRunConfiguration> allValues = rcCaptor.getAllValues();
    DefaultRunConfiguration runConfiguration1 = allValues.get(0);
    assertEquals("pentaho_auto_carte1_config_1", runConfiguration1.getName());
    assertEquals("carte1", runConfiguration1.getServer());
    DefaultRunConfiguration runConfiguration2 = allValues.get(1);
    assertEquals("pentaho_auto_carte2_config", runConfiguration2.getName());
    assertEquals("carte2", runConfiguration2.getServer());
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) JobEntryTrans(org.pentaho.di.job.entries.trans.JobEntryTrans) DefaultRunConfiguration(org.pentaho.di.engine.configuration.impl.pentaho.DefaultRunConfiguration) SlaveServer(org.pentaho.di.cluster.SlaveServer) Test(org.junit.Test)

Example 82 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class StartJobServletTest method testStartJobServletEscapesHtmlWhenTransFound.

@Test
@PrepareForTest({ Encode.class })
public void testStartJobServletEscapesHtmlWhenTransFound() throws ServletException, IOException {
    KettleLogStore.init();
    HttpServletRequest mockHttpServletRequest = mock(HttpServletRequest.class);
    HttpServletResponse mockHttpServletResponse = mock(HttpServletResponse.class);
    Job mockJob = mock(Job.class);
    JobMeta mockJobMeta = mock(JobMeta.class);
    LogChannelInterface mockLogChannelInterface = mock(LogChannelInterface.class);
    mockJob.setName(ServletTestUtils.BAD_STRING_TO_TEST);
    StringWriter out = new StringWriter();
    PrintWriter printWriter = new PrintWriter(out);
    PowerMockito.spy(Encode.class);
    when(mockHttpServletRequest.getContextPath()).thenReturn(StartJobServlet.CONTEXT_PATH);
    when(mockHttpServletRequest.getParameter(anyString())).thenReturn(ServletTestUtils.BAD_STRING_TO_TEST);
    when(mockHttpServletResponse.getWriter()).thenReturn(printWriter);
    when(mockJobMap.getJob(any(CarteObjectEntry.class))).thenReturn(mockJob);
    when(mockJob.getLogChannelId()).thenReturn(ServletTestUtils.BAD_STRING_TO_TEST);
    when(mockJob.getLogChannel()).thenReturn(mockLogChannelInterface);
    when(mockJob.getJobMeta()).thenReturn(mockJobMeta);
    when(mockJobMeta.getMaximum()).thenReturn(new Point(10, 10));
    startJobServlet.doGet(mockHttpServletRequest, mockHttpServletResponse);
    assertFalse(ServletTestUtils.hasBadText(ServletTestUtils.getInsideOfTag("H1", out.toString())));
    PowerMockito.verifyStatic(atLeastOnce());
    Encode.forHtml(anyString());
}
Also used : HttpServletRequest(javax.servlet.http.HttpServletRequest) JobMeta(org.pentaho.di.job.JobMeta) StringWriter(java.io.StringWriter) HttpServletResponse(javax.servlet.http.HttpServletResponse) Point(org.pentaho.di.core.gui.Point) Job(org.pentaho.di.job.Job) LogChannelInterface(org.pentaho.di.core.logging.LogChannelInterface) PrintWriter(java.io.PrintWriter) Test(org.junit.Test) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest)

Example 83 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class StopJobServletTest method testStopJobServletEscapesHtmlWhenTransFound.

@Test
@PrepareForTest({ Encode.class })
public void testStopJobServletEscapesHtmlWhenTransFound() throws ServletException, IOException {
    KettleLogStore.init();
    HttpServletRequest mockHttpServletRequest = mock(HttpServletRequest.class);
    HttpServletResponse mockHttpServletResponse = mock(HttpServletResponse.class);
    Job mockJob = mock(Job.class);
    JobMeta mockJobMeta = mock(JobMeta.class);
    LogChannelInterface mockLogChannelInterface = mock(LogChannelInterface.class);
    mockJob.setName(ServletTestUtils.BAD_STRING_TO_TEST);
    StringWriter out = new StringWriter();
    PrintWriter printWriter = new PrintWriter(out);
    PowerMockito.spy(Encode.class);
    when(mockHttpServletRequest.getContextPath()).thenReturn(StopJobServlet.CONTEXT_PATH);
    when(mockHttpServletRequest.getParameter(anyString())).thenReturn(ServletTestUtils.BAD_STRING_TO_TEST);
    when(mockHttpServletResponse.getWriter()).thenReturn(printWriter);
    when(mockJobMap.getJob(any(CarteObjectEntry.class))).thenReturn(mockJob);
    when(mockJob.getLogChannelId()).thenReturn(ServletTestUtils.BAD_STRING_TO_TEST);
    when(mockJob.getLogChannel()).thenReturn(mockLogChannelInterface);
    when(mockJob.getJobMeta()).thenReturn(mockJobMeta);
    when(mockJobMeta.getMaximum()).thenReturn(new Point(10, 10));
    stopJobServlet.doGet(mockHttpServletRequest, mockHttpServletResponse);
    assertFalse(ServletTestUtils.hasBadText(ServletTestUtils.getInsideOfTag("H1", out.toString())));
    PowerMockito.verifyStatic(atLeastOnce());
    Encode.forHtml(anyString());
}
Also used : HttpServletRequest(javax.servlet.http.HttpServletRequest) JobMeta(org.pentaho.di.job.JobMeta) StringWriter(java.io.StringWriter) HttpServletResponse(javax.servlet.http.HttpServletResponse) Point(org.pentaho.di.core.gui.Point) Job(org.pentaho.di.job.Job) LogChannelInterface(org.pentaho.di.core.logging.LogChannelInterface) PrintWriter(java.io.PrintWriter) Test(org.junit.Test) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest)

Example 84 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class AutoDoc method processRow.

public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
    meta = (AutoDocMeta) smi;
    data = (AutoDocData) sdi;
    Object[] row = getRow();
    if (row == null) {
        if (data.filenames.isEmpty()) {
            // Nothing to see here, move along!
            // 
            setOutputDone();
            return false;
        }
        // End of the line, create the documentation...
        // 
        FileObject targetFile = KettleVFS.getFileObject(environmentSubstitute(meta.getTargetFilename()));
        String targetFilename = KettleVFS.getFilename(targetFile);
        // Create the report builder
        // 
        KettleReportBuilder kettleReportBuilder = new KettleReportBuilder(this, data.filenames, KettleVFS.getFilename(targetFile), meta);
        try {
            // 
            if (ClassicEngineBoot.getInstance().isBootDone() == false) {
                ObjectUtilities.setClassLoader(getClass().getClassLoader());
                ObjectUtilities.setClassLoaderSource(ObjectUtilities.CLASS_CONTEXT);
                LibLoaderBoot.getInstance().start();
                LibFontBoot.getInstance().start();
                ClassicEngineBoot.getInstance().start();
            }
            // Do the reporting thing...
            // 
            kettleReportBuilder.createReport();
            kettleReportBuilder.render();
            Object[] outputRowData = RowDataUtil.allocateRowData(data.outputRowMeta.size());
            int outputIndex = 0;
            outputRowData[outputIndex++] = targetFilename;
            // Pass along the data to the next steps...
            // 
            putRow(data.outputRowMeta, outputRowData);
            // Add the target file to the result file list
            // 
            ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, targetFile, getTransMeta().getName(), toString());
            resultFile.setComment("This file was generated by the 'Auto Documentation Output' step");
            addResultFile(resultFile);
        } catch (Exception e) {
            throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.UnableToRenderReport"), e);
        }
        setOutputDone();
        return false;
    }
    if (first) {
        first = false;
        data.outputRowMeta = getInputRowMeta().clone();
        meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore);
        // Get the filename field index...
        // 
        String filenameField = environmentSubstitute(meta.getFilenameField());
        data.fileNameFieldIndex = getInputRowMeta().indexOfValue(filenameField);
        if (data.fileNameFieldIndex < 0) {
            throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.FilenameFieldNotFound", filenameField));
        }
        // Get the file type field index...
        // 
        String fileTypeField = environmentSubstitute(meta.getFileTypeField());
        data.fileTypeFieldIndex = getInputRowMeta().indexOfValue(fileTypeField);
        if (data.fileTypeFieldIndex < 0) {
            throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.FileTypeFieldNotFound", fileTypeField));
        }
        data.repository = getTrans().getRepository();
        if (data.repository != null) {
            data.tree = data.repository.loadRepositoryDirectoryTree();
        }
        // Initialize the repository information handlers (images, metadata, loading, etc)
        // 
        TransformationInformation.init(getTrans().getRepository());
        JobInformation.init(getTrans().getRepository());
    }
    // One more transformation or job to place in the documentation.
    // 
    String fileName = getInputRowMeta().getString(row, data.fileNameFieldIndex);
    String fileType = getInputRowMeta().getString(row, data.fileTypeFieldIndex);
    RepositoryObjectType objectType;
    if ("Transformation".equalsIgnoreCase(fileType)) {
        objectType = RepositoryObjectType.TRANSFORMATION;
    } else if ("Job".equalsIgnoreCase(fileType)) {
        objectType = RepositoryObjectType.JOB;
    } else {
        throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.UnknownFileTypeValue", fileType));
    }
    ReportSubjectLocation location = null;
    if (getTrans().getRepository() == null) {
        switch(objectType) {
            case TRANSFORMATION:
                location = new ReportSubjectLocation(fileName, null, null, objectType);
                break;
            case JOB:
                location = new ReportSubjectLocation(fileName, null, null, objectType);
                break;
            default:
                break;
        }
    } else {
        int lastSlashIndex = fileName.lastIndexOf(RepositoryDirectory.DIRECTORY_SEPARATOR);
        if (lastSlashIndex < 0) {
            fileName = RepositoryDirectory.DIRECTORY_SEPARATOR + fileName;
            lastSlashIndex = 0;
        }
        String directoryName = fileName.substring(0, lastSlashIndex + 1);
        String objectName = fileName.substring(lastSlashIndex + 1);
        RepositoryDirectoryInterface directory = data.tree.findDirectory(directoryName);
        if (directory == null) {
            throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.RepositoryDirectoryNotFound", directoryName));
        }
        location = new ReportSubjectLocation(null, directory, objectName, objectType);
    }
    if (location == null) {
        throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.UnableToDetermineLocation", fileName, fileType));
    }
    if (meta.getOutputType() != OutputType.METADATA) {
        // Add the file location to the list for later processing in one output report
        // 
        data.filenames.add(location);
    } else {
        // Load the metadata from the transformation / job...
        // Output it in one row for each input row
        // 
        Object[] outputRow = RowDataUtil.resizeArray(row, data.outputRowMeta.size());
        int outputIndex = getInputRowMeta().size();
        List<AreaOwner> imageAreaList = null;
        switch(location.getObjectType()) {
            case TRANSFORMATION:
                TransformationInformation ti = TransformationInformation.getInstance();
                TransMeta transMeta = ti.getTransMeta(location);
                imageAreaList = ti.getImageAreaList(location);
                // TransMeta
                outputRow[outputIndex++] = transMeta;
                break;
            case JOB:
                JobInformation ji = JobInformation.getInstance();
                JobMeta jobMeta = ji.getJobMeta(location);
                imageAreaList = ji.getImageAreaList(location);
                // TransMeta
                outputRow[outputIndex++] = jobMeta;
                break;
            default:
                break;
        }
        // Name
        if (meta.isIncludingName()) {
            outputRow[outputIndex++] = KettleFileTableModel.getName(location);
        }
        // Description
        if (meta.isIncludingDescription()) {
            outputRow[outputIndex++] = KettleFileTableModel.getDescription(location);
        }
        // Extended Description
        if (meta.isIncludingExtendedDescription()) {
            outputRow[outputIndex++] = KettleFileTableModel.getExtendedDescription(location);
        }
        // created
        if (meta.isIncludingCreated()) {
            outputRow[outputIndex++] = KettleFileTableModel.getCreation(location);
        }
        // modified
        if (meta.isIncludingModified()) {
            outputRow[outputIndex++] = KettleFileTableModel.getModification(location);
        }
        // image
        if (meta.isIncludingImage()) {
            ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
            try {
                BufferedImage image = KettleFileTableModel.getImage(location);
                ImageIO.write(image, "png", outputStream);
                outputRow[outputIndex++] = outputStream.toByteArray();
            } catch (Exception e) {
                throw new KettleException("Unable to serialize image to PNG", e);
            } finally {
                try {
                    outputStream.close();
                } catch (IOException e) {
                    throw new KettleException("Unable to serialize image to PNG", e);
                }
            }
        }
        if (meta.isIncludingLoggingConfiguration()) {
            outputRow[outputIndex++] = KettleFileTableModel.getLogging(location);
        }
        if (meta.isIncludingLastExecutionResult()) {
            outputRow[outputIndex++] = KettleFileTableModel.getLogging(location);
        }
        if (meta.isIncludingImageAreaList()) {
            outputRow[outputIndex++] = imageAreaList;
        }
        putRow(data.outputRowMeta, outputRow);
    }
    return true;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface) JobMeta(org.pentaho.di.job.JobMeta) TransMeta(org.pentaho.di.trans.TransMeta) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException) ResultFile(org.pentaho.di.core.ResultFile) KettleException(org.pentaho.di.core.exception.KettleException) IOException(java.io.IOException) BufferedImage(java.awt.image.BufferedImage) AreaOwner(org.pentaho.di.core.gui.AreaOwner) RepositoryObjectType(org.pentaho.di.repository.RepositoryObjectType) FileObject(org.apache.commons.vfs2.FileObject) FileObject(org.apache.commons.vfs2.FileObject)

Example 85 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class JobInformation method loadValues.

private JobInformationValues loadValues(ReportSubjectLocation location) throws KettleException {
    // Load the job
    // 
    JobMeta jobMeta = loadJob(location);
    Point min = jobMeta.getMinimum();
    Point area = jobMeta.getMaximum();
    area.x += 30;
    area.y += 30;
    int iconsize = 32;
    ScrollBarInterface bar = new ScrollBarInterface() {

        public void setThumb(int thumb) {
        }

        public int getSelection() {
            return 0;
        }
    };
    // Paint the transformation...
    // 
    GCInterface gc = new SwingGC(null, area, iconsize, 50, 20);
    List<AreaOwner> areaOwners = new ArrayList<AreaOwner>();
    JobPainter painter = new JobPainter(gc, jobMeta, area, bar, bar, null, null, null, areaOwners, new ArrayList<JobEntryCopy>(), iconsize, 1, 0, 0, true, "FreeSans", 10);
    painter.setMagnification(0.25f);
    painter.drawJob();
    BufferedImage bufferedImage = (BufferedImage) gc.getImage();
    int newWidth = bufferedImage.getWidth() - min.x;
    int newHeigth = bufferedImage.getHeight() - min.y;
    BufferedImage image = new BufferedImage(newWidth, newHeigth, bufferedImage.getType());
    image.getGraphics().drawImage(bufferedImage, 0, 0, newWidth, newHeigth, min.x, min.y, min.x + newWidth, min.y + newHeigth, null);
    JobInformationValues values = new JobInformationValues();
    values.jobMeta = jobMeta;
    values.image = image;
    values.areaOwners = areaOwners;
    return values;
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) JobPainter(org.pentaho.di.job.JobPainter) ArrayList(java.util.ArrayList) Point(org.pentaho.di.core.gui.Point) SwingGC(org.pentaho.di.core.gui.SwingGC) Point(org.pentaho.di.core.gui.Point) ScrollBarInterface(org.pentaho.di.core.gui.ScrollBarInterface) BufferedImage(java.awt.image.BufferedImage) GCInterface(org.pentaho.di.core.gui.GCInterface) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) AreaOwner(org.pentaho.di.core.gui.AreaOwner)

Aggregations

JobMeta (org.pentaho.di.job.JobMeta)254 Test (org.junit.Test)88 TransMeta (org.pentaho.di.trans.TransMeta)69 KettleException (org.pentaho.di.core.exception.KettleException)62 JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)48 Job (org.pentaho.di.job.Job)45 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)27 Repository (org.pentaho.di.repository.Repository)25 RepositoryDirectoryInterface (org.pentaho.di.repository.RepositoryDirectoryInterface)25 Point (org.pentaho.di.core.gui.Point)24 ArrayList (java.util.ArrayList)23 ErrorDialog (org.pentaho.di.ui.core.dialog.ErrorDialog)22 SlaveServer (org.pentaho.di.cluster.SlaveServer)17 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)17 FileObject (org.apache.commons.vfs2.FileObject)16 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)16 LogChannelInterface (org.pentaho.di.core.logging.LogChannelInterface)15 SimpleLoggingObject (org.pentaho.di.core.logging.SimpleLoggingObject)15 PrintWriter (java.io.PrintWriter)12 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)12