Search in sources :

Example 26 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class JobEntryCopyFilesIT method copyFileWithoutOverwrite.

@Test
public void copyFileWithoutOverwrite() throws Exception {
    entry.setoverwrite_files(false);
    Path pathToFile = Files.createTempFile(source, "file", "");
    FileUtils.copyDirectory(source.toFile(), destination.toFile());
    String path = destination.resolve(pathToFile.getFileName()).toString();
    File file = new File(path);
    long createTime = file.lastModified();
    Result result = entry.execute(new Result(), 0);
    long copyTime = file.lastModified();
    assertTrue(result.getResult());
    assertEquals(0, result.getNrErrors());
    assertTrue("File shouldn't be overwritten", createTime == copyTime);
}
Also used : Path(java.nio.file.Path) File(java.io.File) Result(org.pentaho.di.core.Result) Test(org.junit.Test)

Example 27 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class JobEntryJobIT method testLogfileWritesFromRemote.

@Test
public void testLogfileWritesFromRemote() throws Exception {
    JobEntryJob job = spy(new JobEntryJob(JOB_ENTRY_JOB_NAME));
    doCallRealMethod().when(job).execute(any(Result.class), anyInt());
    Job parentJob = mock(Job.class);
    JobMeta parentJobMeta = mock(JobMeta.class);
    JobMeta jobMeta = mock(JobMeta.class);
    SlaveServer slaveServer = mock(SlaveServer.class);
    LogChannelInterface log = mock(LogChannelInterface.class);
    SlaveServerJobStatus status = mock(SlaveServerJobStatus.class);
    when(parentJob.getLogLevel()).thenReturn(LogLevel.BASIC);
    when(parentJobMeta.getRepositoryDirectory()).thenReturn(null);
    when(jobMeta.getRepositoryDirectory()).thenReturn(mock(RepositoryDirectoryInterface.class));
    when(jobMeta.getName()).thenReturn(JOB_META_NAME);
    when(parentJob.getJobMeta()).thenReturn(parentJobMeta);
    when(parentJobMeta.findSlaveServer(REMOTE_SLAVE_SERVER_NAME)).thenReturn(slaveServer);
    when(slaveServer.getLogChannel()).thenReturn(log);
    when(log.getLogLevel()).thenReturn(LogLevel.BASIC);
    when(slaveServer.sendXML(anyString(), anyString())).thenReturn(REPLY);
    when(slaveServer.execService(anyString())).thenReturn(REPLY);
    when(slaveServer.getJobStatus(anyString(), anyString(), anyInt())).thenReturn(status);
    when(status.getResult()).thenReturn(mock(Result.class));
    when(status.getLoggingString()).thenReturn(LOG);
    file = Files.createTempFile("file", "");
    doReturn(LOG_FILE_NAME).when(job).getLogFilename();
    doReturn(file.toString()).when(job).environmentSubstitute(LOG_FILE_NAME);
    doReturn(REMOTE_SLAVE_SERVER_NAME).when(job).environmentSubstitute(REMOTE_SLAVE_SERVER_NAME);
    doReturn(jobMeta).when(job).getJobMeta(any(Repository.class), any(VariableSpace.class));
    doNothing().when(job).copyVariablesFrom(anyObject());
    doNothing().when(job).setParentVariableSpace(anyObject());
    job.setLogfile = true;
    job.createParentFolder = false;
    job.logFileLevel = LogLevel.BASIC;
    job.execPerRow = false;
    job.paramsFromPrevious = false;
    job.argFromPrevious = false;
    job.waitingToFinish = true;
    job.setSpecificationMethod(ObjectLocationSpecificationMethod.FILENAME);
    job.setRemoteSlaveServerName(REMOTE_SLAVE_SERVER_NAME);
    job.setParentJob(parentJob);
    job.setParentJobMeta(parentJobMeta);
    job.execute(new Result(), 0);
    String result = Files.lines(file).collect(Collectors.joining(""));
    assertTrue(result.contains(LOG));
}
Also used : RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface) JobMeta(org.pentaho.di.job.JobMeta) SlaveServerJobStatus(org.pentaho.di.www.SlaveServerJobStatus) Repository(org.pentaho.di.repository.Repository) VariableSpace(org.pentaho.di.core.variables.VariableSpace) Matchers.anyString(org.mockito.Matchers.anyString) Job(org.pentaho.di.job.Job) SlaveServer(org.pentaho.di.cluster.SlaveServer) LogChannelInterface(org.pentaho.di.core.logging.LogChannelInterface) Result(org.pentaho.di.core.Result) Test(org.junit.Test)

Example 28 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class JobEntryZipFileIT method processFile_ReturnsTrue_OnSuccess.

@Test
public void processFile_ReturnsTrue_OnSuccess() throws Exception {
    final String zipPath = createTempZipFileName("pdi-15013");
    final String content = "temp file";
    final File tempFile = createTempFile(content);
    tempFile.deleteOnExit();
    try {
        Result result = new Result();
        JobEntryZipFile entry = new JobEntryZipFile();
        assertTrue(entry.processRowFile(new Job(), result, zipPath, null, null, tempFile.getAbsolutePath(), null, false));
        boolean isTrue = true;
        FileObject zip = KettleVFS.getFileObject(zipPath);
        assertTrue("Zip archive should be created", zip.exists());
        ByteArrayOutputStream os = new ByteArrayOutputStream();
        IOUtils.copy(zip.getContent().getInputStream(), os);
        ZipInputStream zis = new ZipInputStream(new ByteArrayInputStream(os.toByteArray()));
        ZipEntry zipEntry = zis.getNextEntry();
        assertEquals("Input file should be put into the archive", tempFile.getName(), zipEntry.getName());
        os.reset();
        IOUtils.copy(zis, os);
        assertEquals("File's content should be equal to original", content, new String(os.toByteArray()));
    } finally {
        tempFile.delete();
        File tempZipFile = new File(zipPath);
        tempZipFile.delete();
    }
}
Also used : ZipInputStream(java.util.zip.ZipInputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) ZipEntry(java.util.zip.ZipEntry) FileObject(org.apache.commons.vfs2.FileObject) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Job(org.pentaho.di.job.Job) File(java.io.File) Result(org.pentaho.di.core.Result) Test(org.junit.Test)

Example 29 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class TransLogTable method getLogRecord.

/**
 * This method calculates all the values that are required
 *
 * @param id
 *          the id to use or -1 if no id is needed
 * @param status
 *          the log status to use
 * @param subject
 *          the subject to query, in this case a Trans object
 */
public RowMetaAndData getLogRecord(LogStatus status, Object subject, Object parent) {
    if (subject == null || subject instanceof Trans) {
        Trans trans = (Trans) subject;
        Result result = null;
        if (trans != null) {
            result = trans.getResult();
        }
        RowMetaAndData row = new RowMetaAndData();
        for (LogTableField field : fields) {
            if (field.isEnabled()) {
                Object value = null;
                if (trans != null) {
                    switch(ID.valueOf(field.getId())) {
                        case ID_BATCH:
                            value = new Long(trans.getBatchId());
                            break;
                        case CHANNEL_ID:
                            value = trans.getLogChannelId();
                            break;
                        case TRANSNAME:
                            value = trans.getName();
                            break;
                        case STATUS:
                            value = status.getStatus();
                            break;
                        case LINES_READ:
                            value = new Long(result.getNrLinesRead());
                            break;
                        case LINES_WRITTEN:
                            value = new Long(result.getNrLinesWritten());
                            break;
                        case LINES_INPUT:
                            value = new Long(result.getNrLinesInput());
                            break;
                        case LINES_OUTPUT:
                            value = new Long(result.getNrLinesOutput());
                            break;
                        case LINES_UPDATED:
                            value = new Long(result.getNrLinesUpdated());
                            break;
                        case LINES_REJECTED:
                            value = new Long(result.getNrLinesRejected());
                            break;
                        case ERRORS:
                            value = new Long(result.getNrErrors());
                            break;
                        case STARTDATE:
                            value = trans.getStartDate();
                            break;
                        case LOGDATE:
                            value = trans.getLogDate();
                            break;
                        case ENDDATE:
                            value = trans.getEndDate();
                            break;
                        case DEPDATE:
                            value = trans.getDepDate();
                            break;
                        case REPLAYDATE:
                            value = trans.getCurrentDate();
                            break;
                        case LOG_FIELD:
                            value = getLogBuffer(trans, trans.getLogChannelId(), status, logSizeLimit);
                            break;
                        case EXECUTING_SERVER:
                            value = trans.getExecutingServer();
                            break;
                        case EXECUTING_USER:
                            value = trans.getExecutingUser();
                            break;
                        case CLIENT:
                            value = KettleClientEnvironment.getInstance().getClient() != null ? KettleClientEnvironment.getInstance().getClient().toString() : "unknown";
                            break;
                        default:
                            break;
                    }
                }
                row.addValue(field.getFieldName(), field.getDataType(), value);
                row.getRowMeta().getValueMeta(row.size() - 1).setLength(field.getLength());
            }
        }
        return row;
    } else {
        return null;
    }
}
Also used : RowMetaAndData(org.pentaho.di.core.RowMetaAndData) Trans(org.pentaho.di.trans.Trans) Result(org.pentaho.di.core.Result)

Example 30 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class Job method run.

/**
 * Threads main loop: called by Thread.start();
 */
public void run() {
    // this job's heartbeat scheduled executor
    ExecutorService heartbeat = null;
    try {
        stopped = new AtomicBoolean(false);
        finished = new AtomicBoolean(false);
        initialized = new AtomicBoolean(true);
        // Create a new variable name space as we want jobs to have their own set of variables.
        // initialize from parentJob or null
        // 
        variables.initializeVariablesFrom(parentJob);
        setInternalKettleVariables(variables);
        copyParametersFrom(jobMeta);
        activateParameters();
        // Run the job
        // 
        fireJobStartListeners();
        heartbeat = startHeartbeat(getHeartbeatIntervalInSeconds());
        result = execute();
    } catch (Throwable je) {
        log.logError(BaseMessages.getString(PKG, "Job.Log.ErrorExecJob", je.getMessage()), je);
        // log.logError(Const.getStackTracker(je));
        // 
        // we don't have result object because execute() threw a curve-ball.
        // So we create a new error object.
        // 
        result = new Result();
        result.setNrErrors(1L);
        result.setResult(false);
        // This can be before actual execution
        addErrors(1);
        emergencyWriteJobTracker(result);
        active.set(false);
        finished.set(true);
        stopped.set(false);
    } finally {
        try {
            shutdownHeartbeat(heartbeat);
            ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobFinish.id, this);
            jobMeta.disposeEmbeddedMetastoreProvider();
            fireJobFinishListeners();
            // release unused vfs connections
            KettleVFS.freeUnusedResources();
        } catch (KettleException e) {
            result.setNrErrors(1);
            result.setResult(false);
            log.logError(BaseMessages.getString(PKG, "Job.Log.ErrorExecJob", e.getMessage()), e);
            emergencyWriteJobTracker(result);
        }
    }
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) KettleException(org.pentaho.di.core.exception.KettleException) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) ExecutorService(java.util.concurrent.ExecutorService) WebResult(org.pentaho.di.www.WebResult) Result(org.pentaho.di.core.Result)

Aggregations

Result (org.pentaho.di.core.Result)192 Test (org.junit.Test)75 KettleException (org.pentaho.di.core.exception.KettleException)75 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)64 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)57 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)40 FileObject (org.apache.commons.vfs2.FileObject)34 Job (org.pentaho.di.job.Job)32 IOException (java.io.IOException)24 ResultFile (org.pentaho.di.core.ResultFile)20 File (java.io.File)17 ArrayList (java.util.ArrayList)16 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)15 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)12 Pattern (java.util.regex.Pattern)10 KettleFileException (org.pentaho.di.core.exception.KettleFileException)10 Database (org.pentaho.di.core.database.Database)9 Date (java.util.Date)8 Trans (org.pentaho.di.trans.Trans)8 Matcher (java.util.regex.Matcher)7