Search in sources :

Example 26 with LogTableInterface

use of org.pentaho.di.core.logging.LogTableInterface in project pentaho-kettle by pentaho.

the class JobDelegate method loadJobMetaDetails.

protected void loadJobMetaDetails(DataNode rootNode, JobMeta jobMeta) throws KettleException {
    try {
        jobMeta.setExtendedDescription(getString(rootNode, PROP_EXTENDED_DESCRIPTION));
        jobMeta.setJobversion(getString(rootNode, PROP_JOB_VERSION));
        jobMeta.setJobstatus((int) rootNode.getProperty(PROP_JOB_STATUS).getLong());
        jobMeta.getJobLogTable().setTableName(getString(rootNode, PROP_TABLE_NAME_LOG));
        jobMeta.setCreatedUser(getString(rootNode, PROP_CREATED_USER));
        jobMeta.setCreatedDate(getDate(rootNode, PROP_CREATED_DATE));
        jobMeta.setModifiedUser(getString(rootNode, PROP_MODIFIED_USER));
        jobMeta.setModifiedDate(getDate(rootNode, PROP_MODIFIED_DATE));
        if (rootNode.hasProperty(PROP_DATABASE_LOG)) {
            String id = rootNode.getProperty(PROP_DATABASE_LOG).getRef().getId().toString();
            DatabaseMeta conn = (DatabaseMeta.findDatabase(jobMeta.getDatabases(), new StringObjectId(id)));
            jobMeta.getJobLogTable().setConnectionName(conn.getName());
        }
        jobMeta.getJobLogTable().setBatchIdUsed(rootNode.getProperty(PROP_USE_BATCH_ID).getBoolean());
        jobMeta.setBatchIdPassed(rootNode.getProperty(PROP_PASS_BATCH_ID).getBoolean());
        jobMeta.getJobLogTable().setLogFieldUsed(rootNode.getProperty(PROP_USE_LOGFIELD).getBoolean());
        jobMeta.getJobLogTable().setLogSizeLimit(getString(rootNode, PROP_LOG_SIZE_LIMIT));
        // Load the logging tables too..
        // 
        RepositoryAttributeInterface attributeInterface = new PurRepositoryAttribute(rootNode, jobMeta.getDatabases());
        for (LogTableInterface logTable : jobMeta.getLogTables()) {
            logTable.loadFromRepository(attributeInterface);
        }
        // Load the attributes map
        // 
        AttributesMapUtil.loadAttributesMap(rootNode, jobMeta);
    } catch (Exception e) {
        throw new KettleException("Error loading job details", e);
    }
}
Also used : LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) KettleException(org.pentaho.di.core.exception.KettleException) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) StringObjectId(org.pentaho.di.repository.StringObjectId) RepositoryAttributeInterface(org.pentaho.di.repository.RepositoryAttributeInterface) KettleException(org.pentaho.di.core.exception.KettleException)

Example 27 with LogTableInterface

use of org.pentaho.di.core.logging.LogTableInterface in project pentaho-kettle by pentaho.

the class KettleFileTableModel method getLastExecutionResult.

public static String getLastExecutionResult(LogChannelInterface log, LoggingObjectInterface parentObject, ReportSubjectLocation filename) throws KettleException {
    LogTableInterface logTable = null;
    if (filename.isTransformation()) {
        TransMeta transMeta = TransformationInformation.getInstance().getTransMeta(filename);
        logTable = transMeta.getTransLogTable();
    } else {
        JobMeta jobMeta = JobInformation.getInstance().getJobMeta(filename);
        logTable = jobMeta.getJobLogTable();
    }
    if (logTable != null && logTable.isDefined()) {
        DatabaseMeta dbMeta = logTable.getDatabaseMeta();
        Database database = new Database(parentObject, dbMeta);
        try {
            database.connect();
            String sql = "SELECT ";
            sql += dbMeta.quoteField(logTable.getStatusField().getFieldName()) + ", ";
            sql += dbMeta.quoteField(logTable.getLogDateField().getFieldName()) + ", ";
            sql += dbMeta.quoteField(logTable.getErrorsField().getFieldName()) + "";
            sql += " FROM ";
            sql += dbMeta.getQuotedSchemaTableCombination(logTable.getSchemaName(), logTable.getTableName());
            sql += " ORDER BY " + dbMeta.quoteField(logTable.getLogDateField().getFieldName()) + " DESC";
            RowMetaAndData oneRow = database.getOneRow(sql);
            String status = oneRow.getString(0, "?");
            Date date = oneRow.getDate(1, null);
            Long nrErrors = oneRow.getInteger(2);
            String evaluation;
            if (status.equalsIgnoreCase(LogStatus.END.getStatus())) {
                evaluation = "Ended";
            } else if (status.equalsIgnoreCase(LogStatus.START.getStatus())) {
                evaluation = "Started";
            } else if (status.equalsIgnoreCase(LogStatus.STOP.getStatus())) {
                evaluation = "Stopped";
            } else if (status.equalsIgnoreCase(LogStatus.RUNNING.getStatus())) {
                evaluation = "Running";
            } else if (status.equalsIgnoreCase(LogStatus.PAUSED.getStatus())) {
                evaluation = "Paused";
            } else if (status.equalsIgnoreCase(LogStatus.ERROR.getStatus())) {
                evaluation = "Failed";
            } else {
                evaluation = "Unknown";
            }
            if (nrErrors > 0) {
                evaluation += " with errors";
            } else {
                evaluation += " with success";
            }
            return evaluation + " at " + XMLHandler.date2string(date);
        } catch (Exception e) {
            log.logBasic("Unable to get logging information from log table" + logTable);
        } finally {
            database.disconnect();
        }
    }
    return null;
}
Also used : LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) JobMeta(org.pentaho.di.job.JobMeta) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) TransMeta(org.pentaho.di.trans.TransMeta) Database(org.pentaho.di.core.database.Database) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Date(java.util.Date) KettleException(org.pentaho.di.core.exception.KettleException)

Example 28 with LogTableInterface

use of org.pentaho.di.core.logging.LogTableInterface in project pentaho-kettle by pentaho.

the class TransDelegate method loadTransformationDetails.

protected void loadTransformationDetails(final DataNode rootNode, final TransMeta transMeta) throws KettleException {
    transMeta.setExtendedDescription(getString(rootNode, PROP_EXTENDED_DESCRIPTION));
    transMeta.setTransversion(getString(rootNode, PROP_TRANS_VERSION));
    transMeta.setTransstatus((int) rootNode.getProperty(PROP_TRANS_STATUS).getLong());
    if (rootNode.hasProperty(PROP_STEP_READ)) {
        transMeta.getTransLogTable().setStepRead(StepMeta.findStep(transMeta.getSteps(), getString(rootNode, PROP_STEP_READ)));
    }
    if (rootNode.hasProperty(PROP_STEP_WRITE)) {
        transMeta.getTransLogTable().setStepWritten(StepMeta.findStep(transMeta.getSteps(), getString(rootNode, PROP_STEP_WRITE)));
    }
    if (rootNode.hasProperty(PROP_STEP_INPUT)) {
        transMeta.getTransLogTable().setStepInput(StepMeta.findStep(transMeta.getSteps(), getString(rootNode, PROP_STEP_INPUT)));
    }
    if (rootNode.hasProperty(PROP_STEP_OUTPUT)) {
        transMeta.getTransLogTable().setStepOutput(StepMeta.findStep(transMeta.getSteps(), getString(rootNode, PROP_STEP_OUTPUT)));
    }
    if (rootNode.hasProperty(PROP_STEP_UPDATE)) {
        transMeta.getTransLogTable().setStepUpdate(StepMeta.findStep(transMeta.getSteps(), getString(rootNode, PROP_STEP_UPDATE)));
    }
    if (rootNode.hasProperty(PROP_STEP_REJECTED)) {
        transMeta.getTransLogTable().setStepRejected(StepMeta.findStep(transMeta.getSteps(), getString(rootNode, PROP_STEP_REJECTED)));
    }
    if (rootNode.hasProperty(PROP_DATABASE_LOG)) {
        String id = rootNode.getProperty(PROP_DATABASE_LOG).getRef().getId().toString();
        DatabaseMeta conn = DatabaseMeta.findDatabase(transMeta.getDatabases(), new StringObjectId(id));
        transMeta.getTransLogTable().setConnectionName(conn.getName());
    }
    transMeta.getTransLogTable().setTableName(getString(rootNode, PROP_TABLE_NAME_LOG));
    transMeta.getTransLogTable().setBatchIdUsed(rootNode.getProperty(PROP_USE_BATCHID).getBoolean());
    transMeta.getTransLogTable().setLogFieldUsed(rootNode.getProperty(PROP_USE_LOGFIELD).getBoolean());
    if (rootNode.hasProperty(PROP_ID_DATABASE_MAXDATE)) {
        String id = rootNode.getProperty(PROP_ID_DATABASE_MAXDATE).getRef().getId().toString();
        transMeta.setMaxDateConnection(DatabaseMeta.findDatabase(transMeta.getDatabases(), new StringObjectId(id)));
    }
    transMeta.setMaxDateTable(getString(rootNode, PROP_TABLE_NAME_MAXDATE));
    transMeta.setMaxDateField(getString(rootNode, PROP_FIELD_NAME_MAXDATE));
    transMeta.setMaxDateOffset(rootNode.getProperty(PROP_OFFSET_MAXDATE).getDouble());
    transMeta.setMaxDateDifference(rootNode.getProperty(PROP_DIFF_MAXDATE).getDouble());
    transMeta.setCreatedUser(getString(rootNode, PROP_CREATED_USER));
    transMeta.setCreatedDate(getDate(rootNode, PROP_CREATED_DATE));
    transMeta.setModifiedUser(getString(rootNode, PROP_MODIFIED_USER));
    transMeta.setModifiedDate(getDate(rootNode, PROP_MODIFIED_DATE));
    // Optional:
    transMeta.setSizeRowset(Const.ROWS_IN_ROWSET);
    long val_size_rowset = rootNode.getProperty(PROP_SIZE_ROWSET).getLong();
    if (val_size_rowset > 0) {
        transMeta.setSizeRowset((int) val_size_rowset);
    }
    if (rootNode.hasProperty(PROP_ID_DIRECTORY)) {
        String id_directory = getString(rootNode, PROP_ID_DIRECTORY);
        if (log.isDetailed()) {
            // $NON-NLS-1$
            log.logDetailed(toString(), PROP_ID_DIRECTORY + "=" + id_directory);
        }
        // Set right directory...
        // always reload the
        transMeta.setRepositoryDirectory(repo.findDirectory(new StringObjectId(id_directory)));
    // folder structure
    }
    transMeta.setUsingUniqueConnections(rootNode.getProperty(PROP_UNIQUE_CONNECTIONS).getBoolean());
    boolean feedbackShown = true;
    if (rootNode.hasProperty(PROP_FEEDBACK_SHOWN)) {
        feedbackShown = rootNode.getProperty(PROP_FEEDBACK_SHOWN).getBoolean();
    }
    transMeta.setFeedbackShown(feedbackShown);
    transMeta.setFeedbackSize((int) rootNode.getProperty(PROP_FEEDBACK_SIZE).getLong());
    boolean usingThreadPriorityManagement = true;
    if (rootNode.hasProperty(PROP_USING_THREAD_PRIORITIES)) {
        usingThreadPriorityManagement = rootNode.getProperty(PROP_USING_THREAD_PRIORITIES).getBoolean();
    }
    transMeta.setUsingThreadPriorityManagment(usingThreadPriorityManagement);
    transMeta.setSharedObjectsFile(getString(rootNode, PROP_SHARED_FILE));
    String transTypeCode = getString(rootNode, PROP_TRANSFORMATION_TYPE);
    transMeta.setTransformationType(TransformationType.getTransformationTypeByCode(transTypeCode));
    // Performance monitoring for steps...
    // 
    boolean capturingStepPerformanceSnapShots = true;
    if (rootNode.hasProperty(PROP_CAPTURE_STEP_PERFORMANCE)) {
        capturingStepPerformanceSnapShots = rootNode.getProperty(PROP_CAPTURE_STEP_PERFORMANCE).getBoolean();
    }
    transMeta.setCapturingStepPerformanceSnapShots(capturingStepPerformanceSnapShots);
    transMeta.setStepPerformanceCapturingDelay(getLong(rootNode, PROP_STEP_PERFORMANCE_CAPTURING_DELAY));
    transMeta.setStepPerformanceCapturingSizeLimit(getString(rootNode, PROP_STEP_PERFORMANCE_CAPTURING_SIZE_LIMIT));
    transMeta.getPerformanceLogTable().setTableName(getString(rootNode, PROP_STEP_PERFORMANCE_LOG_TABLE));
    transMeta.getTransLogTable().setLogSizeLimit(getString(rootNode, PROP_LOG_SIZE_LIMIT));
    // Load the logging tables too..
    // 
    RepositoryAttributeInterface attributeInterface = new PurRepositoryAttribute(rootNode, transMeta.getDatabases());
    for (LogTableInterface logTable : transMeta.getLogTables()) {
        logTable.loadFromRepository(attributeInterface);
    }
    AttributesMapUtil.loadAttributesMap(rootNode, transMeta);
}
Also used : LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) StringObjectId(org.pentaho.di.repository.StringObjectId) RepositoryAttributeInterface(org.pentaho.di.repository.RepositoryAttributeInterface)

Aggregations

LogTableInterface (org.pentaho.di.core.logging.LogTableInterface)28 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)11 KettleException (org.pentaho.di.core.exception.KettleException)10 ArrayList (java.util.ArrayList)8 Database (org.pentaho.di.core.database.Database)8 RepositoryAttributeInterface (org.pentaho.di.repository.RepositoryAttributeInterface)7 JobEntryLogTable (org.pentaho.di.core.logging.JobEntryLogTable)6 UnknownParamException (org.pentaho.di.core.parameters.UnknownParamException)6 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)5 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)5 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)5 JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)5 ErrorDialog (org.pentaho.di.ui.core.dialog.ErrorDialog)5 MessageBox (org.eclipse.swt.widgets.MessageBox)4 NotePadMeta (org.pentaho.di.core.NotePadMeta)4 KettleValueException (org.pentaho.di.core.exception.KettleValueException)4 ChannelLogTable (org.pentaho.di.core.logging.ChannelLogTable)4 JobLogTable (org.pentaho.di.core.logging.JobLogTable)4 FileSystemException (org.apache.commons.vfs2.FileSystemException)3 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)3