Search in sources :

Example 1 with JobLogTable

use of org.pentaho.di.core.logging.JobLogTable in project pentaho-kettle by pentaho.

the class SpoonExportXmlTest method savingJobToXmlNotChangesLogTables.

@Test
public void savingJobToXmlNotChangesLogTables() {
    JobMeta jobMeta = new JobMeta();
    initTables(jobMeta);
    JobLogTable originJobLogTable = jobMeta.getJobLogTable();
    JobEntryLogTable originJobEntryLogTable = jobMeta.getJobEntryLogTable();
    ChannelLogTable originChannelLogTable = jobMeta.getChannelLogTable();
    when(spoon.getActiveTransformation()).thenReturn(null);
    when(spoon.getActiveJob()).thenReturn(jobMeta);
    when(spoon.saveXMLFile(any(JobMeta.class), anyBoolean())).thenReturn(true);
    when(spoon.saveXMLFile(anyBoolean())).thenCallRealMethod();
    spoon.saveXMLFile(true);
    tablesCommonValuesEqual(originJobLogTable, jobMeta.getJobLogTable());
    assertEquals(originJobLogTable.getLogInterval(), jobMeta.getJobLogTable().getLogInterval());
    assertEquals(originJobLogTable.getLogSizeLimit(), jobMeta.getJobLogTable().getLogSizeLimit());
    tablesCommonValuesEqual(originJobEntryLogTable, jobMeta.getJobEntryLogTable());
    tablesCommonValuesEqual(originChannelLogTable, jobMeta.getChannelLogTable());
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) JobLogTable(org.pentaho.di.core.logging.JobLogTable) ChannelLogTable(org.pentaho.di.core.logging.ChannelLogTable) JobEntryLogTable(org.pentaho.di.core.logging.JobEntryLogTable) Test(org.junit.Test)

Example 2 with JobLogTable

use of org.pentaho.di.core.logging.JobLogTable in project pentaho-kettle by pentaho.

the class XmlExportHelper method swapTables.

/**
 * @param jobMeta
 *            contains log tables to be refactored before export
 */
public static void swapTables(JobMeta jobMeta) {
    JobLogTable jobLogTable = jobMeta.getJobLogTable();
    if (jobLogTable != null) {
        JobLogTable cloneJobLogTable = (JobLogTable) jobLogTable.clone();
        cloneJobLogTable.setAllGlobalParametersToNull();
        jobMeta.setJobLogTable(cloneJobLogTable);
    }
    JobEntryLogTable jobEntryLogTable = jobMeta.getJobEntryLogTable();
    if (jobEntryLogTable != null) {
        JobEntryLogTable cloneEntryLogTable = (JobEntryLogTable) jobEntryLogTable.clone();
        cloneEntryLogTable.setAllGlobalParametersToNull();
        jobMeta.setJobEntryLogTable(cloneEntryLogTable);
    }
    ChannelLogTable channelLogTable = jobMeta.getChannelLogTable();
    if (channelLogTable != null) {
        ChannelLogTable cloneChannelLogTable = (ChannelLogTable) channelLogTable.clone();
        cloneChannelLogTable.setAllGlobalParametersToNull();
        jobMeta.setChannelLogTable(cloneChannelLogTable);
    }
    List<LogTableInterface> extraLogTables = jobMeta.getExtraLogTables();
    if (extraLogTables != null) {
        List<LogTableInterface> cloneExtraLogTables = new ArrayList<>();
        for (LogTableInterface logTable : extraLogTables) {
            if (logTable instanceof BaseLogTable) {
                if (logTable instanceof Cloneable) {
                    BaseLogTable cloneExtraLogTable = (BaseLogTable) logTable.clone();
                    cloneExtraLogTable.setAllGlobalParametersToNull();
                    cloneExtraLogTables.add((LogTableInterface) cloneExtraLogTable);
                }
            }
        }
        jobMeta.setExtraLogTables(cloneExtraLogTables);
    }
}
Also used : LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) JobLogTable(org.pentaho.di.core.logging.JobLogTable) BaseLogTable(org.pentaho.di.core.logging.BaseLogTable) ChannelLogTable(org.pentaho.di.core.logging.ChannelLogTable) ArrayList(java.util.ArrayList) JobEntryLogTable(org.pentaho.di.core.logging.JobEntryLogTable)

Example 3 with JobLogTable

use of org.pentaho.di.core.logging.JobLogTable in project pentaho-kettle by pentaho.

the class JobDialog method showLogTypeOptions.

private void showLogTypeOptions(int index) {
    if (index != previousLogTableIndex) {
        getLogInfo(previousLogTableIndex);
        // 
        for (Control control : wLogOptionsComposite.getChildren()) {
            control.dispose();
        }
        previousLogTableIndex = index;
        LogTableInterface logTable = logTables.get(index);
        LogTableUserInterface logTableUserInterface = logTableUserInterfaces.get(index);
        if (logTableUserInterface != null) {
            logTableUserInterface.showLogTableOptions(wLogOptionsComposite, logTable);
        } else {
            if (logTable instanceof JobLogTable) {
                showJobLogTableOptions((JobLogTable) logTable);
            } else if (logTable instanceof ChannelLogTable) {
                showChannelLogTableOptions((ChannelLogTable) logTable);
            }
            if (logTable instanceof JobEntryLogTable) {
                showJobEntryLogTableOptions((JobEntryLogTable) logTable);
            }
        }
        wLogOptionsComposite.layout(true, true);
        wLogComp.layout(true, true);
    }
}
Also used : LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) Control(org.eclipse.swt.widgets.Control) JobLogTable(org.pentaho.di.core.logging.JobLogTable) ChannelLogTable(org.pentaho.di.core.logging.ChannelLogTable) JobEntryLogTable(org.pentaho.di.core.logging.JobEntryLogTable)

Example 4 with JobLogTable

use of org.pentaho.di.core.logging.JobLogTable in project pentaho-kettle by pentaho.

the class JobHasJobLogConfiguredImportRule method verifyRule.

@Override
public List<ImportValidationFeedback> verifyRule(Object subject) {
    List<ImportValidationFeedback> feedback = new ArrayList<ImportValidationFeedback>();
    if (!isEnabled()) {
        return feedback;
    }
    if (!(subject instanceof JobMeta)) {
        return feedback;
    }
    JobMeta jobMeta = (JobMeta) subject;
    JobLogTable jobLogTable = jobMeta.getJobLogTable();
    if (!jobLogTable.isDefined()) {
        feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "The logging table is not defined"));
    } else {
        if (!Utils.isEmpty(schemaName)) {
            if (schemaName.equals(jobLogTable.getSchemaName())) {
                feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.APPROVAL, "The schema name is set to: " + schemaName));
            } else {
                feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "The schema name is not set to: " + schemaName));
            }
        }
        if (!Utils.isEmpty(tableName)) {
            if (tableName.equals(jobLogTable.getTableName())) {
                feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.APPROVAL, "The table name is set to: " + tableName));
            } else {
                feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "The table name is not set to: " + tableName));
            }
        }
        if (!Utils.isEmpty(connectionName)) {
            if (connectionName.equals(jobLogTable.getDatabaseMeta().getName())) {
                feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.APPROVAL, "The database connection used for logging is: " + connectionName));
            } else {
                feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "The database connection used for logging is not: " + connectionName));
            }
        }
        if (feedback.isEmpty()) {
            feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.APPROVAL, "The logging table is correctly defined"));
        }
    }
    return feedback;
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) JobLogTable(org.pentaho.di.core.logging.JobLogTable) ImportValidationFeedback(org.pentaho.di.imp.rule.ImportValidationFeedback) ArrayList(java.util.ArrayList)

Example 5 with JobLogTable

use of org.pentaho.di.core.logging.JobLogTable in project pentaho-kettle by pentaho.

the class Job method beginProcessing.

/**
 * Handle logging at start
 *
 * @return true if it went OK.
 *
 * @throws KettleException
 */
public boolean beginProcessing() throws KettleException {
    currentDate = new Date();
    logDate = new Date();
    startDate = Const.MIN_DATE;
    endDate = currentDate;
    resetErrors();
    final JobLogTable jobLogTable = jobMeta.getJobLogTable();
    int intervalInSeconds = Const.toInt(environmentSubstitute(jobLogTable.getLogInterval()), -1);
    if (jobLogTable.isDefined()) {
        DatabaseMeta logcon = jobMeta.getJobLogTable().getDatabaseMeta();
        String schemaName = environmentSubstitute(jobMeta.getJobLogTable().getActualSchemaName());
        String tableName = environmentSubstitute(jobMeta.getJobLogTable().getActualTableName());
        String schemaAndTable = jobMeta.getJobLogTable().getDatabaseMeta().getQuotedSchemaTableCombination(schemaName, tableName);
        Database ldb = new Database(this, logcon);
        ldb.shareVariablesWith(this);
        ldb.connect();
        ldb.setCommit(logCommitSize);
        try {
            // See if we have to add a batch id...
            Long id_batch = new Long(1);
            if (jobMeta.getJobLogTable().isBatchIdUsed()) {
                id_batch = logcon.getNextBatchId(ldb, schemaName, tableName, jobLogTable.getKeyField().getFieldName());
                setBatchId(id_batch.longValue());
                if (getPassedBatchId() <= 0) {
                    setPassedBatchId(id_batch.longValue());
                }
            }
            Object[] lastr = ldb.getLastLogDate(schemaAndTable, jobMeta.getName(), true, LogStatus.END);
            if (!Utils.isEmpty(lastr)) {
                Date last;
                try {
                    last = ldb.getReturnRowMeta().getDate(lastr, 0);
                } catch (KettleValueException e) {
                    throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.ConversionError", "" + tableName), e);
                }
                if (last != null) {
                    startDate = last;
                }
            }
            depDate = currentDate;
            ldb.writeLogRecord(jobMeta.getJobLogTable(), LogStatus.START, this, null);
            if (!ldb.isAutoCommit()) {
                ldb.commitLog(true, jobMeta.getJobLogTable());
            }
            ldb.disconnect();
            // 
            if (intervalInSeconds > 0) {
                final Timer timer = new Timer(getName() + " - interval logging timer");
                TimerTask timerTask = new TimerTask() {

                    public void run() {
                        try {
                            endProcessing();
                        } catch (Exception e) {
                            log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformIntervalLogging"), e);
                            // Also stop the show...
                            // 
                            errors.incrementAndGet();
                            stopAll();
                        }
                    }
                };
                timer.schedule(timerTask, intervalInSeconds * 1000, intervalInSeconds * 1000);
                addJobListener(new JobAdapter() {

                    public void jobFinished(Job job) {
                        timer.cancel();
                    }
                });
            }
            // Add a listener at the end of the job to take of writing the final job
            // log record...
            // 
            addJobListener(new JobAdapter() {

                public void jobFinished(Job job) throws KettleException {
                    try {
                        endProcessing();
                    } catch (KettleJobException e) {
                        log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToWriteToLoggingTable", jobLogTable.toString()), e);
                        // job is failed in case log database record is failed!
                        throw new KettleException(e);
                    }
                }
            });
        } catch (KettleDatabaseException dbe) {
            // This is even before actual execution
            addErrors(1);
            throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.UnableToProcessLoggingStart", "" + tableName), dbe);
        } finally {
            ldb.disconnect();
        }
    }
    // If we need to write out the job entry logging information, do so at the end of the job:
    // 
    JobEntryLogTable jobEntryLogTable = jobMeta.getJobEntryLogTable();
    if (jobEntryLogTable.isDefined()) {
        addJobListener(new JobAdapter() {

            public void jobFinished(Job job) throws KettleException {
                try {
                    writeJobEntryLogInformation();
                } catch (KettleException e) {
                    throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformJobEntryLoggingAtJobEnd"), e);
                }
            }
        });
    }
    // If we need to write the log channel hierarchy and lineage information,
    // add a listener for that too...
    // 
    ChannelLogTable channelLogTable = jobMeta.getChannelLogTable();
    if (channelLogTable.isDefined()) {
        addJobListener(new JobAdapter() {

            public void jobFinished(Job job) throws KettleException {
                try {
                    writeLogChannelInformation();
                } catch (KettleException e) {
                    throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformLoggingAtTransEnd"), e);
                }
            }
        });
    }
    JobExecutionExtension extension = new JobExecutionExtension(this, result, null, false);
    ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobBeginProcessing.id, extension);
    return true;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobLogTable(org.pentaho.di.core.logging.JobLogTable) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Date(java.util.Date) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) KettleJobException(org.pentaho.di.core.exception.KettleJobException) DuplicateParamException(org.pentaho.di.core.parameters.DuplicateParamException) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException) KettleJobException(org.pentaho.di.core.exception.KettleJobException) KettleValueException(org.pentaho.di.core.exception.KettleValueException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) Timer(java.util.Timer) TimerTask(java.util.TimerTask) ChannelLogTable(org.pentaho.di.core.logging.ChannelLogTable) Database(org.pentaho.di.core.database.Database) FileObject(org.apache.commons.vfs2.FileObject) JobEntryLogTable(org.pentaho.di.core.logging.JobEntryLogTable) KettleValueException(org.pentaho.di.core.exception.KettleValueException) JobEntryJob(org.pentaho.di.job.entries.job.JobEntryJob)

Aggregations

JobLogTable (org.pentaho.di.core.logging.JobLogTable)15 ChannelLogTable (org.pentaho.di.core.logging.ChannelLogTable)8 JobEntryLogTable (org.pentaho.di.core.logging.JobEntryLogTable)8 JobMeta (org.pentaho.di.job.JobMeta)7 Test (org.junit.Test)5 LogTableInterface (org.pentaho.di.core.logging.LogTableInterface)4 JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)4 ArrayList (java.util.ArrayList)3 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)3 IUnifiedRepository (org.pentaho.platform.api.repository2.unified.IUnifiedRepository)3 DataNode (org.pentaho.platform.api.repository2.unified.data.node.DataNode)3 Date (java.util.Date)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)2 KettleException (org.pentaho.di.core.exception.KettleException)2 KettleJobException (org.pentaho.di.core.exception.KettleJobException)2 KettleValueException (org.pentaho.di.core.exception.KettleValueException)2 Point (org.pentaho.di.core.gui.Point)2 DuplicateParamException (org.pentaho.di.core.parameters.DuplicateParamException)2