use of org.pentaho.di.core.logging.JobLogTable in project pentaho-kettle by pentaho.
the class SpoonExportXmlTest method savingJobToXmlNotChangesLogTables.
@Test
public void savingJobToXmlNotChangesLogTables() {
JobMeta jobMeta = new JobMeta();
initTables(jobMeta);
JobLogTable originJobLogTable = jobMeta.getJobLogTable();
JobEntryLogTable originJobEntryLogTable = jobMeta.getJobEntryLogTable();
ChannelLogTable originChannelLogTable = jobMeta.getChannelLogTable();
when(spoon.getActiveTransformation()).thenReturn(null);
when(spoon.getActiveJob()).thenReturn(jobMeta);
when(spoon.saveXMLFile(any(JobMeta.class), anyBoolean())).thenReturn(true);
when(spoon.saveXMLFile(anyBoolean())).thenCallRealMethod();
spoon.saveXMLFile(true);
tablesCommonValuesEqual(originJobLogTable, jobMeta.getJobLogTable());
assertEquals(originJobLogTable.getLogInterval(), jobMeta.getJobLogTable().getLogInterval());
assertEquals(originJobLogTable.getLogSizeLimit(), jobMeta.getJobLogTable().getLogSizeLimit());
tablesCommonValuesEqual(originJobEntryLogTable, jobMeta.getJobEntryLogTable());
tablesCommonValuesEqual(originChannelLogTable, jobMeta.getChannelLogTable());
}
use of org.pentaho.di.core.logging.JobLogTable in project pentaho-kettle by pentaho.
the class XmlExportHelper method swapTables.
/**
* @param jobMeta
* contains log tables to be refactored before export
*/
public static void swapTables(JobMeta jobMeta) {
JobLogTable jobLogTable = jobMeta.getJobLogTable();
if (jobLogTable != null) {
JobLogTable cloneJobLogTable = (JobLogTable) jobLogTable.clone();
cloneJobLogTable.setAllGlobalParametersToNull();
jobMeta.setJobLogTable(cloneJobLogTable);
}
JobEntryLogTable jobEntryLogTable = jobMeta.getJobEntryLogTable();
if (jobEntryLogTable != null) {
JobEntryLogTable cloneEntryLogTable = (JobEntryLogTable) jobEntryLogTable.clone();
cloneEntryLogTable.setAllGlobalParametersToNull();
jobMeta.setJobEntryLogTable(cloneEntryLogTable);
}
ChannelLogTable channelLogTable = jobMeta.getChannelLogTable();
if (channelLogTable != null) {
ChannelLogTable cloneChannelLogTable = (ChannelLogTable) channelLogTable.clone();
cloneChannelLogTable.setAllGlobalParametersToNull();
jobMeta.setChannelLogTable(cloneChannelLogTable);
}
List<LogTableInterface> extraLogTables = jobMeta.getExtraLogTables();
if (extraLogTables != null) {
List<LogTableInterface> cloneExtraLogTables = new ArrayList<>();
for (LogTableInterface logTable : extraLogTables) {
if (logTable instanceof BaseLogTable) {
if (logTable instanceof Cloneable) {
BaseLogTable cloneExtraLogTable = (BaseLogTable) logTable.clone();
cloneExtraLogTable.setAllGlobalParametersToNull();
cloneExtraLogTables.add((LogTableInterface) cloneExtraLogTable);
}
}
}
jobMeta.setExtraLogTables(cloneExtraLogTables);
}
}
use of org.pentaho.di.core.logging.JobLogTable in project pentaho-kettle by pentaho.
the class JobDialog method showLogTypeOptions.
private void showLogTypeOptions(int index) {
if (index != previousLogTableIndex) {
getLogInfo(previousLogTableIndex);
//
for (Control control : wLogOptionsComposite.getChildren()) {
control.dispose();
}
previousLogTableIndex = index;
LogTableInterface logTable = logTables.get(index);
LogTableUserInterface logTableUserInterface = logTableUserInterfaces.get(index);
if (logTableUserInterface != null) {
logTableUserInterface.showLogTableOptions(wLogOptionsComposite, logTable);
} else {
if (logTable instanceof JobLogTable) {
showJobLogTableOptions((JobLogTable) logTable);
} else if (logTable instanceof ChannelLogTable) {
showChannelLogTableOptions((ChannelLogTable) logTable);
}
if (logTable instanceof JobEntryLogTable) {
showJobEntryLogTableOptions((JobEntryLogTable) logTable);
}
}
wLogOptionsComposite.layout(true, true);
wLogComp.layout(true, true);
}
}
use of org.pentaho.di.core.logging.JobLogTable in project pentaho-kettle by pentaho.
the class JobHasJobLogConfiguredImportRule method verifyRule.
@Override
public List<ImportValidationFeedback> verifyRule(Object subject) {
List<ImportValidationFeedback> feedback = new ArrayList<ImportValidationFeedback>();
if (!isEnabled()) {
return feedback;
}
if (!(subject instanceof JobMeta)) {
return feedback;
}
JobMeta jobMeta = (JobMeta) subject;
JobLogTable jobLogTable = jobMeta.getJobLogTable();
if (!jobLogTable.isDefined()) {
feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "The logging table is not defined"));
} else {
if (!Utils.isEmpty(schemaName)) {
if (schemaName.equals(jobLogTable.getSchemaName())) {
feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.APPROVAL, "The schema name is set to: " + schemaName));
} else {
feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "The schema name is not set to: " + schemaName));
}
}
if (!Utils.isEmpty(tableName)) {
if (tableName.equals(jobLogTable.getTableName())) {
feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.APPROVAL, "The table name is set to: " + tableName));
} else {
feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "The table name is not set to: " + tableName));
}
}
if (!Utils.isEmpty(connectionName)) {
if (connectionName.equals(jobLogTable.getDatabaseMeta().getName())) {
feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.APPROVAL, "The database connection used for logging is: " + connectionName));
} else {
feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "The database connection used for logging is not: " + connectionName));
}
}
if (feedback.isEmpty()) {
feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.APPROVAL, "The logging table is correctly defined"));
}
}
return feedback;
}
use of org.pentaho.di.core.logging.JobLogTable in project pentaho-kettle by pentaho.
the class Job method beginProcessing.
/**
* Handle logging at start
*
* @return true if it went OK.
*
* @throws KettleException
*/
public boolean beginProcessing() throws KettleException {
currentDate = new Date();
logDate = new Date();
startDate = Const.MIN_DATE;
endDate = currentDate;
resetErrors();
final JobLogTable jobLogTable = jobMeta.getJobLogTable();
int intervalInSeconds = Const.toInt(environmentSubstitute(jobLogTable.getLogInterval()), -1);
if (jobLogTable.isDefined()) {
DatabaseMeta logcon = jobMeta.getJobLogTable().getDatabaseMeta();
String schemaName = environmentSubstitute(jobMeta.getJobLogTable().getActualSchemaName());
String tableName = environmentSubstitute(jobMeta.getJobLogTable().getActualTableName());
String schemaAndTable = jobMeta.getJobLogTable().getDatabaseMeta().getQuotedSchemaTableCombination(schemaName, tableName);
Database ldb = new Database(this, logcon);
ldb.shareVariablesWith(this);
ldb.connect();
ldb.setCommit(logCommitSize);
try {
// See if we have to add a batch id...
Long id_batch = new Long(1);
if (jobMeta.getJobLogTable().isBatchIdUsed()) {
id_batch = logcon.getNextBatchId(ldb, schemaName, tableName, jobLogTable.getKeyField().getFieldName());
setBatchId(id_batch.longValue());
if (getPassedBatchId() <= 0) {
setPassedBatchId(id_batch.longValue());
}
}
Object[] lastr = ldb.getLastLogDate(schemaAndTable, jobMeta.getName(), true, LogStatus.END);
if (!Utils.isEmpty(lastr)) {
Date last;
try {
last = ldb.getReturnRowMeta().getDate(lastr, 0);
} catch (KettleValueException e) {
throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.ConversionError", "" + tableName), e);
}
if (last != null) {
startDate = last;
}
}
depDate = currentDate;
ldb.writeLogRecord(jobMeta.getJobLogTable(), LogStatus.START, this, null);
if (!ldb.isAutoCommit()) {
ldb.commitLog(true, jobMeta.getJobLogTable());
}
ldb.disconnect();
//
if (intervalInSeconds > 0) {
final Timer timer = new Timer(getName() + " - interval logging timer");
TimerTask timerTask = new TimerTask() {
public void run() {
try {
endProcessing();
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformIntervalLogging"), e);
// Also stop the show...
//
errors.incrementAndGet();
stopAll();
}
}
};
timer.schedule(timerTask, intervalInSeconds * 1000, intervalInSeconds * 1000);
addJobListener(new JobAdapter() {
public void jobFinished(Job job) {
timer.cancel();
}
});
}
// Add a listener at the end of the job to take of writing the final job
// log record...
//
addJobListener(new JobAdapter() {
public void jobFinished(Job job) throws KettleException {
try {
endProcessing();
} catch (KettleJobException e) {
log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToWriteToLoggingTable", jobLogTable.toString()), e);
// job is failed in case log database record is failed!
throw new KettleException(e);
}
}
});
} catch (KettleDatabaseException dbe) {
// This is even before actual execution
addErrors(1);
throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.UnableToProcessLoggingStart", "" + tableName), dbe);
} finally {
ldb.disconnect();
}
}
// If we need to write out the job entry logging information, do so at the end of the job:
//
JobEntryLogTable jobEntryLogTable = jobMeta.getJobEntryLogTable();
if (jobEntryLogTable.isDefined()) {
addJobListener(new JobAdapter() {
public void jobFinished(Job job) throws KettleException {
try {
writeJobEntryLogInformation();
} catch (KettleException e) {
throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformJobEntryLoggingAtJobEnd"), e);
}
}
});
}
// If we need to write the log channel hierarchy and lineage information,
// add a listener for that too...
//
ChannelLogTable channelLogTable = jobMeta.getChannelLogTable();
if (channelLogTable.isDefined()) {
addJobListener(new JobAdapter() {
public void jobFinished(Job job) throws KettleException {
try {
writeLogChannelInformation();
} catch (KettleException e) {
throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformLoggingAtTransEnd"), e);
}
}
});
}
JobExecutionExtension extension = new JobExecutionExtension(this, result, null, false);
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobBeginProcessing.id, extension);
return true;
}
Aggregations