use of org.pentaho.di.core.logging.JobEntryLogTable in project pentaho-kettle by pentaho.
the class SpoonExportXmlTest method savingJobToXmlNotChangesLogTables.
@Test
public void savingJobToXmlNotChangesLogTables() {
JobMeta jobMeta = new JobMeta();
initTables(jobMeta);
JobLogTable originJobLogTable = jobMeta.getJobLogTable();
JobEntryLogTable originJobEntryLogTable = jobMeta.getJobEntryLogTable();
ChannelLogTable originChannelLogTable = jobMeta.getChannelLogTable();
when(spoon.getActiveTransformation()).thenReturn(null);
when(spoon.getActiveJob()).thenReturn(jobMeta);
when(spoon.saveXMLFile(any(JobMeta.class), anyBoolean())).thenReturn(true);
when(spoon.saveXMLFile(anyBoolean())).thenCallRealMethod();
spoon.saveXMLFile(true);
tablesCommonValuesEqual(originJobLogTable, jobMeta.getJobLogTable());
assertEquals(originJobLogTable.getLogInterval(), jobMeta.getJobLogTable().getLogInterval());
assertEquals(originJobLogTable.getLogSizeLimit(), jobMeta.getJobLogTable().getLogSizeLimit());
tablesCommonValuesEqual(originJobEntryLogTable, jobMeta.getJobEntryLogTable());
tablesCommonValuesEqual(originChannelLogTable, jobMeta.getChannelLogTable());
}
use of org.pentaho.di.core.logging.JobEntryLogTable in project pentaho-kettle by pentaho.
the class JobHistoryDelegate method replayHistory.
/**
* Public for XUL.
*/
public void replayHistory() {
JobHistoryLogTab model = models[tabFolder.getSelectionIndex()];
int idx = model.logDisplayTableView.getSelectionIndex();
if (idx >= 0) {
String[] fields = model.logDisplayTableView.getItem(idx);
int batchId = Const.toInt(fields[0], -1);
// String dateString = fields[13];
// Date replayDate = XMLHandler.stringToDate(dateString);
List<JobEntryCopyResult> results = null;
boolean gotResults = false;
// We check in the Job Entry Logging to see the results from all the various job entries that were executed.
//
JobEntryLogTable jeLogTable = jobMeta.getJobEntryLogTable();
if (jeLogTable.isDefined()) {
try {
DatabaseMeta databaseMeta = jobMeta.getJobEntryLogTable().getDatabaseMeta();
Database db = new Database(Spoon.loggingObject, databaseMeta);
try {
db.connect();
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(jeLogTable.getActualSchemaName(), jeLogTable.getActualTableName());
String sql = "SELECT * FROM " + schemaTable + " WHERE " + databaseMeta.quoteField(jeLogTable.getKeyField().getFieldName()) + " = " + batchId;
List<Object[]> rows = db.getRows(sql, 0);
RowMetaInterface rowMeta = db.getReturnRowMeta();
results = new ArrayList<JobEntryCopyResult>();
int jobEntryNameIndex = rowMeta.indexOfValue(jeLogTable.findField(JobEntryLogTable.ID.JOBENTRYNAME.toString()).getFieldName());
int jobEntryResultIndex = rowMeta.indexOfValue(jeLogTable.findField(JobEntryLogTable.ID.RESULT.toString()).getFieldName());
int jobEntryErrorsIndex = rowMeta.indexOfValue(jeLogTable.findField(JobEntryLogTable.ID.ERRORS.toString()).getFieldName());
LogTableField copyNrField = jeLogTable.findField(JobEntryLogTable.ID.COPY_NR.toString());
int jobEntryCopyNrIndex = copyNrField == null ? -1 : (copyNrField.isEnabled() ? rowMeta.indexOfValue(copyNrField.getFieldName()) : -1);
for (Object[] row : rows) {
String jobEntryName = rowMeta.getString(row, jobEntryNameIndex);
boolean jobEntryResult = rowMeta.getBoolean(row, jobEntryResultIndex);
long errors = rowMeta.getInteger(row, jobEntryErrorsIndex);
long copyNr = jobEntryCopyNrIndex < 0 ? 0 : rowMeta.getInteger(row, jobEntryCopyNrIndex);
JobEntryCopyResult result = new JobEntryCopyResult(jobEntryName, jobEntryResult, errors, (int) copyNr);
results.add(result);
}
} finally {
db.disconnect();
}
gotResults = true;
} catch (Exception e) {
new ErrorDialog(spoon.getShell(), BaseMessages.getString(PKG, "JobHistoryDelegate.ReplayHistory.UnexpectedErrorReadingJobEntryHistory.Text"), BaseMessages.getString(PKG, "JobHistoryDelegate.ReplayHistory.UnexpectedErrorReadingJobEntryHistory.Message"), e);
}
} else {
MessageBox box = new MessageBox(spoon.getShell(), SWT.ICON_ERROR | SWT.OK);
box.setText(BaseMessages.getString(PKG, "JobHistoryDelegate.ReplayHistory.NoJobEntryTable.Text"));
box.setMessage(BaseMessages.getString(PKG, "JobHistoryDelegate.ReplayHistory.NoJobEntryTable.Message"));
box.open();
}
// spoon.executeJob(jobGraph.getManagedObject(), true, false, replayDate, false);
if (!gotResults) {
// For some reason we have no execution results, simply list all the job entries so the user can choose...
//
results = new ArrayList<JobEntryCopyResult>();
for (JobEntryCopy copy : jobMeta.getJobCopies()) {
results.add(new JobEntryCopyResult(copy.getName(), null, null, copy.getNr()));
}
}
// OK, now that we have our list of job entries, let's first try to find the first job-entry that had a false
// result or where errors>0
// If the error was handled, we look further for a more appropriate target.
//
JobEntryCopy selection = null;
boolean more = true;
JobEntryCopy start = jobMeta.findStart();
while (selection == null && more) {
int nrNext = jobMeta.findNrNextJobEntries(start);
more = nrNext > 0;
for (int n = 0; n < nrNext; n++) {
JobEntryCopy copy = jobMeta.findNextJobEntry(start, n);
// See if we can find a result for this job entry...
//
JobEntryCopyResult result = JobEntryCopyResult.findResult(results, copy);
if (result != null) {
System.out.println("TODO: replay");
// Do nothing???
}
}
}
//
for (JobEntryCopyResult result : results) {
System.out.println("Job entry copy result -- Name=" + result.getJobEntryName() + ", result=" + result.getResult() + ", errors=" + result.getErrors() + ", nr=" + result.getCopyNr());
}
}
}
use of org.pentaho.di.core.logging.JobEntryLogTable in project pentaho-kettle by pentaho.
the class XmlExportHelper method swapTables.
/**
* @param jobMeta
* contains log tables to be refactored before export
*/
public static void swapTables(JobMeta jobMeta) {
JobLogTable jobLogTable = jobMeta.getJobLogTable();
if (jobLogTable != null) {
JobLogTable cloneJobLogTable = (JobLogTable) jobLogTable.clone();
cloneJobLogTable.setAllGlobalParametersToNull();
jobMeta.setJobLogTable(cloneJobLogTable);
}
JobEntryLogTable jobEntryLogTable = jobMeta.getJobEntryLogTable();
if (jobEntryLogTable != null) {
JobEntryLogTable cloneEntryLogTable = (JobEntryLogTable) jobEntryLogTable.clone();
cloneEntryLogTable.setAllGlobalParametersToNull();
jobMeta.setJobEntryLogTable(cloneEntryLogTable);
}
ChannelLogTable channelLogTable = jobMeta.getChannelLogTable();
if (channelLogTable != null) {
ChannelLogTable cloneChannelLogTable = (ChannelLogTable) channelLogTable.clone();
cloneChannelLogTable.setAllGlobalParametersToNull();
jobMeta.setChannelLogTable(cloneChannelLogTable);
}
List<LogTableInterface> extraLogTables = jobMeta.getExtraLogTables();
if (extraLogTables != null) {
List<LogTableInterface> cloneExtraLogTables = new ArrayList<>();
for (LogTableInterface logTable : extraLogTables) {
if (logTable instanceof BaseLogTable) {
if (logTable instanceof Cloneable) {
BaseLogTable cloneExtraLogTable = (BaseLogTable) logTable.clone();
cloneExtraLogTable.setAllGlobalParametersToNull();
cloneExtraLogTables.add((LogTableInterface) cloneExtraLogTable);
}
}
}
jobMeta.setExtraLogTables(cloneExtraLogTables);
}
}
use of org.pentaho.di.core.logging.JobEntryLogTable in project pentaho-kettle by pentaho.
the class JobDialog method showLogTypeOptions.
private void showLogTypeOptions(int index) {
if (index != previousLogTableIndex) {
getLogInfo(previousLogTableIndex);
//
for (Control control : wLogOptionsComposite.getChildren()) {
control.dispose();
}
previousLogTableIndex = index;
LogTableInterface logTable = logTables.get(index);
LogTableUserInterface logTableUserInterface = logTableUserInterfaces.get(index);
if (logTableUserInterface != null) {
logTableUserInterface.showLogTableOptions(wLogOptionsComposite, logTable);
} else {
if (logTable instanceof JobLogTable) {
showJobLogTableOptions((JobLogTable) logTable);
} else if (logTable instanceof ChannelLogTable) {
showChannelLogTableOptions((ChannelLogTable) logTable);
}
if (logTable instanceof JobEntryLogTable) {
showJobEntryLogTableOptions((JobEntryLogTable) logTable);
}
}
wLogOptionsComposite.layout(true, true);
wLogComp.layout(true, true);
}
}
use of org.pentaho.di.core.logging.JobEntryLogTable in project pentaho-kettle by pentaho.
the class Job method beginProcessing.
/**
* Handle logging at start
*
* @return true if it went OK.
*
* @throws KettleException
*/
public boolean beginProcessing() throws KettleException {
currentDate = new Date();
logDate = new Date();
startDate = Const.MIN_DATE;
endDate = currentDate;
resetErrors();
final JobLogTable jobLogTable = jobMeta.getJobLogTable();
int intervalInSeconds = Const.toInt(environmentSubstitute(jobLogTable.getLogInterval()), -1);
if (jobLogTable.isDefined()) {
DatabaseMeta logcon = jobMeta.getJobLogTable().getDatabaseMeta();
String schemaName = environmentSubstitute(jobMeta.getJobLogTable().getActualSchemaName());
String tableName = environmentSubstitute(jobMeta.getJobLogTable().getActualTableName());
String schemaAndTable = jobMeta.getJobLogTable().getDatabaseMeta().getQuotedSchemaTableCombination(schemaName, tableName);
Database ldb = new Database(this, logcon);
ldb.shareVariablesWith(this);
ldb.connect();
ldb.setCommit(logCommitSize);
try {
// See if we have to add a batch id...
Long id_batch = new Long(1);
if (jobMeta.getJobLogTable().isBatchIdUsed()) {
id_batch = logcon.getNextBatchId(ldb, schemaName, tableName, jobLogTable.getKeyField().getFieldName());
setBatchId(id_batch.longValue());
if (getPassedBatchId() <= 0) {
setPassedBatchId(id_batch.longValue());
}
}
Object[] lastr = ldb.getLastLogDate(schemaAndTable, jobMeta.getName(), true, LogStatus.END);
if (!Utils.isEmpty(lastr)) {
Date last;
try {
last = ldb.getReturnRowMeta().getDate(lastr, 0);
} catch (KettleValueException e) {
throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.ConversionError", "" + tableName), e);
}
if (last != null) {
startDate = last;
}
}
depDate = currentDate;
ldb.writeLogRecord(jobMeta.getJobLogTable(), LogStatus.START, this, null);
if (!ldb.isAutoCommit()) {
ldb.commitLog(true, jobMeta.getJobLogTable());
}
ldb.disconnect();
//
if (intervalInSeconds > 0) {
final Timer timer = new Timer(getName() + " - interval logging timer");
TimerTask timerTask = new TimerTask() {
public void run() {
try {
endProcessing();
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformIntervalLogging"), e);
// Also stop the show...
//
errors.incrementAndGet();
stopAll();
}
}
};
timer.schedule(timerTask, intervalInSeconds * 1000, intervalInSeconds * 1000);
addJobListener(new JobAdapter() {
public void jobFinished(Job job) {
timer.cancel();
}
});
}
// Add a listener at the end of the job to take of writing the final job
// log record...
//
addJobListener(new JobAdapter() {
public void jobFinished(Job job) throws KettleException {
try {
endProcessing();
} catch (KettleJobException e) {
log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToWriteToLoggingTable", jobLogTable.toString()), e);
// job is failed in case log database record is failed!
throw new KettleException(e);
}
}
});
} catch (KettleDatabaseException dbe) {
// This is even before actual execution
addErrors(1);
throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.UnableToProcessLoggingStart", "" + tableName), dbe);
} finally {
ldb.disconnect();
}
}
// If we need to write out the job entry logging information, do so at the end of the job:
//
JobEntryLogTable jobEntryLogTable = jobMeta.getJobEntryLogTable();
if (jobEntryLogTable.isDefined()) {
addJobListener(new JobAdapter() {
public void jobFinished(Job job) throws KettleException {
try {
writeJobEntryLogInformation();
} catch (KettleException e) {
throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformJobEntryLoggingAtJobEnd"), e);
}
}
});
}
// If we need to write the log channel hierarchy and lineage information,
// add a listener for that too...
//
ChannelLogTable channelLogTable = jobMeta.getChannelLogTable();
if (channelLogTable.isDefined()) {
addJobListener(new JobAdapter() {
public void jobFinished(Job job) throws KettleException {
try {
writeLogChannelInformation();
} catch (KettleException e) {
throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformLoggingAtTransEnd"), e);
}
}
});
}
JobExecutionExtension extension = new JobExecutionExtension(this, result, null, false);
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobBeginProcessing.id, extension);
return true;
}
Aggregations