use of org.pentaho.di.core.logging.LogTableInterface in project pentaho-kettle by pentaho.
the class JobMeta method getUsedDatabaseMetas.
private Set<DatabaseMeta> getUsedDatabaseMetas() {
Set<DatabaseMeta> databaseMetas = new HashSet<DatabaseMeta>();
for (JobEntryCopy jobEntryCopy : getJobCopies()) {
DatabaseMeta[] dbs = jobEntryCopy.getEntry().getUsedDatabaseConnections();
if (dbs != null) {
for (DatabaseMeta db : dbs) {
databaseMetas.add(db);
}
}
}
databaseMetas.add(jobLogTable.getDatabaseMeta());
for (LogTableInterface logTable : getExtraLogTables()) {
databaseMetas.add(logTable.getDatabaseMeta());
}
return databaseMetas;
}
use of org.pentaho.di.core.logging.LogTableInterface in project pentaho-kettle by pentaho.
the class JobMeta method getXML.
/*
* (non-Javadoc)
*
* @see org.pentaho.di.core.xml.XMLInterface#getXML()
*/
public String getXML() {
// Clear the embedded named clusters. We will be repopulating from steps that used named clusters
getNamedClusterEmbedManager().clear();
Props props = null;
if (Props.isInitialized()) {
props = Props.getInstance();
}
StringBuilder retval = new StringBuilder(500);
retval.append(XMLHandler.openTag(XML_TAG)).append(Const.CR);
retval.append(" ").append(XMLHandler.addTagValue("name", getName()));
retval.append(" ").append(XMLHandler.addTagValue("description", description));
retval.append(" ").append(XMLHandler.addTagValue("extended_description", extendedDescription));
retval.append(" ").append(XMLHandler.addTagValue("job_version", jobVersion));
if (jobStatus >= 0) {
retval.append(" ").append(XMLHandler.addTagValue("job_status", jobStatus));
}
retval.append(" ").append(XMLHandler.addTagValue("directory", (directory != null ? directory.getPath() : RepositoryDirectory.DIRECTORY_SEPARATOR)));
retval.append(" ").append(XMLHandler.addTagValue("created_user", createdUser));
retval.append(" ").append(XMLHandler.addTagValue("created_date", XMLHandler.date2string(createdDate)));
retval.append(" ").append(XMLHandler.addTagValue("modified_user", modifiedUser));
retval.append(" ").append(XMLHandler.addTagValue("modified_date", XMLHandler.date2string(modifiedDate)));
retval.append(" ").append(XMLHandler.openTag(XML_TAG_PARAMETERS)).append(Const.CR);
String[] parameters = listParameters();
for (int idx = 0; idx < parameters.length; idx++) {
retval.append(" ").append(XMLHandler.openTag("parameter")).append(Const.CR);
retval.append(" ").append(XMLHandler.addTagValue("name", parameters[idx]));
try {
retval.append(" ").append(XMLHandler.addTagValue("default_value", getParameterDefault(parameters[idx])));
retval.append(" ").append(XMLHandler.addTagValue("description", getParameterDescription(parameters[idx])));
} catch (UnknownParamException e) {
// skip the default value and/or description. This exception should never happen because we use listParameters()
// above.
}
retval.append(" ").append(XMLHandler.closeTag("parameter")).append(Const.CR);
}
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_PARAMETERS)).append(Const.CR);
Set<DatabaseMeta> usedDatabaseMetas = getUsedDatabaseMetas();
// Save the database connections...
for (int i = 0; i < nrDatabases(); i++) {
DatabaseMeta dbMeta = getDatabase(i);
if (props != null && props.areOnlyUsedConnectionsSavedToXML()) {
if (usedDatabaseMetas.contains(dbMeta)) {
retval.append(dbMeta.getXML());
}
} else {
retval.append(dbMeta.getXML());
}
}
// The slave servers...
//
retval.append(" ").append(XMLHandler.openTag(XML_TAG_SLAVESERVERS)).append(Const.CR);
for (int i = 0; i < slaveServers.size(); i++) {
SlaveServer slaveServer = slaveServers.get(i);
retval.append(slaveServer.getXML());
}
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_SLAVESERVERS)).append(Const.CR);
//
for (LogTableInterface logTable : getLogTables()) {
retval.append(logTable.getXML());
}
retval.append(" ").append(XMLHandler.addTagValue("pass_batchid", batchIdPassed));
retval.append(" ").append(XMLHandler.addTagValue("shared_objects_file", sharedObjectsFile));
retval.append(" ").append(XMLHandler.openTag("entries")).append(Const.CR);
for (int i = 0; i < nrJobEntries(); i++) {
JobEntryCopy jge = getJobEntry(i);
jge.getEntry().setRepository(repository);
retval.append(jge.getXML());
}
retval.append(" ").append(XMLHandler.closeTag("entries")).append(Const.CR);
retval.append(" ").append(XMLHandler.openTag("hops")).append(Const.CR);
for (JobHopMeta hi : jobhops) {
// Look at all the hops
retval.append(hi.getXML());
}
retval.append(" ").append(XMLHandler.closeTag("hops")).append(Const.CR);
retval.append(" ").append(XMLHandler.openTag("notepads")).append(Const.CR);
for (int i = 0; i < nrNotes(); i++) {
NotePadMeta ni = getNote(i);
retval.append(ni.getXML());
}
retval.append(" ").append(XMLHandler.closeTag("notepads")).append(Const.CR);
// Also store the attribute groups
//
retval.append(AttributesUtil.getAttributesXml(attributesMap));
retval.append(XMLHandler.closeTag(XML_TAG)).append(Const.CR);
return XMLFormatter.format(retval.toString());
}
use of org.pentaho.di.core.logging.LogTableInterface in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryJobDelegate method loadJobMeta.
/**
* Load a job in a directory
*
* @param log
* the logging channel
* @param rep
* The Repository
* @param jobname
* The name of the job
* @param repdir
* The directory in which the job resides.
* @throws KettleException
*/
public JobMeta loadJobMeta(String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor) throws KettleException {
JobMeta jobMeta = new JobMeta();
synchronized (repository) {
try {
// Clear everything...
jobMeta.clear();
jobMeta.setRepositoryDirectory(repdir);
// Get the transformation id
jobMeta.setObjectId(getJobID(jobname, repdir.getObjectId()));
// If no valid id is available in the database, then give error...
if (jobMeta.getObjectId() != null) {
// Load the notes...
ObjectId[] noteids = repository.getJobNoteIDs(jobMeta.getObjectId());
ObjectId[] jecids = repository.getJobEntryCopyIDs(jobMeta.getObjectId());
ObjectId[] hopid = repository.getJobHopIDs(jobMeta.getObjectId());
int nrWork = 2 + noteids.length + jecids.length + hopid.length;
if (monitor != null) {
monitor.beginTask(BaseMessages.getString(PKG, "JobMeta.Monitor.LoadingJob") + repdir + Const.FILE_SEPARATOR + jobname, nrWork);
}
//
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobInformation"));
}
RowMetaAndData jobRow = getJob(jobMeta.getObjectId());
jobMeta.setName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_NAME, null));
jobMeta.setDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, null));
jobMeta.setExtendedDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, null));
jobMeta.setJobversion(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, null));
jobMeta.setJobstatus(Const.toInt(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, null), -1));
jobMeta.setCreatedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_CREATED_USER, null));
jobMeta.setCreatedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, new Date()));
jobMeta.setModifiedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, null));
jobMeta.setModifiedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, new Date()));
long id_logdb = jobRow.getInteger(KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, 0);
if (id_logdb > 0) {
// Get the logconnection
//
DatabaseMeta logDb = repository.loadDatabaseMeta(new LongObjectId(id_logdb), null);
jobMeta.getJobLogTable().setConnectionName(logDb.getName());
// jobMeta.getJobLogTable().getDatabaseMeta().shareVariablesWith(jobMeta);
}
jobMeta.getJobLogTable().setTableName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, null));
jobMeta.getJobLogTable().setBatchIdUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, false));
jobMeta.getJobLogTable().setLogFieldUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, false));
jobMeta.getJobLogTable().setLogSizeLimit(getJobAttributeString(jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT));
jobMeta.setBatchIdPassed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, false));
// Load all the log tables for the job...
//
RepositoryAttributeInterface attributeInterface = new KettleDatabaseRepositoryJobAttribute(repository.connectionDelegate, jobMeta.getObjectId());
for (LogTableInterface logTable : jobMeta.getLogTables()) {
logTable.loadFromRepository(attributeInterface);
}
if (monitor != null) {
monitor.worked(1);
}
//
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingAvailableDatabasesFromRepository"));
}
// Read objects from the shared XML file & the repository
try {
jobMeta.setSharedObjectsFile(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, null));
jobMeta.setSharedObjects(repository != null ? repository.readJobMetaSharedObjects(jobMeta) : jobMeta.readSharedObjects());
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "JobMeta.ErrorReadingSharedObjects.Message", e.toString()));
//
log.logError(Const.getStackTracker(e));
}
if (monitor != null) {
monitor.worked(1);
}
if (log.isDetailed()) {
log.logDetailed("Loading " + noteids.length + " notes");
}
for (int i = 0; i < noteids.length; i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingNoteNr") + (i + 1) + "/" + noteids.length);
}
NotePadMeta ni = repository.notePadDelegate.loadNotePadMeta(noteids[i]);
if (jobMeta.indexOfNote(ni) < 0) {
jobMeta.addNote(ni);
}
if (monitor != null) {
monitor.worked(1);
}
}
// Load the group attributes map
//
jobMeta.setAttributesMap(loadJobAttributesMap(jobMeta.getObjectId()));
// Load the job entries...
//
// Keep a unique list of job entries to facilitate in the loading.
//
List<JobEntryInterface> jobentries = new ArrayList<JobEntryInterface>();
if (log.isDetailed()) {
log.logDetailed("Loading " + jecids.length + " job entries");
}
for (int i = 0; i < jecids.length; i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobEntryNr") + (i + 1) + "/" + (jecids.length));
}
JobEntryCopy jec = repository.jobEntryDelegate.loadJobEntryCopy(jobMeta.getObjectId(), jecids[i], jobentries, jobMeta.getDatabases(), jobMeta.getSlaveServers(), jobname);
if (jec.isMissing()) {
jobMeta.addMissingEntry((MissingEntry) jec.getEntry());
}
// Also set the copy number...
// We count the number of job entry copies that use the job
// entry
//
int copyNr = 0;
for (JobEntryCopy copy : jobMeta.getJobCopies()) {
if (jec.getEntry() == copy.getEntry()) {
copyNr++;
}
}
jec.setNr(copyNr);
int idx = jobMeta.indexOfJobEntry(jec);
if (idx < 0) {
if (jec.getName() != null && jec.getName().length() > 0) {
jobMeta.addJobEntry(jec);
}
} else {
// replace it!
jobMeta.setJobEntry(idx, jec);
}
if (monitor != null) {
monitor.worked(1);
}
}
// Load the hops...
if (log.isDetailed()) {
log.logDetailed("Loading " + hopid.length + " job hops");
}
for (int i = 0; i < hopid.length; i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobHopNr") + (i + 1) + "/" + (jecids.length));
}
JobHopMeta hi = loadJobHopMeta(hopid[i], jobMeta.getJobCopies());
jobMeta.getJobhops().add(hi);
if (monitor != null) {
monitor.worked(1);
}
}
loadRepParameters(jobMeta);
// Finally, clear the changed flags...
jobMeta.clearChanged();
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.FinishedLoadOfJob"));
}
if (monitor != null) {
monitor.done();
}
// close prepared statements, minimize locking etc.
//
repository.connectionDelegate.closeAttributeLookupPreparedStatements();
return jobMeta;
} else {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.CanNotFindJob") + jobname);
}
} catch (KettleException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", jobname), dbe);
} finally {
jobMeta.initializeVariablesFrom(jobMeta.getParentVariableSpace());
jobMeta.setInternalKettleVariables();
}
}
}
use of org.pentaho.di.core.logging.LogTableInterface in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryJobDelegate method insertJob.
private synchronized void insertJob(JobMeta jobMeta) throws KettleException {
RowMetaAndData table = new RowMetaAndData();
table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_JOB_ID_JOB), jobMeta.getObjectId());
table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY), jobMeta.getRepositoryDirectory().getObjectId());
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_NAME), jobMeta.getName());
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_DESCRIPTION), jobMeta.getDescription());
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION), jobMeta.getExtendedDescription());
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_JOB_VERSION), jobMeta.getJobversion());
table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_JOB_JOB_STATUS), new Long(jobMeta.getJobstatus() < 0 ? -1L : jobMeta.getJobstatus()));
table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG), jobMeta.getJobLogTable().getDatabaseMeta() != null ? jobMeta.getJobLogTable().getDatabaseMeta().getObjectId() : -1L);
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG), jobMeta.getJobLogTable().getTableName());
table.addValue(new ValueMetaBoolean(KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID), jobMeta.getJobLogTable().isBatchIdUsed());
table.addValue(new ValueMetaBoolean(KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD), jobMeta.getJobLogTable().isLogFieldUsed());
repository.connectionDelegate.insertJobAttribute(jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT, 0, jobMeta.getJobLogTable().getLogSizeLimit());
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_CREATED_USER), jobMeta.getCreatedUser());
table.addValue(new ValueMetaDate(KettleDatabaseRepository.FIELD_JOB_CREATED_DATE), jobMeta.getCreatedDate());
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER), jobMeta.getModifiedUser());
table.addValue(new ValueMetaDate(KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE), jobMeta.getModifiedDate());
table.addValue(new ValueMetaBoolean(KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID), jobMeta.isBatchIdPassed());
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_JOB_SHARED_FILE), jobMeta.getSharedObjectsFile());
repository.connectionDelegate.getDatabase().prepareInsert(table.getRowMeta(), KettleDatabaseRepository.TABLE_R_JOB);
repository.connectionDelegate.getDatabase().setValuesInsert(table);
repository.connectionDelegate.getDatabase().insertRow();
if (log.isDebug()) {
log.logDebug("Inserted new record into table " + quoteTable(KettleDatabaseRepository.TABLE_R_JOB) + " with data : " + table);
}
repository.connectionDelegate.getDatabase().closeInsert();
// Save the logging connection link...
if (jobMeta.getJobLogTable().getDatabaseMeta() != null) {
repository.insertJobEntryDatabase(jobMeta.getObjectId(), null, jobMeta.getJobLogTable().getDatabaseMeta().getObjectId());
}
// Save the logging tables too..
//
RepositoryAttributeInterface attributeInterface = new KettleDatabaseRepositoryJobAttribute(repository.connectionDelegate, jobMeta.getObjectId());
for (LogTableInterface logTable : jobMeta.getLogTables()) {
logTable.saveToRepository(attributeInterface);
}
}
use of org.pentaho.di.core.logging.LogTableInterface in project pentaho-kettle by pentaho.
the class XmlExportHelper method swapTables.
/**
* @param jobMeta
* contains log tables to be refactored before export
*/
public static void swapTables(JobMeta jobMeta) {
JobLogTable jobLogTable = jobMeta.getJobLogTable();
if (jobLogTable != null) {
JobLogTable cloneJobLogTable = (JobLogTable) jobLogTable.clone();
cloneJobLogTable.setAllGlobalParametersToNull();
jobMeta.setJobLogTable(cloneJobLogTable);
}
JobEntryLogTable jobEntryLogTable = jobMeta.getJobEntryLogTable();
if (jobEntryLogTable != null) {
JobEntryLogTable cloneEntryLogTable = (JobEntryLogTable) jobEntryLogTable.clone();
cloneEntryLogTable.setAllGlobalParametersToNull();
jobMeta.setJobEntryLogTable(cloneEntryLogTable);
}
ChannelLogTable channelLogTable = jobMeta.getChannelLogTable();
if (channelLogTable != null) {
ChannelLogTable cloneChannelLogTable = (ChannelLogTable) channelLogTable.clone();
cloneChannelLogTable.setAllGlobalParametersToNull();
jobMeta.setChannelLogTable(cloneChannelLogTable);
}
List<LogTableInterface> extraLogTables = jobMeta.getExtraLogTables();
if (extraLogTables != null) {
List<LogTableInterface> cloneExtraLogTables = new ArrayList<>();
for (LogTableInterface logTable : extraLogTables) {
if (logTable instanceof BaseLogTable) {
if (logTable instanceof Cloneable) {
BaseLogTable cloneExtraLogTable = (BaseLogTable) logTable.clone();
cloneExtraLogTable.setAllGlobalParametersToNull();
cloneExtraLogTables.add((LogTableInterface) cloneExtraLogTable);
}
}
}
jobMeta.setExtraLogTables(cloneExtraLogTables);
}
}
Aggregations