use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class JobMeta method findJobEntry.
/**
* Find job entry.
*
* @param full_name_nr the full_name_nr
* @return the job entry copy
*/
public JobEntryCopy findJobEntry(String full_name_nr) {
int i;
for (i = 0; i < nrJobEntries(); i++) {
JobEntryCopy jec = getJobEntry(i);
JobEntryInterface je = jec.getEntry();
if (je.toString().equalsIgnoreCase(full_name_nr)) {
return jec;
}
}
return null;
}
use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class JobMeta method loadXML.
/**
* Load a block of XML from an DOM node.
*
* @param jobnode The node to load from
* @param fname The filename
* @param rep The reference to a repository to load additional information from
* @param metaStore the MetaStore to use
* @param ignoreRepositorySharedObjects Do not load shared objects, handled separately
* @param prompter The prompter to use in case a shared object gets overwritten
* @throws KettleXMLException
*/
public void loadXML(Node jobnode, String fname, Repository rep, IMetaStore metaStore, boolean ignoreRepositorySharedObjects, OverwritePrompter prompter) throws KettleXMLException {
Props props = null;
if (Props.isInitialized()) {
props = Props.getInstance();
}
try {
// clear the jobs;
clear();
// Set the filename here so it can be used in variables for ALL aspects of the job FIX: PDI-8890
if (null == rep) {
setFilename(fname);
} else {
// Set the repository here so it can be used in variables for ALL aspects of the job FIX: PDI-16441
setRepository(rep);
}
//
// get job info:
//
setName(XMLHandler.getTagValue(jobnode, "name"));
//
if (rep != null) {
String directoryPath = XMLHandler.getTagValue(jobnode, "directory");
if (directoryPath != null) {
directory = rep.findDirectory(directoryPath);
if (directory == null) {
// not found
// The root as default
directory = new RepositoryDirectory();
}
}
}
// description
description = XMLHandler.getTagValue(jobnode, "description");
// extended description
extendedDescription = XMLHandler.getTagValue(jobnode, "extended_description");
// job version
jobVersion = XMLHandler.getTagValue(jobnode, "job_version");
// job status
jobStatus = Const.toInt(XMLHandler.getTagValue(jobnode, "job_status"), -1);
// Created user/date
createdUser = XMLHandler.getTagValue(jobnode, "created_user");
String createDate = XMLHandler.getTagValue(jobnode, "created_date");
if (createDate != null) {
createdDate = XMLHandler.stringToDate(createDate);
}
// Changed user/date
modifiedUser = XMLHandler.getTagValue(jobnode, "modified_user");
String modDate = XMLHandler.getTagValue(jobnode, "modified_date");
if (modDate != null) {
modifiedDate = XMLHandler.stringToDate(modDate);
}
// Read objects from the shared XML file & the repository
try {
sharedObjectsFile = XMLHandler.getTagValue(jobnode, "shared_objects_file");
if (rep == null || ignoreRepositorySharedObjects) {
sharedObjects = readSharedObjects();
} else {
sharedObjects = rep.readJobMetaSharedObjects(this);
}
} catch (Exception e) {
LogChannel.GENERAL.logError(BaseMessages.getString(PKG, "JobMeta.ErrorReadingSharedObjects.Message", e.toString()));
LogChannel.GENERAL.logError(Const.getStackTracker(e));
}
// Load the database connections, slave servers, cluster schemas & partition schemas into this object.
//
importFromMetaStore();
// Read the named parameters.
Node paramsNode = XMLHandler.getSubNode(jobnode, XML_TAG_PARAMETERS);
int nrParams = XMLHandler.countNodes(paramsNode, "parameter");
for (int i = 0; i < nrParams; i++) {
Node paramNode = XMLHandler.getSubNodeByNr(paramsNode, "parameter", i);
String paramName = XMLHandler.getTagValue(paramNode, "name");
String defValue = XMLHandler.getTagValue(paramNode, "default_value");
String descr = XMLHandler.getTagValue(paramNode, "description");
addParameterDefinition(paramName, defValue, descr);
}
//
// Read the database connections
//
int nr = XMLHandler.countNodes(jobnode, "connection");
Set<String> privateDatabases = new HashSet<String>(nr);
for (int i = 0; i < nr; i++) {
Node dbnode = XMLHandler.getSubNodeByNr(jobnode, "connection", i);
DatabaseMeta dbcon = new DatabaseMeta(dbnode);
dbcon.shareVariablesWith(this);
if (!dbcon.isShared()) {
privateDatabases.add(dbcon.getName());
}
DatabaseMeta exist = findDatabase(dbcon.getName());
if (exist == null) {
addDatabase(dbcon);
} else {
if (!exist.isShared()) {
// skip shared connections
if (shouldOverwrite(prompter, props, BaseMessages.getString(PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.Message", dbcon.getName()), BaseMessages.getString(PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.DontShowAnyMoreMessage"))) {
int idx = indexOfDatabase(exist);
removeDatabase(idx);
addDatabase(idx, dbcon);
}
}
}
}
setPrivateDatabases(privateDatabases);
// Read the slave servers...
//
Node slaveServersNode = XMLHandler.getSubNode(jobnode, XML_TAG_SLAVESERVERS);
int nrSlaveServers = XMLHandler.countNodes(slaveServersNode, SlaveServer.XML_TAG);
for (int i = 0; i < nrSlaveServers; i++) {
Node slaveServerNode = XMLHandler.getSubNodeByNr(slaveServersNode, SlaveServer.XML_TAG, i);
SlaveServer slaveServer = new SlaveServer(slaveServerNode);
slaveServer.shareVariablesWith(this);
// Check if the object exists and if it's a shared object.
// If so, then we will keep the shared version, not this one.
// The stored XML is only for backup purposes.
SlaveServer check = findSlaveServer(slaveServer.getName());
if (check != null) {
if (!check.isShared()) {
// we don't overwrite shared objects.
if (shouldOverwrite(prompter, props, BaseMessages.getString(PKG, "JobMeta.Dialog.SlaveServerExistsOverWrite.Message", slaveServer.getName()), BaseMessages.getString(PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.DontShowAnyMoreMessage"))) {
addOrReplaceSlaveServer(slaveServer);
}
}
} else {
slaveServers.add(slaveServer);
}
}
/*
* Get the log database connection & log table
*/
// Backward compatibility...
//
Node jobLogNode = XMLHandler.getSubNode(jobnode, JobLogTable.XML_TAG);
if (jobLogNode == null) {
// Load the XML
//
jobLogTable.setConnectionName(XMLHandler.getTagValue(jobnode, "logconnection"));
jobLogTable.setTableName(XMLHandler.getTagValue(jobnode, "logtable"));
jobLogTable.setBatchIdUsed("Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "use_batchid")));
jobLogTable.setLogFieldUsed("Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "use_logfield")));
jobLogTable.findField(JobLogTable.ID.CHANNEL_ID).setEnabled(false);
jobLogTable.findField(JobLogTable.ID.LINES_REJECTED).setEnabled(false);
} else {
jobLogTable.loadXML(jobLogNode, databases, null);
}
Node channelLogTableNode = XMLHandler.getSubNode(jobnode, ChannelLogTable.XML_TAG);
if (channelLogTableNode != null) {
channelLogTable.loadXML(channelLogTableNode, databases, null);
}
jobEntryLogTable.loadXML(jobnode, databases, null);
for (LogTableInterface extraLogTable : extraLogTables) {
extraLogTable.loadXML(jobnode, databases, null);
}
batchIdPassed = "Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "pass_batchid"));
/*
* read the job entries...
*/
Node entriesnode = XMLHandler.getSubNode(jobnode, "entries");
int tr = XMLHandler.countNodes(entriesnode, "entry");
for (int i = 0; i < tr; i++) {
Node entrynode = XMLHandler.getSubNodeByNr(entriesnode, "entry", i);
// System.out.println("Reading entry:\n"+entrynode);
JobEntryCopy je = new JobEntryCopy(entrynode, databases, slaveServers, rep, metaStore);
if (je.isSpecial() && je.isMissing()) {
addMissingEntry((MissingEntry) je.getEntry());
}
JobEntryCopy prev = findJobEntry(je.getName(), 0, true);
if (prev != null) {
//
if (je.getNr() == 0) {
// Replace previous version with this one: remove it first
//
int idx = indexOfJobEntry(prev);
removeJobEntry(idx);
} else if (je.getNr() > 0) {
// Use previously defined JobEntry info!
//
je.setEntry(prev.getEntry());
// See if entry already exists...
prev = findJobEntry(je.getName(), je.getNr(), true);
if (prev != null) {
// remove the old one!
//
int idx = indexOfJobEntry(prev);
removeJobEntry(idx);
}
}
}
// Add the JobEntryCopy...
addJobEntry(je);
}
Node hopsnode = XMLHandler.getSubNode(jobnode, "hops");
int ho = XMLHandler.countNodes(hopsnode, "hop");
for (int i = 0; i < ho; i++) {
Node hopnode = XMLHandler.getSubNodeByNr(hopsnode, "hop", i);
JobHopMeta hi = new JobHopMeta(hopnode, this);
jobhops.add(hi);
}
// Read the notes...
Node notepadsnode = XMLHandler.getSubNode(jobnode, "notepads");
int nrnotes = XMLHandler.countNodes(notepadsnode, "notepad");
for (int i = 0; i < nrnotes; i++) {
Node notepadnode = XMLHandler.getSubNodeByNr(notepadsnode, "notepad", i);
NotePadMeta ni = new NotePadMeta(notepadnode);
notes.add(ni);
}
// Load the attribute groups map
//
attributesMap = AttributesUtil.loadAttributes(XMLHandler.getSubNode(jobnode, AttributesUtil.XML_TAG));
ExtensionPointHandler.callExtensionPoint(LogChannel.GENERAL, KettleExtensionPoint.JobMetaLoaded.id, this);
clearChanged();
} catch (Exception e) {
throw new KettleXMLException(BaseMessages.getString(PKG, "JobMeta.Exception.UnableToLoadJobFromXMLNode"), e);
} finally {
setInternalKettleVariables();
}
}
use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class JobMeta method checkJobEntries.
/**
* Check all job entries within the job. Each Job Entry has the opportunity to check their own settings.
*
* @param remarks List of CheckResult remarks inserted into by each JobEntry
* @param only_selected true if you only want to check the selected jobs
* @param monitor Progress monitor (not presently in use)
*/
public void checkJobEntries(List<CheckResultInterface> remarks, boolean only_selected, ProgressMonitorListener monitor, VariableSpace space, Repository repository, IMetaStore metaStore) {
// Empty remarks
remarks.clear();
if (monitor != null) {
monitor.beginTask(BaseMessages.getString(PKG, "JobMeta.Monitor.VerifyingThisJobEntryTask.Title"), jobcopies.size() + 2);
}
boolean stop_checking = false;
for (int i = 0; i < jobcopies.size() && !stop_checking; i++) {
// get the job entry copy
JobEntryCopy copy = jobcopies.get(i);
if ((!only_selected) || (only_selected && copy.isSelected())) {
JobEntryInterface entry = copy.getEntry();
if (entry != null) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.VerifyingJobEntry.Title", entry.getName()));
}
entry.check(remarks, this, space, repository, metaStore);
compatibleEntryCheck(entry, remarks);
if (monitor != null) {
// progress bar...
monitor.worked(1);
if (monitor.isCanceled()) {
stop_checking = true;
}
}
}
}
if (monitor != null) {
monitor.worked(1);
}
}
if (monitor != null) {
monitor.done();
}
}
use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class JobMeta method createDummyEntry.
/**
* Creates the dummy entry.
*
* @return the job entry copy
*/
public static final JobEntryCopy createDummyEntry() {
JobEntrySpecial jobEntrySpecial = new JobEntrySpecial(STRING_SPECIAL_DUMMY, false, true);
JobEntryCopy jobEntry = new JobEntryCopy();
jobEntry.setObjectId(null);
jobEntry.setEntry(jobEntrySpecial);
jobEntry.setLocation(50, 50);
jobEntry.setDrawn(false);
jobEntry.setDescription(BaseMessages.getString(PKG, "JobMeta.DummyJobEntry.Description"));
return jobEntry;
}
use of org.pentaho.di.job.entry.JobEntryCopy in project pentaho-kettle by pentaho.
the class JobMeta method createStartEntry.
/**
* Creates the start entry.
*
* @return the job entry copy
*/
public static final JobEntryCopy createStartEntry() {
JobEntrySpecial jobEntrySpecial = new JobEntrySpecial(STRING_SPECIAL_START, true, false);
JobEntryCopy jobEntry = new JobEntryCopy();
jobEntry.setObjectId(null);
jobEntry.setEntry(jobEntrySpecial);
jobEntry.setLocation(50, 50);
jobEntry.setDrawn(false);
jobEntry.setDescription(BaseMessages.getString(PKG, "JobMeta.StartJobEntry.Description"));
return jobEntry;
}
Aggregations