use of org.pentaho.di.core.Props in project pentaho-kettle by pentaho.
the class JobMeta method getXML.
/*
* (non-Javadoc)
*
* @see org.pentaho.di.core.xml.XMLInterface#getXML()
*/
public String getXML() {
// Clear the embedded named clusters. We will be repopulating from steps that used named clusters
getNamedClusterEmbedManager().clear();
Props props = null;
if (Props.isInitialized()) {
props = Props.getInstance();
}
StringBuilder retval = new StringBuilder(500);
retval.append(XMLHandler.openTag(XML_TAG)).append(Const.CR);
retval.append(" ").append(XMLHandler.addTagValue("name", getName()));
retval.append(" ").append(XMLHandler.addTagValue("description", description));
retval.append(" ").append(XMLHandler.addTagValue("extended_description", extendedDescription));
retval.append(" ").append(XMLHandler.addTagValue("job_version", jobVersion));
if (jobStatus >= 0) {
retval.append(" ").append(XMLHandler.addTagValue("job_status", jobStatus));
}
retval.append(" ").append(XMLHandler.addTagValue("directory", (directory != null ? directory.getPath() : RepositoryDirectory.DIRECTORY_SEPARATOR)));
retval.append(" ").append(XMLHandler.addTagValue("created_user", createdUser));
retval.append(" ").append(XMLHandler.addTagValue("created_date", XMLHandler.date2string(createdDate)));
retval.append(" ").append(XMLHandler.addTagValue("modified_user", modifiedUser));
retval.append(" ").append(XMLHandler.addTagValue("modified_date", XMLHandler.date2string(modifiedDate)));
retval.append(" ").append(XMLHandler.openTag(XML_TAG_PARAMETERS)).append(Const.CR);
String[] parameters = listParameters();
for (int idx = 0; idx < parameters.length; idx++) {
retval.append(" ").append(XMLHandler.openTag("parameter")).append(Const.CR);
retval.append(" ").append(XMLHandler.addTagValue("name", parameters[idx]));
try {
retval.append(" ").append(XMLHandler.addTagValue("default_value", getParameterDefault(parameters[idx])));
retval.append(" ").append(XMLHandler.addTagValue("description", getParameterDescription(parameters[idx])));
} catch (UnknownParamException e) {
// skip the default value and/or description. This exception should never happen because we use listParameters()
// above.
}
retval.append(" ").append(XMLHandler.closeTag("parameter")).append(Const.CR);
}
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_PARAMETERS)).append(Const.CR);
Set<DatabaseMeta> usedDatabaseMetas = getUsedDatabaseMetas();
// Save the database connections...
for (int i = 0; i < nrDatabases(); i++) {
DatabaseMeta dbMeta = getDatabase(i);
if (props != null && props.areOnlyUsedConnectionsSavedToXML()) {
if (usedDatabaseMetas.contains(dbMeta)) {
retval.append(dbMeta.getXML());
}
} else {
retval.append(dbMeta.getXML());
}
}
// The slave servers...
//
retval.append(" ").append(XMLHandler.openTag(XML_TAG_SLAVESERVERS)).append(Const.CR);
for (int i = 0; i < slaveServers.size(); i++) {
SlaveServer slaveServer = slaveServers.get(i);
retval.append(slaveServer.getXML());
}
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_SLAVESERVERS)).append(Const.CR);
//
for (LogTableInterface logTable : getLogTables()) {
retval.append(logTable.getXML());
}
retval.append(" ").append(XMLHandler.addTagValue("pass_batchid", batchIdPassed));
retval.append(" ").append(XMLHandler.addTagValue("shared_objects_file", sharedObjectsFile));
retval.append(" ").append(XMLHandler.openTag("entries")).append(Const.CR);
for (int i = 0; i < nrJobEntries(); i++) {
JobEntryCopy jge = getJobEntry(i);
jge.getEntry().setRepository(repository);
retval.append(jge.getXML());
}
retval.append(" ").append(XMLHandler.closeTag("entries")).append(Const.CR);
retval.append(" ").append(XMLHandler.openTag("hops")).append(Const.CR);
for (JobHopMeta hi : jobhops) {
// Look at all the hops
retval.append(hi.getXML());
}
retval.append(" ").append(XMLHandler.closeTag("hops")).append(Const.CR);
retval.append(" ").append(XMLHandler.openTag("notepads")).append(Const.CR);
for (int i = 0; i < nrNotes(); i++) {
NotePadMeta ni = getNote(i);
retval.append(ni.getXML());
}
retval.append(" ").append(XMLHandler.closeTag("notepads")).append(Const.CR);
// Also store the attribute groups
//
retval.append(AttributesUtil.getAttributesXml(attributesMap));
retval.append(XMLHandler.closeTag(XML_TAG)).append(Const.CR);
return XMLFormatter.format(retval.toString());
}
use of org.pentaho.di.core.Props in project pentaho-kettle by pentaho.
the class JmsProducerDialog method open.
@Override
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.MIN | SWT.MAX | SWT.RESIZE);
props.setLook(shell);
setShellImage(shell, meta);
shell.setMinimumSize(SHELL_MIN_WIDTH, SHELL_MIN_HEIGHT);
changed = meta.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = 15;
formLayout.marginHeight = 15;
shell.setLayout(formLayout);
shell.setText(BaseMessages.getString(PKG, "JmsProducerDialog.Shell.Title"));
Label wicon = new Label(shell, SWT.RIGHT);
wicon.setImage(getImage());
FormData fdlicon = new FormData();
fdlicon.top = new FormAttachment(0, 0);
fdlicon.right = new FormAttachment(100, 0);
wicon.setLayoutData(fdlicon);
props.setLook(wicon);
wlStepname = new Label(shell, SWT.RIGHT);
wlStepname.setText(BaseMessages.getString(PKG, "JmsProducerDialog.Stepname.Label"));
props.setLook(wlStepname);
fdlStepname = new FormData();
fdlStepname.left = new FormAttachment(0, 0);
fdlStepname.top = new FormAttachment(0, 0);
wlStepname.setLayoutData(fdlStepname);
wStepname = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wStepname.setText(stepname);
props.setLook(wStepname);
wStepname.addModifyListener(lsMod);
fdStepname = new FormData();
fdStepname.width = 250;
fdStepname.left = new FormAttachment(0, 0);
fdStepname.top = new FormAttachment(wlStepname, 5);
wStepname.setLayoutData(fdStepname);
Label spacer = new Label(shell, SWT.HORIZONTAL | SWT.SEPARATOR);
props.setLook(spacer);
FormData fdSpacer = new FormData();
fdSpacer.height = 2;
fdSpacer.left = new FormAttachment(0, 0);
fdSpacer.top = new FormAttachment(wStepname, 15);
fdSpacer.right = new FormAttachment(100, 0);
fdSpacer.width = 497;
spacer.setLayoutData(fdSpacer);
// Start of tabbed display
wTabFolder = new CTabFolder(shell, SWT.BORDER);
props.setLook(wTabFolder, Props.WIDGET_STYLE_TAB);
wTabFolder.setSimple(false);
wTabFolder.setUnselectedCloseVisible(true);
wCancel = new Button(shell, SWT.PUSH);
wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel"));
FormData fdCancel = new FormData();
fdCancel.right = new FormAttachment(100, 0);
fdCancel.bottom = new FormAttachment(100, 0);
wCancel.setLayoutData(fdCancel);
wOK = new Button(shell, SWT.PUSH);
wOK.setText(BaseMessages.getString(PKG, "System.Button.OK"));
FormData fdOk = new FormData();
fdOk.right = new FormAttachment(wCancel, -5);
fdOk.bottom = new FormAttachment(100, 0);
wOK.setLayoutData(fdOk);
Label hSpacer = new Label(shell, SWT.HORIZONTAL | SWT.SEPARATOR);
props.setLook(hSpacer);
FormData fdhSpacer = new FormData();
fdhSpacer.height = 2;
fdhSpacer.left = new FormAttachment(0, 0);
fdhSpacer.bottom = new FormAttachment(wCancel, -15);
fdhSpacer.right = new FormAttachment(100, 0);
hSpacer.setLayoutData(fdhSpacer);
FormData fdTabFolder = new FormData();
fdTabFolder.left = new FormAttachment(0, 0);
fdTabFolder.top = new FormAttachment(spacer, 15);
fdTabFolder.bottom = new FormAttachment(hSpacer, -15);
fdTabFolder.right = new FormAttachment(100, 0);
wTabFolder.setLayoutData(fdTabFolder);
// Setup Tab
wSetupTab = new CTabItem(wTabFolder, SWT.NONE);
wSetupTab.setText(BaseMessages.getString(PKG, "JmsProducerDialog.SetupTab"));
wSetupComp = new Composite(wTabFolder, SWT.NONE);
props.setLook(wSetupComp);
FormLayout setupLayout = new FormLayout();
setupLayout.marginHeight = 15;
setupLayout.marginWidth = 15;
wSetupComp.setLayout(setupLayout);
connectionForm = new ConnectionForm(wSetupComp, props, transMeta, lsMod, jmsDelegate);
Group group = connectionForm.layoutForm();
destinationForm = new DestinationForm(wSetupComp, group, props, transMeta, lsMod, jmsDelegate.destinationType, jmsDelegate.destinationName);
Composite destinationFormComposite = destinationForm.layoutForm();
Label lbMessageField = new Label(wSetupComp, SWT.LEFT);
props.setLook(lbMessageField);
lbMessageField.setText(getString(PKG, "JmsProducerDialog.MessageField"));
FormData fdMessage = new FormData();
fdMessage.left = new FormAttachment(0, 0);
fdMessage.top = new FormAttachment(destinationFormComposite, 15);
fdMessage.width = 250;
lbMessageField.setLayoutData(fdMessage);
wMessageField = new ComboVar(transMeta, wSetupComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
props.setLook(wMessageField);
wMessageField.addModifyListener(lsMod);
FormData fdMessageField = new FormData();
fdMessageField.left = new FormAttachment(0, 0);
fdMessageField.top = new FormAttachment(lbMessageField, 5);
fdMessageField.width = 250;
wMessageField.setLayoutData(fdMessageField);
Listener lsMessageFocus = e -> {
String current = wMessageField.getText();
wMessageField.getCComboWidget().removeAll();
wMessageField.setText(current);
try {
RowMetaInterface rmi = transMeta.getPrevStepFields(meta.getParentStepMeta().getName());
List ls = rmi.getValueMetaList();
for (Object l : ls) {
ValueMetaBase vmb = (ValueMetaBase) l;
wMessageField.add(vmb.getName());
}
} catch (KettleStepException ex) {
// do nothing
}
};
wMessageField.getCComboWidget().addListener(SWT.FocusIn, lsMessageFocus);
FormData fdSetupComp = new FormData();
fdSetupComp.left = new FormAttachment(0, 0);
fdSetupComp.top = new FormAttachment(0, 0);
fdSetupComp.right = new FormAttachment(100, 0);
fdSetupComp.bottom = new FormAttachment(100, 0);
wSetupComp.setLayoutData(fdSetupComp);
wSetupComp.layout();
wSetupTab.setControl(wSetupComp);
wTabFolder.setSelection(0);
wOK.addListener(SWT.Selection, lsOK);
wCancel.addListener(SWT.Selection, lsCancel);
// get data for message field, other fields data is loaded by the forms
wMessageField.setText(nullToEmpty(meta.getFieldToSend()));
setSize();
meta.setChanged(changed);
shell.open();
while (!shell.isDisposed()) {
if (!display.readAndDispatch()) {
display.sleep();
}
}
return stepname;
}
use of org.pentaho.di.core.Props in project pentaho-kettle by pentaho.
the class TransMeta method getXML.
/**
* Gets the XML representation of this transformation, including or excluding step, database, slave server, cluster,
* or partition information as specified by the parameters
*
* @param includeSteps
* whether to include step data
* @param includeDatabase
* whether to include database data
* @param includeSlaves
* whether to include slave server data
* @param includeClusters
* whether to include cluster data
* @param includePartitions
* whether to include partition data
* @param includeNamedParameters
* whether to include named parameters data
* @param includeLog
* whether to include log data
* @param includeDependencies
* whether to include dependencies data
* @param includeNotePads
* whether to include notepads data
* @param includeAttributeGroups
* whether to include attributes map data
* @return the XML representation of this transformation
* @throws KettleException
* if any errors occur during generation of the XML
*/
public String getXML(boolean includeSteps, boolean includeDatabase, boolean includeSlaves, boolean includeClusters, boolean includePartitions, boolean includeNamedParameters, boolean includeLog, boolean includeDependencies, boolean includeNotePads, boolean includeAttributeGroups) throws KettleException {
// Clear the embedded named clusters. We will be repopulating from steps that used named clusters
getNamedClusterEmbedManager().clear();
Props props = null;
if (Props.isInitialized()) {
props = Props.getInstance();
}
StringBuilder retval = new StringBuilder(800);
retval.append(XMLHandler.openTag(XML_TAG)).append(Const.CR);
retval.append(" ").append(XMLHandler.openTag(XML_TAG_INFO)).append(Const.CR);
retval.append(" ").append(XMLHandler.addTagValue("name", name));
retval.append(" ").append(XMLHandler.addTagValue("description", description));
retval.append(" ").append(XMLHandler.addTagValue("extended_description", extendedDescription));
retval.append(" ").append(XMLHandler.addTagValue("trans_version", trans_version));
retval.append(" ").append(XMLHandler.addTagValue("trans_type", transformationType.getCode()));
if (trans_status >= 0) {
retval.append(" ").append(XMLHandler.addTagValue("trans_status", trans_status));
}
retval.append(" ").append(XMLHandler.addTagValue("directory", directory != null ? directory.getPath() : RepositoryDirectory.DIRECTORY_SEPARATOR));
if (includeNamedParameters) {
retval.append(" ").append(XMLHandler.openTag(XML_TAG_PARAMETERS)).append(Const.CR);
String[] parameters = listParameters();
for (int idx = 0; idx < parameters.length; idx++) {
retval.append(" ").append(XMLHandler.openTag("parameter")).append(Const.CR);
retval.append(" ").append(XMLHandler.addTagValue("name", parameters[idx]));
retval.append(" ").append(XMLHandler.addTagValue("default_value", getParameterDefault(parameters[idx])));
retval.append(" ").append(XMLHandler.addTagValue("description", getParameterDescription(parameters[idx])));
retval.append(" ").append(XMLHandler.closeTag("parameter")).append(Const.CR);
}
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_PARAMETERS)).append(Const.CR);
}
if (includeLog) {
retval.append(" ").append(XMLHandler.openTag("log")).append(Const.CR);
// Add the metadata for the various logging tables
//
retval.append(transLogTable.getXML());
retval.append(performanceLogTable.getXML());
retval.append(channelLogTable.getXML());
retval.append(stepLogTable.getXML());
retval.append(metricsLogTable.getXML());
retval.append(" ").append(XMLHandler.closeTag("log")).append(Const.CR);
}
retval.append(" ").append(XMLHandler.openTag("maxdate")).append(Const.CR);
retval.append(" ").append(XMLHandler.addTagValue("connection", maxDateConnection == null ? "" : maxDateConnection.getName()));
retval.append(" ").append(XMLHandler.addTagValue("table", maxDateTable));
retval.append(" ").append(XMLHandler.addTagValue("field", maxDateField));
retval.append(" ").append(XMLHandler.addTagValue("offset", maxDateOffset));
retval.append(" ").append(XMLHandler.addTagValue("maxdiff", maxDateDifference));
retval.append(" ").append(XMLHandler.closeTag("maxdate")).append(Const.CR);
retval.append(" ").append(XMLHandler.addTagValue("size_rowset", sizeRowset));
retval.append(" ").append(XMLHandler.addTagValue("sleep_time_empty", sleepTimeEmpty));
retval.append(" ").append(XMLHandler.addTagValue("sleep_time_full", sleepTimeFull));
retval.append(" ").append(XMLHandler.addTagValue("unique_connections", usingUniqueConnections));
retval.append(" ").append(XMLHandler.addTagValue("feedback_shown", feedbackShown));
retval.append(" ").append(XMLHandler.addTagValue("feedback_size", feedbackSize));
retval.append(" ").append(XMLHandler.addTagValue("using_thread_priorities", usingThreadPriorityManagment));
retval.append(" ").append(XMLHandler.addTagValue("shared_objects_file", sharedObjectsFile));
// Performance monitoring
//
retval.append(" ").append(XMLHandler.addTagValue("capture_step_performance", capturingStepPerformanceSnapShots));
retval.append(" ").append(XMLHandler.addTagValue("step_performance_capturing_delay", stepPerformanceCapturingDelay));
retval.append(" ").append(XMLHandler.addTagValue("step_performance_capturing_size_limit", stepPerformanceCapturingSizeLimit));
if (includeDependencies) {
retval.append(" ").append(XMLHandler.openTag(XML_TAG_DEPENDENCIES)).append(Const.CR);
for (int i = 0; i < nrDependencies(); i++) {
TransDependency td = getDependency(i);
retval.append(td.getXML());
}
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_DEPENDENCIES)).append(Const.CR);
}
//
if (includePartitions) {
retval.append(" ").append(XMLHandler.openTag(XML_TAG_PARTITIONSCHEMAS)).append(Const.CR);
for (int i = 0; i < partitionSchemas.size(); i++) {
PartitionSchema partitionSchema = partitionSchemas.get(i);
retval.append(partitionSchema.getXML());
}
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_PARTITIONSCHEMAS)).append(Const.CR);
}
//
if (includeSlaves) {
retval.append(" ").append(XMLHandler.openTag(XML_TAG_SLAVESERVERS)).append(Const.CR);
for (int i = 0; i < slaveServers.size(); i++) {
SlaveServer slaveServer = slaveServers.get(i);
retval.append(slaveServer.getXML());
}
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_SLAVESERVERS)).append(Const.CR);
}
//
if (includeClusters) {
retval.append(" ").append(XMLHandler.openTag(XML_TAG_CLUSTERSCHEMAS)).append(Const.CR);
for (int i = 0; i < clusterSchemas.size(); i++) {
ClusterSchema clusterSchema = clusterSchemas.get(i);
retval.append(clusterSchema.getXML());
}
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_CLUSTERSCHEMAS)).append(Const.CR);
}
retval.append(" ").append(XMLHandler.addTagValue("created_user", createdUser));
retval.append(" ").append(XMLHandler.addTagValue("created_date", XMLHandler.date2string(createdDate)));
retval.append(" ").append(XMLHandler.addTagValue("modified_user", modifiedUser));
retval.append(" ").append(XMLHandler.addTagValue("modified_date", XMLHandler.date2string(modifiedDate)));
try {
retval.append(" ").append(XMLHandler.addTagValue("key_for_session_key", keyForSessionKey));
} catch (Exception ex) {
log.logError("Unable to decode key", ex);
}
retval.append(" ").append(XMLHandler.addTagValue("is_key_private", isKeyPrivate));
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_INFO)).append(Const.CR);
if (includeNotePads) {
retval.append(" ").append(XMLHandler.openTag(XML_TAG_NOTEPADS)).append(Const.CR);
if (notes != null) {
for (int i = 0; i < nrNotes(); i++) {
NotePadMeta ni = getNote(i);
retval.append(ni.getXML());
}
}
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_NOTEPADS)).append(Const.CR);
}
// The database connections...
if (includeDatabase) {
for (int i = 0; i < nrDatabases(); i++) {
DatabaseMeta dbMeta = getDatabase(i);
if (props != null && props.areOnlyUsedConnectionsSavedToXML()) {
if (isDatabaseConnectionUsed(dbMeta)) {
retval.append(dbMeta.getXML());
}
} else {
retval.append(dbMeta.getXML());
}
}
}
if (includeSteps) {
retval.append(" ").append(XMLHandler.openTag(XML_TAG_ORDER)).append(Const.CR);
for (int i = 0; i < nrTransHops(); i++) {
TransHopMeta transHopMeta = getTransHop(i);
retval.append(transHopMeta.getXML());
}
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_ORDER)).append(Const.CR);
/* The steps... */
for (int i = 0; i < nrSteps(); i++) {
StepMeta stepMeta = getStep(i);
if (stepMeta.getStepMetaInterface() instanceof HasRepositoryInterface) {
((HasRepositoryInterface) stepMeta.getStepMetaInterface()).setRepository(repository);
}
retval.append(stepMeta.getXML());
}
/* The error handling metadata on the steps */
retval.append(" ").append(XMLHandler.openTag(XML_TAG_STEP_ERROR_HANDLING)).append(Const.CR);
for (int i = 0; i < nrSteps(); i++) {
StepMeta stepMeta = getStep(i);
if (stepMeta.getStepErrorMeta() != null) {
retval.append(stepMeta.getStepErrorMeta().getXML());
}
}
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_STEP_ERROR_HANDLING)).append(Const.CR);
}
// The slave-step-copy/partition distribution. Only used for slave transformations in a clustering environment.
retval.append(slaveStepCopyPartitionDistribution.getXML());
// Is this a slave transformation or not?
retval.append(" ").append(XMLHandler.addTagValue("slave_transformation", slaveTransformation));
//
if (includeAttributeGroups) {
retval.append(AttributesUtil.getAttributesXml(attributesMap));
}
retval.append(XMLHandler.closeTag(XML_TAG)).append(Const.CR);
return XMLFormatter.format(retval.toString());
}
use of org.pentaho.di.core.Props in project pentaho-kettle by pentaho.
the class TransMeta method loadXML.
/**
* Parses an XML DOM (starting at the specified Node) that describes the transformation.
*
* @param transnode
* The XML node to load from
* @param fname
* The filename
* @param rep
* The repository to load the default list of database connections from (null if no repository is available)
* @param setInternalVariables
* true if you want to set the internal variables based on this transformation information
* @param parentVariableSpace
* the parent variable space to use during TransMeta construction
* @param prompter
* the changed/replace listener or null if there is none
* @throws KettleXMLException
* if any errors occur during parsing of the specified file
* @throws KettleMissingPluginsException
* in case missing plugins were found (details are in the exception in that case)
*/
public void loadXML(Node transnode, String fname, IMetaStore metaStore, Repository rep, boolean setInternalVariables, VariableSpace parentVariableSpace, OverwritePrompter prompter) throws KettleXMLException, KettleMissingPluginsException {
KettleMissingPluginsException missingPluginsException = new KettleMissingPluginsException(BaseMessages.getString(PKG, "TransMeta.MissingPluginsFoundWhileLoadingTransformation.Exception"));
// Remember this as the primary meta store.
this.metaStore = metaStore;
try {
Props props = null;
if (Props.isInitialized()) {
props = Props.getInstance();
}
initializeVariablesFrom(parentVariableSpace);
try {
// Clear the transformation
clear();
// Set the filename here so it can be used in variables for ALL aspects of the transformation FIX: PDI-8890
if (null == rep) {
setFilename(fname);
} else {
// Set the repository here so it can be used in variables for ALL aspects of the job FIX: PDI-16441
setRepository(rep);
}
//
try {
sharedObjectsFile = XMLHandler.getTagValue(transnode, "info", "shared_objects_file");
sharedObjects = rep != null ? rep.readTransSharedObjects(this) : readSharedObjects();
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "TransMeta.ErrorReadingSharedObjects.Message", e.toString()));
log.logError(Const.getStackTracker(e));
}
// Load the database connections, slave servers, cluster schemas & partition schemas into this object.
//
importFromMetaStore();
// Handle connections
int n = XMLHandler.countNodes(transnode, DatabaseMeta.XML_TAG);
Set<String> privateTransformationDatabases = new HashSet<>(n);
if (log.isDebug()) {
log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.WeHaveConnections", String.valueOf(n)));
}
for (int i = 0; i < n; i++) {
if (log.isDebug()) {
log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.LookingAtConnection") + i);
}
Node nodecon = XMLHandler.getSubNodeByNr(transnode, DatabaseMeta.XML_TAG, i);
DatabaseMeta dbcon = new DatabaseMeta(nodecon);
dbcon.shareVariablesWith(this);
if (!dbcon.isShared()) {
privateTransformationDatabases.add(dbcon.getName());
}
DatabaseMeta exist = findDatabase(dbcon.getName());
if (exist == null) {
addDatabase(dbcon);
} else {
if (!exist.isShared()) {
// otherwise, we just keep the shared connection.
if (shouldOverwrite(prompter, props, BaseMessages.getString(PKG, "TransMeta.Message.OverwriteConnectionYN", dbcon.getName()), BaseMessages.getString(PKG, "TransMeta.Message.OverwriteConnection.DontShowAnyMoreMessage"))) {
int idx = indexOfDatabase(exist);
removeDatabase(idx);
addDatabase(idx, dbcon);
}
}
}
}
setPrivateDatabases(privateTransformationDatabases);
// Read the notes...
Node notepadsnode = XMLHandler.getSubNode(transnode, XML_TAG_NOTEPADS);
int nrnotes = XMLHandler.countNodes(notepadsnode, NotePadMeta.XML_TAG);
for (int i = 0; i < nrnotes; i++) {
Node notepadnode = XMLHandler.getSubNodeByNr(notepadsnode, NotePadMeta.XML_TAG, i);
NotePadMeta ni = new NotePadMeta(notepadnode);
notes.add(ni);
}
// Handle Steps
int s = XMLHandler.countNodes(transnode, StepMeta.XML_TAG);
if (log.isDebug()) {
log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.ReadingSteps") + s + " steps...");
}
for (int i = 0; i < s; i++) {
Node stepnode = XMLHandler.getSubNodeByNr(transnode, StepMeta.XML_TAG, i);
if (log.isDebug()) {
log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.LookingAtStep") + i);
}
StepMeta stepMeta = new StepMeta(stepnode, databases, metaStore);
// for tracing, retain hierarchy
stepMeta.setParentTransMeta(this);
if (stepMeta.isMissing()) {
addMissingTrans((MissingTrans) stepMeta.getStepMetaInterface());
}
// Check if the step exists and if it's a shared step.
// If so, then we will keep the shared version, not this one.
// The stored XML is only for backup purposes.
//
StepMeta check = findStep(stepMeta.getName());
if (check != null) {
if (!check.isShared()) {
// Don't overwrite shared objects
addOrReplaceStep(stepMeta);
} else {
// Just keep the drawn flag and location
check.setDraw(stepMeta.isDrawn());
check.setLocation(stepMeta.getLocation());
}
} else {
// simply add it.
addStep(stepMeta);
}
}
// Read the error handling code of the steps...
//
Node errorHandlingNode = XMLHandler.getSubNode(transnode, XML_TAG_STEP_ERROR_HANDLING);
int nrErrorHandlers = XMLHandler.countNodes(errorHandlingNode, StepErrorMeta.XML_ERROR_TAG);
for (int i = 0; i < nrErrorHandlers; i++) {
Node stepErrorMetaNode = XMLHandler.getSubNodeByNr(errorHandlingNode, StepErrorMeta.XML_ERROR_TAG, i);
StepErrorMeta stepErrorMeta = new StepErrorMeta(this, stepErrorMetaNode, steps);
if (stepErrorMeta.getSourceStep() != null) {
// a bit of a trick, I know.
stepErrorMeta.getSourceStep().setStepErrorMeta(stepErrorMeta);
}
}
//
for (int i = 0; i < nrSteps(); i++) {
StepMeta stepMeta = getStep(i);
StepMetaInterface sii = stepMeta.getStepMetaInterface();
if (sii != null) {
sii.searchInfoAndTargetSteps(steps);
}
}
// Handle Hops
//
Node ordernode = XMLHandler.getSubNode(transnode, XML_TAG_ORDER);
n = XMLHandler.countNodes(ordernode, TransHopMeta.XML_HOP_TAG);
if (log.isDebug()) {
log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.WeHaveHops") + n + " hops...");
}
for (int i = 0; i < n; i++) {
if (log.isDebug()) {
log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.LookingAtHop") + i);
}
Node hopnode = XMLHandler.getSubNodeByNr(ordernode, TransHopMeta.XML_HOP_TAG, i);
TransHopMeta hopinf = new TransHopMeta(hopnode, steps);
hopinf.setErrorHop(isErrorNode(errorHandlingNode, hopnode));
addTransHop(hopinf);
}
//
// get transformation info:
//
Node infonode = XMLHandler.getSubNode(transnode, XML_TAG_INFO);
// Name
//
setName(XMLHandler.getTagValue(infonode, "name"));
// description
//
description = XMLHandler.getTagValue(infonode, "description");
// extended description
//
extendedDescription = XMLHandler.getTagValue(infonode, "extended_description");
// trans version
//
trans_version = XMLHandler.getTagValue(infonode, "trans_version");
// trans status
//
trans_status = Const.toInt(XMLHandler.getTagValue(infonode, "trans_status"), -1);
String transTypeCode = XMLHandler.getTagValue(infonode, "trans_type");
transformationType = TransformationType.getTransformationTypeByCode(transTypeCode);
//
if (rep != null) {
String directoryPath = XMLHandler.getTagValue(infonode, "directory");
if (directoryPath != null) {
directory = rep.findDirectory(directoryPath);
if (directory == null) {
// not found
// The root as default
directory = new RepositoryDirectory();
}
}
}
// Read logging table information
//
Node logNode = XMLHandler.getSubNode(infonode, "log");
if (logNode != null) {
// Backward compatibility...
//
Node transLogNode = XMLHandler.getSubNode(logNode, TransLogTable.XML_TAG);
if (transLogNode == null) {
// Load the XML
//
transLogTable.findField(TransLogTable.ID.LINES_READ).setSubject(findStep(XMLHandler.getTagValue(infonode, "log", "read")));
transLogTable.findField(TransLogTable.ID.LINES_WRITTEN).setSubject(findStep(XMLHandler.getTagValue(infonode, "log", "write")));
transLogTable.findField(TransLogTable.ID.LINES_INPUT).setSubject(findStep(XMLHandler.getTagValue(infonode, "log", "input")));
transLogTable.findField(TransLogTable.ID.LINES_OUTPUT).setSubject(findStep(XMLHandler.getTagValue(infonode, "log", "output")));
transLogTable.findField(TransLogTable.ID.LINES_UPDATED).setSubject(findStep(XMLHandler.getTagValue(infonode, "log", "update")));
transLogTable.findField(TransLogTable.ID.LINES_REJECTED).setSubject(findStep(XMLHandler.getTagValue(infonode, "log", "rejected")));
transLogTable.setConnectionName(XMLHandler.getTagValue(infonode, "log", "connection"));
transLogTable.setSchemaName(XMLHandler.getTagValue(infonode, "log", "schema"));
transLogTable.setTableName(XMLHandler.getTagValue(infonode, "log", "table"));
transLogTable.findField(TransLogTable.ID.ID_BATCH).setEnabled("Y".equalsIgnoreCase(XMLHandler.getTagValue(infonode, "log", "use_batchid")));
transLogTable.findField(TransLogTable.ID.LOG_FIELD).setEnabled("Y".equalsIgnoreCase(XMLHandler.getTagValue(infonode, "log", "USE_LOGFIELD")));
transLogTable.setLogSizeLimit(XMLHandler.getTagValue(infonode, "log", "size_limit_lines"));
transLogTable.setLogInterval(XMLHandler.getTagValue(infonode, "log", "interval"));
transLogTable.findField(TransLogTable.ID.CHANNEL_ID).setEnabled(false);
transLogTable.findField(TransLogTable.ID.LINES_REJECTED).setEnabled(false);
performanceLogTable.setConnectionName(transLogTable.getConnectionName());
performanceLogTable.setTableName(XMLHandler.getTagValue(infonode, "log", "step_performance_table"));
} else {
transLogTable.loadXML(transLogNode, databases, steps);
}
Node perfLogNode = XMLHandler.getSubNode(logNode, PerformanceLogTable.XML_TAG);
if (perfLogNode != null) {
performanceLogTable.loadXML(perfLogNode, databases, steps);
}
Node channelLogNode = XMLHandler.getSubNode(logNode, ChannelLogTable.XML_TAG);
if (channelLogNode != null) {
channelLogTable.loadXML(channelLogNode, databases, steps);
}
Node stepLogNode = XMLHandler.getSubNode(logNode, StepLogTable.XML_TAG);
if (stepLogNode != null) {
stepLogTable.loadXML(stepLogNode, databases, steps);
}
Node metricsLogNode = XMLHandler.getSubNode(logNode, MetricsLogTable.XML_TAG);
if (metricsLogNode != null) {
metricsLogTable.loadXML(metricsLogNode, databases, steps);
}
}
// Maxdate range options...
String maxdatcon = XMLHandler.getTagValue(infonode, "maxdate", "connection");
maxDateConnection = findDatabase(maxdatcon);
maxDateTable = XMLHandler.getTagValue(infonode, "maxdate", "table");
maxDateField = XMLHandler.getTagValue(infonode, "maxdate", "field");
String offset = XMLHandler.getTagValue(infonode, "maxdate", "offset");
maxDateOffset = Const.toDouble(offset, 0.0);
String mdiff = XMLHandler.getTagValue(infonode, "maxdate", "maxdiff");
maxDateDifference = Const.toDouble(mdiff, 0.0);
// Check the dependencies as far as dates are concerned...
// We calculate BEFORE we run the MAX of these dates
// If the date is larger then enddate, startdate is set to MIN_DATE
//
Node depsNode = XMLHandler.getSubNode(infonode, XML_TAG_DEPENDENCIES);
int nrDeps = XMLHandler.countNodes(depsNode, TransDependency.XML_TAG);
for (int i = 0; i < nrDeps; i++) {
Node depNode = XMLHandler.getSubNodeByNr(depsNode, TransDependency.XML_TAG, i);
TransDependency transDependency = new TransDependency(depNode, databases);
if (transDependency.getDatabase() != null && transDependency.getFieldname() != null) {
addDependency(transDependency);
}
}
// Read the named parameters.
Node paramsNode = XMLHandler.getSubNode(infonode, XML_TAG_PARAMETERS);
int nrParams = XMLHandler.countNodes(paramsNode, "parameter");
for (int i = 0; i < nrParams; i++) {
Node paramNode = XMLHandler.getSubNodeByNr(paramsNode, "parameter", i);
String paramName = XMLHandler.getTagValue(paramNode, "name");
String defaultValue = XMLHandler.getTagValue(paramNode, "default_value");
String descr = XMLHandler.getTagValue(paramNode, "description");
addParameterDefinition(paramName, defaultValue, descr);
}
// Read the partitioning schemas
//
Node partSchemasNode = XMLHandler.getSubNode(infonode, XML_TAG_PARTITIONSCHEMAS);
int nrPartSchemas = XMLHandler.countNodes(partSchemasNode, PartitionSchema.XML_TAG);
for (int i = 0; i < nrPartSchemas; i++) {
Node partSchemaNode = XMLHandler.getSubNodeByNr(partSchemasNode, PartitionSchema.XML_TAG, i);
PartitionSchema partitionSchema = new PartitionSchema(partSchemaNode);
// Check if the step exists and if it's a shared step.
// If so, then we will keep the shared version, not this one.
// The stored XML is only for backup purposes.
//
PartitionSchema check = findPartitionSchema(partitionSchema.getName());
if (check != null) {
if (!check.isShared()) {
// we don't overwrite shared objects.
if (shouldOverwrite(prompter, props, BaseMessages.getString(PKG, "TransMeta.Message.OverwritePartitionSchemaYN", partitionSchema.getName()), BaseMessages.getString(PKG, "TransMeta.Message.OverwriteConnection.DontShowAnyMoreMessage"))) {
addOrReplacePartitionSchema(partitionSchema);
}
}
} else {
partitionSchemas.add(partitionSchema);
}
}
//
for (int i = 0; i < nrSteps(); i++) {
StepPartitioningMeta stepPartitioningMeta = getStep(i).getStepPartitioningMeta();
if (stepPartitioningMeta != null) {
stepPartitioningMeta.setPartitionSchemaAfterLoading(partitionSchemas);
}
StepPartitioningMeta targetStepPartitioningMeta = getStep(i).getTargetStepPartitioningMeta();
if (targetStepPartitioningMeta != null) {
targetStepPartitioningMeta.setPartitionSchemaAfterLoading(partitionSchemas);
}
}
// Read the slave servers...
//
Node slaveServersNode = XMLHandler.getSubNode(infonode, XML_TAG_SLAVESERVERS);
int nrSlaveServers = XMLHandler.countNodes(slaveServersNode, SlaveServer.XML_TAG);
for (int i = 0; i < nrSlaveServers; i++) {
Node slaveServerNode = XMLHandler.getSubNodeByNr(slaveServersNode, SlaveServer.XML_TAG, i);
SlaveServer slaveServer = new SlaveServer(slaveServerNode);
if (slaveServer.getName() == null) {
log.logError(BaseMessages.getString(PKG, "TransMeta.Log.WarningWhileCreationSlaveServer", slaveServer.getName()));
continue;
}
slaveServer.shareVariablesWith(this);
// Check if the object exists and if it's a shared object.
// If so, then we will keep the shared version, not this one.
// The stored XML is only for backup purposes.
SlaveServer check = findSlaveServer(slaveServer.getName());
if (check != null) {
if (!check.isShared()) {
// we don't overwrite shared objects.
if (shouldOverwrite(prompter, props, BaseMessages.getString(PKG, "TransMeta.Message.OverwriteSlaveServerYN", slaveServer.getName()), BaseMessages.getString(PKG, "TransMeta.Message.OverwriteConnection.DontShowAnyMoreMessage"))) {
addOrReplaceSlaveServer(slaveServer);
}
}
} else {
slaveServers.add(slaveServer);
}
}
// Read the cluster schemas
//
Node clusterSchemasNode = XMLHandler.getSubNode(infonode, XML_TAG_CLUSTERSCHEMAS);
int nrClusterSchemas = XMLHandler.countNodes(clusterSchemasNode, ClusterSchema.XML_TAG);
for (int i = 0; i < nrClusterSchemas; i++) {
Node clusterSchemaNode = XMLHandler.getSubNodeByNr(clusterSchemasNode, ClusterSchema.XML_TAG, i);
ClusterSchema clusterSchema = new ClusterSchema(clusterSchemaNode, slaveServers);
clusterSchema.shareVariablesWith(this);
// Check if the object exists and if it's a shared object.
// If so, then we will keep the shared version, not this one.
// The stored XML is only for backup purposes.
ClusterSchema check = findClusterSchema(clusterSchema.getName());
if (check != null) {
if (!check.isShared()) {
// we don't overwrite shared objects.
if (shouldOverwrite(prompter, props, BaseMessages.getString(PKG, "TransMeta.Message.OverwriteClusterSchemaYN", clusterSchema.getName()), BaseMessages.getString(PKG, "TransMeta.Message.OverwriteConnection.DontShowAnyMoreMessage"))) {
addOrReplaceClusterSchema(clusterSchema);
}
}
} else {
clusterSchemas.add(clusterSchema);
}
}
//
for (int i = 0; i < nrSteps(); i++) {
getStep(i).setClusterSchemaAfterLoading(clusterSchemas);
}
String srowset = XMLHandler.getTagValue(infonode, "size_rowset");
sizeRowset = Const.toInt(srowset, Const.ROWS_IN_ROWSET);
sleepTimeEmpty = Const.toInt(XMLHandler.getTagValue(infonode, "sleep_time_empty"), Const.TIMEOUT_GET_MILLIS);
sleepTimeFull = Const.toInt(XMLHandler.getTagValue(infonode, "sleep_time_full"), Const.TIMEOUT_PUT_MILLIS);
usingUniqueConnections = "Y".equalsIgnoreCase(XMLHandler.getTagValue(infonode, "unique_connections"));
feedbackShown = !"N".equalsIgnoreCase(XMLHandler.getTagValue(infonode, "feedback_shown"));
feedbackSize = Const.toInt(XMLHandler.getTagValue(infonode, "feedback_size"), Const.ROWS_UPDATE);
usingThreadPriorityManagment = !"N".equalsIgnoreCase(XMLHandler.getTagValue(infonode, "using_thread_priorities"));
// Performance monitoring for steps...
//
capturingStepPerformanceSnapShots = "Y".equalsIgnoreCase(XMLHandler.getTagValue(infonode, "capture_step_performance"));
stepPerformanceCapturingDelay = Const.toLong(XMLHandler.getTagValue(infonode, "step_performance_capturing_delay"), 1000);
stepPerformanceCapturingSizeLimit = XMLHandler.getTagValue(infonode, "step_performance_capturing_size_limit");
// Created user/date
createdUser = XMLHandler.getTagValue(infonode, "created_user");
String createDate = XMLHandler.getTagValue(infonode, "created_date");
if (createDate != null) {
createdDate = XMLHandler.stringToDate(createDate);
}
// Changed user/date
modifiedUser = XMLHandler.getTagValue(infonode, "modified_user");
String modDate = XMLHandler.getTagValue(infonode, "modified_date");
if (modDate != null) {
modifiedDate = XMLHandler.stringToDate(modDate);
}
Node partitionDistNode = XMLHandler.getSubNode(transnode, SlaveStepCopyPartitionDistribution.XML_TAG);
if (partitionDistNode != null) {
slaveStepCopyPartitionDistribution = new SlaveStepCopyPartitionDistribution(partitionDistNode);
} else {
// leave empty
slaveStepCopyPartitionDistribution = new SlaveStepCopyPartitionDistribution();
}
// Is this a slave transformation?
//
slaveTransformation = "Y".equalsIgnoreCase(XMLHandler.getTagValue(transnode, "slave_transformation"));
if (log.isDebug()) {
log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.NumberOfStepsReaded") + nrSteps());
log.logDebug(BaseMessages.getString(PKG, "TransMeta.Log.NumberOfHopsReaded") + nrTransHops());
}
sortSteps();
// Load the attribute groups map
//
attributesMap = AttributesUtil.loadAttributes(XMLHandler.getSubNode(transnode, AttributesUtil.XML_TAG));
keyForSessionKey = XMLHandler.stringToBinary(XMLHandler.getTagValue(infonode, "key_for_session_key"));
isKeyPrivate = "Y".equals(XMLHandler.getTagValue(infonode, "is_key_private"));
} catch (KettleXMLException xe) {
throw new KettleXMLException(BaseMessages.getString(PKG, "TransMeta.Exception.ErrorReadingTransformation"), xe);
} catch (KettleException e) {
throw new KettleXMLException(e);
} finally {
initializeVariablesFrom(null);
if (setInternalVariables) {
setInternalKettleVariables();
}
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.TransformationMetaLoaded.id, this);
}
} catch (Exception e) {
//
if (!missingPluginsException.getMissingPluginDetailsList().isEmpty()) {
throw missingPluginsException;
} else {
throw new KettleXMLException(BaseMessages.getString(PKG, "TransMeta.Exception.ErrorReadingTransformation"), e);
}
} finally {
if (!missingPluginsException.getMissingPluginDetailsList().isEmpty()) {
throw missingPluginsException;
}
}
}
use of org.pentaho.di.core.Props in project pentaho-kettle by pentaho.
the class JobMeta method loadXML.
/**
* Load a block of XML from an DOM node.
*
* @param jobnode The node to load from
* @param fname The filename
* @param rep The reference to a repository to load additional information from
* @param metaStore the MetaStore to use
* @param ignoreRepositorySharedObjects Do not load shared objects, handled separately
* @param prompter The prompter to use in case a shared object gets overwritten
* @throws KettleXMLException
*/
public void loadXML(Node jobnode, String fname, Repository rep, IMetaStore metaStore, boolean ignoreRepositorySharedObjects, OverwritePrompter prompter) throws KettleXMLException {
Props props = null;
if (Props.isInitialized()) {
props = Props.getInstance();
}
try {
// clear the jobs;
clear();
// Set the filename here so it can be used in variables for ALL aspects of the job FIX: PDI-8890
if (null == rep) {
setFilename(fname);
} else {
// Set the repository here so it can be used in variables for ALL aspects of the job FIX: PDI-16441
setRepository(rep);
}
//
// get job info:
//
setName(XMLHandler.getTagValue(jobnode, "name"));
//
if (rep != null) {
String directoryPath = XMLHandler.getTagValue(jobnode, "directory");
if (directoryPath != null) {
directory = rep.findDirectory(directoryPath);
if (directory == null) {
// not found
// The root as default
directory = new RepositoryDirectory();
}
}
}
// description
description = XMLHandler.getTagValue(jobnode, "description");
// extended description
extendedDescription = XMLHandler.getTagValue(jobnode, "extended_description");
// job version
jobVersion = XMLHandler.getTagValue(jobnode, "job_version");
// job status
jobStatus = Const.toInt(XMLHandler.getTagValue(jobnode, "job_status"), -1);
// Created user/date
createdUser = XMLHandler.getTagValue(jobnode, "created_user");
String createDate = XMLHandler.getTagValue(jobnode, "created_date");
if (createDate != null) {
createdDate = XMLHandler.stringToDate(createDate);
}
// Changed user/date
modifiedUser = XMLHandler.getTagValue(jobnode, "modified_user");
String modDate = XMLHandler.getTagValue(jobnode, "modified_date");
if (modDate != null) {
modifiedDate = XMLHandler.stringToDate(modDate);
}
// Read objects from the shared XML file & the repository
try {
sharedObjectsFile = XMLHandler.getTagValue(jobnode, "shared_objects_file");
if (rep == null || ignoreRepositorySharedObjects) {
sharedObjects = readSharedObjects();
} else {
sharedObjects = rep.readJobMetaSharedObjects(this);
}
} catch (Exception e) {
LogChannel.GENERAL.logError(BaseMessages.getString(PKG, "JobMeta.ErrorReadingSharedObjects.Message", e.toString()));
LogChannel.GENERAL.logError(Const.getStackTracker(e));
}
// Load the database connections, slave servers, cluster schemas & partition schemas into this object.
//
importFromMetaStore();
// Read the named parameters.
Node paramsNode = XMLHandler.getSubNode(jobnode, XML_TAG_PARAMETERS);
int nrParams = XMLHandler.countNodes(paramsNode, "parameter");
for (int i = 0; i < nrParams; i++) {
Node paramNode = XMLHandler.getSubNodeByNr(paramsNode, "parameter", i);
String paramName = XMLHandler.getTagValue(paramNode, "name");
String defValue = XMLHandler.getTagValue(paramNode, "default_value");
String descr = XMLHandler.getTagValue(paramNode, "description");
addParameterDefinition(paramName, defValue, descr);
}
//
// Read the database connections
//
int nr = XMLHandler.countNodes(jobnode, "connection");
Set<String> privateDatabases = new HashSet<String>(nr);
for (int i = 0; i < nr; i++) {
Node dbnode = XMLHandler.getSubNodeByNr(jobnode, "connection", i);
DatabaseMeta dbcon = new DatabaseMeta(dbnode);
dbcon.shareVariablesWith(this);
if (!dbcon.isShared()) {
privateDatabases.add(dbcon.getName());
}
DatabaseMeta exist = findDatabase(dbcon.getName());
if (exist == null) {
addDatabase(dbcon);
} else {
if (!exist.isShared()) {
// skip shared connections
if (shouldOverwrite(prompter, props, BaseMessages.getString(PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.Message", dbcon.getName()), BaseMessages.getString(PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.DontShowAnyMoreMessage"))) {
int idx = indexOfDatabase(exist);
removeDatabase(idx);
addDatabase(idx, dbcon);
}
}
}
}
setPrivateDatabases(privateDatabases);
// Read the slave servers...
//
Node slaveServersNode = XMLHandler.getSubNode(jobnode, XML_TAG_SLAVESERVERS);
int nrSlaveServers = XMLHandler.countNodes(slaveServersNode, SlaveServer.XML_TAG);
for (int i = 0; i < nrSlaveServers; i++) {
Node slaveServerNode = XMLHandler.getSubNodeByNr(slaveServersNode, SlaveServer.XML_TAG, i);
SlaveServer slaveServer = new SlaveServer(slaveServerNode);
slaveServer.shareVariablesWith(this);
// Check if the object exists and if it's a shared object.
// If so, then we will keep the shared version, not this one.
// The stored XML is only for backup purposes.
SlaveServer check = findSlaveServer(slaveServer.getName());
if (check != null) {
if (!check.isShared()) {
// we don't overwrite shared objects.
if (shouldOverwrite(prompter, props, BaseMessages.getString(PKG, "JobMeta.Dialog.SlaveServerExistsOverWrite.Message", slaveServer.getName()), BaseMessages.getString(PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.DontShowAnyMoreMessage"))) {
addOrReplaceSlaveServer(slaveServer);
}
}
} else {
slaveServers.add(slaveServer);
}
}
/*
* Get the log database connection & log table
*/
// Backward compatibility...
//
Node jobLogNode = XMLHandler.getSubNode(jobnode, JobLogTable.XML_TAG);
if (jobLogNode == null) {
// Load the XML
//
jobLogTable.setConnectionName(XMLHandler.getTagValue(jobnode, "logconnection"));
jobLogTable.setTableName(XMLHandler.getTagValue(jobnode, "logtable"));
jobLogTable.setBatchIdUsed("Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "use_batchid")));
jobLogTable.setLogFieldUsed("Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "use_logfield")));
jobLogTable.findField(JobLogTable.ID.CHANNEL_ID).setEnabled(false);
jobLogTable.findField(JobLogTable.ID.LINES_REJECTED).setEnabled(false);
} else {
jobLogTable.loadXML(jobLogNode, databases, null);
}
Node channelLogTableNode = XMLHandler.getSubNode(jobnode, ChannelLogTable.XML_TAG);
if (channelLogTableNode != null) {
channelLogTable.loadXML(channelLogTableNode, databases, null);
}
jobEntryLogTable.loadXML(jobnode, databases, null);
for (LogTableInterface extraLogTable : extraLogTables) {
extraLogTable.loadXML(jobnode, databases, null);
}
batchIdPassed = "Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "pass_batchid"));
/*
* read the job entries...
*/
Node entriesnode = XMLHandler.getSubNode(jobnode, "entries");
int tr = XMLHandler.countNodes(entriesnode, "entry");
for (int i = 0; i < tr; i++) {
Node entrynode = XMLHandler.getSubNodeByNr(entriesnode, "entry", i);
// System.out.println("Reading entry:\n"+entrynode);
JobEntryCopy je = new JobEntryCopy(entrynode, databases, slaveServers, rep, metaStore);
if (je.isSpecial() && je.isMissing()) {
addMissingEntry((MissingEntry) je.getEntry());
}
JobEntryCopy prev = findJobEntry(je.getName(), 0, true);
if (prev != null) {
//
if (je.getNr() == 0) {
// Replace previous version with this one: remove it first
//
int idx = indexOfJobEntry(prev);
removeJobEntry(idx);
} else if (je.getNr() > 0) {
// Use previously defined JobEntry info!
//
je.setEntry(prev.getEntry());
// See if entry already exists...
prev = findJobEntry(je.getName(), je.getNr(), true);
if (prev != null) {
// remove the old one!
//
int idx = indexOfJobEntry(prev);
removeJobEntry(idx);
}
}
}
// Add the JobEntryCopy...
addJobEntry(je);
}
Node hopsnode = XMLHandler.getSubNode(jobnode, "hops");
int ho = XMLHandler.countNodes(hopsnode, "hop");
for (int i = 0; i < ho; i++) {
Node hopnode = XMLHandler.getSubNodeByNr(hopsnode, "hop", i);
JobHopMeta hi = new JobHopMeta(hopnode, this);
jobhops.add(hi);
}
// Read the notes...
Node notepadsnode = XMLHandler.getSubNode(jobnode, "notepads");
int nrnotes = XMLHandler.countNodes(notepadsnode, "notepad");
for (int i = 0; i < nrnotes; i++) {
Node notepadnode = XMLHandler.getSubNodeByNr(notepadsnode, "notepad", i);
NotePadMeta ni = new NotePadMeta(notepadnode);
notes.add(ni);
}
// Load the attribute groups map
//
attributesMap = AttributesUtil.loadAttributes(XMLHandler.getSubNode(jobnode, AttributesUtil.XML_TAG));
ExtensionPointHandler.callExtensionPoint(LogChannel.GENERAL, KettleExtensionPoint.JobMetaLoaded.id, this);
clearChanged();
} catch (Exception e) {
throw new KettleXMLException(BaseMessages.getString(PKG, "JobMeta.Exception.UnableToLoadJobFromXMLNode"), e);
} finally {
setInternalKettleVariables();
}
}
Aggregations