use of org.pentaho.di.core.NotePadMeta in project pentaho-kettle by pentaho.
the class JobMeta method loadXML.
/**
* Load a block of XML from an DOM node.
*
* @param jobnode The node to load from
* @param fname The filename
* @param rep The reference to a repository to load additional information from
* @param metaStore the MetaStore to use
* @param ignoreRepositorySharedObjects Do not load shared objects, handled separately
* @param prompter The prompter to use in case a shared object gets overwritten
* @throws KettleXMLException
*/
public void loadXML(Node jobnode, String fname, Repository rep, IMetaStore metaStore, boolean ignoreRepositorySharedObjects, OverwritePrompter prompter) throws KettleXMLException {
Props props = null;
if (Props.isInitialized()) {
props = Props.getInstance();
}
try {
// clear the jobs;
clear();
// Set the filename here so it can be used in variables for ALL aspects of the job FIX: PDI-8890
if (null == rep) {
setFilename(fname);
} else {
// Set the repository here so it can be used in variables for ALL aspects of the job FIX: PDI-16441
setRepository(rep);
}
//
// get job info:
//
setName(XMLHandler.getTagValue(jobnode, "name"));
//
if (rep != null) {
String directoryPath = XMLHandler.getTagValue(jobnode, "directory");
if (directoryPath != null) {
directory = rep.findDirectory(directoryPath);
if (directory == null) {
// not found
// The root as default
directory = new RepositoryDirectory();
}
}
}
// description
description = XMLHandler.getTagValue(jobnode, "description");
// extended description
extendedDescription = XMLHandler.getTagValue(jobnode, "extended_description");
// job version
jobVersion = XMLHandler.getTagValue(jobnode, "job_version");
// job status
jobStatus = Const.toInt(XMLHandler.getTagValue(jobnode, "job_status"), -1);
// Created user/date
createdUser = XMLHandler.getTagValue(jobnode, "created_user");
String createDate = XMLHandler.getTagValue(jobnode, "created_date");
if (createDate != null) {
createdDate = XMLHandler.stringToDate(createDate);
}
// Changed user/date
modifiedUser = XMLHandler.getTagValue(jobnode, "modified_user");
String modDate = XMLHandler.getTagValue(jobnode, "modified_date");
if (modDate != null) {
modifiedDate = XMLHandler.stringToDate(modDate);
}
// Read objects from the shared XML file & the repository
try {
sharedObjectsFile = XMLHandler.getTagValue(jobnode, "shared_objects_file");
if (rep == null || ignoreRepositorySharedObjects) {
sharedObjects = readSharedObjects();
} else {
sharedObjects = rep.readJobMetaSharedObjects(this);
}
} catch (Exception e) {
LogChannel.GENERAL.logError(BaseMessages.getString(PKG, "JobMeta.ErrorReadingSharedObjects.Message", e.toString()));
LogChannel.GENERAL.logError(Const.getStackTracker(e));
}
// Load the database connections, slave servers, cluster schemas & partition schemas into this object.
//
importFromMetaStore();
// Read the named parameters.
Node paramsNode = XMLHandler.getSubNode(jobnode, XML_TAG_PARAMETERS);
int nrParams = XMLHandler.countNodes(paramsNode, "parameter");
for (int i = 0; i < nrParams; i++) {
Node paramNode = XMLHandler.getSubNodeByNr(paramsNode, "parameter", i);
String paramName = XMLHandler.getTagValue(paramNode, "name");
String defValue = XMLHandler.getTagValue(paramNode, "default_value");
String descr = XMLHandler.getTagValue(paramNode, "description");
addParameterDefinition(paramName, defValue, descr);
}
//
// Read the database connections
//
int nr = XMLHandler.countNodes(jobnode, "connection");
Set<String> privateDatabases = new HashSet<String>(nr);
for (int i = 0; i < nr; i++) {
Node dbnode = XMLHandler.getSubNodeByNr(jobnode, "connection", i);
DatabaseMeta dbcon = new DatabaseMeta(dbnode);
dbcon.shareVariablesWith(this);
if (!dbcon.isShared()) {
privateDatabases.add(dbcon.getName());
}
DatabaseMeta exist = findDatabase(dbcon.getName());
if (exist == null) {
addDatabase(dbcon);
} else {
if (!exist.isShared()) {
// skip shared connections
if (shouldOverwrite(prompter, props, BaseMessages.getString(PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.Message", dbcon.getName()), BaseMessages.getString(PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.DontShowAnyMoreMessage"))) {
int idx = indexOfDatabase(exist);
removeDatabase(idx);
addDatabase(idx, dbcon);
}
}
}
}
setPrivateDatabases(privateDatabases);
// Read the slave servers...
//
Node slaveServersNode = XMLHandler.getSubNode(jobnode, XML_TAG_SLAVESERVERS);
int nrSlaveServers = XMLHandler.countNodes(slaveServersNode, SlaveServer.XML_TAG);
for (int i = 0; i < nrSlaveServers; i++) {
Node slaveServerNode = XMLHandler.getSubNodeByNr(slaveServersNode, SlaveServer.XML_TAG, i);
SlaveServer slaveServer = new SlaveServer(slaveServerNode);
slaveServer.shareVariablesWith(this);
// Check if the object exists and if it's a shared object.
// If so, then we will keep the shared version, not this one.
// The stored XML is only for backup purposes.
SlaveServer check = findSlaveServer(slaveServer.getName());
if (check != null) {
if (!check.isShared()) {
// we don't overwrite shared objects.
if (shouldOverwrite(prompter, props, BaseMessages.getString(PKG, "JobMeta.Dialog.SlaveServerExistsOverWrite.Message", slaveServer.getName()), BaseMessages.getString(PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.DontShowAnyMoreMessage"))) {
addOrReplaceSlaveServer(slaveServer);
}
}
} else {
slaveServers.add(slaveServer);
}
}
/*
* Get the log database connection & log table
*/
// Backward compatibility...
//
Node jobLogNode = XMLHandler.getSubNode(jobnode, JobLogTable.XML_TAG);
if (jobLogNode == null) {
// Load the XML
//
jobLogTable.setConnectionName(XMLHandler.getTagValue(jobnode, "logconnection"));
jobLogTable.setTableName(XMLHandler.getTagValue(jobnode, "logtable"));
jobLogTable.setBatchIdUsed("Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "use_batchid")));
jobLogTable.setLogFieldUsed("Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "use_logfield")));
jobLogTable.findField(JobLogTable.ID.CHANNEL_ID).setEnabled(false);
jobLogTable.findField(JobLogTable.ID.LINES_REJECTED).setEnabled(false);
} else {
jobLogTable.loadXML(jobLogNode, databases, null);
}
Node channelLogTableNode = XMLHandler.getSubNode(jobnode, ChannelLogTable.XML_TAG);
if (channelLogTableNode != null) {
channelLogTable.loadXML(channelLogTableNode, databases, null);
}
jobEntryLogTable.loadXML(jobnode, databases, null);
for (LogTableInterface extraLogTable : extraLogTables) {
extraLogTable.loadXML(jobnode, databases, null);
}
batchIdPassed = "Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "pass_batchid"));
/*
* read the job entries...
*/
Node entriesnode = XMLHandler.getSubNode(jobnode, "entries");
int tr = XMLHandler.countNodes(entriesnode, "entry");
for (int i = 0; i < tr; i++) {
Node entrynode = XMLHandler.getSubNodeByNr(entriesnode, "entry", i);
// System.out.println("Reading entry:\n"+entrynode);
JobEntryCopy je = new JobEntryCopy(entrynode, databases, slaveServers, rep, metaStore);
if (je.isSpecial() && je.isMissing()) {
addMissingEntry((MissingEntry) je.getEntry());
}
JobEntryCopy prev = findJobEntry(je.getName(), 0, true);
if (prev != null) {
//
if (je.getNr() == 0) {
// Replace previous version with this one: remove it first
//
int idx = indexOfJobEntry(prev);
removeJobEntry(idx);
} else if (je.getNr() > 0) {
// Use previously defined JobEntry info!
//
je.setEntry(prev.getEntry());
// See if entry already exists...
prev = findJobEntry(je.getName(), je.getNr(), true);
if (prev != null) {
// remove the old one!
//
int idx = indexOfJobEntry(prev);
removeJobEntry(idx);
}
}
}
// Add the JobEntryCopy...
addJobEntry(je);
}
Node hopsnode = XMLHandler.getSubNode(jobnode, "hops");
int ho = XMLHandler.countNodes(hopsnode, "hop");
for (int i = 0; i < ho; i++) {
Node hopnode = XMLHandler.getSubNodeByNr(hopsnode, "hop", i);
JobHopMeta hi = new JobHopMeta(hopnode, this);
jobhops.add(hi);
}
// Read the notes...
Node notepadsnode = XMLHandler.getSubNode(jobnode, "notepads");
int nrnotes = XMLHandler.countNodes(notepadsnode, "notepad");
for (int i = 0; i < nrnotes; i++) {
Node notepadnode = XMLHandler.getSubNodeByNr(notepadsnode, "notepad", i);
NotePadMeta ni = new NotePadMeta(notepadnode);
notes.add(ni);
}
// Load the attribute groups map
//
attributesMap = AttributesUtil.loadAttributes(XMLHandler.getSubNode(jobnode, AttributesUtil.XML_TAG));
ExtensionPointHandler.callExtensionPoint(LogChannel.GENERAL, KettleExtensionPoint.JobMetaLoaded.id, this);
clearChanged();
} catch (Exception e) {
throw new KettleXMLException(BaseMessages.getString(PKG, "JobMeta.Exception.UnableToLoadJobFromXMLNode"), e);
} finally {
setInternalKettleVariables();
}
}
use of org.pentaho.di.core.NotePadMeta in project pentaho-kettle by pentaho.
the class PurRepositoryIT method testExportWithRules.
@Test
public void testExportWithRules() throws Exception {
String fileName = "testExportWithRuled.xml";
// $NON-NLS-1$
final String exportFileName = new File(fileName).getAbsolutePath();
RepositoryDirectoryInterface rootDir = initRepo();
String transWithoutNoteName = "2" + EXP_DBMETA_NAME;
TransMeta transWithoutNote = createTransMeta(transWithoutNoteName);
String transUniqueName = EXP_TRANS_NAME.concat(transWithoutNoteName);
RepositoryDirectoryInterface transDir = rootDir.findDirectory(DIR_TRANSFORMATIONS);
repository.save(transWithoutNote, VERSION_COMMENT_V1, null);
// So this transformation is cleaned up afterward
deleteStack.push(transWithoutNote);
assertNotNull(transWithoutNote.getObjectId());
assertTrue(hasVersionWithComment(transWithoutNote, VERSION_COMMENT_V1));
assertTrue(repository.exists(transUniqueName, transDir, RepositoryObjectType.TRANSFORMATION));
// Second transformation (contained note)
String transWithNoteName = "1" + EXP_DBMETA_NAME;
TransMeta transWithNote = createTransMeta(transWithNoteName);
transUniqueName = EXP_TRANS_NAME.concat(EXP_DBMETA_NAME);
TransMeta transWithRules = createTransMeta(EXP_DBMETA_NAME);
NotePadMeta note = new NotePadMeta("Note Message", 1, 1, 100, 5);
transWithRules.addNote(note);
repository.save(transWithRules, VERSION_COMMENT_V1, null);
// So this transformation is cleaned up afterward
deleteStack.push(transWithRules);
assertNotNull(transWithRules.getObjectId());
assertTrue(hasVersionWithComment(transWithRules, VERSION_COMMENT_V1));
assertTrue(repository.exists(transUniqueName, transDir, RepositoryObjectType.TRANSFORMATION));
// create rules for export to .xml file
List<ImportRuleInterface> rules = new AbstractList<ImportRuleInterface>() {
@Override
public ImportRuleInterface get(int index) {
TransformationHasANoteImportRule rule = new TransformationHasANoteImportRule();
rule.setEnabled(true);
return rule;
}
@Override
public int size() {
return 1;
}
};
ImportRules importRules = new ImportRules();
importRules.setRules(rules);
// create exporter
IRepositoryExporter exporter = repository.getExporter();
exporter.setImportRulesToValidate(importRules);
// export itself
try {
// $NON-NLS-1$
exporter.exportAllObjects(new MockProgressMonitorListener(), exportFileName, null, "all");
FileObject exportFile = KettleVFS.getFileObject(exportFileName);
assertNotNull(exportFile);
MockRepositoryExportParser parser = new MockRepositoryExportParser();
SAXParserFactory.newInstance().newSAXParser().parse(KettleVFS.getInputStream(exportFile), parser);
if (parser.getFatalError() != null) {
throw parser.getFatalError();
}
// assumed transformation with note will be here and only it
assertEquals("Incorrect number of transformations", 1, parser.getNodesWithName(RepositoryObjectType.TRANSFORMATION.getTypeDescription()).size());
} finally {
KettleVFS.getFileObject(exportFileName).delete();
}
}
use of org.pentaho.di.core.NotePadMeta in project pentaho-kettle by pentaho.
the class Spoon method pasteXML.
public void pasteXML(TransMeta transMeta, String clipcontent, Point loc) {
if (RepositorySecurityUI.verifyOperations(shell, rep, RepositoryOperation.MODIFY_TRANSFORMATION, RepositoryOperation.EXECUTE_TRANSFORMATION)) {
return;
}
try {
Document doc = XMLHandler.loadXMLString(clipcontent);
Node transNode = XMLHandler.getSubNode(doc, Spoon.XML_TAG_TRANSFORMATION_STEPS);
// De-select all, re-select pasted steps...
transMeta.unselectAll();
Node stepsNode = XMLHandler.getSubNode(transNode, "steps");
int nr = XMLHandler.countNodes(stepsNode, "step");
if (getLog().isDebug()) {
// "I found "+nr+" steps to paste on location: "
getLog().logDebug(BaseMessages.getString(PKG, "Spoon.Log.FoundSteps", "" + nr) + loc);
}
StepMeta[] steps = new StepMeta[nr];
ArrayList<String> stepOldNames = new ArrayList<>(nr);
// Point min = new Point(loc.x, loc.y);
Point min = new Point(99999999, 99999999);
// Load the steps...
for (int i = 0; i < nr; i++) {
Node stepNode = XMLHandler.getSubNodeByNr(stepsNode, "step", i);
steps[i] = new StepMeta(stepNode, transMeta.getDatabases(), metaStore);
if (loc != null) {
Point p = steps[i].getLocation();
if (min.x > p.x) {
min.x = p.x;
}
if (min.y > p.y) {
min.y = p.y;
}
}
}
// Load the hops...
Node hopsNode = XMLHandler.getSubNode(transNode, "order");
nr = XMLHandler.countNodes(hopsNode, "hop");
if (getLog().isDebug()) {
// "I found "+nr+" hops to paste."
getLog().logDebug(BaseMessages.getString(PKG, "Spoon.Log.FoundHops", "" + nr));
}
TransHopMeta[] hops = new TransHopMeta[nr];
for (int i = 0; i < nr; i++) {
Node hopNode = XMLHandler.getSubNodeByNr(hopsNode, "hop", i);
hops[i] = new TransHopMeta(hopNode, Arrays.asList(steps));
}
// This is the offset:
Point offset = new Point(loc.x - min.x, loc.y - min.y);
// Undo/redo object positions...
int[] position = new int[steps.length];
for (int i = 0; i < steps.length; i++) {
Point p = steps[i].getLocation();
String name = steps[i].getName();
steps[i].setLocation(p.x + offset.x, p.y + offset.y);
steps[i].setDraw(true);
// Check the name, find alternative...
stepOldNames.add(name);
steps[i].setName(transMeta.getAlternativeStepname(name));
transMeta.addStep(steps[i]);
position[i] = transMeta.indexOfStep(steps[i]);
steps[i].setSelected(true);
}
// Add the hops too...
for (TransHopMeta hop : hops) {
transMeta.addTransHop(hop);
}
// Load the notes...
Node notesNode = XMLHandler.getSubNode(transNode, "notepads");
nr = XMLHandler.countNodes(notesNode, "notepad");
if (getLog().isDebug()) {
// "I found "+nr+" notepads to paste."
getLog().logDebug(BaseMessages.getString(PKG, "Spoon.Log.FoundNotepads", "" + nr));
}
NotePadMeta[] notes = new NotePadMeta[nr];
for (int i = 0; i < notes.length; i++) {
Node noteNode = XMLHandler.getSubNodeByNr(notesNode, "notepad", i);
notes[i] = new NotePadMeta(noteNode);
Point p = notes[i].getLocation();
notes[i].setLocation(p.x + offset.x, p.y + offset.y);
transMeta.addNote(notes[i]);
notes[i].setSelected(true);
}
// Set the source and target steps ...
for (StepMeta step : steps) {
StepMetaInterface smi = step.getStepMetaInterface();
smi.searchInfoAndTargetSteps(transMeta.getSteps());
}
// Set the error handling hops
Node errorHandlingNode = XMLHandler.getSubNode(transNode, TransMeta.XML_TAG_STEP_ERROR_HANDLING);
int nrErrorHandlers = XMLHandler.countNodes(errorHandlingNode, StepErrorMeta.XML_ERROR_TAG);
for (int i = 0; i < nrErrorHandlers; i++) {
Node stepErrorMetaNode = XMLHandler.getSubNodeByNr(errorHandlingNode, StepErrorMeta.XML_ERROR_TAG, i);
StepErrorMeta stepErrorMeta = new StepErrorMeta(transMeta.getParentVariableSpace(), stepErrorMetaNode, transMeta.getSteps());
// Handle pasting multiple times, need to update source and target step names
int srcStepPos = stepOldNames.indexOf(stepErrorMeta.getSourceStep().getName());
int tgtStepPos = stepOldNames.indexOf(stepErrorMeta.getTargetStep().getName());
StepMeta sourceStep = transMeta.findStep(steps[srcStepPos].getName());
if (sourceStep != null) {
sourceStep.setStepErrorMeta(stepErrorMeta);
}
sourceStep.setStepErrorMeta(null);
if (tgtStepPos >= 0) {
sourceStep.setStepErrorMeta(stepErrorMeta);
StepMeta targetStep = transMeta.findStep(steps[tgtStepPos].getName());
stepErrorMeta.setSourceStep(sourceStep);
stepErrorMeta.setTargetStep(targetStep);
}
}
// Save undo information too...
addUndoNew(transMeta, steps, position, false);
int[] hopPos = new int[hops.length];
for (int i = 0; i < hops.length; i++) {
hopPos[i] = transMeta.indexOfTransHop(hops[i]);
}
addUndoNew(transMeta, hops, hopPos, true);
int[] notePos = new int[notes.length];
for (int i = 0; i < notes.length; i++) {
notePos[i] = transMeta.indexOfNote(notes[i]);
}
addUndoNew(transMeta, notes, notePos, true);
if (transMeta.haveStepsChanged()) {
refreshTree();
refreshGraph();
}
} catch (KettleException e) {
// "Error pasting steps...",
// "I was unable to paste steps to this transformation"
new ErrorDialog(shell, BaseMessages.getString(PKG, "Spoon.Dialog.UnablePasteSteps.Title"), BaseMessages.getString(PKG, "Spoon.Dialog.UnablePasteSteps.Message"), e);
}
}
use of org.pentaho.di.core.NotePadMeta in project pentaho-kettle by pentaho.
the class TransGraph method mouseDoubleClick.
@Override
public void mouseDoubleClick(MouseEvent e) {
clearSettings();
Point real = screen2real(e.x, e.y);
// Hide the tooltip!
hideToolTips();
try {
ExtensionPointHandler.callExtensionPoint(LogChannel.GENERAL, KettleExtensionPoint.TransGraphMouseDoubleClick.id, new TransGraphExtension(this, e, real));
} catch (Exception ex) {
LogChannel.GENERAL.logError("Error calling TransGraphMouseDoubleClick extension point", ex);
}
StepMeta stepMeta = transMeta.getStep(real.x, real.y, iconsize);
if (stepMeta != null) {
if (e.button == 1) {
editStep(stepMeta);
} else {
editDescription(stepMeta);
}
} else {
// Check if point lies on one of the many hop-lines...
TransHopMeta online = findHop(real.x, real.y);
if (online != null) {
editHop(online);
} else {
NotePadMeta ni = transMeta.getNote(real.x, real.y);
if (ni != null) {
selectedNote = null;
editNote(ni);
} else {
// See if the double click was in one of the area's...
//
boolean hit = false;
for (AreaOwner areaOwner : areaOwners) {
if (areaOwner.contains(real.x, real.y)) {
if (areaOwner.getParent() instanceof StepMeta && areaOwner.getOwner().equals(TransPainter.STRING_PARTITIONING_CURRENT_STEP)) {
StepMeta step = (StepMeta) areaOwner.getParent();
spoon.editPartitioning(transMeta, step);
hit = true;
break;
}
}
}
if (!hit) {
settings();
}
}
}
}
}
use of org.pentaho.di.core.NotePadMeta in project pentaho-kettle by pentaho.
the class SpoonJobDelegate method redoJobAction.
public void redoJobAction(JobMeta jobMeta, TransAction transAction) {
switch(transAction.getType()) {
//
case TransAction.TYPE_ACTION_NEW_JOB_ENTRY:
// re-delete the entry at correct location:
JobEntryCopy[] si = (JobEntryCopy[]) transAction.getCurrent();
int[] idx = transAction.getCurrentIndex();
for (int i = 0; i < idx.length; i++) {
jobMeta.addJobEntry(idx[i], si[i]);
}
spoon.refreshTree();
spoon.refreshGraph();
break;
case TransAction.TYPE_ACTION_NEW_NOTE:
// re-insert the note at correct location:
NotePadMeta[] ni = (NotePadMeta[]) transAction.getCurrent();
idx = transAction.getCurrentIndex();
for (int i = 0; i < idx.length; i++) {
jobMeta.addNote(idx[i], ni[i]);
}
spoon.refreshTree();
spoon.refreshGraph();
break;
case TransAction.TYPE_ACTION_NEW_JOB_HOP:
// re-insert the hop at correct location:
JobHopMeta[] hi = (JobHopMeta[]) transAction.getCurrent();
idx = transAction.getCurrentIndex();
for (int i = 0; i < idx.length; i++) {
jobMeta.addJobHop(idx[i], hi[i]);
}
spoon.refreshTree();
spoon.refreshGraph();
break;
//
case TransAction.TYPE_ACTION_DELETE_JOB_ENTRY:
// re-remove the entry at correct location:
idx = transAction.getCurrentIndex();
for (int i = idx.length - 1; i >= 0; i--) {
jobMeta.removeJobEntry(idx[i]);
}
spoon.refreshTree();
spoon.refreshGraph();
break;
case TransAction.TYPE_ACTION_DELETE_NOTE:
// re-remove the note at correct location:
idx = transAction.getCurrentIndex();
for (int i = idx.length - 1; i >= 0; i--) {
jobMeta.removeNote(idx[i]);
}
spoon.refreshTree();
spoon.refreshGraph();
break;
case TransAction.TYPE_ACTION_DELETE_JOB_HOP:
// re-remove the hop at correct location:
idx = transAction.getCurrentIndex();
for (int i = idx.length - 1; i >= 0; i--) {
jobMeta.removeJobHop(idx[i]);
}
spoon.refreshTree();
spoon.refreshGraph();
break;
// We changed a step : undo this...
case TransAction.TYPE_ACTION_CHANGE_JOB_ENTRY:
// replace with "current" version.
for (int i = 0; i < transAction.getCurrent().length; i++) {
JobEntryCopy copy = (JobEntryCopy) ((JobEntryCopy) (transAction.getCurrent()[i])).clone_deep();
jobMeta.getJobEntry(transAction.getCurrentIndex()[i]).replaceMeta(copy);
}
spoon.refreshTree();
spoon.refreshGraph();
break;
// We changed a note : undo this...
case TransAction.TYPE_ACTION_CHANGE_NOTE:
// Delete & re-insert
ni = (NotePadMeta[]) transAction.getCurrent();
idx = transAction.getCurrentIndex();
for (int i = 0; i < idx.length; i++) {
jobMeta.removeNote(idx[i]);
jobMeta.addNote(idx[i], ni[i]);
}
spoon.refreshTree();
spoon.refreshGraph();
break;
// We changed a hop : undo this...
case TransAction.TYPE_ACTION_CHANGE_JOB_HOP:
// Delete & re-insert
hi = (JobHopMeta[]) transAction.getCurrent();
idx = transAction.getCurrentIndex();
for (int i = 0; i < idx.length; i++) {
jobMeta.removeJobHop(idx[i]);
jobMeta.addJobHop(idx[i], hi[i]);
}
spoon.refreshTree();
spoon.refreshGraph();
break;
//
case TransAction.TYPE_ACTION_POSITION_JOB_ENTRY:
// Find the location of the step:
idx = transAction.getCurrentIndex();
Point[] p = transAction.getCurrentLocation();
for (int i = 0; i < p.length; i++) {
JobEntryCopy entry = jobMeta.getJobEntry(idx[i]);
entry.setLocation(p[i]);
}
spoon.refreshGraph();
break;
case TransAction.TYPE_ACTION_POSITION_NOTE:
idx = transAction.getCurrentIndex();
Point[] curr = transAction.getCurrentLocation();
for (int i = 0; i < idx.length; i++) {
NotePadMeta npi = jobMeta.getNote(idx[i]);
npi.setLocation(curr[i]);
}
spoon.refreshGraph();
break;
default:
break;
}
}
Aggregations