use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class FileNodeContainerMetaPersistor method saveJobManagerInternalsDirectory.
protected static void saveJobManagerInternalsDirectory(final NodeSettingsWO settings, final NodeContainer nc, final ReferencedFile targetDir) {
NodeExecutionJobManager jobManager = nc.getJobManager();
if (jobManager != null && jobManager.canSaveInternals()) {
String dirName = "job_manager_internals";
File dir = new File(targetDir.getFile(), dirName);
if (dir.exists()) {
LOGGER.warn("Directory \"" + dir.getAbsolutePath() + "\"" + " already exists; deleting it");
FileUtil.deleteRecursively(dir);
}
if (!dir.mkdirs()) {
LOGGER.error("Unable to create directory \"" + dir.getAbsolutePath() + "\"");
return;
}
try {
jobManager.saveInternals(new ReferencedFile(targetDir, dirName));
settings.addString(CFG_JOB_MANAGER_DIR, dirName);
} catch (Throwable e) {
if (!(e instanceof IOException)) {
LOGGER.coding("Saving internals of job manager should " + "only throw IOException, caught " + e.getClass().getSimpleName());
}
String error = "Saving job manager internals failed: " + e.getMessage();
LOGGER.error(error, e);
}
}
}
use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class FileWorkflowPersistor method preLoadNodeContainer.
/**
* {@inheritDoc}
*/
@Override
public void preLoadNodeContainer(final WorkflowPersistor parentPersistor, final NodeSettingsRO parentSettings, final LoadResult loadResult) throws InvalidSettingsException, IOException {
m_parentPersistor = parentPersistor;
final ReferencedFile knimeFile = getWorkflowKNIMEFile();
if (knimeFile == null || !knimeFile.getFile().isFile()) {
setDirtyAfterLoad();
String error = "Can't read workflow file \"" + knimeFile + "\"";
throw new IOException(error);
}
// workflow.knime (or template.knime)
File nodeFile = knimeFile.getFile();
ReferencedFile parentRef = knimeFile.getParent();
if (parentRef == null) {
setDirtyAfterLoad();
throw new IOException("Parent directory of file \"" + knimeFile + "\" is not represented by " + ReferencedFile.class.getSimpleName() + " object");
}
m_mustWarnOnDataLoadError = loadIfMustWarnOnDataLoadError(parentRef.getFile());
NodeSettingsRO subWFSettings;
try {
InputStream in = new FileInputStream(nodeFile);
if (m_parentPersistor != null) {
// real metanode, not a project
// the workflow.knime (or template.knime) file is not encrypted
// with this metanode's cipher but possibly with a parent
// cipher
in = m_parentPersistor.decipherInput(in);
}
in = new BufferedInputStream(in);
subWFSettings = NodeSettings.loadFromXML(in);
} catch (IOException ioe) {
setDirtyAfterLoad();
throw ioe;
}
m_workflowSett = subWFSettings;
try {
if (m_nameOverwrite != null) {
m_name = m_nameOverwrite;
} else {
m_name = loadWorkflowName(m_workflowSett);
}
} catch (InvalidSettingsException e) {
String error = "Unable to load workflow name: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
m_name = null;
}
try {
m_workflowCipher = loadWorkflowCipher(getLoadVersion(), m_workflowSett);
} catch (InvalidSettingsException e) {
String error = "Unable to load workflow cipher: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
m_workflowCipher = WorkflowCipher.NULL_CIPHER;
}
try {
if (m_templateInformation != null) {
// template information was set after construction (this node is a link created from a template)
assert m_templateInformation.getRole() == Role.Link;
} else {
m_templateInformation = MetaNodeTemplateInformation.load(m_workflowSett, getLoadVersion());
CheckUtils.checkSettingNotNull(m_templateInformation, "No template information");
}
} catch (InvalidSettingsException e) {
String error = "Unable to load workflow template information: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
m_templateInformation = MetaNodeTemplateInformation.NONE;
}
try {
m_authorInformation = loadAuthorInformation(m_workflowSett);
} catch (InvalidSettingsException e) {
String error = "Unable to load workflow author information: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
m_authorInformation = AuthorInformation.UNKNOWN;
}
try {
m_workflowVariables = loadWorkflowVariables(m_workflowSett);
} catch (InvalidSettingsException e) {
String error = "Unable to load workflow variables: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
m_workflowVariables = Collections.emptyList();
}
try {
m_credentials = loadCredentials(m_workflowSett);
// request to initialize credentials - if available
if (m_credentials != null && !m_credentials.isEmpty()) {
m_credentials = getLoadHelper().loadCredentials(m_credentials);
}
} catch (InvalidSettingsException e) {
String error = "Unable to load credentials: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
m_credentials = Collections.emptyList();
}
try {
m_workflowAnnotations = loadWorkflowAnnotations(m_workflowSett);
} catch (InvalidSettingsException e) {
String error = "Unable to load workflow annotations: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
m_workflowAnnotations = Collections.emptyList();
}
try {
m_wizardState = loadWizardState(m_workflowSett);
} catch (InvalidSettingsException e) {
String error = "Unable to load wizard state: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
m_wizardState = null;
}
NodeSettingsRO metaFlowParentSettings = new NodeSettings("fake_parent_settings");
try {
metaFlowParentSettings = readParentSettings();
} catch (IOException e1) {
String error = "Errors reading settings file: " + e1.getMessage();
getLogger().warn(error, e1);
setDirtyAfterLoad();
loadResult.addError(error);
}
boolean isResetRequired = m_metaPersistor.load(subWFSettings, metaFlowParentSettings, loadResult);
if (isResetRequired) {
setNeedsResetAfterLoad();
}
if (m_metaPersistor.isDirtyAfterLoad()) {
setDirtyAfterLoad();
}
/* read in and outports */
NodeSettingsRO inPortsEnum = EMPTY_SETTINGS;
try {
NodeSettingsRO inPorts = loadInPortsSetting(m_workflowSett);
if (inPorts != null) {
inPortsEnum = loadInPortsSettingsEnum(inPorts);
}
} catch (InvalidSettingsException e) {
String error = "Can't load workflow ports, config not found";
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
setNeedsResetAfterLoad();
}
int inPortCount = inPortsEnum.keySet().size();
m_inPortTemplates = new WorkflowPortTemplate[inPortCount];
for (String key : inPortsEnum.keySet()) {
WorkflowPortTemplate p;
try {
NodeSettingsRO sub = inPortsEnum.getNodeSettings(key);
p = loadInPortTemplate(sub);
} catch (InvalidSettingsException e) {
String error = "Can't load workflow inport (internal ID \"" + key + "\", skipping it: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
setNeedsResetAfterLoad();
continue;
}
int index = p.getPortIndex();
if (index < 0 || index >= inPortCount) {
setDirtyAfterLoad();
loadResult.addError("Invalid inport index " + index);
setNeedsResetAfterLoad();
continue;
}
if (m_inPortTemplates[index] != null) {
setDirtyAfterLoad();
loadResult.addError("Duplicate inport definition for index: " + index);
}
m_inPortTemplates[index] = p;
}
for (int i = 0; i < m_inPortTemplates.length; i++) {
if (m_inPortTemplates[i] == null) {
setDirtyAfterLoad();
loadResult.addError("Assigning fallback port type for " + "missing input port " + i);
m_inPortTemplates[i] = new WorkflowPortTemplate(i, FALLBACK_PORTTYPE);
}
}
NodeSettingsRO outPortsEnum = EMPTY_SETTINGS;
try {
NodeSettingsRO outPorts = loadOutPortsSetting(m_workflowSett);
if (outPorts != null) {
outPortsEnum = loadOutPortsSettingsEnum(outPorts);
}
} catch (InvalidSettingsException e) {
String error = "Can't load workflow out ports, config not found: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
}
int outPortCount = outPortsEnum.keySet().size();
m_outPortTemplates = new WorkflowPortTemplate[outPortCount];
for (String key : outPortsEnum.keySet()) {
WorkflowPortTemplate p;
try {
NodeSettingsRO sub = outPortsEnum.getNodeSettings(key);
p = loadOutPortTemplate(sub);
} catch (InvalidSettingsException e) {
String error = "Can't load workflow outport (internal ID \"" + key + "\", skipping it: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
setNeedsResetAfterLoad();
continue;
}
int index = p.getPortIndex();
if (index < 0 || index >= outPortCount) {
setDirtyAfterLoad();
loadResult.addError("Invalid inport index " + index);
setNeedsResetAfterLoad();
continue;
}
if (m_outPortTemplates[index] != null) {
setDirtyAfterLoad();
loadResult.addError("Duplicate outport definition for index: " + index);
}
m_outPortTemplates[index] = p;
}
for (int i = 0; i < m_outPortTemplates.length; i++) {
if (m_outPortTemplates[i] == null) {
setDirtyAfterLoad();
loadResult.addError("Assigning fallback port type for " + "missing output port " + i);
m_outPortTemplates[i] = new WorkflowPortTemplate(i, FALLBACK_PORTTYPE);
}
}
boolean hasPorts = inPortCount > 0 || outPortCount > 0;
if (hasPorts && m_isProject) {
throw new InvalidSettingsException(String.format("Workflow \"%s\"" + " is not a project as it has ports (%d in, %d out)", nodeFile.getAbsoluteFile(), inPortCount, outPortCount));
}
NodeSettingsRO inPorts = EMPTY_SETTINGS;
NodeUIInformation inPortsBarUIInfo = null;
String uiInfoClassName = null;
try {
inPorts = loadInPortsSetting(m_workflowSett);
if (inPorts != null) {
uiInfoClassName = loadInPortsBarUIInfoClassName(inPorts);
}
} catch (InvalidSettingsException e) {
String error = "Unable to load class name for inport bar's " + "UI information: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
}
if (uiInfoClassName != null) {
try {
if (!getLoadVersion().isOlderThan(LoadVersion.V200)) {
inPortsBarUIInfo = loadNodeUIInformation(inPorts);
}
} catch (InvalidSettingsException e) {
String error = "Unable to load inport bar's UI information: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
inPortsBarUIInfo = null;
}
}
NodeSettingsRO outPorts = null;
m_inPortsBarUIInfo = inPortsBarUIInfo;
NodeUIInformation outPortsBarUIInfo = null;
uiInfoClassName = null;
try {
// TODO probably not necessary anymore to store the ui information class name (it's node ui information anyway)
outPorts = loadOutPortsSetting(m_workflowSett);
if (outPorts != null) {
uiInfoClassName = loadOutPortsBarUIInfoClassName(outPorts);
}
} catch (InvalidSettingsException e) {
String error = "Unable to load class name for outport bar's UI information" + ", no UI information available: " + e.getMessage();
setDirtyAfterLoad();
getLogger().debug(error, e);
loadResult.addError(error);
}
if (uiInfoClassName != null) {
try {
if (!getLoadVersion().isOlderThan(LoadVersion.V200)) {
outPortsBarUIInfo = loadNodeUIInformation(outPorts);
}
} catch (InvalidSettingsException e) {
String error = "Unable to load outport bar's UI information: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
outPortsBarUIInfo = null;
}
}
m_outPortsBarUIInfo = outPortsBarUIInfo;
try {
m_editorUIInfo = loadEditorUIInformation(m_workflowSett);
} catch (InvalidSettingsException e) {
String error = "Unable to load editor UI information: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
m_editorUIInfo = null;
}
}
use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class ObsoleteMetaNodeFileWorkflowPersistor method preLoadNodeContainer.
/**
* {@inheritDoc}
*/
@Override
public void preLoadNodeContainer(final WorkflowPersistor parentPersistor, final NodeSettingsRO parentSettings, final LoadResult result) throws IOException, InvalidSettingsException {
ReferencedFile workflowKNIMEFile = super.getWorkflowKNIMEFile();
File setFile = workflowKNIMEFile.getFile();
if (!setFile.getName().equals("settings.xml")) {
String warn = "Settings file of obsolete metanode is not " + "named settings.xml: " + setFile.getName();
getLogger().warn(warn);
result.addError(warn);
}
ReferencedFile parent = workflowKNIMEFile.getParent();
if (parent == null) {
throw new IOException("Parent directory not represented by class " + ReferencedFile.class);
}
ReferencedFile workflowKnimeRef = new ReferencedFile(parent, "workflow.knime");
File workflowKnime = workflowKnimeRef.getFile();
if (!workflowKnime.isFile()) {
throw new IOException("Can't find file " + workflowKnime.getAbsolutePath());
}
String factory = parentSettings.getString("factory");
if ("org.knime.base.node.meta.xvalidation.XValidateNodeFactory".equals(factory)) {
m_metaNodeType = MetaNodeType.CROSSVALIDATION;
} else if ("org.knime.base.node.meta.looper.LooperFactory".equals(factory)) {
m_metaNodeType = MetaNodeType.LOOPER;
} else {
m_metaNodeType = MetaNodeType.ORDINARY;
}
NodeSettingsRO settings = NodeSettings.loadFromXML(new BufferedInputStream(new FileInputStream(setFile)));
NodeSettingsRO modelSet = settings.getNodeSettings("model");
m_dataInNodeIDs = modelSet.getIntArray("dataInContainerIDs");
m_dataOutNodeIDs = modelSet.getIntArray("dataOutContainerIDs");
super.preLoadNodeContainer(parentPersistor, parentSettings, result);
String name = "Looper";
switch(m_metaNodeType) {
case CROSSVALIDATION:
name = "Cross Validation";
case LOOPER:
result.addError("Workflow contains obsolete \"" + name + "\" metanode implementation, not all settings could " + "be restored, please re-configure and execute again.");
setNeedsResetAfterLoad();
default:
}
}
use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class SubNodeContainer method saveAsTemplate.
/**
* {@inheritDoc}
*/
@Override
public MetaNodeTemplateInformation saveAsTemplate(final File directory, final ExecutionMonitor exec) throws IOException, CanceledExecutionException, LockFailedException, InvalidSettingsException {
WorkflowManager tempParent = WorkflowManager.lazyInitTemplateWorkflowRoot();
SubNodeContainer copy = null;
ReferencedFile workflowDirRef = new ReferencedFile(directory);
directory.mkdir();
workflowDirRef.lock();
try {
WorkflowCopyContent.Builder cntBuilder = WorkflowCopyContent.builder();
cntBuilder.setNodeIDs(getID());
WorkflowCopyContent cnt;
synchronized (m_nodeMutex) {
cnt = tempParent.copyFromAndPasteHere(getParent(), cntBuilder.build());
}
NodeID cID = cnt.getNodeIDs()[0];
copy = ((SubNodeContainer) tempParent.getNodeContainer(cID));
try (WorkflowLock copyLock = copy.lock()) {
SingleNodeContainerSettings sncSettings = copy.getSingleNodeContainerSettings().clone();
sncSettings.setModelSettings(new NodeSettings("empty model"));
sncSettings.setVariablesSettings(new NodeSettings("empty variables setting"));
NodeSettings newSettings = new NodeSettings("new settings");
sncSettings.save(newSettings);
copy.loadSettings(newSettings);
MetaNodeTemplateInformation template = MetaNodeTemplateInformation.createNewTemplate(SubNodeContainer.class);
synchronized (copy.m_nodeMutex) {
copy.setTemplateInformation(template);
copy.setName(null);
NodeSettings templateSettings = MetaNodeTemplateInformation.createNodeSettingsForTemplate(copy);
templateSettings.saveToXML(new FileOutputStream(new File(workflowDirRef.getFile(), WorkflowPersistor.TEMPLATE_FILE)));
FileSingleNodeContainerPersistor.save(copy, workflowDirRef, exec, new WorkflowSaveHelper(true, false));
}
return template;
}
} finally {
if (copy != null) {
tempParent.removeNode(copy.getID());
}
workflowDirRef.unlock();
}
}
use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class Node method loadDataAndInternals.
/**
* Loads data from an argument persistor.
* @param loader To load from.
* @param exec For progress.
* @param loadResult to add errors and warnings to (if any)
* @noreference This method is not intended to be referenced by clients.
*/
public void loadDataAndInternals(final NodeContentPersistor loader, final ExecutionMonitor exec, final LoadResult loadResult) {
LOGGER.assertLog(NodeContext.getContext() != null, "No node context available, please check call hierarchy and fix it");
boolean hasContent = loader.hasContent();
m_model.setHasContent(hasContent);
for (int i = 0; i < getNrOutPorts(); i++) {
PortObjectSpec spec = loader.getPortObjectSpec(i);
if (checkPortObjectSpecClass(spec, i)) {
m_outputs[i].spec = spec;
} else {
Class<? extends PortObjectSpec> specClass = m_outputs[i].type.getPortObjectSpecClass();
loadResult.addError("Loaded PortObjectSpec of class \"" + spec.getClass().getSimpleName() + ", expected " + specClass.getSimpleName());
loader.setNeedsResetAfterLoad();
}
PortObject obj = loader.getPortObject(i);
if (checkPortObjectClass(obj, i)) {
m_outputs[i].object = obj;
m_outputs[i].summary = loader.getPortObjectSummary(i);
} else {
Class<? extends PortObject> objClass = m_outputs[i].type.getPortObjectClass();
loadResult.addError("Loaded PortObject of class \"" + obj.getClass().getSimpleName() + ", expected " + objClass.getSimpleName());
loader.setNeedsResetAfterLoad();
}
if (m_outputs[i].object != null) {
// overwrites the spec that is read few rows above
spec = m_outputs[i].object.getSpec();
m_outputs[i].spec = spec;
m_outputs[i].hiliteHdl = (i == 0) ? null : m_model.getOutHiLiteHandler(i - 1);
}
}
m_model.restoreWarningMessage(loader.getWarningMessage());
ReferencedFile internDirRef = loader.getNodeInternDirectory();
if (internDirRef != null) {
internDirRef.lock();
try {
exec.setMessage("Loading internals");
m_model.loadInternals(internDirRef.getFile(), exec);
} catch (Throwable e) {
String error;
if (e instanceof IOException) {
error = "Loading model internals failed: " + e.getMessage();
if (loader.mustWarnOnDataLoadError()) {
LOGGER.debug(error, e);
} else {
LOGGER.debug(error);
}
} else {
error = "Caught \"" + e.getClass().getSimpleName() + "\", " + "Loading model internals failed: " + e.getMessage();
LOGGER.coding(error, e);
}
loadResult.addError(error, true);
} finally {
internDirRef.unlock();
}
}
if (m_model instanceof BufferedDataTableHolder || m_model instanceof PortObjectHolder) {
m_internalHeldPortObjects = loader.getInternalHeldPortObjects();
if (m_internalHeldPortObjects != null) {
if (m_model instanceof PortObjectHolder) {
PortObject[] copy = Arrays.copyOf(m_internalHeldPortObjects, m_internalHeldPortObjects.length);
((PortObjectHolder) m_model).setInternalPortObjects(copy);
} else {
assert m_model instanceof BufferedDataTableHolder;
BufferedDataTable[] copy;
try {
copy = NodeModel.toBDTArray(m_internalHeldPortObjects, "Internal held objects array index", m_model.getClass().getSimpleName() + " should implement " + PortObjectHolder.class.getSimpleName() + " and not " + BufferedDataTableHolder.class.getSimpleName());
((BufferedDataTableHolder) m_model).setInternalTables(copy);
} catch (IOException e) {
loadResult.addError(e.getMessage(), true);
}
}
}
}
}
Aggregations