Search in sources :

Example 6 with ReferencedFile

use of org.knime.core.internal.ReferencedFile in project knime-core by knime.

the class FileWorkflowPersistor method saveContent.

/**
 * @param wm The WFM to save.
 * @param preFilledSettings The settings eventually written to workflow.knime (or workflow.knime.encrypted).
 * For workflows it contains the version number, cipher, template infos etc. The name of the setting defines the
 * output file name (so it's important!)
 * @param rawWorkflowDirRef To save to.
 * @param execMon ...
 * @param saveHelper ...
 * @throws IOException ...
 * @throws CanceledExecutionException ...
 * @throws LockFailedException ...
 */
private static void saveContent(final WorkflowManager wm, final NodeSettings preFilledSettings, final ReferencedFile rawWorkflowDirRef, final ExecutionMonitor execMon, final WorkflowSaveHelper saveHelper) throws IOException, CanceledExecutionException, LockFailedException {
    ReferencedFile workflowDirRef = rawWorkflowDirRef;
    Role r = wm.getTemplateInformation().getRole();
    final String fName = preFilledSettings.getKey();
    if (!workflowDirRef.fileLockRootForVM()) {
        throw new LockFailedException("Can't write workflow to \"" + workflowDirRef + "\" because the directory can't be locked");
    }
    try {
        final ReferencedFile nodeContainerDirectory = wm.getNodeContainerDirectory();
        final ReferencedFile autoSaveDirectory = wm.getAutoSaveDirectory();
        if (!saveHelper.isAutoSave() && workflowDirRef.equals(nodeContainerDirectory)) {
            if (!nodeContainerDirectory.isDirty()) {
                return;
            } else {
                // update variable assignment to do changes on member
                workflowDirRef = nodeContainerDirectory;
                // delete "old" node directories if not saving to the working
                // directory -- do this before saving the nodes (dirs newly created)
                WorkflowManager.deleteObsoleteNodeDirs(nodeContainerDirectory.getDeletedNodesFileLocations());
            }
        }
        if (saveHelper.isAutoSave() && workflowDirRef.equals(autoSaveDirectory)) {
            if (!autoSaveDirectory.isDirty()) {
                return;
            } else {
                workflowDirRef = autoSaveDirectory;
                WorkflowManager.deleteObsoleteNodeDirs(autoSaveDirectory.getDeletedNodesFileLocations());
            }
        }
        File workflowDir = workflowDirRef.getFile();
        workflowDir.mkdirs();
        if (!workflowDir.isDirectory()) {
            throw new IOException("Unable to create or write directory \": " + workflowDir + "\"");
        }
        saveWorkflowName(preFilledSettings, wm.getNameField());
        saveAuthorInformation(wm.getAuthorInformation(), preFilledSettings);
        saveWorkflowCipher(preFilledSettings, wm.getWorkflowCipher());
        FileNodeContainerMetaPersistor.save(preFilledSettings, wm, workflowDirRef);
        saveWorkflowVariables(wm, preFilledSettings);
        saveCredentials(wm, preFilledSettings);
        saveWorkflowAnnotations(wm, preFilledSettings);
        NodeSettingsWO nodesSettings = saveSettingsForNodes(preFilledSettings);
        Collection<NodeContainer> nodes = wm.getNodeContainers();
        double progRatio = 1.0 / (nodes.size() + 1);
        for (NodeContainer nextNode : nodes) {
            int id = nextNode.getID().getIndex();
            ExecutionMonitor subExec = execMon.createSubProgress(progRatio);
            execMon.setMessage(nextNode.getNameWithID());
            NodeSettingsWO sub = nodesSettings.addNodeSettings("node_" + id);
            saveNodeContainer(sub, workflowDirRef, nextNode, subExec, saveHelper);
            subExec.setProgress(1.0);
        }
        execMon.setMessage("connection information");
        NodeSettingsWO connSettings = saveSettingsForConnections(preFilledSettings);
        int connectionNumber = 0;
        for (ConnectionContainer cc : wm.getConnectionContainers()) {
            NodeSettingsWO nextConnectionConfig = connSettings.addNodeSettings("connection_" + connectionNumber);
            saveConnection(nextConnectionConfig, cc);
            connectionNumber += 1;
        }
        int inCount = wm.getNrInPorts();
        NodeSettingsWO inPortsSetts = inCount > 0 ? saveInPortsSetting(preFilledSettings) : null;
        NodeSettingsWO inPortsSettsEnum = null;
        if (inPortsSetts != null) {
            // TODO actually not neccessary to save the class name
            saveInportsBarUIInfoClassName(inPortsSetts, wm.getInPortsBarUIInfo());
            saveInportsBarUIInfoSettings(inPortsSetts, wm.getInPortsBarUIInfo());
            inPortsSettsEnum = saveInPortsEnumSetting(inPortsSetts);
        }
        for (int i = 0; i < inCount; i++) {
            NodeSettingsWO sPort = saveInPortSetting(inPortsSettsEnum, i);
            saveInPort(sPort, wm, i);
        }
        int outCount = wm.getNrOutPorts();
        NodeSettingsWO outPortsSetts = outCount > 0 ? saveOutPortsSetting(preFilledSettings) : null;
        NodeSettingsWO outPortsSettsEnum = null;
        if (outPortsSetts != null) {
            saveOutportsBarUIInfoClassName(outPortsSetts, wm.getOutPortsBarUIInfo());
            saveOutportsBarUIInfoSettings(outPortsSetts, wm.getOutPortsBarUIInfo());
            outPortsSettsEnum = saveOutPortsEnumSetting(outPortsSetts);
        }
        for (int i = 0; i < outCount; i++) {
            NodeSettingsWO singlePort = saveOutPortSetting(outPortsSettsEnum, i);
            saveOutPort(singlePort, wm, i);
        }
        saveEditorUIInformation(wm, preFilledSettings);
        File workflowFile = new File(workflowDir, fName);
        String toBeDeletedFileName = Role.Template.equals(r) ? TEMPLATE_FILE : WORKFLOW_FILE;
        new File(workflowDir, toBeDeletedFileName).delete();
        new File(workflowDir, WorkflowCipher.getCipherFileName(toBeDeletedFileName)).delete();
        OutputStream os = new FileOutputStream(workflowFile);
        os = wm.getDirectNCParent().cipherOutput(os);
        preFilledSettings.saveToXML(os);
        if (saveHelper.isSaveData()) {
            File saveWithDataFile = new File(workflowDir, SAVED_WITH_DATA_FILE);
            BufferedWriter o = new BufferedWriter(new FileWriter(saveWithDataFile));
            o.write("Do not delete this file!");
            o.newLine();
            o.write("This file serves to indicate that the workflow was written as part of the usual save " + "routine (not exported).");
            o.newLine();
            o.newLine();
            o.write("Workflow was last saved by user ");
            o.write(System.getProperty("user.name"));
            o.write(" on " + new Date());
            o.close();
        }
        if (saveHelper.isAutoSave() && autoSaveDirectory == null) {
            wm.setAutoSaveDirectory(workflowDirRef);
        }
        if (!saveHelper.isAutoSave() && nodeContainerDirectory == null) {
            wm.setNodeContainerDirectory(workflowDirRef);
        }
        NodeContainerState wmState = wm.getNodeContainerState();
        // non remote executions
        boolean isExecutingLocally = wmState.isExecutionInProgress() && !wmState.isExecutingRemotely();
        if (workflowDirRef.equals(nodeContainerDirectory) && !isExecutingLocally) {
            wm.unsetDirty();
        }
        workflowDirRef.setDirty(isExecutingLocally);
        execMon.setProgress(1.0);
    } finally {
        workflowDirRef.fileUnlockRootForVM();
    }
}
Also used : LockFailedException(org.knime.core.util.LockFailedException) NodeSettingsWO(org.knime.core.node.NodeSettingsWO) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) FileWriter(java.io.FileWriter) IOException(java.io.IOException) ReferencedFile(org.knime.core.internal.ReferencedFile) Date(java.util.Date) BufferedWriter(java.io.BufferedWriter) Role(org.knime.core.node.workflow.MetaNodeTemplateInformation.Role) FileOutputStream(java.io.FileOutputStream) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) ReferencedFile(org.knime.core.internal.ReferencedFile) File(java.io.File)

Example 7 with ReferencedFile

use of org.knime.core.internal.ReferencedFile in project knime-core by knime.

the class FileWorkflowPersistor method loadNodeContainer.

/**
 * {@inheritDoc}
 */
@Override
public void loadNodeContainer(final Map<Integer, BufferedDataTable> tblRep, final ExecutionMonitor exec, final LoadResult loadResult) throws CanceledExecutionException, IOException {
    ReferencedFile workflowKNIMEFile = getWorkflowKNIMEFile();
    if (workflowKNIMEFile == null || m_workflowSett == null) {
        setDirtyAfterLoad();
        throw new IllegalStateException("The method preLoadNodeContainer has either not been called or failed");
    }
    /* read nodes */
    NodeSettingsRO nodes;
    try {
        nodes = loadSettingsForNodes(m_workflowSett);
    } catch (InvalidSettingsException e) {
        String error = "Can't load nodes in workflow, config not found: " + e.getMessage();
        getLogger().debug(error, e);
        loadResult.addError(error);
        setDirtyAfterLoad();
        setNeedsResetAfterLoad();
        // stop loading here
        return;
    }
    // ids of nodes that failed to load. Used to suppress superfluous errors when reading the connections
    Set<Integer> failingNodeIDSet = new HashSet<Integer>();
    // ids of nodes whose factory can't be loaded (e.g. node extension not installed)
    Map<Integer, NodeFactoryUnknownException> missingNodeIDMap = new HashMap<Integer, NodeFactoryUnknownException>();
    exec.setMessage("node information");
    final ReferencedFile workflowDirRef = workflowKNIMEFile.getParent();
    /* Load nodes */
    for (String nodeKey : nodes.keySet()) {
        exec.checkCanceled();
        NodeSettingsRO nodeSetting;
        try {
            nodeSetting = nodes.getNodeSettings(nodeKey);
        } catch (InvalidSettingsException e) {
            String error = "Unable to load settings for node with internal " + "id \"" + nodeKey + "\": " + e.getMessage();
            getLogger().debug(error, e);
            setDirtyAfterLoad();
            loadResult.addError(error);
            continue;
        }
        if (shouldSkipThisNode(nodeSetting)) {
            continue;
        }
        int nodeIDSuffix;
        try {
            nodeIDSuffix = loadNodeIDSuffix(nodeSetting);
        } catch (InvalidSettingsException e) {
            nodeIDSuffix = getRandomNodeID();
            String error = "Unable to load node ID (internal id \"" + nodeKey + "\"), trying random number " + nodeIDSuffix + "instead: " + e.getMessage();
            getLogger().debug(error, e);
            setDirtyAfterLoad();
            loadResult.addError(error);
        }
        NodeType nodeType;
        try {
            nodeType = loadNodeType(nodeSetting);
        } catch (InvalidSettingsException e) {
            String error = "Can't retrieve node type for contained node with id suffix " + nodeIDSuffix + ", attempting to read ordinary (native) node: " + e.getMessage();
            getLogger().debug(error, e);
            setDirtyAfterLoad();
            loadResult.addError(error);
            nodeType = NodeType.NativeNode;
        }
        NodeUIInformation nodeUIInfo = null;
        String uiInfoClassName;
        try {
            uiInfoClassName = loadUIInfoClassName(nodeSetting);
        } catch (InvalidSettingsException e) {
            String error = "Unable to load UI information class name " + "to node with ID suffix " + nodeIDSuffix + ", no UI information available: " + e.getMessage();
            getLogger().debug(error, e);
            setDirtyAfterLoad();
            loadResult.addError(error);
            uiInfoClassName = null;
        }
        if (uiInfoClassName != null) {
            try {
                // load node ui info
                nodeUIInfo = loadNodeUIInformation(nodeSetting);
            } catch (InvalidSettingsException e) {
                String error = "Unable to load UI information to " + "node with ID suffix " + nodeIDSuffix + ", no UI information available: " + e.getMessage();
                getLogger().debug(error, e);
                setDirtyAfterLoad();
                loadResult.addError(error);
            }
        }
        ReferencedFile nodeFile;
        try {
            nodeFile = loadNodeFile(nodeSetting, workflowDirRef);
        } catch (InvalidSettingsException e) {
            String error = "Unable to load settings for node " + "with ID suffix " + nodeIDSuffix + ": " + e.getMessage();
            getLogger().debug(error, e);
            setDirtyAfterLoad();
            loadResult.addError(error);
            failingNodeIDSet.add(nodeIDSuffix);
            continue;
        }
        FromFileNodeContainerPersistor persistor;
        switch(nodeType) {
            case MetaNode:
                persistor = createWorkflowPersistorLoad(nodeFile);
                break;
            case NativeNode:
                persistor = createNativeNodeContainerPersistorLoad(nodeFile);
                break;
            case SubNode:
                persistor = createSubNodeContainerPersistorLoad(nodeFile);
                break;
            default:
                throw new IllegalStateException("Unknown node type: " + nodeType);
        }
        try {
            LoadResult childResult = new LoadResult(nodeType.toString() + " with ID suffix " + nodeIDSuffix);
            persistor.preLoadNodeContainer(this, nodeSetting, childResult);
            loadResult.addChildError(childResult);
        } catch (Throwable e) {
            String error = "Unable to load node with ID suffix " + nodeIDSuffix + " into workflow, skipping it: " + e.getMessage();
            String loadErrorString;
            if (e instanceof NodeFactoryUnknownException) {
                loadErrorString = e.getMessage();
            } else {
                loadErrorString = error;
            }
            if (e instanceof InvalidSettingsException || e instanceof IOException || e instanceof NodeFactoryUnknownException) {
                getLogger().debug(error, e);
            } else {
                getLogger().error(error, e);
            }
            loadResult.addError(loadErrorString);
            if (e instanceof NodeFactoryUnknownException) {
                missingNodeIDMap.put(nodeIDSuffix, (NodeFactoryUnknownException) e);
            // don't set dirty
            } else {
                setDirtyAfterLoad();
                failingNodeIDSet.add(nodeIDSuffix);
                // node directory is the parent of the settings.xml
                m_obsoleteNodeDirectories.add(nodeFile.getParent());
                continue;
            }
        }
        NodeContainerMetaPersistor meta = persistor.getMetaPersistor();
        if (m_nodeContainerLoaderMap.containsKey(nodeIDSuffix)) {
            int randomID = getRandomNodeID();
            setDirtyAfterLoad();
            loadResult.addError("Duplicate id encountered in workflow: " + nodeIDSuffix + ", uniquifying to random id " + randomID + ", this possibly screws the connections");
            nodeIDSuffix = randomID;
        }
        meta.setNodeIDSuffix(nodeIDSuffix);
        meta.setUIInfo(nodeUIInfo);
        if (persistor.isDirtyAfterLoad()) {
            setDirtyAfterLoad();
        }
        m_nodeContainerLoaderMap.put(nodeIDSuffix, persistor);
    }
    /* read connections */
    exec.setMessage("connection information");
    NodeSettingsRO connections;
    try {
        connections = loadSettingsForConnections(m_workflowSett);
        if (connections == null) {
            connections = EMPTY_SETTINGS;
        }
    } catch (InvalidSettingsException e) {
        String error = "Can't load workflow connections, config not found: " + e.getMessage();
        getLogger().debug(error, e);
        setDirtyAfterLoad();
        loadResult.addError(error);
        connections = EMPTY_SETTINGS;
    }
    for (String connectionKey : connections.keySet()) {
        exec.checkCanceled();
        ConnectionContainerTemplate c;
        try {
            c = loadConnection(connections.getNodeSettings(connectionKey));
        } catch (InvalidSettingsException e) {
            String error = "Can't load connection with internal ID \"" + connectionKey + "\": " + e.getMessage();
            getLogger().debug(error, e);
            setDirtyAfterLoad();
            loadResult.addError(error);
            continue;
        }
        int sourceIDSuffix = c.getSourceSuffix();
        NodeContainerPersistor sourceNodePersistor = m_nodeContainerLoaderMap.get(sourceIDSuffix);
        if (sourceNodePersistor == null && sourceIDSuffix != -1) {
            setDirtyAfterLoad();
            if (!failingNodeIDSet.contains(sourceIDSuffix)) {
                loadResult.addError("Unable to load node connection " + c + ", source node does not exist");
            }
            continue;
        }
        fixSourcePortIfNecessary(sourceNodePersistor, c);
        int destIDSuffix = c.getDestSuffix();
        NodeContainerPersistor destNodePersistor = m_nodeContainerLoaderMap.get(destIDSuffix);
        if (destNodePersistor == null && destIDSuffix != -1) {
            setDirtyAfterLoad();
            if (!failingNodeIDSet.contains(destIDSuffix)) {
                loadResult.addError("Unable to load node connection " + c + ", destination node does not exist");
            }
            continue;
        }
        fixDestPortIfNecessary(destNodePersistor, c);
        if (!m_connectionSet.add(c)) {
            setDirtyAfterLoad();
            loadResult.addError("Duplicate connection information: " + c);
        }
    }
    for (Map.Entry<Integer, NodeFactoryUnknownException> missingNode : missingNodeIDMap.entrySet()) {
        exec.checkCanceled();
        int missingNodeSuffix = missingNode.getKey();
        NodeAndBundleInformation nodeInfo = missingNode.getValue().getNodeAndBundleInformation();
        loadResult.addMissingNode(nodeInfo);
        NodeSettingsRO additionalFactorySettings = missingNode.getValue().getAdditionalFactorySettings();
        ArrayList<PersistorWithPortIndex> upstreamNodes = new ArrayList<PersistorWithPortIndex>();
        ArrayList<List<PersistorWithPortIndex>> downstreamNodes = new ArrayList<List<PersistorWithPortIndex>>();
        for (ConnectionContainerTemplate t : m_connectionSet) {
            // check upstream nodes
            int sourceSuffix = t.getSourceSuffix();
            int destSuffix = t.getDestSuffix();
            int sourcePort = t.getSourcePort();
            int destPort = t.getDestPort();
            if (destSuffix == missingNodeSuffix) {
                FromFileNodeContainerPersistor persistor;
                if (sourceSuffix == -1) {
                    // connected to this metanode's input port bar
                    persistor = this;
                } else {
                    persistor = m_nodeContainerLoaderMap.get(sourceSuffix);
                }
                ensureArrayListIndexValid(upstreamNodes, destPort);
                upstreamNodes.set(destPort, new PersistorWithPortIndex(persistor, sourcePort));
            }
            // check downstream nodes
            if (sourceSuffix == missingNodeSuffix) {
                FromFileNodeContainerPersistor persistor;
                if (destSuffix == -1) {
                    // connect to this metanode's output port bar
                    persistor = this;
                } else {
                    persistor = m_nodeContainerLoaderMap.get(destSuffix);
                }
                ensureArrayListIndexValid(downstreamNodes, sourcePort);
                List<PersistorWithPortIndex> downstreamNodesAtPort = downstreamNodes.get(sourcePort);
                if (downstreamNodesAtPort == null) {
                    downstreamNodesAtPort = new ArrayList<PersistorWithPortIndex>();
                    downstreamNodes.set(sourcePort, downstreamNodesAtPort);
                }
                downstreamNodesAtPort.add(new PersistorWithPortIndex(persistor, destPort));
            }
        }
        FromFileNodeContainerPersistor failingNodePersistor = m_nodeContainerLoaderMap.get(missingNodeSuffix);
        failingNodePersistor.guessPortTypesFromConnectedNodes(nodeInfo, additionalFactorySettings, upstreamNodes, downstreamNodes);
    }
    exec.setProgress(1.0);
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ReferencedFile(org.knime.core.internal.ReferencedFile) ArrayList(java.util.ArrayList) List(java.util.List) HashSet(java.util.HashSet) NodeAndBundleInformation(org.knime.core.node.NodeAndBundleInformation) IOException(java.io.IOException) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) NodeSettingsRO(org.knime.core.node.NodeSettingsRO) HashMap(java.util.HashMap) Map(java.util.Map) TreeMap(java.util.TreeMap)

Example 8 with ReferencedFile

use of org.knime.core.internal.ReferencedFile in project knime-core by knime.

the class BufferedDataTable method loadFromFile.

/**
 * Factory method to restore a table that has been written using
 * the save method.
 * @param dirRef The directory to load from.
 * @param settings The settings to load from.
 * @param exec The exec mon for progress/cancel
 * @param tblRep The table repository
 * @param bufferRep The buffer repository (needed for blobs).
 * @param fileStoreHandlerRepository ...
 * @return The table as written by save.
 * @throws IOException If reading fails.
 * @throws CanceledExecutionException If canceled.
 * @throws InvalidSettingsException If settings are invalid.
 */
static BufferedDataTable loadFromFile(final ReferencedFile dirRef, final NodeSettingsRO settings, final ExecutionMonitor exec, final Map<Integer, BufferedDataTable> tblRep, final HashMap<Integer, ContainerTable> bufferRep, final FileStoreHandlerRepository fileStoreHandlerRepository) throws IOException, CanceledExecutionException, InvalidSettingsException {
    File dir = dirRef.getFile();
    NodeSettingsRO s;
    // in version 1.1.x and before, the information was stored in
    // an external data.xml (directly in the node dir)
    boolean isVersion11x;
    File dataXML = new File(dir, TABLE_DESCRIPTION_FILE);
    // loading an exported workflow without data
    if (!dataXML.exists() && settings == null) {
        throw new IOException("No such data file: " + dataXML.getAbsolutePath());
    }
    DataTableSpec spec;
    if (dataXML.exists()) {
        // version 1.2.0 and later
        s = NodeSettings.loadFromXML(new BufferedInputStream(new FileInputStream(dataXML)));
        spec = loadSpec(dirRef);
        isVersion11x = false;
    } else {
        // version 1.1.x
        s = settings.getNodeSettings(CFG_TABLE_META);
        // needs to be read from zip file!
        spec = null;
        isVersion11x = true;
    }
    int id = s.getInt(CFG_TABLE_ID);
    LAST_ID.set(Math.max(LAST_ID.get(), id + 1));
    String fileName = s.getString(CFG_TABLE_FILE_NAME);
    ReferencedFile fileRef;
    if (fileName != null) {
        fileRef = new ReferencedFile(dirRef, fileName);
        File file = fileRef.getFile();
        if (!file.exists()) {
            throw new IOException("No such data file: " + fileRef);
        }
        if (!file.isFile() || !file.canRead()) {
            throw new IOException("Cannot read file " + fileRef);
        }
    } else {
        // for instance for a column filter node this is null.
        fileRef = null;
    }
    String tableType = s.getString(CFG_TABLE_TYPE);
    BufferedDataTable t;
    if (tableType.equals(TABLE_TYPE_REFERENCE_IN_SAME_NODE)) {
        t = tblRep.get(id);
        if (t == null) {
            throw new InvalidSettingsException("Table reference with ID " + id + " not found in load map");
        }
        return t;
    } else if (tableType.equals(TABLE_TYPE_CONTAINER)) {
        ContainerTable fromContainer;
        if (isVersion11x) {
            fromContainer = DataContainer.readFromZip(fileRef.getFile());
        } else {
            fromContainer = BufferedDataContainer.readFromZipDelayed(fileRef, spec, id, bufferRep, fileStoreHandlerRepository);
        }
        t = new BufferedDataTable(fromContainer, id);
    } else {
        String[] referenceDirs;
        // (no concatenate table in those versions)
        if (s.containsKey("table_reference")) {
            String refDir = s.getString("table_reference");
            referenceDirs = refDir == null ? new String[0] : new String[] { refDir };
        } else {
            referenceDirs = s.getStringArray(CFG_TABLE_REFERENCE);
        }
        for (String reference : referenceDirs) {
            if (reference == null) {
                throw new InvalidSettingsException("Reference dir is \"null\"");
            }
            ReferencedFile referenceDirRef = new ReferencedFile(dirRef, reference);
            loadFromFile(referenceDirRef, s, exec, tblRep, bufferRep, fileStoreHandlerRepository);
        }
        if (tableType.equals(TABLE_TYPE_REARRANGE_COLUMN)) {
            t = new BufferedDataTable(new RearrangeColumnsTable(fileRef, s, tblRep, spec, id, bufferRep, fileStoreHandlerRepository));
        } else if (tableType.equals(TABLE_TYPE_JOINED)) {
            JoinedTable jt = JoinedTable.load(s, spec, tblRep);
            t = new BufferedDataTable(jt);
        } else if (tableType.equals(TABLE_TYPE_VOID)) {
            VoidTable jt = VoidTable.load(spec);
            t = new BufferedDataTable(jt);
        } else if (tableType.equals(TABLE_TYPE_CONCATENATE)) {
            ConcatenateTable ct = ConcatenateTable.load(s, spec, tblRep);
            t = new BufferedDataTable(ct);
        } else if (tableType.equals(TABLE_TYPE_WRAPPED)) {
            WrappedTable wt = WrappedTable.load(s, tblRep);
            t = new BufferedDataTable(wt);
        } else if (tableType.equals(TABLE_TYPE_NEW_SPEC)) {
            TableSpecReplacerTable replTable;
            if (isVersion11x) {
                replTable = TableSpecReplacerTable.load11x(fileRef.getFile(), s, tblRep);
            } else {
                replTable = TableSpecReplacerTable.load(s, spec, tblRep);
            }
            t = new BufferedDataTable(replTable);
        } else if (tableType.equals(TABLE_TYPE_EXTENSION)) {
            ExtensionTable et = ExtensionTable.loadExtensionTable(fileRef, spec, s, tblRep, exec);
            t = new BufferedDataTable(et);
        } else {
            throw new InvalidSettingsException("Unknown table identifier: " + tableType);
        }
    }
    t.m_tableID = id;
    tblRep.put(id, t);
    return t;
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) VoidTable(org.knime.core.data.container.VoidTable) JoinedTable(org.knime.core.data.container.JoinedTable) IOException(java.io.IOException) ReferencedFile(org.knime.core.internal.ReferencedFile) FileInputStream(java.io.FileInputStream) ContainerTable(org.knime.core.data.container.ContainerTable) WrappedTable(org.knime.core.data.container.WrappedTable) BufferedInputStream(java.io.BufferedInputStream) RearrangeColumnsTable(org.knime.core.data.container.RearrangeColumnsTable) ConcatenateTable(org.knime.core.data.container.ConcatenateTable) ReferencedFile(org.knime.core.internal.ReferencedFile) File(java.io.File) TableSpecReplacerTable(org.knime.core.data.container.TableSpecReplacerTable)

Example 9 with ReferencedFile

use of org.knime.core.internal.ReferencedFile in project knime-core by knime.

the class Node method createNodeExecutionResult.

/**
 * Creates an execution result containing all calculated values in a
 * execution. The returned value is suitable to be used in
 * {@link #loadDataAndInternals(
 * NodeContentPersistor, ExecutionMonitor, LoadResult)}.
 * If this node is not executed, it will assign null values to the fields
 * in the returned execution result.
 * @param exec For progress information.
 * @return A new execution result containing the values being calculated.
 * @throws CanceledExecutionException If canceled
 */
public NodeExecutionResult createNodeExecutionResult(final ExecutionMonitor exec) throws CanceledExecutionException {
    NodeExecutionResult result = new NodeExecutionResult();
    result.setWarningMessage(m_model.getWarningMessage());
    if (hasContent()) {
        File internTempDir;
        try {
            internTempDir = FileUtil.createTempDir("knime_node_internDir");
            exec.setMessage("Saving internals");
            saveInternals(internTempDir, exec.createSubProgress(0.0));
            result.setNodeInternDir(new ReferencedFile(internTempDir));
        } catch (IOException ioe) {
            LOGGER.error("Unable to save internals", ioe);
        }
    }
    if (m_internalHeldPortObjects != null) {
        PortObject[] internalHeldPortObjects = Arrays.copyOf(m_internalHeldPortObjects, m_internalHeldPortObjects.length);
        result.setInternalHeldPortObjects(internalHeldPortObjects);
    }
    PortObject[] pos = new PortObject[getNrOutPorts()];
    PortObjectSpec[] poSpecs = new PortObjectSpec[getNrOutPorts()];
    for (int i = 0; i < pos.length; i++) {
        PortObject po = getOutputObject(i);
        if (po != null) {
            pos[i] = po;
            poSpecs[i] = po.getSpec();
        }
    }
    result.setPortObjects(pos);
    result.setPortObjectSpecs(poSpecs);
    // Add the outgoing flow variables to the execution result
    FlowObjectStack outgoingStack = m_model.getOutgoingFlowObjectStack();
    List<FlowVariable> nodeFlowVars = outgoingStack.getAvailableFlowVariables().values().stream().filter(f -> f.getScope().equals(FlowVariable.Scope.Flow)).collect(Collectors.toList());
    // the bottom most element should remain at the bottom of the stack
    Collections.reverse(nodeFlowVars);
    result.setFlowVariables(nodeFlowVars);
    return result;
}
Also used : WizardNode(org.knime.core.node.wizard.WizardNode) ScopeEndNode(org.knime.core.node.workflow.ScopeEndNode) NodeType(org.knime.core.node.NodeFactory.NodeType) Arrays(java.util.Arrays) BufferedInputStream(java.io.BufferedInputStream) NodeMessageEvent(org.knime.core.node.workflow.NodeMessageEvent) ReferencedFile(org.knime.core.internal.ReferencedFile) FileStoreUtil(org.knime.core.data.filestore.FileStoreUtil) IWriteFileStoreHandler(org.knime.core.data.filestore.internal.IWriteFileStoreHandler) InteractiveNode(org.knime.core.node.interactive.InteractiveNode) StringUtils(org.apache.commons.lang3.StringUtils) ExecutionEnvironment(org.knime.core.node.workflow.ExecutionEnvironment) ByteArrayInputStream(java.io.ByteArrayInputStream) Map(java.util.Map) FlowTryCatchContext(org.knime.core.node.workflow.FlowTryCatchContext) ContainerTable(org.knime.core.data.container.ContainerTable) InactiveBranchPortObjectSpec(org.knime.core.node.port.inactive.InactiveBranchPortObjectSpec) PortType(org.knime.core.node.port.PortType) PrintWriter(java.io.PrintWriter) PortObjectZipOutputStream(org.knime.core.node.port.PortObjectZipOutputStream) VirtualSubNodeInputNodeModel(org.knime.core.node.workflow.virtual.subnode.VirtualSubNodeInputNodeModel) Set(java.util.Set) CopyOnWriteArraySet(java.util.concurrent.CopyOnWriteArraySet) Collectors(java.util.stream.Collectors) InterruptibleNodeModel(org.knime.core.node.interrupt.InterruptibleNodeModel) List(java.util.List) HiLiteHandler(org.knime.core.node.property.hilite.HiLiteHandler) InteractiveNodeFactoryExtension(org.knime.core.node.interactive.InteractiveNodeFactoryExtension) CheckUtils(org.knime.core.node.util.CheckUtils) PortObject(org.knime.core.node.port.PortObject) PortObjectZipInputStream(org.knime.core.node.port.PortObjectZipInputStream) NodeMessage(org.knime.core.node.workflow.NodeMessage) Rectangle(java.awt.Rectangle) FileStorePortObject(org.knime.core.data.filestore.FileStorePortObject) NodeMessageListener(org.knime.core.node.workflow.NodeMessageListener) FlowVariablePortObject(org.knime.core.node.port.flowvariable.FlowVariablePortObject) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ValueControlledDialogPane(org.knime.core.node.dialog.ValueControlledDialogPane) DataTableSpec(org.knime.core.data.DataTableSpec) LoopEndNode(org.knime.core.node.workflow.LoopEndNode) NodeExecutionJobManagerPool(org.knime.core.node.util.NodeExecutionJobManagerPool) FlowVariable(org.knime.core.node.workflow.FlowVariable) HashMap(java.util.HashMap) ArrayUtils(org.apache.commons.lang3.ArrayUtils) SplitType(org.knime.core.node.workflow.NodeContainer.NodeContainerSettings.SplitType) AtomicReference(java.util.concurrent.atomic.AtomicReference) PortObjectSpecZipInputStream(org.knime.core.node.port.PortObjectSpecZipInputStream) HashSet(java.util.HashSet) PortTypeRegistry(org.knime.core.node.port.PortTypeRegistry) CredentialsProvider(org.knime.core.node.workflow.CredentialsProvider) PortUtil(org.knime.core.node.port.PortUtil) ViewUtils(org.knime.core.node.util.ViewUtils) LoadResult(org.knime.core.node.workflow.WorkflowPersistor.LoadResult) InactiveBranchPortObject(org.knime.core.node.port.inactive.InactiveBranchPortObject) LinkedHashSet(java.util.LinkedHashSet) FlowObjectStack(org.knime.core.node.workflow.FlowObjectStack) ScopeStartNode(org.knime.core.node.workflow.ScopeStartNode) StringWriter(java.io.StringWriter) DeferredFileOutputStream(org.apache.commons.io.output.DeferredFileOutputStream) InteractiveView(org.knime.core.node.interactive.InteractiveView) PortObjectSpec(org.knime.core.node.port.PortObjectSpec) FlowScopeContext(org.knime.core.node.workflow.FlowScopeContext) IOException(java.io.IOException) InactiveBranchConsumer(org.knime.core.node.port.inactive.InactiveBranchConsumer) FileInputStream(java.io.FileInputStream) PortObjectSpecZipOutputStream(org.knime.core.node.port.PortObjectSpecZipOutputStream) WizardNodeFactoryExtension(org.knime.core.node.wizard.WizardNodeFactoryExtension) NodeContext(org.knime.core.node.workflow.NodeContext) File(java.io.File) ViewContent(org.knime.core.node.interactive.ViewContent) Element(org.w3c.dom.Element) NodeExecutionResult(org.knime.core.node.workflow.execresult.NodeExecutionResult) LoopStartNode(org.knime.core.node.workflow.LoopStartNode) NodeID(org.knime.core.node.workflow.NodeID) DataContainerException(org.knime.core.data.container.DataContainerException) FileUtil(org.knime.core.util.FileUtil) DataTableSpecCreator(org.knime.core.data.DataTableSpecCreator) IFileStoreHandler(org.knime.core.data.filestore.internal.IFileStoreHandler) ValueControlledNode(org.knime.core.node.dialog.ValueControlledNode) FlowVariablePortObjectSpec(org.knime.core.node.port.flowvariable.FlowVariablePortObjectSpec) FlowLoopContext(org.knime.core.node.workflow.FlowLoopContext) Collections(java.util.Collections) InputStream(java.io.InputStream) PortObjectHolder(org.knime.core.node.port.PortObjectHolder) NodeExecutionResult(org.knime.core.node.workflow.execresult.NodeExecutionResult) FlowObjectStack(org.knime.core.node.workflow.FlowObjectStack) IOException(java.io.IOException) ReferencedFile(org.knime.core.internal.ReferencedFile) InactiveBranchPortObjectSpec(org.knime.core.node.port.inactive.InactiveBranchPortObjectSpec) PortObjectSpec(org.knime.core.node.port.PortObjectSpec) FlowVariablePortObjectSpec(org.knime.core.node.port.flowvariable.FlowVariablePortObjectSpec) ReferencedFile(org.knime.core.internal.ReferencedFile) File(java.io.File) PortObject(org.knime.core.node.port.PortObject) FileStorePortObject(org.knime.core.data.filestore.FileStorePortObject) FlowVariablePortObject(org.knime.core.node.port.flowvariable.FlowVariablePortObject) InactiveBranchPortObject(org.knime.core.node.port.inactive.InactiveBranchPortObject) FlowVariable(org.knime.core.node.workflow.FlowVariable)

Example 10 with ReferencedFile

use of org.knime.core.internal.ReferencedFile in project knime-core by knime.

the class FileNodePersistor method loadPortObjectSpec.

private PortObjectSpec loadPortObjectSpec(final Node node, final NodeSettingsRO settings, final int index) throws InvalidSettingsException, IOException {
    int newIndex = getNewPortIndex(index);
    PortType type = node.getOutputType(newIndex);
    boolean isDataPort = BufferedDataTable.class.isAssignableFrom(type.getPortObjectClass());
    if (!isDataPort) {
        // port is a model port, no spec available in 1.x.x
        return null;
    }
    // in 1.1.x and before the settings.xml contained the location
    // of the data table specs file (spec_0.xml, e.g.). From 1.2.0 on,
    // the spec is saved in data/data_0/spec.xml
    boolean isVersion11x = settings.containsKey(CFG_SPEC_FILES);
    ReferencedFile nodeDirectory = getNodeDirectory();
    if (isVersion11x) {
        NodeSettingsRO spec = settings.getNodeSettings(CFG_SPEC_FILES);
        String specName = spec.getString(CFG_OUTPUT_PREFIX + index);
        ReferencedFile targetFileRef = new ReferencedFile(nodeDirectory, specName);
        File targetFile = targetFileRef.getFile();
        DataTableSpec outSpec = null;
        if (targetFile.exists()) {
            NodeSettingsRO settingsSpec = NodeSettings.loadFromXML(new BufferedInputStream(new FileInputStream(targetFile)));
            outSpec = DataTableSpec.load(settingsSpec);
        }
        return outSpec;
    } else {
        NodeSettingsRO dataSettings = settings.getNodeSettings(CFG_DATA_FILE);
        String dataDirStr = dataSettings.getString(CFG_DATA_FILE_DIR);
        ReferencedFile dataDirRef = new ReferencedFile(nodeDirectory, dataDirStr);
        NodeSettingsRO portSettings = dataSettings.getNodeSettings(CFG_OUTPUT_PREFIX + index);
        String dataName = portSettings.getString(CFG_DATA_FILE_DIR);
        DataTableSpec outSpec = null;
        if (portSettings.getBoolean(CFG_HAS_SPEC_FILE, true)) {
            ReferencedFile dirRef = new ReferencedFile(dataDirRef, dataName);
            File dir = dirRef.getFile();
            readDirectory(dir);
            outSpec = BufferedDataTable.loadSpec(dirRef);
            if (portSettings.containsKey(CFG_HAS_SPEC_FILE) && outSpec == null) {
                throw new IOException("No spec file available for" + " outport " + index + ".");
            }
        }
        return outSpec;
    }
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) BufferedInputStream(java.io.BufferedInputStream) IOException(java.io.IOException) ReferencedFile(org.knime.core.internal.ReferencedFile) ReferencedFile(org.knime.core.internal.ReferencedFile) File(java.io.File) FileInputStream(java.io.FileInputStream) PortType(org.knime.core.node.port.PortType)

Aggregations

ReferencedFile (org.knime.core.internal.ReferencedFile)46 File (java.io.File)29 IOException (java.io.IOException)22 BufferedInputStream (java.io.BufferedInputStream)11 FileInputStream (java.io.FileInputStream)11 InvalidSettingsException (org.knime.core.node.InvalidSettingsException)8 PortObject (org.knime.core.node.port.PortObject)8 FlowVariablePortObject (org.knime.core.node.port.flowvariable.FlowVariablePortObject)8 FileStorePortObject (org.knime.core.data.filestore.FileStorePortObject)7 InactiveBranchPortObject (org.knime.core.node.port.inactive.InactiveBranchPortObject)7 HashMap (java.util.HashMap)6 NodeSettingsRO (org.knime.core.node.NodeSettingsRO)6 FileOutputStream (java.io.FileOutputStream)5 Map (java.util.Map)5 ContainerTable (org.knime.core.data.container.ContainerTable)5 PortObjectSpec (org.knime.core.node.port.PortObjectSpec)5 FlowVariablePortObjectSpec (org.knime.core.node.port.flowvariable.FlowVariablePortObjectSpec)5 InactiveBranchPortObjectSpec (org.knime.core.node.port.inactive.InactiveBranchPortObjectSpec)5 InputStream (java.io.InputStream)4 ArrayList (java.util.ArrayList)4