Search in sources :

Example 11 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class SandboxedNodeCreator method createSandbox.

/**
 * Creates that temporary mini workflow that is executed remotely on the cluster/stream executor.
 * The returned value should be {@link SandboxedNode#close()} when done (using try-with-resources). After this
 * method is called no other set-method should be called.
 *
 * @param exec for progress/cancelation
 * @return the index of the node that represents this node (the node to execute) in the temporary mini workflow
 * @throws InvalidSettingsException
 * @throws IOException
 * @throws CanceledExecutionException
 * @throws LockFailedException
 * @throws InterruptedException
 */
public SandboxedNode createSandbox(final ExecutionMonitor exec) throws InvalidSettingsException, IOException, CanceledExecutionException, LockFailedException, InterruptedException {
    exec.setMessage("Creating virtual workflow");
    final WorkflowManager parent = m_nc.getParent();
    // derive workflow context via NodeContext as the parent could only a be a metanode in a metanode...
    final WorkflowContext origContext = NodeContext.getContext().getWorkflowManager().getContext();
    WorkflowContext.Factory ctxFactory;
    // (specifically reading knime://knime.workflow files)
    if (!m_copyDataIntoNewContext) {
        ctxFactory = new WorkflowContext.Factory(origContext);
        if (m_localWorkflowDir != null) {
            ctxFactory.setOriginalLocation(origContext.getCurrentLocation()).setCurrentLocation(m_localWorkflowDir);
        }
    } else if (m_localWorkflowDir != null) {
        ctxFactory = new WorkflowContext.Factory(m_localWorkflowDir);
    } else {
        ctxFactory = new WorkflowContext.Factory(FileUtil.createTempDir("sandbox-" + m_nc.getNameWithID()));
    }
    // We have to use the same location for the temporary files
    ctxFactory.setTempLocation(origContext.getTempLocation());
    origContext.getMountpointURI().ifPresent(u -> ctxFactory.setMountpointURI(u));
    WorkflowCreationHelper creationHelper = new WorkflowCreationHelper();
    creationHelper.setWorkflowContext(ctxFactory.createContext());
    if (!m_copyDataIntoNewContext) {
        creationHelper.setDataHandlers(parent.getGlobalTableRepository(), parent.getFileStoreHandlerRepository());
    }
    WorkflowManager tempWFM = m_rootWFM.createAndAddProject("Sandbox Exec on " + m_nc.getNameWithID(), creationHelper);
    // Add the workflow variables
    List<FlowVariable> workflowVariables = parent.getProjectWFM().getWorkflowVariables();
    tempWFM.addWorkflowVariables(true, workflowVariables.toArray(new FlowVariable[workflowVariables.size()]));
    // update credentials store of the workflow
    CredentialsStore cs = tempWFM.getCredentialsStore();
    workflowVariables.stream().filter(f -> f.getType().equals(FlowVariable.Type.CREDENTIALS)).filter(f -> !cs.contains(f.getName())).forEach(cs::addFromFlowVariable);
    final int inCnt = m_inData.length;
    // port object IDs in static port object map, one entry for
    // each connected input (no value for unconnected optional inputs)
    List<Integer> portObjectRepositoryIDs = new ArrayList<Integer>(inCnt);
    try {
        NodeID[] ins = new NodeID[inCnt];
        for (int i = 0; i < inCnt; i++) {
            final PortObject in = m_inData[i];
            final NodeInPort inPort = m_nc.getInPort(i);
            final PortType portType = inPort.getPortType();
            if (in == null) {
                // unconnected optional input
                CheckUtils.checkState(portType.isOptional(), "No data at port %d, although port is mandatory (port type %s)", i, portType.getName());
                continue;
            }
            int portObjectRepositoryID = PortObjectRepository.add(in);
            portObjectRepositoryIDs.add(portObjectRepositoryID);
            boolean isTable = BufferedDataTable.TYPE.equals(portType);
            NodeID inID = tempWFM.createAndAddNode(isTable ? TABLE_READ_NODE_FACTORY : OBJECT_READ_NODE_FACTORY);
            NodeSettings s = new NodeSettings("temp_data_in");
            tempWFM.saveNodeSettings(inID, s);
            List<FlowVariable> flowVars = getFlowVariablesOnPort(i);
            PortObjectInNodeModel.setInputNodeSettings(s, portObjectRepositoryID, flowVars, m_copyDataIntoNewContext);
            // update credentials store of the workflow
            flowVars.stream().filter(f -> f.getType().equals(FlowVariable.Type.CREDENTIALS)).filter(f -> !cs.contains(f.getName())).forEach(cs::addFromFlowVariable);
            tempWFM.loadNodeSettings(inID, s);
            ins[i] = inID;
        }
        // execute inPort object nodes to store the input data in them
        if (ins.length > 0 && !tempWFM.executeAllAndWaitUntilDoneInterruptibly()) {
            String error = "Unable to execute virtual workflow, status sent to log facilities";
            LOGGER.debug(error + ":");
            LOGGER.debug(tempWFM.toString());
            throw new RuntimeException(error);
        }
        // add the target node to the workflow
        WorkflowCopyContent.Builder content = WorkflowCopyContent.builder();
        content.setNodeIDs(m_nc.getID());
        final NodeID targetNodeID = tempWFM.copyFromAndPasteHere(parent, content.build()).getNodeIDs()[0];
        NodeContainer targetNode = tempWFM.getNodeContainer(targetNodeID);
        // connect target node to inPort object nodes, skipping unconnected (optional) inputs
        IntStream.range(0, inCnt).filter(i -> ins[i] != null).forEach(i -> tempWFM.addConnection(ins[i], 1, targetNodeID, i));
        if (m_forwardConnectionProgressEvents) {
            setupConnectionProgressEventListeners(m_nc, targetNode);
        }
        // copy the existing tables into the (meta) node (e.g. an executed file reader that's necessary
        // for other nodes to execute)
        exec.setMessage("Copying tables into temp flow");
        NodeContainerExecutionResult origResult = m_nc.createExecutionResult(exec);
        ExecutionMonitor copyExec = exec.createSubProgress(0.0);
        copyExistingTablesIntoSandboxContainer(origResult, m_nc, targetNode, copyExec, m_copyDataIntoNewContext);
        CopyContentIntoTempFlowNodeExecutionJobManager copyDataIntoTmpFlow = new CopyContentIntoTempFlowNodeExecutionJobManager(origResult);
        NodeExecutionJobManager oldJobManager = targetNode.getJobManager();
        tempWFM.setJobManager(targetNodeID, copyDataIntoTmpFlow);
        tempWFM.executeAllAndWaitUntilDoneInterruptibly();
        tempWFM.setJobManager(targetNodeID, oldJobManager);
        // do not use the cluster executor on the cluster...
        tempWFM.setJobManager(targetNodeID, NodeExecutionJobManagerPool.getDefaultJobManagerFactory().getInstance());
        if (!m_copyDataIntoNewContext) {
            copyFileStoreHandlerReference(targetNode, parent, false);
        }
        // save workflow in the local job dir
        if (m_localWorkflowDir != null) {
            tempWFM.save(m_localWorkflowDir, exec, true);
            deepCopyFilesInWorkflowDir(m_nc, tempWFM);
        }
        return new SandboxedNode(tempWFM, targetNodeID);
    } finally {
        portObjectRepositoryIDs.stream().forEach(PortObjectRepository::remove);
    }
}
Also used : InvalidSettingsException(org.knime.core.node.InvalidSettingsException) ReferencedFile(org.knime.core.internal.ReferencedFile) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) WorkflowContext(org.knime.core.node.workflow.WorkflowContext) NativeNodeContainerExecutionResult(org.knime.core.node.workflow.execresult.NativeNodeContainerExecutionResult) NodeContainer(org.knime.core.node.workflow.NodeContainer) Map(java.util.Map) PortObjectInNodeModel(org.knime.core.node.exec.dataexchange.in.PortObjectInNodeModel) PortObjectInNodeFactory(org.knime.core.node.exec.dataexchange.in.PortObjectInNodeFactory) PortType(org.knime.core.node.port.PortType) LockFailedException(org.knime.core.util.LockFailedException) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) Collection(java.util.Collection) ConnectionID(org.knime.core.node.workflow.ConnectionID) WorkflowCreationHelper(org.knime.core.node.workflow.WorkflowCreationHelper) Collectors(java.util.stream.Collectors) ConnectionContainer(org.knime.core.node.workflow.ConnectionContainer) List(java.util.List) BufferedDataTable(org.knime.core.node.BufferedDataTable) Stream(java.util.stream.Stream) Optional(java.util.Optional) CredentialsStore(org.knime.core.node.workflow.CredentialsStore) CheckUtils(org.knime.core.node.util.CheckUtils) PortObject(org.knime.core.node.port.PortObject) NodeContainerExecutionResult(org.knime.core.node.workflow.execresult.NodeContainerExecutionResult) IntStream(java.util.stream.IntStream) NativeNodeContainer(org.knime.core.node.workflow.NativeNodeContainer) NodeExecutionJobManagerPool(org.knime.core.node.util.NodeExecutionJobManagerPool) FlowVariable(org.knime.core.node.workflow.FlowVariable) NodeSettings(org.knime.core.node.NodeSettings) BDTInNodeFactory(org.knime.core.node.exec.dataexchange.in.BDTInNodeFactory) ArrayList(java.util.ArrayList) SubNodeContainer(org.knime.core.node.workflow.SubNodeContainer) ExecutionContext(org.knime.core.node.ExecutionContext) NodeExecutionJobManager(org.knime.core.node.workflow.NodeExecutionJobManager) NodeLogger(org.knime.core.node.NodeLogger) WorkflowExecutionResult(org.knime.core.node.workflow.execresult.WorkflowExecutionResult) NodeFactory(org.knime.core.node.NodeFactory) NodeInPort(org.knime.core.node.workflow.NodeInPort) WorkflowManager(org.knime.core.node.workflow.WorkflowManager) IOException(java.io.IOException) FileUtils(org.apache.commons.io.FileUtils) NodeContext(org.knime.core.node.workflow.NodeContext) WorkflowCopyContent(org.knime.core.node.workflow.WorkflowCopyContent) File(java.io.File) PortObjectRepository(org.knime.core.node.exec.dataexchange.PortObjectRepository) NodeExecutionResult(org.knime.core.node.workflow.execresult.NodeExecutionResult) NodeID(org.knime.core.node.workflow.NodeID) FileUtil(org.knime.core.util.FileUtil) IFileStoreHandler(org.knime.core.data.filestore.internal.IFileStoreHandler) SubnodeContainerExecutionResult(org.knime.core.node.workflow.execresult.SubnodeContainerExecutionResult) Collections(java.util.Collections) SingleNodeContainer(org.knime.core.node.workflow.SingleNodeContainer) WorkflowCopyContent(org.knime.core.node.workflow.WorkflowCopyContent) WorkflowManager(org.knime.core.node.workflow.WorkflowManager) ArrayList(java.util.ArrayList) PortObjectInNodeFactory(org.knime.core.node.exec.dataexchange.in.PortObjectInNodeFactory) BDTInNodeFactory(org.knime.core.node.exec.dataexchange.in.BDTInNodeFactory) NodeFactory(org.knime.core.node.NodeFactory) NodeContainer(org.knime.core.node.workflow.NodeContainer) NativeNodeContainer(org.knime.core.node.workflow.NativeNodeContainer) SubNodeContainer(org.knime.core.node.workflow.SubNodeContainer) SingleNodeContainer(org.knime.core.node.workflow.SingleNodeContainer) WorkflowCreationHelper(org.knime.core.node.workflow.WorkflowCreationHelper) NodeInPort(org.knime.core.node.workflow.NodeInPort) CredentialsStore(org.knime.core.node.workflow.CredentialsStore) NodeID(org.knime.core.node.workflow.NodeID) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) PortObject(org.knime.core.node.port.PortObject) NativeNodeContainerExecutionResult(org.knime.core.node.workflow.execresult.NativeNodeContainerExecutionResult) NodeContainerExecutionResult(org.knime.core.node.workflow.execresult.NodeContainerExecutionResult) WorkflowContext(org.knime.core.node.workflow.WorkflowContext) NodeSettings(org.knime.core.node.NodeSettings) PortObjectRepository(org.knime.core.node.exec.dataexchange.PortObjectRepository) FlowVariable(org.knime.core.node.workflow.FlowVariable) PortType(org.knime.core.node.port.PortType) NodeExecutionJobManager(org.knime.core.node.workflow.NodeExecutionJobManager)

Example 12 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class PortObjectRepository method copy.

/**
 * Copies the argument object by means of the associated serializer.
 * @param object The port object to be copied.
 * @param exec Host for BDTs being created
 * @param progress For progress/cancelation
 * @return The deep copy.
 * @throws IOException In case of exceptions while accessing the streams
 * @throws CanceledExecutionException If canceled.
 */
public static final PortObject copy(final PortObject object, final ExecutionContext exec, final ExecutionMonitor progress) throws IOException, CanceledExecutionException {
    if (object instanceof BufferedDataTable) {
        // need to copy the table cell by cell
        // this is to workaround the standard knime philosophy according
        // to which tables are referenced. A row-based copy will not work
        // as it still will reference blobs
        BufferedDataTable in = (BufferedDataTable) object;
        BufferedDataContainer con = exec.createDataContainer(in.getSpec(), true, 0);
        final long rowCount = in.size();
        long row = 0;
        boolean hasLoggedCloneProblem = false;
        for (DataRow r : in) {
            DataCell[] cells = new DataCell[r.getNumCells()];
            for (int i = 0; i < cells.length; i++) {
                // deserialize blob
                DataCell c = r.getCell(i);
                if (c instanceof BlobDataCell) {
                    try {
                        c = cloneBlobCell(c);
                    } catch (Exception e) {
                        if (!hasLoggedCloneProblem) {
                            LOGGER.warn("Can't clone blob object: " + e.getMessage(), e);
                            hasLoggedCloneProblem = true;
                            LOGGER.debug("Suppressing futher warnings.");
                        }
                    }
                }
                cells[i] = c;
            }
            con.addRowToTable(new DefaultRow(r.getKey(), cells));
            progress.setProgress(row / (double) rowCount, "Copied row " + row + "/" + rowCount);
            progress.checkCanceled();
            row++;
        }
        con.close();
        return con.getTable();
    }
    return Node.copyPortObject(object, exec);
}
Also used : BlobDataCell(org.knime.core.data.container.BlobDataCell) BufferedDataContainer(org.knime.core.node.BufferedDataContainer) BufferedDataTable(org.knime.core.node.BufferedDataTable) BlobDataCell(org.knime.core.data.container.BlobDataCell) DataCell(org.knime.core.data.DataCell) DefaultRow(org.knime.core.data.def.DefaultRow) DataRow(org.knime.core.data.DataRow) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) IOException(java.io.IOException)

Example 13 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class AbstractSimplePortObject method equals.

/**
 * Method compares both <code>ModelContent</code> objects that first need
 * to be saved by calling {@link #save(ModelContentWO, ExecutionMonitor)}.
 * Override this method in order to compare both objects more efficiently.
 *
 * {@inheritDoc}
 */
@Override
public boolean equals(final Object oport) {
    if (oport == this) {
        return true;
    }
    if (oport == null) {
        return false;
    }
    if (!this.getClass().equals(oport.getClass())) {
        return false;
    }
    ModelContent tcont = new ModelContent("ignored");
    ModelContent ocont = new ModelContent("ignored");
    try {
        this.save(tcont, new ExecutionMonitor());
        ((AbstractSimplePortObject) oport).save(ocont, new ExecutionMonitor());
    } catch (CanceledExecutionException cee) {
    // ignored, should not happen
    }
    return tcont.equals(ocont);
}
Also used : CanceledExecutionException(org.knime.core.node.CanceledExecutionException) ModelContent(org.knime.core.node.ModelContent) ExecutionMonitor(org.knime.core.node.ExecutionMonitor)

Example 14 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class PMMLPreprocPortObject method load.

/**
 * {@inheritDoc}
 */
@Override
protected void load(final PortObjectZipInputStream in, final PortObjectSpec spec, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
    ZipEntry entry;
    while ((entry = in.getNextEntry()) != null) {
        String clazzName = entry.getName();
        Class<?> clazz;
        try {
            clazz = Class.forName(clazzName);
            if (!PMMLPreprocOperation.class.isAssignableFrom(clazz)) {
                // throw exception
                throw new IllegalArgumentException("Class " + clazz.getName() + " must extend PMMLPreprocOperation! " + "Loading failed!");
            }
            PMMLPreprocOperation op = (PMMLPreprocOperation) clazz.newInstance();
            SAXParserFactory fac = SAXParserFactory.newInstance();
            SAXParser parser;
            parser = fac.newSAXParser();
            parser.parse(new NonClosableInputStream(in), op.getHandlerForLoad());
            m_operations.add(op);
        } catch (Exception e) {
            throw new IOException(e);
        }
        in.closeEntry();
    }
    m_spec = (PMMLPreprocPortObjectSpec) spec;
}
Also used : NonClosableInputStream(org.knime.core.data.util.NonClosableInputStream) ZipEntry(java.util.zip.ZipEntry) SAXParser(javax.xml.parsers.SAXParser) IOException(java.io.IOException) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) TransformerConfigurationException(javax.xml.transform.TransformerConfigurationException) IOException(java.io.IOException) SAXException(org.xml.sax.SAXException) SAXParserFactory(javax.xml.parsers.SAXParserFactory)

Example 15 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class CancellableReportingInputStream method beforeRead.

@Override
protected void beforeRead(final int n) throws IOException {
    super.beforeRead(n);
    try {
        m_exec.checkCanceled();
        Thread currentThread = Thread.currentThread();
        if (currentThread.isInterrupted()) {
            currentThread.interrupt();
            throw new EOFException("Reading interrupted.");
        }
    } catch (final CanceledExecutionException e) {
        throw new EOFException("Reading has been cancelled");
    }
}
Also used : CanceledExecutionException(org.knime.core.node.CanceledExecutionException) EOFException(java.io.EOFException)

Aggregations

CanceledExecutionException (org.knime.core.node.CanceledExecutionException)82 InvalidSettingsException (org.knime.core.node.InvalidSettingsException)34 IOException (java.io.IOException)32 File (java.io.File)21 ExecutionMonitor (org.knime.core.node.ExecutionMonitor)21 DataRow (org.knime.core.data.DataRow)20 DataTableSpec (org.knime.core.data.DataTableSpec)20 BufferedDataTable (org.knime.core.node.BufferedDataTable)20 DataCell (org.knime.core.data.DataCell)19 ArrayList (java.util.ArrayList)11 DataColumnSpec (org.knime.core.data.DataColumnSpec)11 BufferedDataContainer (org.knime.core.node.BufferedDataContainer)10 LinkedHashMap (java.util.LinkedHashMap)9 ExecutionException (java.util.concurrent.ExecutionException)9 DefaultRow (org.knime.core.data.def.DefaultRow)9 RowKey (org.knime.core.data.RowKey)8 BufferedWriter (java.io.BufferedWriter)7 FileInputStream (java.io.FileInputStream)7 Map (java.util.Map)7 Future (java.util.concurrent.Future)7