Search in sources :

Example 41 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class NodeExecutionJob method internalRun.

/**
 */
private void internalRun() {
    NodeContainerExecutionStatus status = null;
    // handle inactive branches -- do not delegate to custom job
    // manager (the node will just return inactive branch objects)
    boolean executeInactive = false;
    if (m_nc instanceof SingleNodeContainer) {
        SingleNodeContainer snc = (SingleNodeContainer) m_nc;
        if (!snc.isInactiveBranchConsumer() && Node.containsInactiveObjects(getPortObjects())) {
            executeInactive = true;
        }
    }
    if (!isReConnecting()) {
        try {
            // sets state PREEXECUTE
            if (!m_nc.notifyParentPreExecuteStart()) {
                // node was canceled, omit any subsequent state transitions
                return;
            }
            if (!executeInactive) {
                beforeExecute();
            }
        } catch (Throwable throwable) {
            logError(throwable);
            status = NodeContainerExecutionStatus.FAILURE;
        }
        try {
            // sets state EXECUTING
            m_nc.notifyParentExecuteStart();
        } catch (IllegalFlowObjectStackException e) {
            status = NodeContainerExecutionStatus.FAILURE;
        } catch (Throwable throwable) {
            status = NodeContainerExecutionStatus.FAILURE;
            logError(throwable);
        }
    }
    // check thread cancelation
    if (status == null) {
        if (Thread.interrupted()) {
            status = NodeContainerExecutionStatus.FAILURE;
        } else {
            try {
                m_nc.getProgressMonitor().checkCanceled();
            } catch (CanceledExecutionException cee) {
                status = NodeContainerExecutionStatus.FAILURE;
            }
        }
    }
    try {
        if (status == null) {
            NodeLogger.getLogger(m_nc.getClass());
            // start message and keep start time
            final long time = System.currentTimeMillis();
            m_logger.debug(m_nc.getNameWithID() + " Start execute");
            if (executeInactive) {
                SingleNodeContainer snc = (SingleNodeContainer) m_nc;
                status = snc.performExecuteNode(getPortObjects());
            } else {
                status = mainExecute();
            }
            if (status != null && status.isSuccess()) {
                String elapsed = StringFormat.formatElapsedTime(System.currentTimeMillis() - time);
                m_logger.info(m_nc.getNameWithID() + " End execute (" + elapsed + ")");
            }
        }
    } catch (Throwable throwable) {
        status = NodeContainerExecutionStatus.FAILURE;
        logError(throwable);
    }
    try {
        // sets state POSTEXECUTE
        m_nc.notifyParentPostExecuteStart(status);
        if (!executeInactive) {
            afterExecute();
        }
    } catch (Throwable throwable) {
        status = NodeContainerExecutionStatus.FAILURE;
        logError(throwable);
    }
    try {
        // sets state EXECUTED
        m_nc.notifyParentExecuteFinished(status);
    } catch (Exception e) {
        logError(e);
    }
}
Also used : CanceledExecutionException(org.knime.core.node.CanceledExecutionException) NodeContainerExecutionStatus(org.knime.core.node.workflow.execresult.NodeContainerExecutionStatus) CanceledExecutionException(org.knime.core.node.CanceledExecutionException)

Example 42 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class ARFFWriterNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
    checkFileAccess(m_location, false);
    URL url = FileUtil.toURL(m_location);
    Path localPath = FileUtil.resolveToPath(url);
    DataTableSpec inSpec = inData[0].getDataTableSpec();
    int numOfCols = inSpec.getNumColumns();
    for (int c = 0; c < numOfCols; c++) {
        DataType colType = inSpec.getColumnSpec(c).getType();
        if (!colType.isCompatible(IntValue.class) && !colType.isCompatible(DoubleValue.class) && !colType.isCompatible(StringValue.class)) {
            throw new IllegalStateException("Can only write Double, Int," + " and String columns to ARFF file.");
        }
    }
    LOGGER.info("ARFF Writer: ARFFing into '" + m_location + "'.");
    try (BufferedWriter writer = openWriter(localPath, url)) {
        // Write ARFF header
        writer.write("%\n");
        writer.write("% ARFF data file, generated by KNIME\n");
        writer.write("%\n");
        writer.write("% Date: " + new Date(System.currentTimeMillis()) + "\n");
        try {
            writer.write("% User: " + System.getProperty("user.name") + "\n");
        } catch (SecurityException se) {
        // okay - we don't add the user name.
        }
        writer.write("%\n");
        writer.write("\n@RELATION " + m_relationName + "\n");
        // write the attribute part, i.e. the columns' name and type
        for (int c = 0; c < numOfCols; c++) {
            DataColumnSpec cSpec = inSpec.getColumnSpec(c);
            writer.write("@ATTRIBUTE ");
            if (needsQuotes(cSpec.getName().toString())) {
                writer.write("'" + cSpec.getName().toString() + "'");
            } else {
                writer.write(cSpec.getName().toString());
            }
            writer.write("\t");
            writer.write(colspecToARFFType(cSpec));
            writer.write("\n");
        }
        // finally add the data
        writer.write("\n@DATA\n");
        long rowCnt = inData[0].size();
        long rowNr = 0;
        for (DataRow row : inData[0]) {
            rowNr++;
            exec.setProgress(rowNr / (double) rowCnt, "Writing row " + rowNr + " ('" + row.getKey() + "') of " + rowCnt);
            if (m_sparse) {
                writer.write("{");
            }
            // flag to skip comma in first column
            boolean first = true;
            for (int c = 0; c < row.getNumCells(); c++) {
                DataCell cell = row.getCell(c);
                if (m_sparse && !cell.isMissing()) {
                    // we write only non-zero values in a sparse file
                    if ((cell instanceof IntValue) && (((IntValue) cell).getIntValue() == 0)) {
                        continue;
                    }
                    if ((cell instanceof DoubleValue) && (Math.abs(((DoubleValue) cell).getDoubleValue()) < 1e-29)) {
                        continue;
                    }
                }
                String data = "?";
                if (!cell.isMissing()) {
                    data = cell.toString();
                }
                // trigger quotes.
                if (needsQuotes(data)) {
                    data = "'" + data + "'";
                }
                // now spit it out
                if (!first) {
                    // print column separator
                    writer.write(",");
                } else {
                    first = false;
                }
                // data in sparse file must be proceeded by the column number
                if (m_sparse) {
                    writer.write("" + c + " ");
                }
                writer.write(data);
            }
            if (m_sparse) {
                writer.write("}");
            }
            writer.write("\n");
            // see if user told us to stop.
            // Check if execution was canceled !
            exec.checkCanceled();
        }
    // while (!rIter.atEnd())
    } catch (CanceledExecutionException ex) {
        if (localPath != null) {
            Files.deleteIfExists(localPath);
            LOGGER.debug("File '" + localPath + "' deleted.");
        }
        throw ex;
    }
    // execution successful return empty array
    return new BufferedDataTable[0];
}
Also used : Path(java.nio.file.Path) DataTableSpec(org.knime.core.data.DataTableSpec) DataRow(org.knime.core.data.DataRow) URL(java.net.URL) Date(java.util.Date) BufferedWriter(java.io.BufferedWriter) DataColumnSpec(org.knime.core.data.DataColumnSpec) DoubleValue(org.knime.core.data.DoubleValue) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) BufferedDataTable(org.knime.core.node.BufferedDataTable) DataType(org.knime.core.data.DataType) DataCell(org.knime.core.data.DataCell) IntValue(org.knime.core.data.IntValue)

Example 43 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class CSVReaderNodeModel method createFileTable.

protected FileTable createFileTable(final ExecutionContext exec) throws Exception {
    // prepare the settings for the file analyzer
    FileReaderNodeSettings settings = new FileReaderNodeSettings();
    CheckUtils.checkSourceFile(m_config.getLocation());
    URL url = FileUtil.toURL(m_config.getLocation());
    settings.setDataFileLocationAndUpdateTableName(url);
    String colDel = m_config.getColDelimiter();
    if (colDel != null && !colDel.isEmpty()) {
        settings.addDelimiterPattern(colDel, false, false, false);
    }
    settings.setDelimiterUserSet(true);
    String rowDel = m_config.getRowDelimiter();
    if (rowDel != null && !rowDel.isEmpty()) {
        settings.addRowDelimiter(rowDel, true);
    }
    String quote = m_config.getQuoteString();
    if (quote != null && !quote.isEmpty()) {
        settings.addQuotePattern(quote, quote);
    }
    settings.setQuoteUserSet(true);
    String commentStart = m_config.getCommentStart();
    if (commentStart != null && !commentStart.isEmpty()) {
        settings.addSingleLineCommentPattern(commentStart, false, false);
    }
    settings.setCommentUserSet(true);
    boolean hasColHeader = m_config.hasColHeader();
    settings.setFileHasColumnHeaders(hasColHeader);
    settings.setFileHasColumnHeadersUserSet(true);
    boolean hasRowHeader = m_config.hasRowHeader();
    settings.setFileHasRowHeaders(hasRowHeader);
    settings.setFileHasRowHeadersUserSet(true);
    settings.setWhiteSpaceUserSet(true);
    boolean supportShortLines = m_config.isSupportShortLines();
    settings.setSupportShortLines(supportShortLines);
    int skipFirstLinesCount = m_config.getSkipFirstLinesCount();
    settings.setSkipFirstLines(skipFirstLinesCount);
    final long limitRowsCount = m_config.getLimitRowsCount();
    settings.setMaximumNumberOfRowsToRead(limitRowsCount);
    settings.setCharsetName(m_config.getCharSetName());
    settings.setCharsetUserSet(true);
    settings.setConnectTimeout(m_config.getConnectTimeout());
    final int limitAnalysisCount = m_config.getLimitAnalysisCount();
    final ExecutionMonitor analyseExec = exec.createSubProgress(0.5);
    final ExecutionContext readExec = exec.createSubExecutionContext(0.5);
    exec.setMessage("Analyzing file");
    if (limitAnalysisCount >= 0) {
        final FileReaderExecutionMonitor fileReaderExec = new FileReaderExecutionMonitor();
        fileReaderExec.getProgressMonitor().addProgressListener(new NodeProgressListener() {

            @Override
            public void progressChanged(final NodeProgressEvent pe) {
                try {
                    // if the node was canceled, cancel (interrupt) the analysis
                    analyseExec.checkCanceled();
                    // otherwise update the node progress
                    NodeProgress nodeProgress = pe.getNodeProgress();
                    analyseExec.setProgress(nodeProgress.getProgress(), nodeProgress.getMessage());
                } catch (CanceledExecutionException e) {
                    fileReaderExec.setExecuteInterrupted();
                }
            }
        });
        fileReaderExec.setShortCutLines(limitAnalysisCount);
        fileReaderExec.setExecuteCanceled();
        settings = FileAnalyzer.analyze(settings, fileReaderExec);
    } else {
        settings = FileAnalyzer.analyze(settings, analyseExec);
    }
    SettingsStatus status = settings.getStatusOfSettings();
    if (status.getNumOfErrors() > 0) {
        throw new IllegalStateException(status.getErrorMessage(0));
    }
    final DataTableSpec tableSpec = settings.createDataTableSpec();
    if (tableSpec == null) {
        final SettingsStatus status2 = settings.getStatusOfSettings(true, null);
        if (status2.getNumOfErrors() > 0) {
            throw new IllegalStateException(status2.getErrorMessage(0));
        } else {
            throw new IllegalStateException("Unknown error during file analysis.");
        }
    }
    exec.setMessage("Buffering file");
    return new FileTable(tableSpec, settings, readExec);
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) NodeProgressListener(org.knime.core.node.workflow.NodeProgressListener) NodeProgress(org.knime.core.node.workflow.NodeProgress) FileTable(org.knime.base.node.io.filereader.FileTable) SettingsStatus(org.knime.core.util.tokenizer.SettingsStatus) URL(java.net.URL) FileReaderNodeSettings(org.knime.base.node.io.filereader.FileReaderNodeSettings) ExecutionContext(org.knime.core.node.ExecutionContext) NodeProgressEvent(org.knime.core.node.workflow.NodeProgressEvent) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) FileReaderExecutionMonitor(org.knime.base.node.io.filereader.FileReaderExecutionMonitor) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) FileReaderExecutionMonitor(org.knime.base.node.io.filereader.FileReaderExecutionMonitor)

Example 44 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class CSVWriterNodeModel method doIt.

private BufferedDataTable[] doIt(final BufferedDataTable data, final RowInput input, final ExecutionContext exec) throws Exception {
    CheckUtils.checkDestinationFile(m_settings.getFileName(), m_settings.getFileOverwritePolicy() != FileOverwritePolicy.Abort);
    URL url = FileUtil.toURL(m_settings.getFileName());
    Path localPath = FileUtil.resolveToPath(url);
    boolean writeColHeader = m_settings.writeColumnHeader();
    OutputStream tempOut;
    URLConnection urlConnection = null;
    boolean appendToFile;
    if (localPath != null) {
        // figure out if the writer is actually supposed to write col headers
        if (Files.exists(localPath)) {
            appendToFile = m_settings.getFileOverwritePolicy() == FileOverwritePolicy.Append;
            if (writeColHeader && appendToFile) {
                // do not write headers if the file exists and we append to it
                writeColHeader = !m_settings.skipColHeaderIfFileExists();
            }
        } else {
            appendToFile = false;
        }
        if (appendToFile) {
            tempOut = Files.newOutputStream(localPath, StandardOpenOption.APPEND);
        } else {
            tempOut = Files.newOutputStream(localPath);
        }
    } else {
        CheckUtils.checkState(m_settings.getFileOverwritePolicy() != FileOverwritePolicy.Append, url + " points to a remote file but append to remote files is not possible!");
        urlConnection = FileUtil.openOutputConnection(url, "PUT");
        tempOut = urlConnection.getOutputStream();
        appendToFile = false;
    }
    // make a copy of the settings with the modified value
    FileWriterSettings writerSettings = new FileWriterSettings(m_settings);
    writerSettings.setWriteColumnHeader(writeColHeader);
    if (m_settings.isGzipOutput()) {
        tempOut = new GZIPOutputStream(tempOut);
    }
    tempOut = new BufferedOutputStream(tempOut);
    Charset charSet = Charset.defaultCharset();
    String encoding = writerSettings.getCharacterEncoding();
    if (encoding != null) {
        charSet = Charset.forName(encoding);
    }
    CSVWriter tableWriter = new CSVWriter(new OutputStreamWriter(tempOut, charSet), writerSettings);
    // write the comment header, if we are supposed to
    String tableName;
    if (input == null) {
        tableName = data.getDataTableSpec().getName();
    } else {
        tableName = input.getDataTableSpec().getName();
    }
    writeCommentHeader(m_settings, tableWriter, tableName, appendToFile);
    try {
        if (input == null) {
            tableWriter.write(data, exec);
        } else {
            tableWriter.write(input, exec);
        }
        tableWriter.close();
        if (tableWriter.hasWarningMessage()) {
            setWarningMessage(tableWriter.getLastWarningMessage());
        }
        // execution successful
        if (input == null) {
            return new BufferedDataTable[0];
        } else {
            return null;
        }
    } catch (CanceledExecutionException cee) {
        try {
            tableWriter.close();
        } catch (IOException ex) {
        // may happen if the stream is already closed by the interrupted thread
        }
        if (localPath != null) {
            LOGGER.info("Table FileWriter canceled.");
            try {
                Files.delete(localPath);
                LOGGER.debug("File '" + m_settings.getFileName() + "' deleted after node has been canceled.");
            } catch (IOException ex) {
                LOGGER.warn("Unable to delete file '" + m_settings.getFileName() + "' after cancellation: " + ex.getMessage(), ex);
            }
        }
        throw cee;
    }
}
Also used : Path(java.nio.file.Path) BufferedOutputStream(java.io.BufferedOutputStream) OutputStream(java.io.OutputStream) GZIPOutputStream(java.util.zip.GZIPOutputStream) Charset(java.nio.charset.Charset) IOException(java.io.IOException) URL(java.net.URL) URLConnection(java.net.URLConnection) GZIPOutputStream(java.util.zip.GZIPOutputStream) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) BufferedDataTable(org.knime.core.node.BufferedDataTable) OutputStreamWriter(java.io.OutputStreamWriter) BufferedOutputStream(java.io.BufferedOutputStream)

Example 45 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class DBConnectionWriterNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws CanceledExecutionException, Exception {
    DatabasePortObject dbObj = (DatabasePortObject) inData[0];
    exec.setProgress("Opening database connection...");
    String tableName = m_tableName.getStringValue();
    DatabaseQueryConnectionSettings conn = dbObj.getConnectionSettings(getCredentialsProvider());
    CredentialsProvider cp = getCredentialsProvider();
    final StatementManipulator statementManipulator = conn.getUtility().getStatementManipulator();
    try {
        // use the statement manipulator to create the drop table statement
        conn.execute(statementManipulator.dropTable(tableName, false), cp);
    } catch (Exception e) {
    // suppress exception thrown when table does not exist in database
    }
    String[] stmts = statementManipulator.createTableAsSelect(tableName, conn.getQuery());
    for (final String stmt : stmts) {
        conn.execute(stmt, cp);
    }
    return new BufferedDataTable[0];
}
Also used : DatabasePortObject(org.knime.core.node.port.database.DatabasePortObject) DatabaseQueryConnectionSettings(org.knime.core.node.port.database.DatabaseQueryConnectionSettings) BufferedDataTable(org.knime.core.node.BufferedDataTable) SettingsModelString(org.knime.core.node.defaultnodesettings.SettingsModelString) CredentialsProvider(org.knime.core.node.workflow.CredentialsProvider) StatementManipulator(org.knime.core.node.port.database.StatementManipulator) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) IOException(java.io.IOException)

Aggregations

CanceledExecutionException (org.knime.core.node.CanceledExecutionException)82 InvalidSettingsException (org.knime.core.node.InvalidSettingsException)34 IOException (java.io.IOException)32 File (java.io.File)21 ExecutionMonitor (org.knime.core.node.ExecutionMonitor)21 DataRow (org.knime.core.data.DataRow)20 DataTableSpec (org.knime.core.data.DataTableSpec)20 BufferedDataTable (org.knime.core.node.BufferedDataTable)20 DataCell (org.knime.core.data.DataCell)19 ArrayList (java.util.ArrayList)11 DataColumnSpec (org.knime.core.data.DataColumnSpec)11 BufferedDataContainer (org.knime.core.node.BufferedDataContainer)10 LinkedHashMap (java.util.LinkedHashMap)9 ExecutionException (java.util.concurrent.ExecutionException)9 DefaultRow (org.knime.core.data.def.DefaultRow)9 RowKey (org.knime.core.data.RowKey)8 BufferedWriter (java.io.BufferedWriter)7 FileInputStream (java.io.FileInputStream)7 Map (java.util.Map)7 Future (java.util.concurrent.Future)7