use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class NodeExecutionJob method internalRun.
/**
*/
private void internalRun() {
NodeContainerExecutionStatus status = null;
// handle inactive branches -- do not delegate to custom job
// manager (the node will just return inactive branch objects)
boolean executeInactive = false;
if (m_nc instanceof SingleNodeContainer) {
SingleNodeContainer snc = (SingleNodeContainer) m_nc;
if (!snc.isInactiveBranchConsumer() && Node.containsInactiveObjects(getPortObjects())) {
executeInactive = true;
}
}
if (!isReConnecting()) {
try {
// sets state PREEXECUTE
if (!m_nc.notifyParentPreExecuteStart()) {
// node was canceled, omit any subsequent state transitions
return;
}
if (!executeInactive) {
beforeExecute();
}
} catch (Throwable throwable) {
logError(throwable);
status = NodeContainerExecutionStatus.FAILURE;
}
try {
// sets state EXECUTING
m_nc.notifyParentExecuteStart();
} catch (IllegalFlowObjectStackException e) {
status = NodeContainerExecutionStatus.FAILURE;
} catch (Throwable throwable) {
status = NodeContainerExecutionStatus.FAILURE;
logError(throwable);
}
}
// check thread cancelation
if (status == null) {
if (Thread.interrupted()) {
status = NodeContainerExecutionStatus.FAILURE;
} else {
try {
m_nc.getProgressMonitor().checkCanceled();
} catch (CanceledExecutionException cee) {
status = NodeContainerExecutionStatus.FAILURE;
}
}
}
try {
if (status == null) {
NodeLogger.getLogger(m_nc.getClass());
// start message and keep start time
final long time = System.currentTimeMillis();
m_logger.debug(m_nc.getNameWithID() + " Start execute");
if (executeInactive) {
SingleNodeContainer snc = (SingleNodeContainer) m_nc;
status = snc.performExecuteNode(getPortObjects());
} else {
status = mainExecute();
}
if (status != null && status.isSuccess()) {
String elapsed = StringFormat.formatElapsedTime(System.currentTimeMillis() - time);
m_logger.info(m_nc.getNameWithID() + " End execute (" + elapsed + ")");
}
}
} catch (Throwable throwable) {
status = NodeContainerExecutionStatus.FAILURE;
logError(throwable);
}
try {
// sets state POSTEXECUTE
m_nc.notifyParentPostExecuteStart(status);
if (!executeInactive) {
afterExecute();
}
} catch (Throwable throwable) {
status = NodeContainerExecutionStatus.FAILURE;
logError(throwable);
}
try {
// sets state EXECUTED
m_nc.notifyParentExecuteFinished(status);
} catch (Exception e) {
logError(e);
}
}
use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class ARFFWriterNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
checkFileAccess(m_location, false);
URL url = FileUtil.toURL(m_location);
Path localPath = FileUtil.resolveToPath(url);
DataTableSpec inSpec = inData[0].getDataTableSpec();
int numOfCols = inSpec.getNumColumns();
for (int c = 0; c < numOfCols; c++) {
DataType colType = inSpec.getColumnSpec(c).getType();
if (!colType.isCompatible(IntValue.class) && !colType.isCompatible(DoubleValue.class) && !colType.isCompatible(StringValue.class)) {
throw new IllegalStateException("Can only write Double, Int," + " and String columns to ARFF file.");
}
}
LOGGER.info("ARFF Writer: ARFFing into '" + m_location + "'.");
try (BufferedWriter writer = openWriter(localPath, url)) {
// Write ARFF header
writer.write("%\n");
writer.write("% ARFF data file, generated by KNIME\n");
writer.write("%\n");
writer.write("% Date: " + new Date(System.currentTimeMillis()) + "\n");
try {
writer.write("% User: " + System.getProperty("user.name") + "\n");
} catch (SecurityException se) {
// okay - we don't add the user name.
}
writer.write("%\n");
writer.write("\n@RELATION " + m_relationName + "\n");
// write the attribute part, i.e. the columns' name and type
for (int c = 0; c < numOfCols; c++) {
DataColumnSpec cSpec = inSpec.getColumnSpec(c);
writer.write("@ATTRIBUTE ");
if (needsQuotes(cSpec.getName().toString())) {
writer.write("'" + cSpec.getName().toString() + "'");
} else {
writer.write(cSpec.getName().toString());
}
writer.write("\t");
writer.write(colspecToARFFType(cSpec));
writer.write("\n");
}
// finally add the data
writer.write("\n@DATA\n");
long rowCnt = inData[0].size();
long rowNr = 0;
for (DataRow row : inData[0]) {
rowNr++;
exec.setProgress(rowNr / (double) rowCnt, "Writing row " + rowNr + " ('" + row.getKey() + "') of " + rowCnt);
if (m_sparse) {
writer.write("{");
}
// flag to skip comma in first column
boolean first = true;
for (int c = 0; c < row.getNumCells(); c++) {
DataCell cell = row.getCell(c);
if (m_sparse && !cell.isMissing()) {
// we write only non-zero values in a sparse file
if ((cell instanceof IntValue) && (((IntValue) cell).getIntValue() == 0)) {
continue;
}
if ((cell instanceof DoubleValue) && (Math.abs(((DoubleValue) cell).getDoubleValue()) < 1e-29)) {
continue;
}
}
String data = "?";
if (!cell.isMissing()) {
data = cell.toString();
}
// trigger quotes.
if (needsQuotes(data)) {
data = "'" + data + "'";
}
// now spit it out
if (!first) {
// print column separator
writer.write(",");
} else {
first = false;
}
// data in sparse file must be proceeded by the column number
if (m_sparse) {
writer.write("" + c + " ");
}
writer.write(data);
}
if (m_sparse) {
writer.write("}");
}
writer.write("\n");
// see if user told us to stop.
// Check if execution was canceled !
exec.checkCanceled();
}
// while (!rIter.atEnd())
} catch (CanceledExecutionException ex) {
if (localPath != null) {
Files.deleteIfExists(localPath);
LOGGER.debug("File '" + localPath + "' deleted.");
}
throw ex;
}
// execution successful return empty array
return new BufferedDataTable[0];
}
use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class CSVReaderNodeModel method createFileTable.
protected FileTable createFileTable(final ExecutionContext exec) throws Exception {
// prepare the settings for the file analyzer
FileReaderNodeSettings settings = new FileReaderNodeSettings();
CheckUtils.checkSourceFile(m_config.getLocation());
URL url = FileUtil.toURL(m_config.getLocation());
settings.setDataFileLocationAndUpdateTableName(url);
String colDel = m_config.getColDelimiter();
if (colDel != null && !colDel.isEmpty()) {
settings.addDelimiterPattern(colDel, false, false, false);
}
settings.setDelimiterUserSet(true);
String rowDel = m_config.getRowDelimiter();
if (rowDel != null && !rowDel.isEmpty()) {
settings.addRowDelimiter(rowDel, true);
}
String quote = m_config.getQuoteString();
if (quote != null && !quote.isEmpty()) {
settings.addQuotePattern(quote, quote);
}
settings.setQuoteUserSet(true);
String commentStart = m_config.getCommentStart();
if (commentStart != null && !commentStart.isEmpty()) {
settings.addSingleLineCommentPattern(commentStart, false, false);
}
settings.setCommentUserSet(true);
boolean hasColHeader = m_config.hasColHeader();
settings.setFileHasColumnHeaders(hasColHeader);
settings.setFileHasColumnHeadersUserSet(true);
boolean hasRowHeader = m_config.hasRowHeader();
settings.setFileHasRowHeaders(hasRowHeader);
settings.setFileHasRowHeadersUserSet(true);
settings.setWhiteSpaceUserSet(true);
boolean supportShortLines = m_config.isSupportShortLines();
settings.setSupportShortLines(supportShortLines);
int skipFirstLinesCount = m_config.getSkipFirstLinesCount();
settings.setSkipFirstLines(skipFirstLinesCount);
final long limitRowsCount = m_config.getLimitRowsCount();
settings.setMaximumNumberOfRowsToRead(limitRowsCount);
settings.setCharsetName(m_config.getCharSetName());
settings.setCharsetUserSet(true);
settings.setConnectTimeout(m_config.getConnectTimeout());
final int limitAnalysisCount = m_config.getLimitAnalysisCount();
final ExecutionMonitor analyseExec = exec.createSubProgress(0.5);
final ExecutionContext readExec = exec.createSubExecutionContext(0.5);
exec.setMessage("Analyzing file");
if (limitAnalysisCount >= 0) {
final FileReaderExecutionMonitor fileReaderExec = new FileReaderExecutionMonitor();
fileReaderExec.getProgressMonitor().addProgressListener(new NodeProgressListener() {
@Override
public void progressChanged(final NodeProgressEvent pe) {
try {
// if the node was canceled, cancel (interrupt) the analysis
analyseExec.checkCanceled();
// otherwise update the node progress
NodeProgress nodeProgress = pe.getNodeProgress();
analyseExec.setProgress(nodeProgress.getProgress(), nodeProgress.getMessage());
} catch (CanceledExecutionException e) {
fileReaderExec.setExecuteInterrupted();
}
}
});
fileReaderExec.setShortCutLines(limitAnalysisCount);
fileReaderExec.setExecuteCanceled();
settings = FileAnalyzer.analyze(settings, fileReaderExec);
} else {
settings = FileAnalyzer.analyze(settings, analyseExec);
}
SettingsStatus status = settings.getStatusOfSettings();
if (status.getNumOfErrors() > 0) {
throw new IllegalStateException(status.getErrorMessage(0));
}
final DataTableSpec tableSpec = settings.createDataTableSpec();
if (tableSpec == null) {
final SettingsStatus status2 = settings.getStatusOfSettings(true, null);
if (status2.getNumOfErrors() > 0) {
throw new IllegalStateException(status2.getErrorMessage(0));
} else {
throw new IllegalStateException("Unknown error during file analysis.");
}
}
exec.setMessage("Buffering file");
return new FileTable(tableSpec, settings, readExec);
}
use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class CSVWriterNodeModel method doIt.
private BufferedDataTable[] doIt(final BufferedDataTable data, final RowInput input, final ExecutionContext exec) throws Exception {
CheckUtils.checkDestinationFile(m_settings.getFileName(), m_settings.getFileOverwritePolicy() != FileOverwritePolicy.Abort);
URL url = FileUtil.toURL(m_settings.getFileName());
Path localPath = FileUtil.resolveToPath(url);
boolean writeColHeader = m_settings.writeColumnHeader();
OutputStream tempOut;
URLConnection urlConnection = null;
boolean appendToFile;
if (localPath != null) {
// figure out if the writer is actually supposed to write col headers
if (Files.exists(localPath)) {
appendToFile = m_settings.getFileOverwritePolicy() == FileOverwritePolicy.Append;
if (writeColHeader && appendToFile) {
// do not write headers if the file exists and we append to it
writeColHeader = !m_settings.skipColHeaderIfFileExists();
}
} else {
appendToFile = false;
}
if (appendToFile) {
tempOut = Files.newOutputStream(localPath, StandardOpenOption.APPEND);
} else {
tempOut = Files.newOutputStream(localPath);
}
} else {
CheckUtils.checkState(m_settings.getFileOverwritePolicy() != FileOverwritePolicy.Append, url + " points to a remote file but append to remote files is not possible!");
urlConnection = FileUtil.openOutputConnection(url, "PUT");
tempOut = urlConnection.getOutputStream();
appendToFile = false;
}
// make a copy of the settings with the modified value
FileWriterSettings writerSettings = new FileWriterSettings(m_settings);
writerSettings.setWriteColumnHeader(writeColHeader);
if (m_settings.isGzipOutput()) {
tempOut = new GZIPOutputStream(tempOut);
}
tempOut = new BufferedOutputStream(tempOut);
Charset charSet = Charset.defaultCharset();
String encoding = writerSettings.getCharacterEncoding();
if (encoding != null) {
charSet = Charset.forName(encoding);
}
CSVWriter tableWriter = new CSVWriter(new OutputStreamWriter(tempOut, charSet), writerSettings);
// write the comment header, if we are supposed to
String tableName;
if (input == null) {
tableName = data.getDataTableSpec().getName();
} else {
tableName = input.getDataTableSpec().getName();
}
writeCommentHeader(m_settings, tableWriter, tableName, appendToFile);
try {
if (input == null) {
tableWriter.write(data, exec);
} else {
tableWriter.write(input, exec);
}
tableWriter.close();
if (tableWriter.hasWarningMessage()) {
setWarningMessage(tableWriter.getLastWarningMessage());
}
// execution successful
if (input == null) {
return new BufferedDataTable[0];
} else {
return null;
}
} catch (CanceledExecutionException cee) {
try {
tableWriter.close();
} catch (IOException ex) {
// may happen if the stream is already closed by the interrupted thread
}
if (localPath != null) {
LOGGER.info("Table FileWriter canceled.");
try {
Files.delete(localPath);
LOGGER.debug("File '" + m_settings.getFileName() + "' deleted after node has been canceled.");
} catch (IOException ex) {
LOGGER.warn("Unable to delete file '" + m_settings.getFileName() + "' after cancellation: " + ex.getMessage(), ex);
}
}
throw cee;
}
}
use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class DBConnectionWriterNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws CanceledExecutionException, Exception {
DatabasePortObject dbObj = (DatabasePortObject) inData[0];
exec.setProgress("Opening database connection...");
String tableName = m_tableName.getStringValue();
DatabaseQueryConnectionSettings conn = dbObj.getConnectionSettings(getCredentialsProvider());
CredentialsProvider cp = getCredentialsProvider();
final StatementManipulator statementManipulator = conn.getUtility().getStatementManipulator();
try {
// use the statement manipulator to create the drop table statement
conn.execute(statementManipulator.dropTable(tableName, false), cp);
} catch (Exception e) {
// suppress exception thrown when table does not exist in database
}
String[] stmts = statementManipulator.createTableAsSelect(tableName, conn.getQuery());
for (final String stmt : stmts) {
conn.execute(stmt, cp);
}
return new BufferedDataTable[0];
}
Aggregations