use of org.knime.core.node.workflow.NodeProgressEvent in project knime-core by knime.
the class VariableFileReaderNodeDialog method analyzeAction.
/**
* triggers analysis.
*/
protected void analyzeAction() {
synchronized (m_analysisRunning) {
// wait until we have a chance to run the analysis
while (m_analysisRunning.booleanValue()) {
getLogger().error("Internal error: Re-entering analysis thread - " + "canceling it - waiting for it to finish...");
m_analysisExecMonitor.setExecuteInterrupted();
// wait until it finishes
try {
m_analysisRunning.wait();
getLogger().error("Alright - continuing with new analysis...");
} catch (InterruptedException ie) {
// huh?!?
}
}
// Create execution context for progress and cancellations
// We use our own progress monitor, we need to distinguish
// between user cancel and code interrupts.
m_analysisExecMonitor = new FileReaderExecutionMonitor();
m_analysisExecMonitor.getProgressMonitor().addProgressListener(new NodeProgressListener() {
@Override
public void progressChanged(final NodeProgressEvent pEvent) {
if (pEvent.getNodeProgress().getMessage() != null) {
ViewUtils.runOrInvokeLaterInEDT(new Runnable() {
@Override
public void run() {
Double p = pEvent.getNodeProgress().getProgress();
if (p == null) {
p = new Double(0.0);
}
m_analyzeProgressMsg.setText(pEvent.getNodeProgress().getMessage());
m_analyzeProgressBar.setValue((int) Math.round(100 * p.doubleValue()));
getPanel().revalidate();
getPanel().repaint();
}
});
}
}
});
// the analysis thread, when finished, clears this flag.
m_analysisRunning.setValue(true);
// allow for quickies from now on
m_analyzeCancel.setEnabled(true);
setPreviewTable(null);
setErrorLabelText("");
}
// clone current settings
VariableFileReaderNodeSettings newFRSettings = new VariableFileReaderNodeSettings(m_frSettings);
Vector<ColProperty> oldColProps = m_frSettings.getColumnProperties();
// prepare the settings object for re-analysis
newFRSettings.setNumberOfColumns(-1);
Vector<ColProperty> newProps = new Vector<ColProperty>();
if (oldColProps != null) {
for (ColProperty cProp : oldColProps) {
// take over only the ones modified by the user
if ((cProp != null) && (cProp.getUserSettings())) {
newProps.add(cProp);
} else {
newProps.add(null);
}
}
}
newFRSettings.setColumnProperties(newProps);
analyzeInThread(newFRSettings);
}
use of org.knime.core.node.workflow.NodeProgressEvent in project knime-core by knime.
the class NodeProgressEventTest method testGetters.
@Test
public void testGetters() {
NodeProgress np = new NodeProgress(null, null);
NodeProgressEvent e = new NodeProgressEvent(new NodeID(10), np);
assertEquals(e.getSource(), new NodeID(10));
assertEquals(e.getNodeProgress(), np);
}
use of org.knime.core.node.workflow.NodeProgressEvent in project knime-core by knime.
the class DefaultNodeProgressMonitorTest method createListener.
private static NodeProgressListener createListener(final Pointer<NodeProgress> progressPointer, final Function<NodeProgress, Boolean> notificationFunction) {
return new NodeProgressListener() {
@Override
public void progressChanged(final NodeProgressEvent pe) {
NodeProgress prog = pe.getNodeProgress();
progressPointer.set(prog);
if (notificationFunction.apply(prog)) {
synchronized (notificationFunction) {
notificationFunction.notifyAll();
}
}
}
};
}
use of org.knime.core.node.workflow.NodeProgressEvent in project knime-core by knime.
the class TableSorterWorker method doInBackgroundWithContext.
/**
* {@inheritDoc}
*/
@Override
protected DataTable doInBackgroundWithContext() throws Exception {
// passed to table sorter for progress
long rowCount;
if (m_inputTable instanceof BufferedDataTable) {
rowCount = ((BufferedDataTable) m_inputTable).size();
} else if (m_inputTable instanceof ContainerTable) {
rowCount = ((ContainerTable) m_inputTable).size();
} else {
// unknown, no progress
rowCount = -1;
}
publish(new NodeProgress(0.0, "Starting table sort..."));
Collection<String> sortColNames = new ArrayList<String>(2);
DataTableSpec spec = m_inputTable.getDataTableSpec();
for (int i : m_sortOrder.getSortColumnIndices()) {
String name;
if (i < 0) {
// row id
name = DataTableSorter.ROWKEY_SORT_SPEC.getName();
} else {
name = spec.getColumnSpec(i).getName();
}
sortColNames.add(name);
}
long start = System.currentTimeMillis();
LOGGER.debug("Starting interactive table sorting on column(s) " + sortColNames);
boolean[] sortOrders = m_sortOrder.getSortColumnOrder();
// it DOES NOT respect blobs -- they will be copied (expensive)
DataTableSorter sorter = new DataTableSorter(m_inputTable, rowCount, sortColNames, sortOrders, false);
NodeProgressListener progLis = new NodeProgressListener() {
@Override
public void progressChanged(final NodeProgressEvent pe) {
publish(pe.getNodeProgress());
}
};
m_nodeProgressMonitor = new DefaultNodeProgressMonitor();
ExecutionMonitor exec = new ExecutionMonitor(m_nodeProgressMonitor);
m_nodeProgressMonitor.addProgressListener(progLis);
try {
DataTable result = sorter.sort(exec);
long elapsedMS = System.currentTimeMillis() - start;
String time = StringFormat.formatElapsedTime(elapsedMS);
LOGGER.debug("Interactive table sorting finished (" + time + ")");
return result;
} finally {
m_nodeProgressMonitor.removeProgressListener(progLis);
}
}
use of org.knime.core.node.workflow.NodeProgressEvent in project knime-core by knime.
the class CSVReaderNodeModel method createFileTable.
protected FileTable createFileTable(final ExecutionContext exec) throws Exception {
// prepare the settings for the file analyzer
FileReaderNodeSettings settings = new FileReaderNodeSettings();
CheckUtils.checkSourceFile(m_config.getLocation());
URL url = FileUtil.toURL(m_config.getLocation());
settings.setDataFileLocationAndUpdateTableName(url);
String colDel = m_config.getColDelimiter();
if (colDel != null && !colDel.isEmpty()) {
settings.addDelimiterPattern(colDel, false, false, false);
}
settings.setDelimiterUserSet(true);
String rowDel = m_config.getRowDelimiter();
if (rowDel != null && !rowDel.isEmpty()) {
settings.addRowDelimiter(rowDel, true);
}
String quote = m_config.getQuoteString();
if (quote != null && !quote.isEmpty()) {
settings.addQuotePattern(quote, quote);
}
settings.setQuoteUserSet(true);
String commentStart = m_config.getCommentStart();
if (commentStart != null && !commentStart.isEmpty()) {
settings.addSingleLineCommentPattern(commentStart, false, false);
}
settings.setCommentUserSet(true);
boolean hasColHeader = m_config.hasColHeader();
settings.setFileHasColumnHeaders(hasColHeader);
settings.setFileHasColumnHeadersUserSet(true);
boolean hasRowHeader = m_config.hasRowHeader();
settings.setFileHasRowHeaders(hasRowHeader);
settings.setFileHasRowHeadersUserSet(true);
settings.setWhiteSpaceUserSet(true);
boolean supportShortLines = m_config.isSupportShortLines();
settings.setSupportShortLines(supportShortLines);
int skipFirstLinesCount = m_config.getSkipFirstLinesCount();
settings.setSkipFirstLines(skipFirstLinesCount);
final long limitRowsCount = m_config.getLimitRowsCount();
settings.setMaximumNumberOfRowsToRead(limitRowsCount);
settings.setCharsetName(m_config.getCharSetName());
settings.setCharsetUserSet(true);
settings.setConnectTimeout(m_config.getConnectTimeout());
final int limitAnalysisCount = m_config.getLimitAnalysisCount();
final ExecutionMonitor analyseExec = exec.createSubProgress(0.5);
final ExecutionContext readExec = exec.createSubExecutionContext(0.5);
exec.setMessage("Analyzing file");
if (limitAnalysisCount >= 0) {
final FileReaderExecutionMonitor fileReaderExec = new FileReaderExecutionMonitor();
fileReaderExec.getProgressMonitor().addProgressListener(new NodeProgressListener() {
@Override
public void progressChanged(final NodeProgressEvent pe) {
try {
// if the node was canceled, cancel (interrupt) the analysis
analyseExec.checkCanceled();
// otherwise update the node progress
NodeProgress nodeProgress = pe.getNodeProgress();
analyseExec.setProgress(nodeProgress.getProgress(), nodeProgress.getMessage());
} catch (CanceledExecutionException e) {
fileReaderExec.setExecuteInterrupted();
}
}
});
fileReaderExec.setShortCutLines(limitAnalysisCount);
fileReaderExec.setExecuteCanceled();
settings = FileAnalyzer.analyze(settings, fileReaderExec);
} else {
settings = FileAnalyzer.analyze(settings, analyseExec);
}
SettingsStatus status = settings.getStatusOfSettings();
if (status.getNumOfErrors() > 0) {
throw new IllegalStateException(status.getErrorMessage(0));
}
final DataTableSpec tableSpec = settings.createDataTableSpec();
if (tableSpec == null) {
final SettingsStatus status2 = settings.getStatusOfSettings(true, null);
if (status2.getNumOfErrors() > 0) {
throw new IllegalStateException(status2.getErrorMessage(0));
} else {
throw new IllegalStateException("Unknown error during file analysis.");
}
}
exec.setMessage("Buffering file");
return new FileTable(tableSpec, settings, readExec);
}
Aggregations