use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class TransposeTableNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws CanceledExecutionException, Exception {
// input column spec that will be transposed the output row IDs
DataTableSpec spec = inData[0].getDataTableSpec();
// only using the column header as row IDs
if (inData[0].getRowCount() == 0) {
BufferedDataContainer cont = exec.createDataContainer(new DataTableSpec());
for (int i = 0; i < spec.getNumColumns(); i++) {
String colName = spec.getColumnSpec(i).getName();
cont.addRowToTable(new DefaultRow(colName, new DataCell[0]));
}
cont.close();
return new BufferedDataTable[] { cont.getTable() };
}
// new number of columns = number of rows
CheckUtils.checkState(inData[0].size() <= Integer.MAX_VALUE, "Transpose operation can't handle more rows than " + Integer.MAX_VALUE);
final int newNrCols = (int) inData[0].size();
// new column names
final ArrayList<String> colNames = new ArrayList<String>();
// new column types
final ArrayList<DataType> colTypes = new ArrayList<DataType>();
// over entire table
// index for unique colNames if row id only contains whitespace
int idx = 0;
for (DataRow row : inData[0]) {
exec.checkCanceled();
exec.setMessage("Determine most-general column type for row: " + row.getKey().getString());
DataType type = null;
// and all cells
for (int i = 0; i < row.getNumCells(); i++) {
DataType newType = row.getCell(i).getType();
if (type == null) {
type = newType;
} else {
type = DataType.getCommonSuperType(type, newType);
}
}
if (type == null) {
type = DataType.getType(DataCell.class);
}
String colName = row.getKey().getString().trim();
if (colName.isEmpty()) {
colName = "<empty_" + idx + ">";
idx++;
}
colNames.add(colName);
colTypes.add(type);
}
// new number of rows
int newNrRows = spec.getNumColumns();
// create new specs
final DataColumnSpec[] colSpecs = new DataColumnSpec[newNrCols];
for (int c = 0; c < newNrCols; c++) {
colSpecs[c] = new DataColumnSpecCreator(colNames.get(c), colTypes.get(c)).createSpec();
exec.checkCanceled();
}
BufferedDataContainer cont = exec.createDataContainer(new DataTableSpec(colSpecs));
final int chunkSize = m_chunkSize.getIntValue();
// total number of chunks
final double nrChunks = Math.ceil((double) newNrRows / chunkSize);
for (int chunkIdx = 0; chunkIdx < nrChunks; chunkIdx++) {
// map of new row keys to cell arrays
Map<String, DataCell[]> map = new LinkedHashMap<String, DataCell[]>(newNrRows);
int rowIdx = 0;
for (DataRow row : inData[0]) {
exec.setProgress(((rowIdx + 1) * (chunkIdx + 1)) / (nrChunks * newNrCols), "Transpose row \"" + row.getKey().getString() + "\" to column.");
int colIdx = chunkIdx * chunkSize;
// iterate chunk of columns
for (int r = colIdx; r < Math.min(newNrRows, colIdx + chunkSize); r++) {
String newRowKey = spec.getColumnSpec(r).getName();
DataCell[] cellArray = map.get(newRowKey);
if (cellArray == null) {
cellArray = new DataCell[newNrCols];
map.put(newRowKey, cellArray);
}
cellArray[rowIdx] = row.getCell(r);
}
try {
exec.checkCanceled();
} catch (CanceledExecutionException cee) {
cont.close();
throw cee;
}
rowIdx++;
}
// add chunk of rows to buffer
for (Map.Entry<String, DataCell[]> e : map.entrySet()) {
exec.setMessage("Adding row \"" + e.getKey() + "\" to table.");
DataRow row = new DefaultRow(e.getKey(), e.getValue());
cont.addRowToTable(row);
}
}
exec.setProgress(1.0, "Finished, closing buffer...");
cont.close();
return new BufferedDataTable[] { cont.getTable() };
}
use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class PieNodeModel method saveInternals.
/**
* {@inheritDoc}
*/
@Override
protected void saveInternals(final File nodeInternDir, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
try {
if (!new File(nodeInternDir, CFG_DATA_DIR_NAME).mkdir()) {
throw new Exception("Unable to create internal data directory");
}
final File dataDir = new File(nodeInternDir, CFG_DATA_DIR_NAME);
savePieInternals(dataDir, exec);
} catch (final CanceledExecutionException e) {
throw e;
} catch (final Exception e) {
LOGGER.warn("Error while saving saving internals: " + e.getMessage());
throw new IOException(e);
}
}
use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class AccuracyScorerNodeModel method execute.
/**
* Starts the scoring in the scorer.
*
* @param data the input data of length one
* @param exec the execution monitor
* @return the confusion matrix
* @throws CanceledExecutionException if user canceled execution
*
* @see NodeModel#execute(BufferedDataTable[],ExecutionContext)
*/
@SuppressWarnings("unchecked")
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] data, final ExecutionContext exec) throws CanceledExecutionException {
// check input data
assert (data != null && data.length == 1 && data[INPORT] != null);
// blow away result from last execute (should have been reset anyway)
// first try to figure out what are the different class values
// in the two respective columns
BufferedDataTable in = data[INPORT];
DataTableSpec inSpec = in.getDataTableSpec();
final int index1 = inSpec.findColumnIndex(m_firstCompareColumn);
final int index2 = inSpec.findColumnIndex(m_secondCompareColumn);
// two elements, first is column names, second row names;
// these arrays are ordered already, i.e. if both columns have
// cells in common (e.g. both have Iris-Setosa), they get the same
// index in the array. thus, the high numbers should appear
// in the diagonal
DataCell[] values = determineColValues(in, index1, index2, exec.createSubProgress(0.5));
List<DataCell> valuesList = Arrays.asList(values);
Set<DataCell> valuesInCol2 = new HashSet<DataCell>();
// the key store remembers the row key for later hiliting
List<RowKey>[][] keyStore = new List[values.length][values.length];
// the scorerCount counts the confusions
int[][] scorerCount = new int[values.length][values.length];
// init the matrix
for (int i = 0; i < keyStore.length; i++) {
for (int j = 0; j < keyStore[i].length; j++) {
keyStore[i][j] = new ArrayList<RowKey>();
}
}
long rowCnt = in.size();
int numberOfRows = 0;
int correctCount = 0;
int falseCount = 0;
int missingCount = 0;
ExecutionMonitor subExec = exec.createSubProgress(0.5);
for (Iterator<DataRow> it = in.iterator(); it.hasNext(); numberOfRows++) {
DataRow row = it.next();
subExec.setProgress((1.0 + numberOfRows) / rowCnt, "Computing score, row " + numberOfRows + " (\"" + row.getKey() + "\") of " + in.size());
try {
subExec.checkCanceled();
} catch (CanceledExecutionException cee) {
reset();
throw cee;
}
DataCell cell1 = row.getCell(index1);
DataCell cell2 = row.getCell(index2);
valuesInCol2.add(cell2);
if (cell1.isMissing() || cell2.isMissing()) {
++missingCount;
CheckUtils.checkState(m_ignoreMissingValues, "Missing value in row: " + row.getKey());
if (m_ignoreMissingValues) {
continue;
}
}
boolean areEqual = cell1.equals(cell2);
int i1 = valuesList.indexOf(cell1);
int i2 = areEqual ? i1 : valuesList.indexOf(cell2);
assert i1 >= 0 : "column spec lacks possible value " + cell1;
assert i2 >= 0 : "column spec lacks possible value " + cell2;
// i2 must be equal to i1 if cells are equal (implication)
assert (!areEqual || i1 == valuesList.indexOf(cell2));
keyStore[i1][i2].add(row.getKey());
scorerCount[i1][i2]++;
if (areEqual) {
correctCount++;
} else {
falseCount++;
}
}
HashSet<String> valuesAsStringSet = new HashSet<String>();
HashSet<String> duplicateValuesAsString = new HashSet<String>();
for (DataCell c : values) {
valuesAsStringSet.add(c.toString());
}
for (DataCell c : values) {
String cAsString = c.toString();
if (!valuesAsStringSet.remove(cAsString)) {
duplicateValuesAsString.add(cAsString);
}
}
boolean hasPrintedWarningOnAmbiguousValues = false;
String[] targetValues = new String[values.length];
for (int i = 0; i < targetValues.length; i++) {
DataCell c = values[i];
String s = c.toString();
if (duplicateValuesAsString.contains(s)) {
boolean isInSecondColumn = valuesInCol2.contains(c);
int uniquifier = 1;
if (isInSecondColumn) {
s = s.concat(" (" + m_secondCompareColumn + ")");
} else {
s = s.concat(" (" + m_firstCompareColumn + ")");
}
String newName = s;
while (!valuesAsStringSet.add(newName)) {
newName = s + "#" + (uniquifier++);
}
targetValues[i] = newName;
if (!hasPrintedWarningOnAmbiguousValues) {
hasPrintedWarningOnAmbiguousValues = true;
addWarning("Ambiguous value \"" + c.toString() + "\" encountered. Preserving individual instances;" + " consider to convert input columns to string");
}
} else {
int uniquifier = 1;
String newName = s;
while (!valuesAsStringSet.add(newName)) {
newName = s + "#" + (uniquifier++);
}
targetValues[i] = newName;
}
}
if (missingCount > 0) {
addWarning("There were missing values in the reference or in the prediction class columns.");
}
DataType[] colTypes = new DataType[targetValues.length];
Arrays.fill(colTypes, IntCell.TYPE);
BufferedDataContainer container = exec.createDataContainer(new DataTableSpec(targetValues, colTypes));
for (int i = 0; i < targetValues.length; i++) {
// need to make a datacell for the row key
container.addRowToTable(new DefaultRow(targetValues[i], scorerCount[i]));
}
container.close();
ScorerViewData viewData = new ScorerViewData(scorerCount, numberOfRows, falseCount, correctCount, m_firstCompareColumn, m_secondCompareColumn, targetValues, keyStore);
// print info
int missing = numberOfRows - correctCount - falseCount;
LOGGER.info("error=" + viewData.getError() + ", #correct=" + viewData.getCorrectCount() + ", #false=" + viewData.getFalseCount() + ", #rows=" + numberOfRows + ", #missing=" + missing);
// our view displays the table - we must keep a reference in the model.
BufferedDataTable result = container.getTable();
// start creating accuracy statistics
BufferedDataContainer accTable = exec.createDataContainer(new DataTableSpec(QUALITY_MEASURES_SPECS));
for (int r = 0; r < targetValues.length; r++) {
// true positives
int tp = viewData.getTP(r);
// false positives
int fp = viewData.getFP(r);
// true negatives
int tn = viewData.getTN(r);
// false negatives
int fn = viewData.getFN(r);
// TP / (TP + FN)
final DataCell sensitivity;
// TP / (TP + FN)
DoubleCell recall = null;
if (tp + fn > 0) {
recall = new DoubleCell(1.0 * tp / (tp + fn));
sensitivity = new DoubleCell(1.0 * tp / (tp + fn));
} else {
sensitivity = DataType.getMissingCell();
}
// TP / (TP + FP)
DoubleCell prec = null;
if (tp + fp > 0) {
prec = new DoubleCell(1.0 * tp / (tp + fp));
}
// TN / (TN + FP)
final DataCell specificity;
if (tn + fp > 0) {
specificity = new DoubleCell(1.0 * tn / (tn + fp));
} else {
specificity = DataType.getMissingCell();
}
// 2 * Prec. * Recall / (Prec. + Recall)
final DataCell fmeasure;
if (recall != null && prec != null) {
fmeasure = new DoubleCell(2.0 * prec.getDoubleValue() * recall.getDoubleValue() / (prec.getDoubleValue() + recall.getDoubleValue()));
} else {
fmeasure = DataType.getMissingCell();
}
// add complete row for class value to table
DataRow row = new DefaultRow(new RowKey(targetValues[r]), new DataCell[] { new IntCell(tp), new IntCell(fp), new IntCell(tn), new IntCell(fn), recall == null ? DataType.getMissingCell() : recall, prec == null ? DataType.getMissingCell() : prec, sensitivity, specificity, fmeasure, DataType.getMissingCell(), DataType.getMissingCell() });
accTable.addRowToTable(row);
}
List<String> classIds = Arrays.asList(targetValues);
RowKey overallID = new RowKey("Overall");
int uniquifier = 1;
while (classIds.contains(overallID.getString())) {
overallID = new RowKey("Overall (#" + (uniquifier++) + ")");
}
// append additional row for overall accuracy
accTable.addRowToTable(new DefaultRow(overallID, new DataCell[] { DataType.getMissingCell(), DataType.getMissingCell(), DataType.getMissingCell(), DataType.getMissingCell(), DataType.getMissingCell(), DataType.getMissingCell(), DataType.getMissingCell(), DataType.getMissingCell(), DataType.getMissingCell(), new DoubleCell(viewData.getAccuracy()), new DoubleCell(viewData.getCohenKappa()) }));
accTable.close();
m_viewData = viewData;
pushFlowVars(false);
return new BufferedDataTable[] { result, accTable.getTable() };
}
use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class DecisionTreeLearnerNodeModel2 method loadInternals.
/**
* {@inheritDoc}
*/
@Override
protected void loadInternals(final File nodeInternDir, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
File internalsFile = new File(nodeInternDir, SAVE_INTERNALS_FILE_NAME);
if (!internalsFile.exists()) {
// file to load internals from not available
return;
}
BufferedInputStream in = new BufferedInputStream(new GZIPInputStream(new FileInputStream(internalsFile)));
ModelContentRO decisionTreeModel = ModelContent.loadFromXML(in);
try {
m_decisionTree = new DecisionTree(decisionTreeModel);
} catch (Exception e) {
// continue, but inform the user via a message
setWarningMessage("Internal model could not be loaded: " + e.getMessage() + ". The view will not display properly.");
}
}
use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class WorkflowManager method checkUpdateMetaNodeLinkWithCache.
/**
* Implementation of #checkUpdateMetaNodeLink that uses a cache of already checked metanode links.
*
* @param loadResult Errors while loading the template are added here
* @param visitedTemplateMap avoids repeated checks for copies of the same metanode link.
* @param recurseInto Should linked metanodes contained in the metanode also be checked.
*/
private boolean checkUpdateMetaNodeLinkWithCache(final NodeID id, final WorkflowLoadHelper loadHelper, final LoadResult loadResult, final Map<URI, NodeContainerTemplate> visitedTemplateMap, final boolean recurseInto) throws IOException {
NodeContainer nc = m_workflow.getNode(id);
if (!(nc instanceof NodeContainerTemplate)) {
return false;
}
NodeContainerTemplate tnc = (NodeContainerTemplate) nc;
Map<NodeID, NodeContainerTemplate> idsToCheck = new LinkedHashMap<NodeID, NodeContainerTemplate>();
if (tnc.getTemplateInformation().getRole().equals(Role.Link)) {
idsToCheck.put(id, tnc);
}
if (recurseInto) {
idsToCheck = tnc.fillLinkedTemplateNodesList(idsToCheck, true, false);
}
boolean hasUpdate = false;
for (NodeContainerTemplate linkedMeta : idsToCheck.values()) {
MetaNodeTemplateInformation linkInfo = linkedMeta.getTemplateInformation();
final URI uri = linkInfo.getSourceURI();
NodeContainerTemplate tempLink = visitedTemplateMap.get(uri);
if (tempLink == null) {
try {
final LoadResult templateLoadResult = new LoadResult("Template to " + uri);
tempLink = loadMetaNodeTemplate(linkedMeta, loadHelper, templateLoadResult);
loadResult.addChildError(templateLoadResult);
visitedTemplateMap.put(uri, tempLink);
} catch (Exception e) {
if (linkInfo.setUpdateStatusInternal(UpdateStatus.Error)) {
linkedMeta.notifyTemplateConnectionChangedListener();
}
if (e instanceof IOException) {
throw new IOException("Could not update metanode '" + tnc + "': " + e.getMessage(), e);
} else if (e instanceof CanceledExecutionException) {
throw new IOException("Canceled while loading from template", e);
} else if (e instanceof RuntimeException) {
throw (RuntimeException) e;
} else {
throw new RuntimeException(e);
}
}
}
boolean hasThisOneAnUpdate = tempLink.getTemplateInformation().isNewerThan(linkInfo);
UpdateStatus updateStatus = hasThisOneAnUpdate ? UpdateStatus.HasUpdate : UpdateStatus.UpToDate;
hasUpdate = hasUpdate || hasThisOneAnUpdate;
if (linkInfo.setUpdateStatusInternal(updateStatus)) {
linkedMeta.notifyTemplateConnectionChangedListener();
}
}
return hasUpdate;
}
Aggregations