Search in sources :

Example 16 with DefaultHiLiteMapper

use of org.knime.core.node.property.hilite.DefaultHiLiteMapper in project knime-core by knime.

the class EndcaseNodeModel method saveInternals.

/**
 * {@inheritDoc}
 */
@Override
protected void saveInternals(final File nodeInternDir, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
    if (m_enableHiliting) {
        final NodeSettings config = new NodeSettings("hilite_mapping");
        ((DefaultHiLiteMapper) m_hiliteTranslator.getMapper()).save(config);
        config.saveToXML(new GZIPOutputStream(new FileOutputStream(new File(nodeInternDir, "hilite_mapping.xml.gz"))));
    }
}
Also used : NodeSettings(org.knime.core.node.NodeSettings) GZIPOutputStream(java.util.zip.GZIPOutputStream) FileOutputStream(java.io.FileOutputStream) DefaultHiLiteMapper(org.knime.core.node.property.hilite.DefaultHiLiteMapper) File(java.io.File)

Example 17 with DefaultHiLiteMapper

use of org.knime.core.node.property.hilite.DefaultHiLiteMapper in project knime-core by knime.

the class EndcaseNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws Exception {
    Vector<BufferedDataTable> tables = new Vector<BufferedDataTable>();
    for (int i = 0; i < getNrInPorts(); i++) {
        if (inData[i] != null) {
            // if connected...
            if (!(inData[i] instanceof InactiveBranchPortObject)) {
                // ...and active, add it:
                tables.add((BufferedDataTable) inData[i]);
            }
        }
    }
    if (tables.size() == 0) {
        // be connected!)
        assert inData[0] instanceof InactiveBranchPortObject;
        if (m_enableHiliting) {
            // create empty hilite translation map (so we correctly
            // handle the internals).
            Map<RowKey, Set<RowKey>> map = new HashMap<RowKey, Set<RowKey>>();
            m_hiliteTranslator.setMapper(new DefaultHiLiteMapper(map));
        }
        return new PortObject[] { inData[0] };
    }
    assert tables.size() > 0;
    // check compatibility of specs against first spec in list
    for (int i = 1; i < tables.size(); i++) {
        if (!(tables.get(0).getSpec().equalStructure(tables.get(i).getSpec()))) {
            // incompatible - refuse to execute
            throw new Exception("The data table structures of the active " + "ports are not compatible.");
        }
    }
    int totalRowCount = 0;
    DataTable[] dtables = new DataTable[tables.size()];
    int i = 0;
    for (BufferedDataTable t : tables) {
        totalRowCount += t.getRowCount();
        dtables[i] = t;
        i++;
    }
    AppendedRowsTable out = new AppendedRowsTable((m_isAppendSuffix ? m_suffix : null), dtables);
    // note, this iterator throws runtime exceptions when canceled.
    AppendedRowsIterator it = out.iterator(exec, totalRowCount);
    BufferedDataContainer c = exec.createDataContainer(out.getDataTableSpec());
    try {
        while (it.hasNext()) {
            // may throw exception, also sets progress
            c.addRowToTable(it.next());
        }
    } catch (RuntimeCanceledExecutionException rcee) {
        throw rcee.getCause();
    } finally {
        c.close();
    }
    if (it.getNrRowsSkipped() > 0) {
        setWarningMessage("Filtered out " + it.getNrRowsSkipped() + " duplicate row id(s).");
    }
    if (m_enableHiliting) {
        // create hilite translation map
        Map<RowKey, Set<RowKey>> map = new HashMap<RowKey, Set<RowKey>>();
        // map of all RowKeys and duplicate RowKeys in the resulting table
        Map<RowKey, RowKey> dupMap = it.getDuplicateNameMap();
        for (Map.Entry<RowKey, RowKey> e : dupMap.entrySet()) {
            // if a duplicate key
            if (!e.getKey().equals(e.getValue())) {
                Set<RowKey> set = Collections.singleton(e.getValue());
                // put duplicate key and original key into map
                map.put(e.getKey(), set);
            } else {
                // skip duplicate keys
                if (!dupMap.containsKey(new RowKey(e.getKey().getString() + m_suffix))) {
                    Set<RowKey> set = Collections.singleton(e.getValue());
                    map.put(e.getKey(), set);
                }
            }
        }
        m_hiliteTranslator.setMapper(new DefaultHiLiteMapper(map));
    }
    return new BufferedDataTable[] { c.getTable() };
}
Also used : DataTable(org.knime.core.data.DataTable) BufferedDataTable(org.knime.core.node.BufferedDataTable) InactiveBranchPortObject(org.knime.core.node.port.inactive.InactiveBranchPortObject) Set(java.util.Set) BufferedDataContainer(org.knime.core.node.BufferedDataContainer) RowKey(org.knime.core.data.RowKey) HashMap(java.util.HashMap) RuntimeCanceledExecutionException(org.knime.base.data.append.row.AppendedRowsIterator.RuntimeCanceledExecutionException) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) IOException(java.io.IOException) AppendedRowsIterator(org.knime.core.data.append.AppendedRowsIterator) AppendedRowsTable(org.knime.core.data.append.AppendedRowsTable) BufferedDataTable(org.knime.core.node.BufferedDataTable) RuntimeCanceledExecutionException(org.knime.base.data.append.row.AppendedRowsIterator.RuntimeCanceledExecutionException) Vector(java.util.Vector) DefaultHiLiteMapper(org.knime.core.node.property.hilite.DefaultHiLiteMapper) InactiveBranchPortObject(org.knime.core.node.port.inactive.InactiveBranchPortObject) PortObject(org.knime.core.node.port.PortObject) HashMap(java.util.HashMap) Map(java.util.Map)

Example 18 with DefaultHiLiteMapper

use of org.knime.core.node.property.hilite.DefaultHiLiteMapper in project knime-core by knime.

the class RowKeyNodeModel2 method saveInternals.

/**
 * {@inheritDoc}
 */
@Override
protected void saveInternals(final File nodeInternDir, final ExecutionMonitor exec) throws IOException {
    if (m_enableHilite.getBooleanValue()) {
        final NodeSettings config = new NodeSettings("hilite_mapping");
        final DefaultHiLiteMapper mapper = (DefaultHiLiteMapper) m_hilite.getMapper();
        if (mapper != null) {
            mapper.save(config);
        }
        config.saveToXML(new FileOutputStream(new File(nodeInternDir, INTERNALS_FILE_NAME)));
    }
}
Also used : NodeSettings(org.knime.core.node.NodeSettings) FileOutputStream(java.io.FileOutputStream) DefaultHiLiteMapper(org.knime.core.node.property.hilite.DefaultHiLiteMapper) File(java.io.File)

Example 19 with DefaultHiLiteMapper

use of org.knime.core.node.property.hilite.DefaultHiLiteMapper in project knime-core by knime.

the class PivotNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
    final DataTableSpec inspec = inData[0].getDataTableSpec();
    final int group = inspec.findColumnIndex(m_group.getStringValue());
    final int pivot = inspec.findColumnIndex(m_pivot.getStringValue());
    final int aggre = (m_makeAgg.getStringValue().equals(PivotNodeDialogPane.MAKE_AGGREGATION[1]) ? inspec.findColumnIndex(m_agg.getStringValue()) : -1);
    PivotAggregationMethod aggMethod;
    if (aggre < 0) {
        aggMethod = PivotAggregationMethod.COUNT;
    } else {
        aggMethod = PivotAggregationMethod.METHODS.get(m_aggMethod.getStringValue());
    }
    // pair contains group and pivot plus the aggregation value
    final Map<Pair<String, String>, Double[]> map = new LinkedHashMap<Pair<String, String>, Double[]>();
    // list of pivot values
    final Set<String> pivotList = new LinkedHashSet<String>();
    final DataColumnSpec pivotSpec = inspec.getColumnSpec(pivot);
    if (pivotSpec.getDomain().hasValues()) {
        for (DataCell domValue : pivotSpec.getDomain().getValues()) {
            pivotList.add(domValue.toString());
        }
    }
    // list of group values
    final Set<String> groupList = new LinkedHashSet<String>();
    final LinkedHashMap<RowKey, Set<RowKey>> mapping = new LinkedHashMap<RowKey, Set<RowKey>>();
    final double nrRows = inData[0].getRowCount();
    int rowCnt = 0;
    ExecutionContext subExec = exec.createSubExecutionContext(0.75);
    // final all group, pivot pair and aggregate the values of each group
    for (final DataRow row : inData[0]) {
        subExec.checkCanceled();
        subExec.setProgress(++rowCnt / nrRows, "Aggregating row: \"" + row.getKey().getString() + "\" (" + rowCnt + "\\" + (int) nrRows + ")");
        final String groupString = row.getCell(group).toString();
        groupList.add(groupString);
        final DataCell pivotCell = row.getCell(pivot);
        // if missing values should be ignored
        if (pivotCell.isMissing()) {
            if (m_ignoreMissValues.getBooleanValue()) {
                continue;
            }
        }
        final String pivotString = pivotCell.toString();
        pivotList.add(pivotString);
        final Pair<String, String> pair = new Pair<String, String>(groupString, pivotString);
        Double[] aggValue = map.get(pair);
        if (aggValue == null) {
            aggValue = aggMethod.init();
            map.put(pair, aggValue);
        }
        if (aggre < 0) {
            aggMethod.compute(aggValue, null);
        } else {
            final DataCell value = row.getCell(aggre);
            aggMethod.compute(aggValue, value);
        }
        if (m_hiliting.getBooleanValue()) {
            final RowKey groupKey = new RowKey(groupString);
            Set<RowKey> set = mapping.get(groupKey);
            if (set == null) {
                set = new LinkedHashSet<RowKey>();
                mapping.put(groupKey, set);
            }
            set.add(row.getKey());
        }
    }
    final DataTableSpec outspec = initSpec(pivotList);
    // will contain the final pivoting table
    final BufferedDataContainer buf = exec.createDataContainer(outspec);
    final double nrElements = groupList.size();
    int elementCnt = 0;
    subExec = exec.createSubExecutionContext(0.25);
    for (final String groupString : groupList) {
        subExec.checkCanceled();
        subExec.setProgress(++elementCnt / nrElements, "Computing aggregation of group \"" + groupString + "\" (" + elementCnt + "\\" + (int) nrElements + ")");
        // contains the aggregated values
        final DataCell[] aggValues = new DataCell[pivotList.size()];
        // pivot index
        int idx = 0;
        for (final String pivotString : pivotList) {
            final Pair<String, String> newPair = new Pair<String, String>(groupString, pivotString);
            final Double[] aggValue = map.get(newPair);
            aggValues[idx] = aggMethod.done(aggValue);
            idx++;
        }
        // create new row with the given group id and aggregation values
        buf.addRowToTable(new DefaultRow(groupString, aggValues));
    }
    buf.close();
    if (m_hiliting.getBooleanValue()) {
        m_translator.setMapper(new DefaultHiLiteMapper(mapping));
    }
    return new BufferedDataTable[] { buf.getTable() };
}
Also used : LinkedHashSet(java.util.LinkedHashSet) DataTableSpec(org.knime.core.data.DataTableSpec) LinkedHashSet(java.util.LinkedHashSet) Set(java.util.Set) RowKey(org.knime.core.data.RowKey) SettingsModelString(org.knime.core.node.defaultnodesettings.SettingsModelString) DataRow(org.knime.core.data.DataRow) LinkedHashMap(java.util.LinkedHashMap) DataColumnSpec(org.knime.core.data.DataColumnSpec) BufferedDataTable(org.knime.core.node.BufferedDataTable) DefaultHiLiteMapper(org.knime.core.node.property.hilite.DefaultHiLiteMapper) Pair(org.knime.core.util.Pair) BufferedDataContainer(org.knime.core.node.BufferedDataContainer) ExecutionContext(org.knime.core.node.ExecutionContext) DataCell(org.knime.core.data.DataCell) DefaultRow(org.knime.core.data.def.DefaultRow)

Example 20 with DefaultHiLiteMapper

use of org.knime.core.node.property.hilite.DefaultHiLiteMapper in project knime-core by knime.

the class UnpivotNodeModel method saveInternals.

/**
 * {@inheritDoc}
 */
@Override
protected void saveInternals(final File nodeInternDir, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
    if (m_enableHilite.getBooleanValue()) {
        final NodeSettings config = new NodeSettings("hilite_mapping");
        ((DefaultHiLiteMapper) m_trans.getMapper()).save(config);
        config.saveToXML(new GZIPOutputStream(new FileOutputStream(new File(nodeInternDir, "hilite_mapping.xml.gz"))));
    }
}
Also used : NodeSettings(org.knime.core.node.NodeSettings) GZIPOutputStream(java.util.zip.GZIPOutputStream) FileOutputStream(java.io.FileOutputStream) DefaultHiLiteMapper(org.knime.core.node.property.hilite.DefaultHiLiteMapper) File(java.io.File)

Aggregations

DefaultHiLiteMapper (org.knime.core.node.property.hilite.DefaultHiLiteMapper)35 File (java.io.File)16 FileOutputStream (java.io.FileOutputStream)16 NodeSettings (org.knime.core.node.NodeSettings)16 Set (java.util.Set)12 RowKey (org.knime.core.data.RowKey)12 BufferedDataTable (org.knime.core.node.BufferedDataTable)12 GZIPOutputStream (java.util.zip.GZIPOutputStream)9 DataRow (org.knime.core.data.DataRow)8 DataTableSpec (org.knime.core.data.DataTableSpec)8 DataCell (org.knime.core.data.DataCell)7 DefaultRow (org.knime.core.data.def.DefaultRow)7 BufferedDataContainer (org.knime.core.node.BufferedDataContainer)7 HashMap (java.util.HashMap)6 LinkedHashSet (java.util.LinkedHashSet)6 InvalidSettingsException (org.knime.core.node.InvalidSettingsException)6 LinkedHashMap (java.util.LinkedHashMap)5 IOException (java.io.IOException)4 HashSet (java.util.HashSet)4 DataColumnSpec (org.knime.core.data.DataColumnSpec)4