Search in sources :

Example 1 with AppendedRowsTable

use of org.knime.core.data.append.AppendedRowsTable in project knime-core by knime.

the class EndifNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected PortObject[] execute(final PortObject[] rawInData, final ExecutionContext exec) throws Exception {
    if (m_enableHiliting) {
        // create empty hilite translation map (so we correctly
        // handle the internals even if we return with a IBPO:
        Map<RowKey, Set<RowKey>> map = new HashMap<RowKey, Set<RowKey>>();
        m_hiliteTranslator.setMapper(new DefaultHiLiteMapper(map));
    }
    if (rawInData[0] instanceof InactiveBranchPortObject) {
        return new PortObject[] { rawInData[1] };
    }
    if (rawInData[1] instanceof InactiveBranchPortObject) {
        return new PortObject[] { rawInData[0] };
    }
    // no inactive branch - check compatibility of specs - which in
    // this case must be BFT Specs!
    DataTableSpec spec0 = (DataTableSpec) (rawInData[0].getSpec());
    DataTableSpec spec1 = (DataTableSpec) (rawInData[1].getSpec());
    if (spec0.equalStructure(spec1)) {
        // concatenate tables and return result
        BufferedDataTable[] inData = new BufferedDataTable[2];
        inData[0] = (BufferedDataTable) rawInData[0];
        inData[1] = (BufferedDataTable) rawInData[1];
        int totalRowCount = 0;
        for (BufferedDataTable t : inData) {
            totalRowCount += t.getRowCount();
        }
        AppendedRowsTable out = new AppendedRowsTable((m_isAppendSuffix ? m_suffix : null), inData);
        // note, this iterator throws runtime exceptions when canceled.
        AppendedRowsIterator it = out.iterator(exec, totalRowCount);
        BufferedDataContainer c = exec.createDataContainer(out.getDataTableSpec());
        try {
            while (it.hasNext()) {
                // may throw exception, also sets progress
                c.addRowToTable(it.next());
            }
        } catch (AppendedRowsIterator.RuntimeCanceledExecutionException rcee) {
            throw rcee.getCause();
        } finally {
            c.close();
        }
        if (it.getNrRowsSkipped() > 0) {
            setWarningMessage("Filtered out " + it.getNrRowsSkipped() + " duplicate row id(s).");
        }
        if (m_enableHiliting) {
            // create hilite translation map
            Map<RowKey, Set<RowKey>> map = new HashMap<RowKey, Set<RowKey>>();
            // map of all RowKeys and duplicate RowKeys in the resulting table
            Map<RowKey, RowKey> dupMap = it.getDuplicateNameMap();
            for (Map.Entry<RowKey, RowKey> e : dupMap.entrySet()) {
                // if a duplicate key
                if (!e.getKey().equals(e.getValue())) {
                    Set<RowKey> set = Collections.singleton(e.getValue());
                    // put duplicate key and original key into map
                    map.put(e.getKey(), set);
                } else {
                    // skip duplicate keys
                    if (!dupMap.containsKey(new RowKey(e.getKey().getString() + m_suffix))) {
                        Set<RowKey> set = Collections.singleton(e.getValue());
                        map.put(e.getKey(), set);
                    }
                }
            }
            m_hiliteTranslator.setMapper(new DefaultHiLiteMapper(map));
        }
        return new BufferedDataTable[] { c.getTable() };
    }
    throw new Exception("Both input ports have data but the tables " + "have incompatible specs");
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) Set(java.util.Set) InactiveBranchPortObject(org.knime.core.node.port.inactive.InactiveBranchPortObject) BufferedDataContainer(org.knime.core.node.BufferedDataContainer) RowKey(org.knime.core.data.RowKey) HashMap(java.util.HashMap) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) IOException(java.io.IOException) AppendedRowsIterator(org.knime.core.data.append.AppendedRowsIterator) AppendedRowsTable(org.knime.core.data.append.AppendedRowsTable) BufferedDataTable(org.knime.core.node.BufferedDataTable) DefaultHiLiteMapper(org.knime.core.node.property.hilite.DefaultHiLiteMapper) InactiveBranchPortObject(org.knime.core.node.port.inactive.InactiveBranchPortObject) PortObject(org.knime.core.node.port.PortObject) HashMap(java.util.HashMap) Map(java.util.Map)

Example 2 with AppendedRowsTable

use of org.knime.core.data.append.AppendedRowsTable in project knime-core by knime.

the class EndcaseNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws Exception {
    Vector<BufferedDataTable> tables = new Vector<BufferedDataTable>();
    for (int i = 0; i < getNrInPorts(); i++) {
        if (inData[i] != null) {
            // if connected...
            if (!(inData[i] instanceof InactiveBranchPortObject)) {
                // ...and active, add it:
                tables.add((BufferedDataTable) inData[i]);
            }
        }
    }
    if (tables.size() == 0) {
        // be connected!)
        assert inData[0] instanceof InactiveBranchPortObject;
        if (m_enableHiliting) {
            // create empty hilite translation map (so we correctly
            // handle the internals).
            Map<RowKey, Set<RowKey>> map = new HashMap<RowKey, Set<RowKey>>();
            m_hiliteTranslator.setMapper(new DefaultHiLiteMapper(map));
        }
        return new PortObject[] { inData[0] };
    }
    assert tables.size() > 0;
    // check compatibility of specs against first spec in list
    for (int i = 1; i < tables.size(); i++) {
        if (!(tables.get(0).getSpec().equalStructure(tables.get(i).getSpec()))) {
            // incompatible - refuse to execute
            throw new Exception("The data table structures of the active " + "ports are not compatible.");
        }
    }
    int totalRowCount = 0;
    DataTable[] dtables = new DataTable[tables.size()];
    int i = 0;
    for (BufferedDataTable t : tables) {
        totalRowCount += t.getRowCount();
        dtables[i] = t;
        i++;
    }
    AppendedRowsTable out = new AppendedRowsTable((m_isAppendSuffix ? m_suffix : null), dtables);
    // note, this iterator throws runtime exceptions when canceled.
    AppendedRowsIterator it = out.iterator(exec, totalRowCount);
    BufferedDataContainer c = exec.createDataContainer(out.getDataTableSpec());
    try {
        while (it.hasNext()) {
            // may throw exception, also sets progress
            c.addRowToTable(it.next());
        }
    } catch (RuntimeCanceledExecutionException rcee) {
        throw rcee.getCause();
    } finally {
        c.close();
    }
    if (it.getNrRowsSkipped() > 0) {
        setWarningMessage("Filtered out " + it.getNrRowsSkipped() + " duplicate row id(s).");
    }
    if (m_enableHiliting) {
        // create hilite translation map
        Map<RowKey, Set<RowKey>> map = new HashMap<RowKey, Set<RowKey>>();
        // map of all RowKeys and duplicate RowKeys in the resulting table
        Map<RowKey, RowKey> dupMap = it.getDuplicateNameMap();
        for (Map.Entry<RowKey, RowKey> e : dupMap.entrySet()) {
            // if a duplicate key
            if (!e.getKey().equals(e.getValue())) {
                Set<RowKey> set = Collections.singleton(e.getValue());
                // put duplicate key and original key into map
                map.put(e.getKey(), set);
            } else {
                // skip duplicate keys
                if (!dupMap.containsKey(new RowKey(e.getKey().getString() + m_suffix))) {
                    Set<RowKey> set = Collections.singleton(e.getValue());
                    map.put(e.getKey(), set);
                }
            }
        }
        m_hiliteTranslator.setMapper(new DefaultHiLiteMapper(map));
    }
    return new BufferedDataTable[] { c.getTable() };
}
Also used : DataTable(org.knime.core.data.DataTable) BufferedDataTable(org.knime.core.node.BufferedDataTable) InactiveBranchPortObject(org.knime.core.node.port.inactive.InactiveBranchPortObject) Set(java.util.Set) BufferedDataContainer(org.knime.core.node.BufferedDataContainer) RowKey(org.knime.core.data.RowKey) HashMap(java.util.HashMap) RuntimeCanceledExecutionException(org.knime.base.data.append.row.AppendedRowsIterator.RuntimeCanceledExecutionException) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) IOException(java.io.IOException) AppendedRowsIterator(org.knime.core.data.append.AppendedRowsIterator) AppendedRowsTable(org.knime.core.data.append.AppendedRowsTable) BufferedDataTable(org.knime.core.node.BufferedDataTable) RuntimeCanceledExecutionException(org.knime.base.data.append.row.AppendedRowsIterator.RuntimeCanceledExecutionException) Vector(java.util.Vector) DefaultHiLiteMapper(org.knime.core.node.property.hilite.DefaultHiLiteMapper) InactiveBranchPortObject(org.knime.core.node.port.inactive.InactiveBranchPortObject) PortObject(org.knime.core.node.port.PortObject) HashMap(java.util.HashMap) Map(java.util.Map)

Example 3 with AppendedRowsTable

use of org.knime.core.data.append.AppendedRowsTable in project knime-core by knime.

the class ConcatenateTableFactory method copyTablesIntoOneTable.

/**
 * Copies all tables, except the last still not-closed table, into an entire new table
 */
private void copyTablesIntoOneTable(final ExecutionContext exec) throws CanceledExecutionException {
    BufferedDataTable[] tables = new BufferedDataTable[m_tables.size() - 1];
    for (int i = 0; i < tables.length; i++) {
        tables[i] = m_tables.get(i).getTable();
    }
    AppendedRowsTable wrapper = new AppendedRowsTable(org.knime.core.data.append.AppendedRowsTable.DuplicatePolicy.Fail, null, tables);
    BufferedDataContainer con = exec.createDataContainer(wrapper.getDataTableSpec());
    RowIterator rowIt = wrapper.iterator();
    exec.setProgress("Too many tables. Copy tables into one table.");
    while (rowIt.hasNext()) {
        exec.checkCanceled();
        con.addRowToTable(rowIt.next());
    }
    con.close();
    BufferedDataContainer last = m_tables.get(m_tables.size() - 1);
    m_tables.clear();
    m_tables.add(con);
    m_tables.add(last);
    exec.setProgress("Tables copied into one.");
}
Also used : BufferedDataContainer(org.knime.core.node.BufferedDataContainer) AppendedRowsTable(org.knime.core.data.append.AppendedRowsTable) RowIterator(org.knime.core.data.RowIterator) BufferedDataTable(org.knime.core.node.BufferedDataTable)

Example 4 with AppendedRowsTable

use of org.knime.core.data.append.AppendedRowsTable in project knime-core by knime.

the class AppendedRowsNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] rawInData, final ExecutionContext exec) throws Exception {
    // remove all null tables first (optional input data)
    BufferedDataTable[] noNullArray = noNullArray(rawInData);
    DataTableSpec[] noNullSpecs = new DataTableSpec[noNullArray.length];
    for (int i = 0; i < noNullArray.length; i++) {
        noNullSpecs[i] = noNullArray[i].getDataTableSpec();
    }
    // table can only be wrapped if a suffix is to be append or the node fails in case of duplicate row ID's
    if (m_isAppendSuffix || m_isFailOnDuplicate) {
        // just wrap the tables virtually instead of traversing it and copying the rows
        // virtually create the concatenated table (no traverse necessary)
        Optional<String> suffix = m_isAppendSuffix ? Optional.of(m_suffix) : Optional.empty();
        BufferedDataTable concatTable = exec.createConcatenateTable(exec, suffix, m_isFailOnDuplicate, noNullArray);
        if (m_isIntersection) {
            // wrap the table and filter the non-intersecting columns
            DataTableSpec actualOutSpec = getOutputSpec(noNullSpecs);
            DataTableSpec currentOutSpec = concatTable.getDataTableSpec();
            String[] intersectCols = getIntersection(actualOutSpec, currentOutSpec);
            ColumnRearranger cr = new ColumnRearranger(currentOutSpec);
            cr.keepOnly(intersectCols);
            concatTable = exec.createColumnRearrangeTable(concatTable, cr, exec);
        }
        if (m_enableHiliting) {
            AppendedRowsTable tmp = new AppendedRowsTable(DuplicatePolicy.Fail, null, noNullArray);
            Map<RowKey, Set<RowKey>> map = createHiliteTranslationMap(createDuplicateMap(tmp, exec, m_suffix == null ? "" : m_suffix));
            m_hiliteTranslator.setMapper(new DefaultHiLiteMapper(map));
        }
        return new BufferedDataTable[] { concatTable };
    } else {
        // traverse the table and copy the rows
        long totalRowCount = 0L;
        RowInput[] inputs = new RowInput[noNullArray.length];
        for (int i = 0; i < noNullArray.length; i++) {
            totalRowCount += noNullArray[i].size();
            inputs[i] = new DataTableRowInput(noNullArray[i]);
        }
        DataTableSpec outputSpec = getOutputSpec(noNullSpecs);
        BufferedDataTableRowOutput output = new BufferedDataTableRowOutput(exec.createDataContainer(outputSpec));
        run(inputs, output, exec, totalRowCount);
        return new BufferedDataTable[] { output.getDataTable() };
    }
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) Set(java.util.Set) LinkedHashSet(java.util.LinkedHashSet) RowKey(org.knime.core.data.RowKey) FilterColumnRowInput(org.knime.base.data.filter.column.FilterColumnRowInput) DataTableRowInput(org.knime.core.node.streamable.DataTableRowInput) RowInput(org.knime.core.node.streamable.RowInput) AppendedRowsRowInput(org.knime.core.data.append.AppendedRowsRowInput) ColumnRearranger(org.knime.core.data.container.ColumnRearranger) AppendedRowsTable(org.knime.core.data.append.AppendedRowsTable) BufferedDataTable(org.knime.core.node.BufferedDataTable) DataTableRowInput(org.knime.core.node.streamable.DataTableRowInput) DefaultHiLiteMapper(org.knime.core.node.property.hilite.DefaultHiLiteMapper) BufferedDataTableRowOutput(org.knime.core.node.streamable.BufferedDataTableRowOutput)

Aggregations

AppendedRowsTable (org.knime.core.data.append.AppendedRowsTable)4 BufferedDataTable (org.knime.core.node.BufferedDataTable)4 Set (java.util.Set)3 RowKey (org.knime.core.data.RowKey)3 BufferedDataContainer (org.knime.core.node.BufferedDataContainer)3 DefaultHiLiteMapper (org.knime.core.node.property.hilite.DefaultHiLiteMapper)3 IOException (java.io.IOException)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 DataTableSpec (org.knime.core.data.DataTableSpec)2 AppendedRowsIterator (org.knime.core.data.append.AppendedRowsIterator)2 CanceledExecutionException (org.knime.core.node.CanceledExecutionException)2 InvalidSettingsException (org.knime.core.node.InvalidSettingsException)2 PortObject (org.knime.core.node.port.PortObject)2 InactiveBranchPortObject (org.knime.core.node.port.inactive.InactiveBranchPortObject)2 LinkedHashSet (java.util.LinkedHashSet)1 Vector (java.util.Vector)1 RuntimeCanceledExecutionException (org.knime.base.data.append.row.AppendedRowsIterator.RuntimeCanceledExecutionException)1 FilterColumnRowInput (org.knime.base.data.filter.column.FilterColumnRowInput)1 DataTable (org.knime.core.data.DataTable)1