Search in sources :

Example 1 with Normalizer

use of org.knime.base.data.normalize.Normalizer in project knime-core by knime.

the class NormalizerNodeModel method calculate.

/**
 * New normalized {@link org.knime.core.data.DataTable} is created depending
 * on the mode.
 */
/**
 * @param inData The input data.
 * @param exec For BufferedDataTable creation and progress.
 * @return the result of the calculation
 * @throws Exception If the node calculation fails for any reason.
 */
protected CalculationResult calculate(final PortObject[] inData, final ExecutionContext exec) throws Exception {
    BufferedDataTable inTable = (BufferedDataTable) inData[0];
    DataTableSpec inSpec = inTable.getSpec();
    // extract selected numeric columns
    updateNumericColumnSelection(inSpec);
    Normalizer ntable = new Normalizer(inTable, m_columns);
    long rowcount = inTable.size();
    ExecutionMonitor prepareExec = exec.createSubProgress(0.3);
    AffineTransTable outTable;
    boolean fixDomainBounds = false;
    switch(m_mode) {
        case NONORM_MODE:
            return new CalculationResult(inTable, new DataTableSpec(), new AffineTransConfiguration());
        case MINMAX_MODE:
            fixDomainBounds = true;
            outTable = ntable.doMinMaxNorm(m_max, m_min, prepareExec);
            break;
        case ZSCORE_MODE:
            outTable = ntable.doZScoreNorm(prepareExec);
            break;
        case DECIMALSCALING_MODE:
            outTable = ntable.doDecimalScaling(prepareExec);
            break;
        default:
            throw new Exception("No mode set");
    }
    if (outTable.getErrorMessage() != null) {
        // something went wrong, report and throw an exception
        throw new Exception(outTable.getErrorMessage());
    }
    if (ntable.getErrorMessage() != null) {
        // something went wrong during initialization, report.
        setWarningMessage(ntable.getErrorMessage());
    }
    DataTableSpec modelSpec = FilterColumnTable.createFilterTableSpec(inSpec, m_columns);
    AffineTransConfiguration configuration = outTable.getConfiguration();
    DataTableSpec spec = outTable.getDataTableSpec();
    // the same transformation, which is not guaranteed to snap to min/max)
    if (fixDomainBounds) {
        DataColumnSpec[] newColSpecs = new DataColumnSpec[spec.getNumColumns()];
        for (int i = 0; i < newColSpecs.length; i++) {
            newColSpecs[i] = spec.getColumnSpec(i);
        }
        for (int i = 0; i < m_columns.length; i++) {
            int index = spec.findColumnIndex(m_columns[i]);
            DataColumnSpecCreator creator = new DataColumnSpecCreator(newColSpecs[index]);
            DataColumnDomainCreator domCreator = new DataColumnDomainCreator(newColSpecs[index].getDomain());
            domCreator.setLowerBound(new DoubleCell(m_min));
            domCreator.setUpperBound(new DoubleCell(m_max));
            creator.setDomain(domCreator.createDomain());
            newColSpecs[index] = creator.createSpec();
        }
        spec = new DataTableSpec(spec.getName(), newColSpecs);
    }
    ExecutionMonitor normExec = exec.createSubProgress(.7);
    BufferedDataContainer container = exec.createDataContainer(spec);
    long count = 1;
    for (DataRow row : outTable) {
        normExec.checkCanceled();
        normExec.setProgress(count / (double) rowcount, "Normalizing row no. " + count + " of " + rowcount + " (\"" + row.getKey() + "\")");
        container.addRowToTable(row);
        count++;
    }
    container.close();
    return new CalculationResult(container.getTable(), modelSpec, configuration);
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) DataColumnSpecCreator(org.knime.core.data.DataColumnSpecCreator) BufferedDataContainer(org.knime.core.node.BufferedDataContainer) Normalizer(org.knime.base.data.normalize.Normalizer) DoubleCell(org.knime.core.data.def.DoubleCell) DataColumnDomainCreator(org.knime.core.data.DataColumnDomainCreator) DataRow(org.knime.core.data.DataRow) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) IOException(java.io.IOException) DataColumnSpec(org.knime.core.data.DataColumnSpec) BufferedDataTable(org.knime.core.node.BufferedDataTable) AffineTransTable(org.knime.base.data.normalize.AffineTransTable) AffineTransConfiguration(org.knime.base.data.normalize.AffineTransConfiguration) ExecutionMonitor(org.knime.core.node.ExecutionMonitor)

Example 2 with Normalizer

use of org.knime.base.data.normalize.Normalizer in project knime-core by knime.

the class PMCCNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws Exception {
    final BufferedDataTable in = (BufferedDataTable) inData[0];
    // floating point operation
    final double rC = in.getRowCount();
    int[] includes = getIncludes(in.getDataTableSpec());
    String[] includeNames = m_columnIncludesList.getIncludeList().toArray(new String[0]);
    double progNormalize = 0.3;
    double progDetermine = 0.65;
    double progFinish = 1.0 - progNormalize - progDetermine;
    exec.setMessage("Normalizing data");
    final ExecutionMonitor normProg = exec.createSubProgress(progNormalize);
    FilterColumnTable filterTable = new FilterColumnTable(in, includes);
    final int l = includes.length;
    int nomCount = (l - 1) * l / 2;
    final HalfDoubleMatrix nominatorMatrix = new HalfDoubleMatrix(includes.length, /*withDiagonal*/
    false);
    nominatorMatrix.fill(Double.NaN);
    @SuppressWarnings("unchecked") final LinkedHashMap<DataCell, Integer>[] possibleValues = new LinkedHashMap[l];
    DataTableSpec filterTableSpec = filterTable.getDataTableSpec();
    for (int i = 0; i < l; i++) {
        DataColumnSpec cs = filterTableSpec.getColumnSpec(i);
        if (cs.getType().isCompatible(NominalValue.class)) {
            possibleValues[i] = new LinkedHashMap<DataCell, Integer>();
        }
    }
    final int possValueUpperBound = m_maxPossValueCountModel.getIntValue();
    // determines possible values. We can't use those from the domain
    // as the domain can also contain values not present in the data
    // but in the contingency table we need rows/columns to have at least
    // one cell with a value >= 1
    StatisticsTable statTable = new StatisticsTable(filterTable) {

        // that is sort of the constructor in this derived class
        {
            calculateAllMoments(in.getRowCount(), normProg);
        }

        @Override
        protected void calculateMomentInSubClass(final DataRow row) {
            for (int i = 0; i < l; i++) {
                if (possibleValues[i] != null) {
                    DataCell c = row.getCell(i);
                    // note: also take missing value as possible value
                    possibleValues[i].put(c, null);
                    if (possibleValues[i].size() > possValueUpperBound) {
                        possibleValues[i] = null;
                    }
                }
            }
        }
    };
    for (LinkedHashMap<DataCell, Integer> map : possibleValues) {
        if (map != null) {
            int index = 0;
            for (Map.Entry<DataCell, Integer> entry : map.entrySet()) {
                entry.setValue(index++);
            }
        }
    }
    // stores all pair-wise contingency tables,
    // contingencyTables[i] == null <--> either column of the corresponding
    // pair is non-categorical.
    // What is a contingency table?
    // http://en.wikipedia.org/wiki/Contingency_table
    int[][][] contingencyTables = new int[nomCount][][];
    // column which only contain one value - no correlation available
    LinkedHashSet<String> constantColumns = new LinkedHashSet<String>();
    int valIndex = 0;
    for (int i = 0; i < l; i++) {
        for (int j = i + 1; j < l; j++) {
            if (possibleValues[i] != null && possibleValues[j] != null) {
                int iSize = possibleValues[i].size();
                int jSize = possibleValues[j].size();
                contingencyTables[valIndex] = new int[iSize][jSize];
            }
            DataColumnSpec colSpecI = filterTableSpec.getColumnSpec(i);
            DataColumnSpec colSpecJ = filterTableSpec.getColumnSpec(j);
            DataType ti = colSpecI.getType();
            DataType tj = colSpecJ.getType();
            if (ti.isCompatible(DoubleValue.class) && tj.isCompatible(DoubleValue.class)) {
                // one of the two columns contains only one value
                if (statTable.getVariance(i) < PMCCPortObjectAndSpec.ROUND_ERROR_OK) {
                    constantColumns.add(colSpecI.getName());
                    nominatorMatrix.set(i, j, Double.NaN);
                } else if (statTable.getVariance(j) < PMCCPortObjectAndSpec.ROUND_ERROR_OK) {
                    constantColumns.add(colSpecJ.getName());
                    nominatorMatrix.set(i, j, Double.NaN);
                } else {
                    nominatorMatrix.set(i, j, 0.0);
                }
            }
            valIndex++;
        }
    }
    // to other column (will be a missing value)
    if (!constantColumns.isEmpty()) {
        String[] constantColumnNames = constantColumns.toArray(new String[constantColumns.size()]);
        NodeLogger.getLogger(getClass()).info("The following numeric " + "columns contain only one distinct value or have " + "otherwise a low standard deviation: " + Arrays.toString(constantColumnNames));
        int maxLength = 4;
        if (constantColumns.size() > maxLength) {
            constantColumnNames = Arrays.copyOf(constantColumnNames, maxLength);
            constantColumnNames[maxLength - 1] = "...";
        }
        setWarningMessage("Some columns contain only one distinct value: " + Arrays.toString(constantColumnNames));
    }
    DataTable att;
    if (statTable.getNrRows() > 0) {
        att = new Normalizer(statTable, includeNames).doZScoreNorm(// no iteration needed
        exec.createSubProgress(0.0));
    } else {
        att = statTable;
    }
    normProg.setProgress(1.0);
    exec.setMessage("Calculating correlation measure");
    ExecutionMonitor detProg = exec.createSubProgress(progDetermine);
    int rowIndex = 0;
    double[] buf = new double[l];
    DataCell[] catBuf = new DataCell[l];
    boolean containsMissing = false;
    for (DataRow r : att) {
        detProg.checkCanceled();
        for (int i = 0; i < l; i++) {
            catBuf[i] = null;
            buf[i] = Double.NaN;
            DataCell c = r.getCell(i);
            // missing value is also a possible value here
            if (possibleValues[i] != null) {
                catBuf[i] = c;
            } else if (c.isMissing()) {
                containsMissing = true;
            } else if (filterTableSpec.getColumnSpec(i).getType().isCompatible(DoubleValue.class)) {
                buf[i] = ((DoubleValue) c).getDoubleValue();
            }
        }
        valIndex = 0;
        for (int i = 0; i < l; i++) {
            for (int j = i + 1; j < l; j++) {
                double b1 = buf[i];
                double b2 = buf[j];
                if (!Double.isNaN(b1) && !Double.isNaN(b2)) {
                    double old = nominatorMatrix.get(i, j);
                    nominatorMatrix.set(i, j, old + b1 * b2);
                } else if (catBuf[i] != null && catBuf[j] != null) {
                    int iIndex = possibleValues[i].get(catBuf[i]);
                    assert iIndex >= 0 : "Value unknown in value list " + "of column " + includeNames[i] + ": " + catBuf[i];
                    int jIndex = possibleValues[j].get(catBuf[j]);
                    assert jIndex >= 0 : "Value unknown in value list " + "of column " + includeNames[j] + ": " + catBuf[j];
                    contingencyTables[valIndex][iIndex][jIndex]++;
                }
                valIndex++;
            }
        }
        rowIndex++;
        detProg.setProgress(rowIndex / rC, "Processing row " + rowIndex + " (\"" + r.getKey() + "\")");
    }
    if (containsMissing) {
        setWarningMessage("Some row(s) contained missing values.");
    }
    detProg.setProgress(1.0);
    double normalizer = 1.0 / (rC - 1.0);
    valIndex = 0;
    for (int i = 0; i < l; i++) {
        for (int j = i + 1; j < l; j++) {
            if (contingencyTables[valIndex] != null) {
                nominatorMatrix.set(i, j, computeCramersV(contingencyTables[valIndex]));
            } else if (!Double.isNaN(nominatorMatrix.get(i, j))) {
                double old = nominatorMatrix.get(i, j);
                nominatorMatrix.set(i, j, old * normalizer);
            }
            // else pair of columns is double - string (for instance)
            valIndex++;
        }
    }
    normProg.setProgress(progDetermine);
    PMCCPortObjectAndSpec pmccModel = new PMCCPortObjectAndSpec(includeNames, nominatorMatrix);
    ExecutionContext subExec = exec.createSubExecutionContext(progFinish);
    BufferedDataTable out = pmccModel.createCorrelationMatrix(subExec);
    m_correlationTable = out;
    return new PortObject[] { out, pmccModel };
}
Also used : LinkedHashSet(java.util.LinkedHashSet) DataTable(org.knime.core.data.DataTable) BufferedDataTable(org.knime.core.node.BufferedDataTable) DataTableSpec(org.knime.core.data.DataTableSpec) FilterColumnTable(org.knime.base.data.filter.column.FilterColumnTable) StatisticsTable(org.knime.base.data.statistics.StatisticsTable) SettingsModelFilterString(org.knime.core.node.defaultnodesettings.SettingsModelFilterString) DataRow(org.knime.core.data.DataRow) LinkedHashMap(java.util.LinkedHashMap) DataColumnSpec(org.knime.core.data.DataColumnSpec) BufferedDataTable(org.knime.core.node.BufferedDataTable) DataType(org.knime.core.data.DataType) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) PortObject(org.knime.core.node.port.PortObject) Normalizer(org.knime.base.data.normalize.Normalizer) ExecutionContext(org.knime.core.node.ExecutionContext) DoubleValue(org.knime.core.data.DoubleValue) HalfDoubleMatrix(org.knime.base.util.HalfDoubleMatrix) DataCell(org.knime.core.data.DataCell) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map)

Aggregations

Normalizer (org.knime.base.data.normalize.Normalizer)2 DataColumnSpec (org.knime.core.data.DataColumnSpec)2 DataRow (org.knime.core.data.DataRow)2 DataTableSpec (org.knime.core.data.DataTableSpec)2 BufferedDataTable (org.knime.core.node.BufferedDataTable)2 ExecutionMonitor (org.knime.core.node.ExecutionMonitor)2 IOException (java.io.IOException)1 LinkedHashMap (java.util.LinkedHashMap)1 LinkedHashSet (java.util.LinkedHashSet)1 Map (java.util.Map)1 FilterColumnTable (org.knime.base.data.filter.column.FilterColumnTable)1 AffineTransConfiguration (org.knime.base.data.normalize.AffineTransConfiguration)1 AffineTransTable (org.knime.base.data.normalize.AffineTransTable)1 StatisticsTable (org.knime.base.data.statistics.StatisticsTable)1 HalfDoubleMatrix (org.knime.base.util.HalfDoubleMatrix)1 DataCell (org.knime.core.data.DataCell)1 DataColumnDomainCreator (org.knime.core.data.DataColumnDomainCreator)1 DataColumnSpecCreator (org.knime.core.data.DataColumnSpecCreator)1 DataTable (org.knime.core.data.DataTable)1 DataType (org.knime.core.data.DataType)1