Search in sources :

Example 1 with FeatureIterator

use of org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator in project knime-core by knime.

the class EagerSagUpdater method update.

/**
 * {@inheritDoc}
 */
@Override
public void update(final T x, final double[] sig, final WeightMatrix<T> beta, final double stepSize, final int iteration) {
    int id = x.getId();
    if (!m_seen.get(id)) {
        m_seen.set(id);
        m_nCovered++;
    }
    for (FeatureIterator iter = x.getFeatureIterator(); iter.next(); ) {
        int idx = iter.getFeatureIndex();
        double val = iter.getFeatureValue();
        for (int c = 0; c < m_nCats; c++) {
            double newD = val * (sig[c] - m_gradientMemory[c][id]);
            assert Double.isFinite(newD);
            m_gradientSum[c][idx] += newD;
        }
    }
    for (int c = 0; c < m_nCats; c++) {
        m_gradientMemory[c][id] = sig[c];
    }
    double scale = beta.getScale();
    beta.update((val, c, i) -> performUpdate(val, stepSize, scale, c, i), true);
}
Also used : FeatureIterator(org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator)

Example 2 with FeatureIterator

use of org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator in project knime-core by knime.

the class LazySGOptimizer method prepareIteration.

/**
 * {@inheritDoc}
 */
@Override
protected void prepareIteration(final WeightMatrix<T> beta, final T x, final U updater, final R regUpdater, final int iteration) {
    // apply lazy updates
    updater.lazyUpdate(beta, x, m_lastVisited, iteration);
    regUpdater.lazyUpdate(beta, x, m_lastVisited, iteration);
    // update when present features were last visited
    for (FeatureIterator iter = x.getFeatureIterator(); iter.next(); ) {
        m_lastVisited[iter.getFeatureIndex()] = iteration;
    }
}
Also used : FeatureIterator(org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator)

Example 3 with FeatureIterator

use of org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator in project knime-core by knime.

the class SimpleWeightMatrix method predict.

/**
 * {@inheritDoc}
 */
@Override
public double[] predict(final T row) {
    double[] prediction = new double[m_data.length];
    FeatureIterator iter = row.getFeatureIterator();
    while (iter.next()) {
        int idx = iter.getFeatureIndex();
        double val = iter.getFeatureValue();
        for (int c = 0; c < m_data.length; c++) {
            prediction[c] += m_data[c][idx] * val;
        }
    }
    return prediction;
}
Also used : FeatureIterator(org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator)

Example 4 with FeatureIterator

use of org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator in project knime-core by knime.

the class SparseClassificationTrainingRowTest method testFeatureIterator.

/**
 * Tests the {@link FeatureIterator} returned by {@link ClassificationTrainingRow#getFeatureIterator()}.
 *
 * @throws Exception
 */
@Test
public void testFeatureIterator() throws Exception {
    SparseClassificationTrainingRow row = createRow();
    FeatureIterator fi = row.getFeatureIterator();
    for (int i = 0; i < INDICES.length; i++) {
        assertTrue(fi.hasNext());
        assertTrue(fi.next());
        assertEquals(INDICES[i], fi.getFeatureIndex());
        // there are no differences allowed here
        assertEquals(VALUES[i], fi.getFeatureValue(), 0);
        if (i == 2) {
            FeatureIterator sfi = fi.spawn();
            assertEquals(INDICES[i - 1], sfi.getFeatureIndex());
            assertEquals(VALUES[i - 1], sfi.getFeatureValue(), 0);
        }
    }
    assertFalse(fi.hasNext());
    assertFalse(fi.next());
}
Also used : FeatureIterator(org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator) Test(org.junit.Test)

Example 5 with FeatureIterator

use of org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator in project knime-core by knime.

the class LazySagUpdaterTest method lazyVsEager.

private void lazyVsEager(final int nRows, final int nFeatures, final int nCats, final int nEpochs, final double fractionZeros) throws Exception {
    LazySagUpdater<MockClassificationTrainingRow> lazyUpdater = new LazySagUpdater.LazySagUpdaterFactory<MockClassificationTrainingRow>(nRows, nFeatures, nCats - 1).create();
    EagerSagUpdater<MockClassificationTrainingRow> eagerUpdater = new EagerSagUpdater.EagerSagUpdaterFactory<MockClassificationTrainingRow>(nRows, nFeatures, nCats - 1).create();
    SimpleWeightMatrix<MockClassificationTrainingRow> eagerBeta = new SimpleWeightMatrix<>(nFeatures, nCats - 1, true);
    SimpleWeightMatrix<MockClassificationTrainingRow> lazyBeta = new SimpleWeightMatrix<>(nFeatures, nCats - 1, true);
    MockClassificationTrainingRow[] rows = new MockClassificationTrainingRow[nRows];
    boolean[] columns2Check = new boolean[nFeatures];
    for (int i = 0; i < nRows; i++) {
        double[] features = new double[nFeatures - 1];
        for (int j = 0; j < nFeatures - 1; j++) {
            features[j] = Math.random() * 2 - 1;
            // set some features to zero
            if (Math.random() <= fractionZeros) {
                features[j] = 0;
            }
        }
        int cat = (int) (Math.random() * nCats);
        rows[i] = new MockClassificationTrainingRow(features, i, cat);
    }
    double[] gradient = new double[nCats - 1];
    int[] lastVisited = new int[nFeatures];
    double stepSize = 1;
    for (int e = 0; e < nEpochs; e++) {
        for (int k = 0; k < nRows; k++) {
            MockClassificationTrainingRow row = rows[(int) (Math.random() * nRows)];
            lazyUpdater.lazyUpdate(lazyBeta, row, lastVisited, k);
            for (int j = 0; j < nCats - 1; j++) {
                gradient[j] = Math.random() * 4 - 2;
            }
            for (FeatureIterator iter = row.getFeatureIterator(); iter.next(); ) {
                lastVisited[iter.getFeatureIndex()] = k;
            }
            // if feature is present, the lazy update must update beta correctly
            checkPositional(lazyBeta.getWeightVector(), eagerBeta.getWeightVector(), columns2Check, EPSILON);
            // double stepSize = Math.random();
            lazyUpdater.update(row, gradient, lazyBeta, stepSize, k);
            eagerUpdater.update(row, gradient, eagerBeta, stepSize, k);
        }
        lazyUpdater.resetJITSystem(lazyBeta, lastVisited);
        checkEquality(lazyBeta.getWeightVector(), eagerBeta.getWeightVector(), EPSILON, "Epoch " + e);
    }
}
Also used : FeatureIterator(org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator)

Aggregations

FeatureIterator (org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator)10 DecompositionSolver (org.apache.commons.math3.linear.DecompositionSolver)1 RealMatrix (org.apache.commons.math3.linear.RealMatrix)1 SingularValueDecomposition (org.apache.commons.math3.linear.SingularValueDecomposition)1 Test (org.junit.Test)1 ClassificationTrainingRow (org.knime.base.node.mine.regression.logistic.learner4.data.ClassificationTrainingRow)1