use of org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator in project knime-core by knime.
the class EagerSagUpdater method update.
/**
* {@inheritDoc}
*/
@Override
public void update(final T x, final double[] sig, final WeightMatrix<T> beta, final double stepSize, final int iteration) {
int id = x.getId();
if (!m_seen.get(id)) {
m_seen.set(id);
m_nCovered++;
}
for (FeatureIterator iter = x.getFeatureIterator(); iter.next(); ) {
int idx = iter.getFeatureIndex();
double val = iter.getFeatureValue();
for (int c = 0; c < m_nCats; c++) {
double newD = val * (sig[c] - m_gradientMemory[c][id]);
assert Double.isFinite(newD);
m_gradientSum[c][idx] += newD;
}
}
for (int c = 0; c < m_nCats; c++) {
m_gradientMemory[c][id] = sig[c];
}
double scale = beta.getScale();
beta.update((val, c, i) -> performUpdate(val, stepSize, scale, c, i), true);
}
use of org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator in project knime-core by knime.
the class LazySGOptimizer method prepareIteration.
/**
* {@inheritDoc}
*/
@Override
protected void prepareIteration(final WeightMatrix<T> beta, final T x, final U updater, final R regUpdater, final int iteration) {
// apply lazy updates
updater.lazyUpdate(beta, x, m_lastVisited, iteration);
regUpdater.lazyUpdate(beta, x, m_lastVisited, iteration);
// update when present features were last visited
for (FeatureIterator iter = x.getFeatureIterator(); iter.next(); ) {
m_lastVisited[iter.getFeatureIndex()] = iteration;
}
}
use of org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator in project knime-core by knime.
the class SimpleWeightMatrix method predict.
/**
* {@inheritDoc}
*/
@Override
public double[] predict(final T row) {
double[] prediction = new double[m_data.length];
FeatureIterator iter = row.getFeatureIterator();
while (iter.next()) {
int idx = iter.getFeatureIndex();
double val = iter.getFeatureValue();
for (int c = 0; c < m_data.length; c++) {
prediction[c] += m_data[c][idx] * val;
}
}
return prediction;
}
use of org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator in project knime-core by knime.
the class SparseClassificationTrainingRowTest method testFeatureIterator.
/**
* Tests the {@link FeatureIterator} returned by {@link ClassificationTrainingRow#getFeatureIterator()}.
*
* @throws Exception
*/
@Test
public void testFeatureIterator() throws Exception {
SparseClassificationTrainingRow row = createRow();
FeatureIterator fi = row.getFeatureIterator();
for (int i = 0; i < INDICES.length; i++) {
assertTrue(fi.hasNext());
assertTrue(fi.next());
assertEquals(INDICES[i], fi.getFeatureIndex());
// there are no differences allowed here
assertEquals(VALUES[i], fi.getFeatureValue(), 0);
if (i == 2) {
FeatureIterator sfi = fi.spawn();
assertEquals(INDICES[i - 1], sfi.getFeatureIndex());
assertEquals(VALUES[i - 1], sfi.getFeatureValue(), 0);
}
}
assertFalse(fi.hasNext());
assertFalse(fi.next());
}
use of org.knime.base.node.mine.regression.logistic.learner4.data.TrainingRow.FeatureIterator in project knime-core by knime.
the class LazySagUpdaterTest method lazyVsEager.
private void lazyVsEager(final int nRows, final int nFeatures, final int nCats, final int nEpochs, final double fractionZeros) throws Exception {
LazySagUpdater<MockClassificationTrainingRow> lazyUpdater = new LazySagUpdater.LazySagUpdaterFactory<MockClassificationTrainingRow>(nRows, nFeatures, nCats - 1).create();
EagerSagUpdater<MockClassificationTrainingRow> eagerUpdater = new EagerSagUpdater.EagerSagUpdaterFactory<MockClassificationTrainingRow>(nRows, nFeatures, nCats - 1).create();
SimpleWeightMatrix<MockClassificationTrainingRow> eagerBeta = new SimpleWeightMatrix<>(nFeatures, nCats - 1, true);
SimpleWeightMatrix<MockClassificationTrainingRow> lazyBeta = new SimpleWeightMatrix<>(nFeatures, nCats - 1, true);
MockClassificationTrainingRow[] rows = new MockClassificationTrainingRow[nRows];
boolean[] columns2Check = new boolean[nFeatures];
for (int i = 0; i < nRows; i++) {
double[] features = new double[nFeatures - 1];
for (int j = 0; j < nFeatures - 1; j++) {
features[j] = Math.random() * 2 - 1;
// set some features to zero
if (Math.random() <= fractionZeros) {
features[j] = 0;
}
}
int cat = (int) (Math.random() * nCats);
rows[i] = new MockClassificationTrainingRow(features, i, cat);
}
double[] gradient = new double[nCats - 1];
int[] lastVisited = new int[nFeatures];
double stepSize = 1;
for (int e = 0; e < nEpochs; e++) {
for (int k = 0; k < nRows; k++) {
MockClassificationTrainingRow row = rows[(int) (Math.random() * nRows)];
lazyUpdater.lazyUpdate(lazyBeta, row, lastVisited, k);
for (int j = 0; j < nCats - 1; j++) {
gradient[j] = Math.random() * 4 - 2;
}
for (FeatureIterator iter = row.getFeatureIterator(); iter.next(); ) {
lastVisited[iter.getFeatureIndex()] = k;
}
// if feature is present, the lazy update must update beta correctly
checkPositional(lazyBeta.getWeightVector(), eagerBeta.getWeightVector(), columns2Check, EPSILON);
// double stepSize = Math.random();
lazyUpdater.update(row, gradient, lazyBeta, stepSize, k);
eagerUpdater.update(row, gradient, eagerBeta, stepSize, k);
}
lazyUpdater.resetJITSystem(lazyBeta, lastVisited);
checkEquality(lazyBeta.getWeightVector(), eagerBeta.getWeightVector(), EPSILON, "Epoch " + e);
}
}
Aggregations