Search in sources :

Example 46 with DenseVector

use of org.apache.mahout.math.DenseVector in project pyramid by cheng-li.

the class TrustRegionNewtonOptimizer method trcg.

/**
     *
     * @param delta input
     * @param g input
     * @return s, r
     */
private Pair<Vector, Vector> trcg(double delta, Vector g) {
    int numColumns = loss.getNumColumns();
    double one = 1;
    Vector d = new DenseVector(numColumns);
    Vector Hd = new DenseVector(numColumns);
    double rTr, rnewTrnew, cgtol;
    Vector s = new DenseVector(numColumns);
    Vector r = new DenseVector(numColumns);
    Pair<Vector, Vector> result = new Pair<>();
    for (int i = 0; i < numColumns; i++) {
        s.set(i, 0);
        r.set(i, -g.get(i));
        d.set(i, r.get(i));
    }
    cgtol = 0.1 * g.norm(2);
    rTr = r.dot(r);
    while (true) {
        if (r.norm(2) <= cgtol) {
            break;
        }
        loss.Hv(d, Hd);
        double alpha = rTr / d.dot(Hd);
        daxpy(alpha, d, s);
        if (s.norm(2) > delta) {
            alpha = -alpha;
            daxpy(alpha, d, s);
            double std = s.dot(d);
            double sts = s.dot(s);
            double dtd = d.dot(d);
            double dsq = delta * delta;
            double rad = Math.sqrt(std * std + dtd * (dsq - sts));
            if (std >= 0)
                alpha = (dsq - sts) / (std + rad);
            else
                alpha = (rad - std) / dtd;
            daxpy(alpha, d, s);
            alpha = -alpha;
            daxpy(alpha, Hd, r);
            break;
        }
        alpha = -alpha;
        daxpy(alpha, Hd, r);
        rnewTrnew = r.dot(r);
        double beta = rnewTrnew / rTr;
        scale(beta, d);
        daxpy(one, r, d);
        rTr = rnewTrnew;
    }
    result.setFirst(s);
    result.setSecond(r);
    return result;
}
Also used : DenseVector(org.apache.mahout.math.DenseVector) Vector(org.apache.mahout.math.Vector) DenseVector(org.apache.mahout.math.DenseVector) Pair(edu.neu.ccs.pyramid.util.Pair)

Example 47 with DenseVector

use of org.apache.mahout.math.DenseVector in project pyramid by cheng-li.

the class TrustRegionNewtonOptimizer method tron.

void tron(Vector w) {
    int numColumns = loss.getNumColumns();
    double delta, snorm, one = 1.0;
    double alpha, f, fnew, prered, actred, gs;
    int search = 1, iter = 1;
    Vector w_new = new DenseVector(numColumns);
    Vector g = new DenseVector(numColumns);
    for (int i = 0; i < numColumns; i++) w.set(i, 0);
    f = loss.fun(w);
    loss.grad(w, g);
    delta = g.norm(2);
    double gnorm1 = delta;
    double gnorm = gnorm1;
    if (gnorm <= eps * gnorm1)
        search = 0;
    iter = 1;
    while (iter <= maxIter && search != 0) {
        Pair<Vector, Vector> result = trcg(delta, g);
        Vector s = result.getFirst();
        Vector r = result.getSecond();
        for (int j = 0; j < w.size(); j++) {
            w_new.set(j, w.get(j));
        }
        daxpy(one, s, w_new);
        gs = g.dot(s);
        prered = -0.5 * (gs - s.dot(r));
        fnew = loss.fun(w_new);
        // Compute the actual reduction.
        actred = f - fnew;
        // On the first iteration, adjust the initial step bound.
        snorm = s.norm(2);
        if (iter == 1)
            delta = Math.min(delta, snorm);
        // Compute prediction alpha*snorm of the step.
        if (fnew - f - gs <= 0)
            alpha = SIGMA3;
        else
            alpha = Math.max(SIGMA1, -0.5 * (gs / (fnew - f - gs)));
        // predicted reduction.
        if (actred < ETA0 * prered)
            delta = Math.min(Math.max(alpha, SIGMA1) * snorm, SIGMA2 * delta);
        else if (actred < ETA1 * prered)
            delta = Math.max(SIGMA1 * delta, Math.min(alpha * snorm, SIGMA2 * delta));
        else if (actred < ETA2 * prered)
            delta = Math.max(SIGMA1 * delta, Math.min(alpha * snorm, SIGMA3 * delta));
        else
            delta = Math.max(delta, Math.min(alpha * snorm, SIGMA3 * delta));
        System.out.println("f = " + f);
        if (actred > ETA0 * prered) {
            iter++;
            for (int j = 0; j < w.size(); j++) {
                w.set(j, w_new.get(j));
            }
            f = fnew;
            loss.grad(w, g);
            gnorm = g.norm(2);
            if (gnorm <= eps * gnorm1)
                break;
        }
        if (f < -1.0e+32) {
            break;
        }
        if (Math.abs(actred) <= 0 && prered <= 0) {
            System.out.println("WARNING: actred and prered <= 0%n");
            break;
        }
        if (Math.abs(actred) <= 1.0e-12 * Math.abs(f) && Math.abs(prered) <= 1.0e-12 * Math.abs(f)) {
            System.out.println("WARNING: actred and prered too small%n");
            break;
        }
    }
}
Also used : DenseVector(org.apache.mahout.math.DenseVector) Vector(org.apache.mahout.math.Vector) DenseVector(org.apache.mahout.math.DenseVector)

Example 48 with DenseVector

use of org.apache.mahout.math.DenseVector in project pyramid by cheng-li.

the class SupervisedEmbeddingLoss method getParameters.

public Vector getParameters() {
    int numData = this.updatedEmbeddingMatrix.getNumDataPoints();
    int numFeatures = this.updatedEmbeddingMatrix.getNumFeatures();
    int vecSize = numData * numFeatures;
    Vector pVec = new DenseVector(vecSize);
    for (int i = 0; i < numData; i++) {
        for (int j = 0; j < numFeatures; j++) {
            pVec.set(i * numFeatures + j, this.updatedEmbeddingMatrix.getRow(i).get(j));
        }
    }
    return pVec;
}
Also used : DenseVector(org.apache.mahout.math.DenseVector) Vector(org.apache.mahout.math.Vector) DenseVector(org.apache.mahout.math.DenseVector)

Example 49 with DenseVector

use of org.apache.mahout.math.DenseVector in project pyramid by cheng-li.

the class CBMInspector method getMean.

public static Weights getMean(CBM bmm, int label) {
    int numClusters = bmm.getNumComponents();
    int length = ((LogisticRegression) bmm.getBinaryClassifiers()[0][0]).getWeights().getAllWeights().size();
    int numFeatures = ((LogisticRegression) bmm.getBinaryClassifiers()[0][0]).getNumFeatures();
    Vector mean = new DenseVector(length);
    for (int k = 0; k < numClusters; k++) {
        mean = mean.plus(((LogisticRegression) bmm.getBinaryClassifiers()[k][label]).getWeights().getAllWeights());
    }
    mean = mean.divide(numClusters);
    return new Weights(2, numFeatures, mean);
}
Also used : Weights(edu.neu.ccs.pyramid.classification.logistic_regression.Weights) LogisticRegression(edu.neu.ccs.pyramid.classification.logistic_regression.LogisticRegression) DenseVector(org.apache.mahout.math.DenseVector) Vector(org.apache.mahout.math.Vector) DenseVector(org.apache.mahout.math.DenseVector)

Example 50 with DenseVector

use of org.apache.mahout.math.DenseVector in project pyramid by cheng-li.

the class CBMInspector method distanceFromMean.

public static double distanceFromMean(CBM bmm, int label) {
    Classifier.ProbabilityEstimator[][] logistics = bmm.getBinaryClassifiers();
    int numClusters = bmm.getNumComponents();
    int numFeatures = ((LogisticRegression) logistics[0][0]).getNumFeatures();
    Vector positiveAverageVector = new DenseVector(numFeatures);
    for (int k = 0; k < numClusters; k++) {
        Vector positiveVector = ((LogisticRegression) logistics[k][label]).getWeights().getWeightsWithoutBiasForClass(1);
        positiveAverageVector = positiveAverageVector.plus(positiveVector);
    }
    positiveAverageVector = positiveAverageVector.divide(numClusters);
    double dis = 0;
    for (int k = 0; k < numClusters; k++) {
        Vector positiveVector = ((LogisticRegression) logistics[k][label]).getWeights().getWeightsWithoutBiasForClass(1);
        dis += positiveVector.minus(positiveAverageVector).norm(2);
    }
    return dis / numClusters;
}
Also used : Classifier(edu.neu.ccs.pyramid.classification.Classifier) LogisticRegression(edu.neu.ccs.pyramid.classification.logistic_regression.LogisticRegression) DenseVector(org.apache.mahout.math.DenseVector) Vector(org.apache.mahout.math.Vector) DenseVector(org.apache.mahout.math.DenseVector)

Aggregations

DenseVector (org.apache.mahout.math.DenseVector)62 Vector (org.apache.mahout.math.Vector)56 MultiLabelClfDataSet (edu.neu.ccs.pyramid.dataset.MultiLabelClfDataSet)7 MultiLabel (edu.neu.ccs.pyramid.dataset.MultiLabel)5 RandomAccessSparseVector (org.apache.mahout.math.RandomAccessSparseVector)5 SequentialAccessSparseVector (org.apache.mahout.math.SequentialAccessSparseVector)4 List (java.util.List)3 EnumeratedIntegerDistribution (org.apache.commons.math3.distribution.EnumeratedIntegerDistribution)3 LogisticRegression (edu.neu.ccs.pyramid.classification.logistic_regression.LogisticRegression)2 DataSet (edu.neu.ccs.pyramid.dataset.DataSet)2 EmpiricalCDF (edu.neu.ccs.pyramid.util.EmpiricalCDF)2 IntegerDistribution (org.apache.commons.math3.distribution.IntegerDistribution)2 MultivariateNormalDistribution (org.apache.commons.math3.distribution.MultivariateNormalDistribution)2 Classifier (edu.neu.ccs.pyramid.classification.Classifier)1 Weights (edu.neu.ccs.pyramid.classification.logistic_regression.Weights)1 RegDataSet (edu.neu.ccs.pyramid.dataset.RegDataSet)1 ConstantRegressor (edu.neu.ccs.pyramid.regression.ConstantRegressor)1 BernoulliDistribution (edu.neu.ccs.pyramid.util.BernoulliDistribution)1 Pair (edu.neu.ccs.pyramid.util.Pair)1 ArrayList (java.util.ArrayList)1