use of org.apache.mahout.math.DenseVector in project pyramid by cheng-li.
the class CBMInspector method distanceFromMean.
public static double distanceFromMean(CBM bmm, int label) {
Classifier.ProbabilityEstimator[][] logistics = bmm.getBinaryClassifiers();
int numClusters = bmm.getNumComponents();
int numFeatures = ((LogisticRegression) logistics[0][0]).getNumFeatures();
Vector positiveAverageVector = new DenseVector(numFeatures);
for (int k = 0; k < numClusters; k++) {
Vector positiveVector = ((LogisticRegression) logistics[k][label]).getWeights().getWeightsWithoutBiasForClass(1);
positiveAverageVector = positiveAverageVector.plus(positiveVector);
}
positiveAverageVector = positiveAverageVector.divide(numClusters);
double dis = 0;
for (int k = 0; k < numClusters; k++) {
Vector positiveVector = ((LogisticRegression) logistics[k][label]).getWeights().getWeightsWithoutBiasForClass(1);
dis += positiveVector.minus(positiveAverageVector).norm(2);
}
return dis / numClusters;
}
use of org.apache.mahout.math.DenseVector in project pyramid by cheng-li.
the class CBMInspector method getMean.
public static Weights getMean(CBM bmm, int label) {
int numClusters = bmm.getNumComponents();
int length = ((LogisticRegression) bmm.getBinaryClassifiers()[0][0]).getWeights().getAllWeights().size();
int numFeatures = ((LogisticRegression) bmm.getBinaryClassifiers()[0][0]).getNumFeatures();
Vector mean = new DenseVector(length);
for (int k = 0; k < numClusters; k++) {
mean = mean.plus(((LogisticRegression) bmm.getBinaryClassifiers()[k][label]).getWeights().getAllWeights());
}
mean = mean.divide(numClusters);
return new Weights(2, numFeatures, mean);
}
use of org.apache.mahout.math.DenseVector in project pyramid by cheng-li.
the class MLFlatScaling method predictClassProbs.
@Override
public double[] predictClassProbs(Vector vector) {
double[] scores = scoreEstimator.predictClassScores(vector);
double[] probs = new double[scores.length];
for (int k = 0; k < scores.length; k++) {
Vector scoreFeatureVector = new DenseVector(1);
scoreFeatureVector.set(0, scores[k]);
probs[k] = logisticRegression.predictClassProb(scoreFeatureVector, 1);
}
return probs;
}
use of org.apache.mahout.math.DenseVector in project pyramid by cheng-li.
the class AugmentedLRLoss method penaltyGradient.
private Vector penaltyGradient() {
Vector featureWeights = augmentedLR.featureWeights();
Vector componentWeights = augmentedLR.componentWeights();
Vector penaltyGradient = new DenseVector(augmentedLR.getAllWeights().size());
for (int d = 0; d < numFeatures; d++) {
penaltyGradient.set(d, featureWeights.get(d) / featureWeightVariance);
}
for (int k = 0; k < numComponents; k++) {
penaltyGradient.set(numFeatures + k, componentWeights.get(k) / componentWeightVariance);
}
return penaltyGradient;
}
use of org.apache.mahout.math.DenseVector in project pyramid by cheng-li.
the class MLACPlattScaling method predictAssignmentProb.
@Override
public double predictAssignmentProb(Vector vector, MultiLabel assignment) {
double[] scores = scoreEstimator.predictClassScores(vector);
Vector scoreVector = new DenseVector(scores.length);
for (int i = 0; i < scores.length; i++) {
scoreVector.set(i, scores[i]);
}
return logisticRegression.predictAssignmentProb(scoreVector, assignment);
}
Aggregations