Search in sources :

Example 1 with LogisticLoss

use of edu.neu.ccs.pyramid.classification.logistic_regression.LogisticLoss in project pyramid by cheng-li.

the class LRCBMOptimizer method binaryObj.

// todo deal with prior classifier
@Override
protected double binaryObj(int component, int classIndex) {
    int[] binaryLabels = DataSetUtil.toBinaryLabels(dataSet.getMultiLabels(), classIndex);
    double[][] targetsDistribution = DataSetUtil.labelsToDistributions(binaryLabels, 2);
    double[] weights = new double[dataSet.getNumDataPoints()];
    for (int i = 0; i < dataSet.getNumDataPoints(); i++) {
        weights[i] = gammas[i][component];
    }
    LogisticLoss logisticLoss = new LogisticLoss((LogisticRegression) cbm.binaryClassifiers[component][classIndex], dataSet, weights, targetsDistribution, priorVarianceBinary, false);
    return logisticLoss.getValue();
}
Also used : LogisticLoss(edu.neu.ccs.pyramid.classification.logistic_regression.LogisticLoss)

Example 2 with LogisticLoss

use of edu.neu.ccs.pyramid.classification.logistic_regression.LogisticLoss in project pyramid by cheng-li.

the class CBMUtilityOptimizer method penalty.

// regularization
private double penalty() {
    double sum = 0;
    LogisticLoss logisticLoss = new LogisticLoss((LogisticRegression) cbm.multiClassClassifier, dataSet, gammas, priorVarianceMultiClass, true);
    sum += logisticLoss.penaltyValue();
    for (int k = 0; k < cbm.numComponents; k++) {
        for (int l = 0; l < cbm.getNumClasses(); l++) {
            sum += new LogisticLoss((LogisticRegression) cbm.binaryClassifiers[k][l], dataSet, gammasT[k], binaryTargetsDistributions[l], priorVarianceBinary, true).penaltyValue();
        }
    }
    return sum;
}
Also used : LogisticLoss(edu.neu.ccs.pyramid.classification.logistic_regression.LogisticLoss)

Example 3 with LogisticLoss

use of edu.neu.ccs.pyramid.classification.logistic_regression.LogisticLoss in project pyramid by cheng-li.

the class CBMNoiseOptimizerFixed method penalty.

// regularization
private double penalty() {
    double sum = 0;
    LogisticLoss logisticLoss = new LogisticLoss((LogisticRegression) cbm.multiClassClassifier, dataSet, gammas, priorVarianceMultiClass, true);
    sum += logisticLoss.penaltyValue();
    for (int k = 0; k < cbm.numComponents; k++) {
        for (int l = 0; l < cbm.getNumClasses(); l++) {
            sum += new LogisticLoss((LogisticRegression) cbm.binaryClassifiers[k][l], dataSet, gammasT[k], binaryTargetsDistributions[l], priorVarianceBinary, true).penaltyValue();
        }
    }
    return sum;
}
Also used : LogisticLoss(edu.neu.ccs.pyramid.classification.logistic_regression.LogisticLoss)

Example 4 with LogisticLoss

use of edu.neu.ccs.pyramid.classification.logistic_regression.LogisticLoss in project pyramid by cheng-li.

the class ENCBMOptimizer method binaryObj.

@Override
protected double binaryObj(int component, int classIndex) {
    int[] binaryLabels = DataSetUtil.toBinaryLabels(dataSet.getMultiLabels(), classIndex);
    double[][] targetsDistribution = DataSetUtil.labelsToDistributions(binaryLabels, 2);
    double[] weights = new double[dataSet.getNumDataPoints()];
    for (int i = 0; i < dataSet.getNumDataPoints(); i++) {
        weights[i] = gammas[i][component];
    }
    LogisticLoss logisticLoss = new LogisticLoss((LogisticRegression) cbm.binaryClassifiers[component][classIndex], dataSet, weights, targetsDistribution, regularizationBinary, l1RatioBinary, false);
    return logisticLoss.getValueEL();
}
Also used : LogisticLoss(edu.neu.ccs.pyramid.classification.logistic_regression.LogisticLoss)

Aggregations

LogisticLoss (edu.neu.ccs.pyramid.classification.logistic_regression.LogisticLoss)4