Search in sources :

Example 21 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class asyncLinAdapt method train.

// this is online training in each individual user
@Override
public double train() {
    double gNorm, gNormOld = Double.MAX_VALUE;
    ;
    int predL, trueL;
    _Review doc;
    _PerformanceStat perfStat;
    _LinAdaptStruct user;
    initLBFGS();
    init();
    for (int i = 0; i < m_userList.size(); i++) {
        user = (_LinAdaptStruct) m_userList.get(i);
        while (user.hasNextAdaptationIns()) {
            // test the latest model before model adaptation
            if (m_testmode != TestMode.TM_batch && (doc = user.getLatestTestIns()) != null) {
                perfStat = user.getPerfStat();
                predL = predict(doc, user);
                trueL = doc.getYLabel();
                perfStat.addOnePredResult(predL, trueL);
            }
            // in batch mode we will not accumulate the performance during adaptation
            // prepare to adapt: initialize gradient
            Arrays.fill(m_g, 0);
            calculateGradients(user);
            gNorm = gradientTest();
            if (m_displayLv == 1) {
                if (gNorm < gNormOld)
                    System.out.print("o");
                else
                    System.out.print("x");
            }
            // gradient descent
            asyncRegLR.gradientDescent(user, m_initStepSize, m_g);
            gNormOld = gNorm;
        }
        if (m_displayLv > 0)
            System.out.println();
    }
    setPersonalizedModel();
    // we do not evaluate function value
    return 0;
}
Also used : structures._Review(structures._Review) structures._PerformanceStat(structures._PerformanceStat)

Example 22 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class asyncMTLinAdapt method trainByReview.

void trainByReview() {
    LinkedList<_UserReviewPair> reviewlist = new LinkedList<_UserReviewPair>();
    double gNorm, gNormOld = Double.MAX_VALUE;
    int predL, trueL, counter = 0;
    _Review doc;
    _CoLinAdaptStruct user;
    // collect the training/adaptation data
    for (int i = 0; i < m_userList.size(); i++) {
        user = (_CoLinAdaptStruct) m_userList.get(i);
        for (_Review r : user.getReviews()) {
            if (r.getType() == rType.ADAPTATION || r.getType() == rType.TRAIN)
                // we will only collect the training or adaptation reviews
                reviewlist.add(new _UserReviewPair(user, r));
        }
    }
    // sort them by timestamp
    Collections.sort(reviewlist);
    for (_UserReviewPair pair : reviewlist) {
        user = (_CoLinAdaptStruct) pair.getUser();
        // test the latest model before model adaptation
        if (m_testmode != TestMode.TM_batch) {
            doc = pair.getReview();
            predL = predict(doc, user);
            trueL = doc.getYLabel();
            user.getPerfStat().addOnePredResult(predL, trueL);
        }
        // in batch mode we will not accumulate the performance during adaptation
        gradientDescent(user, m_initStepSize, 1.0);
        // test the gradient only when we want to debug
        if (m_displayLv > 0) {
            gNorm = gradientTest();
            if (m_displayLv == 1) {
                if (gNorm < gNormOld)
                    System.out.print("o");
                else
                    System.out.print("x");
            }
            gNormOld = gNorm;
            if (++counter % 120 == 0)
                System.out.println();
        }
    }
}
Also used : structures._Review(structures._Review) structures._UserReviewPair(structures._UserReviewPair) LinkedList(java.util.LinkedList)

Example 23 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class CLRWithDP method predict.

public int predict(_AdaptStruct user, _thetaStar theta) {
    double[] As;
    double sum;
    int m, n, predL = 0, count = 0;
    for (_Review r : user.getReviews()) {
        if (r.getType() == rType.TEST) {
            As = theta.getModel();
            // Bias term: w_s0*a0+b0.
            sum = As[0] * MTCLinAdaptWithDP.m_supWeights[0] + As[m_dim];
            for (_SparseFeature fv : r.getSparse()) {
                n = fv.getIndex() + 1;
                m = m_featureGroupMap[n];
                sum += (As[m] * MTCLinAdaptWithDP.m_supWeights[n] + As[m_dim + m]) * fv.getValue();
            }
            if (sum > 0.5)
                predL = 1;
            if (predL == r.getYLabel())
                count++;
        }
    }
    return count;
}
Also used : structures._Review(structures._Review) structures._SparseFeature(structures._SparseFeature)

Example 24 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class CLRWithDP method calcLogLikelihood4Posterior.

// added by Lin. Calculate the function value of the new added instance.
protected double calcLogLikelihood4Posterior(_AdaptStruct user) {
    // log likelihood.
    double L = 0;
    double Pi = 0;
    for (_Review review : user.getReviews()) {
        if (review.getType() != rType.ADAPTATION && review.getType() != rType.TEST)
            // only touch the adaptation data
            continue;
        Pi = logit(review.getSparse(), user);
        if (review.getYLabel() == 1) {
            if (Pi > 0.0)
                L += Math.log(Pi);
            else
                L -= Utils.MAX_VALUE;
        } else {
            if (Pi < 1.0)
                L += Math.log(1 - Pi);
            else
                L -= Utils.MAX_VALUE;
        }
    }
    if (m_LNormFlag)
        return L / getAdaptationSize(user);
    else
        return L;
}
Also used : structures._Review(structures._Review)

Example 25 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class CLRWithHDP method gradientByFunc.

@Override
protected void gradientByFunc(_AdaptStruct u, _Doc r, double weight, double[] g) {
    _Review review = (_Review) r;
    // feature index
    int n;
    int cIndex = review.getHDPThetaStar().getIndex();
    if (cIndex < 0 || cIndex >= m_kBar)
        System.err.println("Error,cannot find the HDP theta star!");
    int offset = m_dim * cIndex;
    double delta = weight * (review.getYLabel() - logit(review.getSparse(), review));
    // Bias term.
    // x0=1
    g[offset] -= delta;
    // Traverse all the feature dimension to calculate the gradient.
    for (_SparseFeature fv : review.getSparse()) {
        n = fv.getIndex() + 1;
        g[offset + n] -= delta * fv.getValue();
    }
}
Also used : structures._Review(structures._Review) structures._SparseFeature(structures._SparseFeature)

Aggregations

structures._Review (structures._Review)44 structures._SparseFeature (structures._SparseFeature)24 structures._HDPThetaStar (structures._HDPThetaStar)9 ArrayList (java.util.ArrayList)8 Feature (Classifier.supervised.liblinear.Feature)6 Classifier.supervised.modelAdaptation._AdaptStruct (Classifier.supervised.modelAdaptation._AdaptStruct)6 structures._PerformanceStat (structures._PerformanceStat)6 IOException (java.io.IOException)5 File (java.io.File)4 structures._User (structures._User)4 FeatureNode (Classifier.supervised.liblinear.FeatureNode)3 Parameter (Classifier.supervised.liblinear.Parameter)3 Problem (Classifier.supervised.liblinear.Problem)3 structures._RankItem (structures._RankItem)3 BufferedReader (java.io.BufferedReader)2 FileInputStream (java.io.FileInputStream)2 InputStreamReader (java.io.InputStreamReader)2 PrintWriter (java.io.PrintWriter)2 MyPriorityQueue (structures.MyPriorityQueue)2 SolverType (Classifier.supervised.liblinear.SolverType)1