Search in sources :

Example 41 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class UserAnalyzer method allocateReviews.

// [0, train) for training purpose
// [train, adapt) for adaptation purpose
// [adapt, 1] for testing purpose
void allocateReviews(ArrayList<_Review> reviews) {
    // sort the reviews by timestamp
    Collections.sort(reviews);
    int train = (int) (reviews.size() * m_trainRatio), adapt;
    if (m_enforceAdapt)
        adapt = Math.max(1, (int) (reviews.size() * (m_trainRatio + m_adaptRatio)));
    else
        adapt = (int) (reviews.size() * (m_trainRatio + m_adaptRatio));
    _Review r;
    for (int i = 0; i < reviews.size(); i++) {
        r = reviews.get(i);
        if (i < train) {
            r.setType(rType.TRAIN);
            if (r.getYLabel() == 1)
                m_pCount[0]++;
            m_trainSize++;
        } else if (i < adapt) {
            r.setType(rType.ADAPTATION);
            if (r.getYLabel() == 1)
                m_pCount[1]++;
            m_adaptSize++;
        } else {
            r.setType(rType.TEST);
            if (r.getYLabel() == 1)
                m_pCount[2]++;
            m_testSize++;
        }
    }
}
Also used : structures._Review(structures._Review)

Example 42 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class GlobalSVM method train.

@Override
public double train() {
    init();
    // Transfer all user reviews to instances recognized by SVM, indexed by users.
    int trainSize = 0, validUserIndex = 0;
    ArrayList<Feature[]> fvs = new ArrayList<Feature[]>();
    ArrayList<Double> ys = new ArrayList<Double>();
    // Two for loop to access the reviews, indexed by users.
    ArrayList<_Review> reviews;
    for (_AdaptStruct user : m_userList) {
        reviews = user.getReviews();
        boolean validUser = false;
        for (_Review r : reviews) {
            if (r.getType() == rType.ADAPTATION) {
                // we will only use the adaptation data for this purpose
                fvs.add(createLibLinearFV(r, validUserIndex));
                ys.add(new Double(r.getYLabel()));
                trainSize++;
                validUser = true;
            }
        }
        if (validUser)
            validUserIndex++;
    }
    // Train individual model for each user.
    Problem libProblem = new Problem();
    libProblem.l = trainSize;
    libProblem.x = new Feature[trainSize][];
    libProblem.y = new double[trainSize];
    for (int i = 0; i < trainSize; i++) {
        libProblem.x[i] = fvs.get(i);
        libProblem.y[i] = ys.get(i);
    }
    if (m_bias) {
        // including bias term; global model + user models
        libProblem.n = m_featureSize + 1;
        // bias term in liblinear.
        libProblem.bias = 1;
    } else {
        libProblem.n = m_featureSize;
        // no bias term in liblinear.
        libProblem.bias = -1;
    }
    m_libModel = Linear.train(libProblem, new Parameter(m_solverType, m_C, SVM.EPS));
    setPersonalizedModel();
    return 0;
}
Also used : structures._Review(structures._Review) Classifier.supervised.modelAdaptation._AdaptStruct(Classifier.supervised.modelAdaptation._AdaptStruct) ArrayList(java.util.ArrayList) Parameter(Classifier.supervised.liblinear.Parameter) Problem(Classifier.supervised.liblinear.Problem) Feature(Classifier.supervised.liblinear.Feature)

Example 43 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class IndividualSVM method createLibLinearFV.

public Feature[] createLibLinearFV(_Review r, int userIndex) {
    int fIndex;
    double fValue;
    _SparseFeature fv;
    _SparseFeature[] fvs = r.getSparse();
    Feature[] node;
    if (m_bias)
        node = new Feature[fvs.length + 1];
    else
        node = new Feature[fvs.length];
    for (int i = 0; i < fvs.length; i++) {
        fv = fvs[i];
        // liblinear's feature index starts from one
        fIndex = fv.getIndex() + 1;
        fValue = fv.getValue();
        // Construct the user part of the training instance.
        node[i] = new FeatureNode(fIndex, fValue);
    }
    if (// add the bias term
    m_bias)
        // user model's bias
        node[fvs.length] = new FeatureNode(m_featureSize + 1, 1.0);
    return node;
}
Also used : FeatureNode(Classifier.supervised.liblinear.FeatureNode) structures._SparseFeature(structures._SparseFeature) Feature(Classifier.supervised.liblinear.Feature) structures._SparseFeature(structures._SparseFeature)

Example 44 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class CoLinAdaptWithDiffFeatureGroups method calcLogLikelihood.

// Calculate the function value of the new added instance.
protected double calcLogLikelihood(_AdaptStruct user) {
    // log likelihood.
    double L = 0;
    double Pi = 0;
    for (_Review review : user.getReviews()) {
        if (review.getType() != rType.ADAPTATION)
            // only touch the adaptation data
            continue;
        calcPosterior(review.getSparse(), user);
        Pi = m_cache[review.getYLabel()];
        if (Pi > 0.0)
            L += Math.log(Pi);
        else
            L -= Utils.MAX_VALUE;
    }
    return L / getAdaptationSize(user);
}
Also used : structures._Review(structures._Review)

Example 45 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class asyncMTLinAdapt method trainByUser.

void trainByUser() {
    double gNorm, gNormOld = Double.MAX_VALUE;
    int predL, trueL;
    _Review doc;
    _CoLinAdaptStruct user;
    for (int i = 0; i < m_userList.size(); i++) {
        user = (_CoLinAdaptStruct) m_userList.get(i);
        while (user.hasNextAdaptationIns()) {
            // test the latest model before model adaptation
            if (m_testmode != TestMode.TM_batch && (doc = user.getLatestTestIns()) != null) {
                predL = predict(doc, user);
                trueL = doc.getYLabel();
                user.getPerfStat().addOnePredResult(predL, trueL);
            }
            // in batch mode we will not accumulate the performance during adaptation
            gradientDescent(user, m_initStepSize, 1.0);
            // test the gradient only when we want to debug
            if (m_displayLv > 0) {
                gNorm = gradientTest();
                if (m_displayLv == 1) {
                    if (gNorm < gNormOld)
                        System.out.print("o");
                    else
                        System.out.print("x");
                }
                gNormOld = gNorm;
            }
        }
        if (m_displayLv == 1)
            System.out.println();
    }
}
Also used : structures._Review(structures._Review)

Aggregations

structures._Review (structures._Review)44 structures._SparseFeature (structures._SparseFeature)24 structures._HDPThetaStar (structures._HDPThetaStar)9 ArrayList (java.util.ArrayList)8 Feature (Classifier.supervised.liblinear.Feature)6 Classifier.supervised.modelAdaptation._AdaptStruct (Classifier.supervised.modelAdaptation._AdaptStruct)6 structures._PerformanceStat (structures._PerformanceStat)6 IOException (java.io.IOException)5 File (java.io.File)4 structures._User (structures._User)4 FeatureNode (Classifier.supervised.liblinear.FeatureNode)3 Parameter (Classifier.supervised.liblinear.Parameter)3 Problem (Classifier.supervised.liblinear.Problem)3 structures._RankItem (structures._RankItem)3 BufferedReader (java.io.BufferedReader)2 FileInputStream (java.io.FileInputStream)2 InputStreamReader (java.io.InputStreamReader)2 PrintWriter (java.io.PrintWriter)2 MyPriorityQueue (structures.MyPriorityQueue)2 SolverType (Classifier.supervised.liblinear.SolverType)1