Search in sources :

Example 21 with Classifier.supervised.modelAdaptation._AdaptStruct

use of Classifier.supervised.modelAdaptation._AdaptStruct in project IR_Base by Linda-sunshine.

the class RegLR method loadUsers.

@Override
public void loadUsers(ArrayList<_User> userList) {
    m_userList = new ArrayList<_AdaptStruct>();
    for (_User user : userList) {
        m_userList.add(new _AdaptStruct(user));
        user.initModel(m_featureSize + 1);
    }
}
Also used : Classifier.supervised.modelAdaptation._AdaptStruct(Classifier.supervised.modelAdaptation._AdaptStruct) structures._User(structures._User)

Example 22 with Classifier.supervised.modelAdaptation._AdaptStruct

use of Classifier.supervised.modelAdaptation._AdaptStruct in project IR_Base by Linda-sunshine.

the class asyncMTRegLR method train.

@Override
public double train() {
    double gNorm, gNormOld = Double.MAX_VALUE;
    int predL, trueL;
    _Review doc;
    _AdaptStruct user;
    _PerformanceStat perfStat;
    double val;
    initLBFGS();
    init();
    try {
        m_writer = new PrintWriter(new File(String.format("train_online_MTRegLR.txt")));
        for (int i = 0; i < m_userList.size(); i++) {
            user = m_userList.get(i);
            while (user.hasNextAdaptationIns()) {
                // test the latest model before model adaptation
                if (m_testmode != TestMode.TM_batch && (doc = user.getLatestTestIns()) != null) {
                    perfStat = user.getPerfStat();
                    val = logit(doc.getSparse(), user);
                    predL = predict(doc, user);
                    trueL = doc.getYLabel();
                    perfStat.addOnePredResult(predL, trueL);
                    m_writer.format("%s\t%d\t%.4f\t%d\t%d\n", user.getUserID(), doc.getID(), val, predL, trueL);
                }
                // in batch mode we will not accumulate the performance during adaptation
                gradientDescent(user, m_initStepSize, 1.0);
                // test the gradient only when we want to debug
                if (m_displayLv > 0) {
                    gNorm = gradientTest();
                    if (m_displayLv == 1) {
                        if (gNorm < gNormOld)
                            System.out.print("o");
                        else
                            System.out.print("x");
                    }
                    gNormOld = gNorm;
                }
            }
            m_writer.flush();
            if (m_displayLv == 1)
                System.out.println();
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
    setPersonalizedModel();
    // we do not evaluate function value
    return 0;
}
Also used : structures._Review(structures._Review) Classifier.supervised.modelAdaptation._AdaptStruct(Classifier.supervised.modelAdaptation._AdaptStruct) IOException(java.io.IOException) File(java.io.File) structures._PerformanceStat(structures._PerformanceStat) PrintWriter(java.io.PrintWriter)

Example 23 with Classifier.supervised.modelAdaptation._AdaptStruct

use of Classifier.supervised.modelAdaptation._AdaptStruct in project IR_Base by Linda-sunshine.

the class GlobalSVM method train.

@Override
public double train() {
    init();
    // Transfer all user reviews to instances recognized by SVM, indexed by users.
    int trainSize = 0, validUserIndex = 0;
    ArrayList<Feature[]> fvs = new ArrayList<Feature[]>();
    ArrayList<Double> ys = new ArrayList<Double>();
    // Two for loop to access the reviews, indexed by users.
    ArrayList<_Review> reviews;
    for (_AdaptStruct user : m_userList) {
        reviews = user.getReviews();
        boolean validUser = false;
        for (_Review r : reviews) {
            if (r.getType() == rType.ADAPTATION) {
                // we will only use the adaptation data for this purpose
                fvs.add(createLibLinearFV(r, validUserIndex));
                ys.add(new Double(r.getYLabel()));
                trainSize++;
                validUser = true;
            }
        }
        if (validUser)
            validUserIndex++;
    }
    // Train individual model for each user.
    Problem libProblem = new Problem();
    libProblem.l = trainSize;
    libProblem.x = new Feature[trainSize][];
    libProblem.y = new double[trainSize];
    for (int i = 0; i < trainSize; i++) {
        libProblem.x[i] = fvs.get(i);
        libProblem.y[i] = ys.get(i);
    }
    if (m_bias) {
        // including bias term; global model + user models
        libProblem.n = m_featureSize + 1;
        // bias term in liblinear.
        libProblem.bias = 1;
    } else {
        libProblem.n = m_featureSize;
        // no bias term in liblinear.
        libProblem.bias = -1;
    }
    m_libModel = Linear.train(libProblem, new Parameter(m_solverType, m_C, SVM.EPS));
    setPersonalizedModel();
    return 0;
}
Also used : structures._Review(structures._Review) Classifier.supervised.modelAdaptation._AdaptStruct(Classifier.supervised.modelAdaptation._AdaptStruct) ArrayList(java.util.ArrayList) Parameter(Classifier.supervised.liblinear.Parameter) Problem(Classifier.supervised.liblinear.Problem) Feature(Classifier.supervised.liblinear.Feature)

Example 24 with Classifier.supervised.modelAdaptation._AdaptStruct

use of Classifier.supervised.modelAdaptation._AdaptStruct in project IR_Base by Linda-sunshine.

the class IndividualSVM method loadSuperUsers.

public void loadSuperUsers(ArrayList<_User> userList) {
    m_supUserList = new ArrayList<_AdaptStruct>();
    for (_User user : userList) m_supUserList.add(new _AdaptStruct(user));
    m_pWeights = new double[m_featureSize + 1];
}
Also used : Classifier.supervised.modelAdaptation._AdaptStruct(Classifier.supervised.modelAdaptation._AdaptStruct) structures._User(structures._User)

Example 25 with Classifier.supervised.modelAdaptation._AdaptStruct

use of Classifier.supervised.modelAdaptation._AdaptStruct in project IR_Base by Linda-sunshine.

the class CoLinAdaptWithDiffFeatureGroups method constructUserList.

@Override
void constructUserList(ArrayList<_User> userList) {
    int ASize = 2 * m_dimA;
    int BSize = 2 * m_dimB;
    // step 1: create space
    m_userList = new ArrayList<_AdaptStruct>();
    for (int i = 0; i < userList.size(); i++) {
        _User user = userList.get(i);
        m_userList.add(new _CoLinAdaptDiffFvGroupsStruct(user, m_dimA, i, m_topK, m_dimB));
    }
    m_pWeights = new double[m_gWeights.length];
    // huge space consumption
    _CoLinAdaptDiffFvGroupsStruct.sharedA = new double[getASize()];
    _CoLinAdaptDiffFvGroupsStruct.sharedB = new double[getBSize()];
    // step 2: copy each user's A and B to shared A and B in _CoLinAdaptStruct
    _CoLinAdaptDiffFvGroupsStruct user;
    for (int i = 0; i < m_userList.size(); i++) {
        user = (_CoLinAdaptDiffFvGroupsStruct) m_userList.get(i);
        System.arraycopy(user.m_A, 0, _CoLinAdaptDiffFvGroupsStruct.sharedA, ASize * i, ASize);
        System.arraycopy(user.m_B, 0, _CoLinAdaptDiffFvGroupsStruct.sharedB, BSize * i, BSize);
    }
}
Also used : Classifier.supervised.modelAdaptation._AdaptStruct(Classifier.supervised.modelAdaptation._AdaptStruct) structures._User(structures._User)

Aggregations

Classifier.supervised.modelAdaptation._AdaptStruct (Classifier.supervised.modelAdaptation._AdaptStruct)34 structures._User (structures._User)15 File (java.io.File)6 PrintWriter (java.io.PrintWriter)6 structures._Review (structures._Review)6 IOException (java.io.IOException)5 ExceptionWithIflag (LBFGS.LBFGS.ExceptionWithIflag)3 structures._SparseFeature (structures._SparseFeature)3 Feature (Classifier.supervised.liblinear.Feature)2 Parameter (Classifier.supervised.liblinear.Parameter)2 Problem (Classifier.supervised.liblinear.Problem)2 ArrayList (java.util.ArrayList)2 structures._HDPThetaStar (structures._HDPThetaStar)2 structures._PerformanceStat (structures._PerformanceStat)2 FileNotFoundException (java.io.FileNotFoundException)1 HashSet (java.util.HashSet)1 structures._thetaStar (structures._thetaStar)1