Search in sources :

Example 6 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class CLinAdaptWithMMB method logit.

@Override
protected double logit(_SparseFeature[] fvs, _Review r) {
    int k, n;
    double[] Au = r.getHDPThetaStar().getModel();
    // Bias term: w_s0*a0+b0.
    double sum = Au[0] * m_gWeights[0] + Au[m_dim];
    for (_SparseFeature fv : fvs) {
        n = fv.getIndex() + 1;
        k = m_featureGroupMap[n];
        sum += (Au[k] * m_gWeights[n] + Au[m_dim + k]) * fv.getValue();
    }
    return Utils.logistic(sum);
}
Also used : structures._SparseFeature(structures._SparseFeature)

Example 7 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class CLinAdaptWithMMB method gradientByFunc.

@Override
protected void gradientByFunc(_AdaptStruct u, _Doc review, double weight, double[] g) {
    _Review r = (_Review) review;
    // feature index
    int n, k;
    int cIndex = r.getHDPThetaStar().getIndex();
    if (cIndex < 0 || cIndex >= m_kBar)
        System.err.println("Error,cannot find the theta star!");
    int offset = m_dim * 2 * cIndex;
    double delta = (review.getYLabel() - logit(review.getSparse(), r)) * weight;
    // Bias term for individual user.
    // a[0] = ws0*x0; x0=1
    g[offset] -= delta * m_gWeights[0];
    // b[0]
    g[offset + m_dim] -= delta;
    // Traverse all the feature dimension to calculate the gradient for both individual users and super user.
    for (_SparseFeature fv : review.getSparse()) {
        n = fv.getIndex() + 1;
        k = m_featureGroupMap[n];
        // w_si*x_di
        g[offset + k] -= delta * m_gWeights[n] * fv.getValue();
        // x_di
        g[offset + m_dim + k] -= delta * fv.getValue();
    }
}
Also used : structures._Review(structures._Review) structures._SparseFeature(structures._SparseFeature)

Example 8 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class MTCLinAdaptWithMMB method gradientByFunc.

@Override
protected void gradientByFunc(_AdaptStruct u, _Doc review, double weight, double[] g) {
    _Review r = (_Review) review;
    _HDPThetaStar theta = r.getHDPThetaStar();
    // feature index
    int n, k, s;
    int cIndex = theta.getIndex();
    if (cIndex < 0 || cIndex >= m_kBar)
        System.err.println("Error,cannot find the theta star!");
    int offset = m_dim * 2 * cIndex, offsetSup = m_dim * 2 * m_kBar;
    double[] Au = theta.getModel();
    double delta = (review.getYLabel() - logit(review.getSparse(), r)) * weight;
    // Bias term for individual user.
    // a[0] = ws0*x0; x0=1
    g[offset] -= delta * getSupWeights(0);
    // b[0]
    g[offset + m_dim] -= delta;
    // Bias term for super user.
    // a_s[0] = a_i0*w_g0*x_d0
    g[offsetSup] -= delta * Au[0] * m_gWeights[0];
    // b_s[0] = a_i0*x_d0
    g[offsetSup + m_dimSup] -= delta * Au[0];
    // Traverse all the feature dimension to calculate the gradient for both individual users and super user.
    for (_SparseFeature fv : review.getSparse()) {
        n = fv.getIndex() + 1;
        k = m_featureGroupMap[n];
        // w_si*x_di
        g[offset + k] -= delta * getSupWeights(n) * fv.getValue();
        // x_di
        g[offset + m_dim + k] -= delta * fv.getValue();
        s = m_featureGroupMap4SupUsr[n];
        // a_i*w_gi*x_di
        g[offsetSup + s] -= delta * Au[k] * m_gWeights[n] * fv.getValue();
        // a_i*x_di
        g[offsetSup + m_dimSup + s] -= delta * Au[k] * fv.getValue();
    }
}
Also used : structures._Review(structures._Review) structures._HDPThetaStar(structures._HDPThetaStar) structures._SparseFeature(structures._SparseFeature)

Example 9 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class ModelAdaptation method printUserPerformance.

// added by Lin for model performance comparison.
// print out each user's test review's performance.
public void printUserPerformance(String filename) {
    PrintWriter writer;
    try {
        writer = new PrintWriter(new File(filename));
        Collections.sort(m_userList, new Comparator<_AdaptStruct>() {

            @Override
            public int compare(_AdaptStruct u1, _AdaptStruct u2) {
                return String.CASE_INSENSITIVE_ORDER.compare(u1.getUserID(), u2.getUserID());
            }
        });
        for (_AdaptStruct u : m_userList) {
            writer.write("-----\n");
            writer.write(String.format("%s\t%d\n", u.getUserID(), u.getReviews().size()));
            for (_Review r : u.getReviews()) {
                if (r.getType() == rType.ADAPTATION)
                    writer.write(String.format("%s\t%d\t%s\n", r.getCategory(), r.getYLabel(), r.getSource()));
                if (r.getType() == rType.TEST) {
                    writer.write(String.format("%s\t%d\t%d\t%s\n", r.getCategory(), r.getYLabel(), r.getPredictLabel(), r.getSource()));
                }
            }
        }
        writer.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}
Also used : structures._Review(structures._Review) IOException(java.io.IOException) File(java.io.File) PrintWriter(java.io.PrintWriter)

Example 10 with structures._Review

use of structures._Review in project IR_Base by Linda-sunshine.

the class ModelAdaptation method test.

@Override
public double test() {
    int numberOfCores = Runtime.getRuntime().availableProcessors();
    ArrayList<Thread> threads = new ArrayList<Thread>();
    for (int k = 0; k < numberOfCores; ++k) {
        threads.add((new Thread() {

            int core, numOfCores;

            @Override
            public void run() {
                _AdaptStruct user;
                _PerformanceStat userPerfStat;
                try {
                    for (int i = 0; i + core < m_userList.size(); i += numOfCores) {
                        user = m_userList.get(i + core);
                        if (// no testing data
                        (m_testmode == TestMode.TM_batch && user.getTestSize() < 1) || // no adaptation data
                        (m_testmode == TestMode.TM_online && user.getAdaptationSize() < 1) || // no testing and adaptation data
                        (m_testmode == TestMode.TM_hybrid && user.getAdaptationSize() < 1) && user.getTestSize() < 1)
                            continue;
                        userPerfStat = user.getPerfStat();
                        if (m_testmode == TestMode.TM_batch || m_testmode == TestMode.TM_hybrid) {
                            // record prediction results
                            for (_Review r : user.getReviews()) {
                                if (r.getType() != rType.TEST)
                                    continue;
                                int trueL = r.getYLabel();
                                // evoke user's own model
                                int predL = user.predict(r);
                                r.setPredictLabel(predL);
                                userPerfStat.addOnePredResult(predL, trueL);
                            }
                        }
                        userPerfStat.calculatePRF();
                    }
                } catch (Exception ex) {
                    ex.printStackTrace();
                }
            }

            private Thread initialize(int core, int numOfCores) {
                this.core = core;
                this.numOfCores = numOfCores;
                return this;
            }
        }).initialize(k, numberOfCores));
        threads.get(k).start();
    }
    for (int k = 0; k < numberOfCores; ++k) {
        try {
            threads.get(k).join();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
    int count = 0;
    ArrayList<ArrayList<Double>> macroF1 = new ArrayList<ArrayList<Double>>();
    // init macroF1
    for (int i = 0; i < m_classNo; i++) macroF1.add(new ArrayList<Double>());
    _PerformanceStat userPerfStat;
    m_microStat.clear();
    for (_AdaptStruct user : m_userList) {
        if (// no testing data
        (m_testmode == TestMode.TM_batch && user.getTestSize() < 1) || // no adaptation data
        (m_testmode == TestMode.TM_online && user.getAdaptationSize() < 1) || // no testing and adaptation data
        (m_testmode == TestMode.TM_hybrid && user.getAdaptationSize() < 1) && user.getTestSize() < 1)
            continue;
        userPerfStat = user.getPerfStat();
        for (int i = 0; i < m_classNo; i++) {
            if (userPerfStat.getTrueClassNo(i) > 0)
                macroF1.get(i).add(userPerfStat.getF1(i));
        }
        m_microStat.accumulateConfusionMat(userPerfStat);
        count++;
    }
    System.out.print("neg users: " + macroF1.get(0).size());
    System.out.print("\tpos users: " + macroF1.get(1).size() + "\n");
    System.out.println(toString());
    calcMicroPerfStat();
    // macro average and standard deviation.
    System.out.println("\nMacro F1:");
    for (int i = 0; i < m_classNo; i++) {
        double[] avgStd = calcAvgStd(macroF1.get(i));
        m_perf[i] = avgStd[0];
        System.out.format("Class %d: %.4f+%.4f\t", i, avgStd[0], avgStd[1]);
    }
    // printPerformance();
    return 0;
}
Also used : ArrayList(java.util.ArrayList) IOException(java.io.IOException) structures._Review(structures._Review) structures._PerformanceStat(structures._PerformanceStat)

Aggregations

structures._Review (structures._Review)44 structures._SparseFeature (structures._SparseFeature)24 structures._HDPThetaStar (structures._HDPThetaStar)9 ArrayList (java.util.ArrayList)8 Feature (Classifier.supervised.liblinear.Feature)6 Classifier.supervised.modelAdaptation._AdaptStruct (Classifier.supervised.modelAdaptation._AdaptStruct)6 structures._PerformanceStat (structures._PerformanceStat)6 IOException (java.io.IOException)5 File (java.io.File)4 structures._User (structures._User)4 FeatureNode (Classifier.supervised.liblinear.FeatureNode)3 Parameter (Classifier.supervised.liblinear.Parameter)3 Problem (Classifier.supervised.liblinear.Problem)3 structures._RankItem (structures._RankItem)3 BufferedReader (java.io.BufferedReader)2 FileInputStream (java.io.FileInputStream)2 InputStreamReader (java.io.InputStreamReader)2 PrintWriter (java.io.PrintWriter)2 MyPriorityQueue (structures.MyPriorityQueue)2 SolverType (Classifier.supervised.liblinear.SolverType)1