Search in sources :

Example 6 with structures._SparseFeature

use of structures._SparseFeature in project IR_Base by Linda-sunshine.

the class MTCLinAdaptWithMMB method gradientByFunc.

@Override
protected void gradientByFunc(_AdaptStruct u, _Doc review, double weight, double[] g) {
    _Review r = (_Review) review;
    _HDPThetaStar theta = r.getHDPThetaStar();
    // feature index
    int n, k, s;
    int cIndex = theta.getIndex();
    if (cIndex < 0 || cIndex >= m_kBar)
        System.err.println("Error,cannot find the theta star!");
    int offset = m_dim * 2 * cIndex, offsetSup = m_dim * 2 * m_kBar;
    double[] Au = theta.getModel();
    double delta = (review.getYLabel() - logit(review.getSparse(), r)) * weight;
    // Bias term for individual user.
    // a[0] = ws0*x0; x0=1
    g[offset] -= delta * getSupWeights(0);
    // b[0]
    g[offset + m_dim] -= delta;
    // Bias term for super user.
    // a_s[0] = a_i0*w_g0*x_d0
    g[offsetSup] -= delta * Au[0] * m_gWeights[0];
    // b_s[0] = a_i0*x_d0
    g[offsetSup + m_dimSup] -= delta * Au[0];
    // Traverse all the feature dimension to calculate the gradient for both individual users and super user.
    for (_SparseFeature fv : review.getSparse()) {
        n = fv.getIndex() + 1;
        k = m_featureGroupMap[n];
        // w_si*x_di
        g[offset + k] -= delta * getSupWeights(n) * fv.getValue();
        // x_di
        g[offset + m_dim + k] -= delta * fv.getValue();
        s = m_featureGroupMap4SupUsr[n];
        // a_i*w_gi*x_di
        g[offsetSup + s] -= delta * Au[k] * m_gWeights[n] * fv.getValue();
        // a_i*x_di
        g[offsetSup + m_dimSup + s] -= delta * Au[k] * fv.getValue();
    }
}
Also used : structures._Review(structures._Review) structures._HDPThetaStar(structures._HDPThetaStar) structures._SparseFeature(structures._SparseFeature)

Example 7 with structures._SparseFeature

use of structures._SparseFeature in project IR_Base by Linda-sunshine.

the class MultiTaskSVM method createLibLinearFV.

// create a training instance of svm.
// for MT-SVM feature vector construction: we put user models in front of global model
public Feature[] createLibLinearFV(_Review r, int userIndex) {
    int fIndex;
    double fValue;
    _SparseFeature fv;
    _SparseFeature[] fvs = r.getSparse();
    int userOffset, globalOffset;
    // 0-th: x//sqrt(u); t-th: x.
    Feature[] node;
    if (m_bias) {
        userOffset = (m_featureSize + 1) * userIndex;
        globalOffset = (m_featureSize + 1) * m_userSize;
        node = new Feature[(1 + fvs.length) * 2];
    } else {
        userOffset = m_featureSize * userIndex;
        globalOffset = m_featureSize * m_userSize;
        node = new Feature[fvs.length * 2];
    }
    for (int i = 0; i < fvs.length; i++) {
        fv = fvs[i];
        // liblinear's feature index starts from one
        fIndex = fv.getIndex() + 1;
        fValue = fv.getValue();
        // Construct the user part of the training instance.
        node[i] = new FeatureNode(userOffset + fIndex, fValue);
        // Construct the global part of the training instance.
        if (m_bias)
            // global model's bias term has to be moved to the last
            node[i + fvs.length + 1] = new FeatureNode(globalOffset + fIndex, fValue / m_u);
        else
            // global model's bias term has to be moved to the last
            node[i + fvs.length] = new FeatureNode(globalOffset + fIndex, fValue / m_u);
    }
    if (m_bias) {
        // add the bias term
        // user model's bias
        node[fvs.length] = new FeatureNode((m_featureSize + 1) * (userIndex + 1), 1.0);
        // global model's bias
        node[2 * fvs.length + 1] = new FeatureNode((m_featureSize + 1) * (m_userSize + 1), 1.0 / m_u);
    }
    return node;
}
Also used : FeatureNode(Classifier.supervised.liblinear.FeatureNode) structures._SparseFeature(structures._SparseFeature) Feature(Classifier.supervised.liblinear.Feature) structures._SparseFeature(structures._SparseFeature)

Example 8 with structures._SparseFeature

use of structures._SparseFeature in project IR_Base by Linda-sunshine.

the class MultiTaskSVMWithClusters method createLibLinearFV.

// create a training instance of svm with cluster information.
// for MT-SVM feature vector construction: we put user models in front of global model
@Override
public Feature[] createLibLinearFV(_Review r, int userIndex) {
    int fIndex, clusterIndex = m_userClusterIndex[userIndex];
    double fValue;
    _SparseFeature fv;
    _SparseFeature[] fvs = r.getSparse();
    int userOffset, clusterOffset, globalOffset;
    // 0-th: x//sqrt(u); t-th: x.
    Feature[] node;
    if (m_bias) {
        userOffset = (m_featureSize + 1) * userIndex;
        clusterOffset = (m_featureSize + 1) * (m_userSize + clusterIndex);
        globalOffset = (m_featureSize + 1) * (m_userSize + m_clusterNo);
        // It consists of three parts.
        node = new Feature[(1 + fvs.length) * 3];
    } else {
        userOffset = m_featureSize * userIndex;
        clusterOffset = m_featureSize * (m_userSize + clusterIndex);
        globalOffset = m_featureSize * (m_userSize + m_clusterNo);
        node = new Feature[fvs.length * 3];
    }
    for (int i = 0; i < fvs.length; i++) {
        fv = fvs[i];
        // liblinear's feature index starts from one
        fIndex = fv.getIndex() + 1;
        fValue = fv.getValue();
        // Construct the user part of the training instance.
        node[i] = new FeatureNode(userOffset + fIndex, fValue * m_i);
        // Construct the cluster and global part of the training instance.
        if (m_bias) {
            // cluster part
            node[i + fvs.length + 1] = new FeatureNode(clusterOffset + fIndex, m_c == 0 ? 0 : fValue / m_c);
            // global part
            node[i + 2 * fvs.length + 2] = new FeatureNode(globalOffset + fIndex, m_u == 0 ? 0 : fValue / m_u);
        } else {
            // cluster part
            node[i + fvs.length] = new FeatureNode(clusterOffset + fIndex, m_c == 0 ? 0 : fValue / m_c);
            // global part
            node[i + 2 * fvs.length] = new FeatureNode(globalOffset + fIndex, m_u == 0 ? 0 : fValue / m_u);
        }
    }
    if (m_bias) {
        // add the bias term
        // user model's bias
        node[fvs.length] = new FeatureNode((m_featureSize + 1) * (userIndex + 1), m_i == 0 ? 0 : 1.0 / m_i);
        // cluster model's bias
        node[2 * fvs.length + 1] = new FeatureNode((m_featureSize + 1) * (m_userSize + clusterIndex + 1), m_c == 0 ? 0 : 1.0 / m_c);
        // global model's bias
        node[3 * fvs.length + 2] = new FeatureNode((m_featureSize + 1) * (m_userSize + m_clusterNo + 1), m_u == 0 ? 0 : 1.0 / m_u);
    }
    return node;
}
Also used : FeatureNode(Classifier.supervised.liblinear.FeatureNode) structures._SparseFeature(structures._SparseFeature) structures._SparseFeature(structures._SparseFeature) Feature(Classifier.supervised.liblinear.Feature)

Example 9 with structures._SparseFeature

use of structures._SparseFeature in project IR_Base by Linda-sunshine.

the class asyncMTRegLR method gradientByFunc.

@Override
protected void gradientByFunc(_AdaptStruct user, _Doc review, double weight) {
    // feature index
    int n, offset = m_featureSize + 1;
    double delta = weight * (review.getYLabel() - logit(review.getSparse(), user));
    if (m_LNormFlag)
        delta /= getAdaptationSize(user);
    // Bias term.
    // a[0] = w0*x0; x0=1
    m_g[0] -= delta;
    // offset for the global part.
    m_g[offset] -= m_u * delta;
    // Traverse all the feature dimension to calculate the gradient.
    for (_SparseFeature fv : review.getSparse()) {
        n = fv.getIndex() + 1;
        // User part.
        m_g[n] -= delta * fv.getValue();
        // Global part.
        m_g[offset + n] -= delta * m_u * fv.getValue();
    }
}
Also used : structures._SparseFeature(structures._SparseFeature)

Example 10 with structures._SparseFeature

use of structures._SparseFeature in project IR_Base by Linda-sunshine.

the class asyncMTRegLR method logit.

// Every user is represented by (u*global + individual)
@Override
protected double logit(_SparseFeature[] fvs, _AdaptStruct user) {
    int fid;
    // User bias and Global bias
    double sum = user.getPWeight(0) + m_u * m_glbWeights[0];
    for (_SparseFeature f : fvs) {
        fid = f.getIndex() + 1;
        // User model with Global model.
        sum += (user.getPWeight(fid) + m_u * m_glbWeights[fid]) * f.getValue();
    }
    return Utils.logistic(sum);
}
Also used : structures._SparseFeature(structures._SparseFeature)

Aggregations

structures._SparseFeature (structures._SparseFeature)94 structures._ChildDoc (structures._ChildDoc)14 structures._Doc (structures._Doc)14 structures._Review (structures._Review)14 HashMap (java.util.HashMap)7 structures._ParentDoc (structures._ParentDoc)7 structures._Stn (structures._Stn)7 Feature (Classifier.supervised.liblinear.Feature)6 FeatureNode (Classifier.supervised.liblinear.FeatureNode)6 structures._RankItem (structures._RankItem)5 File (java.io.File)3 PrintWriter (java.io.PrintWriter)3 Classifier.supervised.modelAdaptation._AdaptStruct (Classifier.supervised.modelAdaptation._AdaptStruct)2 FileNotFoundException (java.io.FileNotFoundException)2 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 Map (java.util.Map)2 Entry (java.util.Map.Entry)2 structures._ChildDoc4BaseWithPhi (structures._ChildDoc4BaseWithPhi)2 structures._HDPThetaStar (structures._HDPThetaStar)2