Search in sources :

Example 1 with FeatureNode

use of Classifier.supervised.liblinear.FeatureNode in project IR_Base by Linda-sunshine.

the class MultiTaskSVM method createLibLinearFV.

// create a training instance of svm.
// for MT-SVM feature vector construction: we put user models in front of global model
public Feature[] createLibLinearFV(_Review r, int userIndex) {
    int fIndex;
    double fValue;
    _SparseFeature fv;
    _SparseFeature[] fvs = r.getSparse();
    int userOffset, globalOffset;
    // 0-th: x//sqrt(u); t-th: x.
    Feature[] node;
    if (m_bias) {
        userOffset = (m_featureSize + 1) * userIndex;
        globalOffset = (m_featureSize + 1) * m_userSize;
        node = new Feature[(1 + fvs.length) * 2];
    } else {
        userOffset = m_featureSize * userIndex;
        globalOffset = m_featureSize * m_userSize;
        node = new Feature[fvs.length * 2];
    }
    for (int i = 0; i < fvs.length; i++) {
        fv = fvs[i];
        // liblinear's feature index starts from one
        fIndex = fv.getIndex() + 1;
        fValue = fv.getValue();
        // Construct the user part of the training instance.
        node[i] = new FeatureNode(userOffset + fIndex, fValue);
        // Construct the global part of the training instance.
        if (m_bias)
            // global model's bias term has to be moved to the last
            node[i + fvs.length + 1] = new FeatureNode(globalOffset + fIndex, fValue / m_u);
        else
            // global model's bias term has to be moved to the last
            node[i + fvs.length] = new FeatureNode(globalOffset + fIndex, fValue / m_u);
    }
    if (m_bias) {
        // add the bias term
        // user model's bias
        node[fvs.length] = new FeatureNode((m_featureSize + 1) * (userIndex + 1), 1.0);
        // global model's bias
        node[2 * fvs.length + 1] = new FeatureNode((m_featureSize + 1) * (m_userSize + 1), 1.0 / m_u);
    }
    return node;
}
Also used : FeatureNode(Classifier.supervised.liblinear.FeatureNode) structures._SparseFeature(structures._SparseFeature) Feature(Classifier.supervised.liblinear.Feature) structures._SparseFeature(structures._SparseFeature)

Example 2 with FeatureNode

use of Classifier.supervised.liblinear.FeatureNode in project IR_Base by Linda-sunshine.

the class MultiTaskSVMWithClusters method createLibLinearFV.

// create a training instance of svm with cluster information.
// for MT-SVM feature vector construction: we put user models in front of global model
@Override
public Feature[] createLibLinearFV(_Review r, int userIndex) {
    int fIndex, clusterIndex = m_userClusterIndex[userIndex];
    double fValue;
    _SparseFeature fv;
    _SparseFeature[] fvs = r.getSparse();
    int userOffset, clusterOffset, globalOffset;
    // 0-th: x//sqrt(u); t-th: x.
    Feature[] node;
    if (m_bias) {
        userOffset = (m_featureSize + 1) * userIndex;
        clusterOffset = (m_featureSize + 1) * (m_userSize + clusterIndex);
        globalOffset = (m_featureSize + 1) * (m_userSize + m_clusterNo);
        // It consists of three parts.
        node = new Feature[(1 + fvs.length) * 3];
    } else {
        userOffset = m_featureSize * userIndex;
        clusterOffset = m_featureSize * (m_userSize + clusterIndex);
        globalOffset = m_featureSize * (m_userSize + m_clusterNo);
        node = new Feature[fvs.length * 3];
    }
    for (int i = 0; i < fvs.length; i++) {
        fv = fvs[i];
        // liblinear's feature index starts from one
        fIndex = fv.getIndex() + 1;
        fValue = fv.getValue();
        // Construct the user part of the training instance.
        node[i] = new FeatureNode(userOffset + fIndex, fValue * m_i);
        // Construct the cluster and global part of the training instance.
        if (m_bias) {
            // cluster part
            node[i + fvs.length + 1] = new FeatureNode(clusterOffset + fIndex, m_c == 0 ? 0 : fValue / m_c);
            // global part
            node[i + 2 * fvs.length + 2] = new FeatureNode(globalOffset + fIndex, m_u == 0 ? 0 : fValue / m_u);
        } else {
            // cluster part
            node[i + fvs.length] = new FeatureNode(clusterOffset + fIndex, m_c == 0 ? 0 : fValue / m_c);
            // global part
            node[i + 2 * fvs.length] = new FeatureNode(globalOffset + fIndex, m_u == 0 ? 0 : fValue / m_u);
        }
    }
    if (m_bias) {
        // add the bias term
        // user model's bias
        node[fvs.length] = new FeatureNode((m_featureSize + 1) * (userIndex + 1), m_i == 0 ? 0 : 1.0 / m_i);
        // cluster model's bias
        node[2 * fvs.length + 1] = new FeatureNode((m_featureSize + 1) * (m_userSize + clusterIndex + 1), m_c == 0 ? 0 : 1.0 / m_c);
        // global model's bias
        node[3 * fvs.length + 2] = new FeatureNode((m_featureSize + 1) * (m_userSize + m_clusterNo + 1), m_u == 0 ? 0 : 1.0 / m_u);
    }
    return node;
}
Also used : FeatureNode(Classifier.supervised.liblinear.FeatureNode) structures._SparseFeature(structures._SparseFeature) structures._SparseFeature(structures._SparseFeature) Feature(Classifier.supervised.liblinear.Feature)

Example 3 with FeatureNode

use of Classifier.supervised.liblinear.FeatureNode in project IR_Base by Linda-sunshine.

the class LinearSVMMetricLearning method createLinearFeature_diff.

// Calculate the new sample according to two documents.
// Since cross-product will be symmetric, we don't need to store the whole matrix
Feature[] createLinearFeature_diff(_Doc d1, _Doc d2) {
    _SparseFeature[] fv1 = d1.getProjectedFv(), fv2 = d2.getProjectedFv();
    if (fv1 == null || fv2 == null)
        return null;
    _SparseFeature[] diffVct = Utils.diffVector(fv1, fv2);
    Feature[] features = new Feature[diffVct.length * (diffVct.length + 1) / 2];
    int pi, pj, spIndex = 0;
    double value = 0;
    for (int i = 0; i < diffVct.length; i++) {
        pi = diffVct[i].getIndex();
        for (int j = 0; j < i; j++) {
            pj = diffVct[j].getIndex();
            // Currently, we use one dimension array to represent V*V features
            // this might be too small to count
            value = 2 * diffVct[i].getValue() * diffVct[j].getValue();
            features[spIndex++] = new FeatureNode(getIndex(pi, pj), value);
        }
        // this might be too small to count
        value = diffVct[i].getValue() * diffVct[i].getValue();
        features[spIndex++] = new FeatureNode(getIndex(pi, pi), value);
    }
    return features;
}
Also used : FeatureNode(Classifier.supervised.liblinear.FeatureNode) structures._SparseFeature(structures._SparseFeature) Feature(Classifier.supervised.liblinear.Feature) structures._SparseFeature(structures._SparseFeature)

Example 4 with FeatureNode

use of Classifier.supervised.liblinear.FeatureNode in project IR_Base by Linda-sunshine.

the class ACCTM_CZLR method updateFeatureWeight.

public void updateFeatureWeight(_ParentDoc pDoc, int iter, File weightIterFolder) {
    int totalChildWordNum = 0;
    int featureLen = 0;
    ArrayList<Double> targetValList = new ArrayList<Double>();
    ArrayList<Feature[]> featureList = new ArrayList<Feature[]>();
    for (_ChildDoc cDoc : pDoc.m_childDocs) {
        for (_Word w : cDoc.getWords()) {
            double[] wordFeatures = w.getFeatures();
            double x = w.getX();
            featureLen = wordFeatures.length;
            Feature[] featureVec = new Feature[featureLen];
            for (int i = 0; i < featureLen; i++) {
                featureVec[i] = new FeatureNode(i + 1, wordFeatures[i]);
            }
            featureList.add(featureVec);
            targetValList.add(x);
        }
    }
    totalChildWordNum = featureList.size();
    double[] targetVal = new double[totalChildWordNum];
    Feature[][] featureMatrix = new Feature[totalChildWordNum][];
    for (int i = 0; i < totalChildWordNum; i++) {
        featureMatrix[i] = featureList.get(i);
    }
    for (int i = 0; i < totalChildWordNum; i++) {
        targetVal[i] = targetValList.get(i);
    }
    Problem problem = new Problem();
    problem.l = totalChildWordNum;
    // featureNum
    problem.n = featureLen + 1;
    problem.x = featureMatrix;
    problem.y = targetVal;
    SolverType solver = SolverType.L2R_LR;
    double C = 1.0;
    double eps = 0.01;
    Parameter param = new Parameter(solver, C, eps);
    Model model = Linear.train(problem, param);
    int featureNum = model.getNrFeature();
    for (int i = 0; i < featureNum; i++) pDoc.m_featureWeight[i] = model.getDecfunCoef(i, 0);
    String weightFile = pDoc.getName() + ".txt";
    File modelFile = new File(weightIterFolder, weightFile);
    try {
        // if((iter>200)&&(iter%100==0))
        model.save(modelFile);
    } catch (Exception e) {
        System.out.println(e.getMessage());
    }
}
Also used : ArrayList(java.util.ArrayList) structures._Word(structures._Word) SolverType(Classifier.supervised.liblinear.SolverType) Feature(Classifier.supervised.liblinear.Feature) structures._SparseFeature(structures._SparseFeature) structures._ChildDoc(structures._ChildDoc) FeatureNode(Classifier.supervised.liblinear.FeatureNode) Model(Classifier.supervised.liblinear.Model) Parameter(Classifier.supervised.liblinear.Parameter) Problem(Classifier.supervised.liblinear.Problem) File(java.io.File)

Example 5 with FeatureNode

use of Classifier.supervised.liblinear.FeatureNode in project IR_Base by Linda-sunshine.

the class IndividualSVM method createLibLinearFV.

public Feature[] createLibLinearFV(_Review r, int userIndex) {
    int fIndex;
    double fValue;
    _SparseFeature fv;
    _SparseFeature[] fvs = r.getSparse();
    Feature[] node;
    if (m_bias)
        node = new Feature[fvs.length + 1];
    else
        node = new Feature[fvs.length];
    for (int i = 0; i < fvs.length; i++) {
        fv = fvs[i];
        // liblinear's feature index starts from one
        fIndex = fv.getIndex() + 1;
        fValue = fv.getValue();
        // Construct the user part of the training instance.
        node[i] = new FeatureNode(fIndex, fValue);
    }
    if (// add the bias term
    m_bias)
        // user model's bias
        node[fvs.length] = new FeatureNode(m_featureSize + 1, 1.0);
    return node;
}
Also used : FeatureNode(Classifier.supervised.liblinear.FeatureNode) structures._SparseFeature(structures._SparseFeature) Feature(Classifier.supervised.liblinear.Feature) structures._SparseFeature(structures._SparseFeature)

Aggregations

Feature (Classifier.supervised.liblinear.Feature)7 FeatureNode (Classifier.supervised.liblinear.FeatureNode)7 structures._SparseFeature (structures._SparseFeature)7 Model (Classifier.supervised.liblinear.Model)1 Parameter (Classifier.supervised.liblinear.Parameter)1 Problem (Classifier.supervised.liblinear.Problem)1 SolverType (Classifier.supervised.liblinear.SolverType)1 File (java.io.File)1 ArrayList (java.util.ArrayList)1 structures._ChildDoc (structures._ChildDoc)1 structures._Word (structures._Word)1