use of Classifier.supervised.liblinear.Parameter in project IR_Base by Linda-sunshine.
the class MultiTaskSVM method train.
@Override
public double train() {
init();
// Transfer all user reviews to instances recognized by SVM, indexed by users.
int trainSize = 0, validUserIndex = 0;
ArrayList<Feature[]> fvs = new ArrayList<Feature[]>();
ArrayList<Double> ys = new ArrayList<Double>();
// Two for loop to access the reviews, indexed by users.
ArrayList<_Review> reviews;
for (_AdaptStruct user : m_userList) {
reviews = user.getReviews();
boolean validUser = false;
for (_Review r : reviews) {
if (r.getType() == rType.ADAPTATION) {
// we will only use the adaptation data for this purpose
fvs.add(createLibLinearFV(r, validUserIndex));
ys.add(new Double(r.getYLabel()));
trainSize++;
validUser = true;
}
}
if (validUser)
validUserIndex++;
}
// Train a liblinear model based on all reviews.
Problem libProblem = new Problem();
libProblem.l = trainSize;
libProblem.x = new Feature[trainSize][];
libProblem.y = new double[trainSize];
for (int i = 0; i < trainSize; i++) {
libProblem.x[i] = fvs.get(i);
libProblem.y[i] = ys.get(i);
}
if (m_bias) {
// including bias term; global model + user models
libProblem.n = (m_featureSize + 1) * (m_userSize + 1);
// bias term in liblinear.
libProblem.bias = 1;
} else {
libProblem.n = m_featureSize * (m_userSize + 1);
// no bias term in liblinear.
libProblem.bias = -1;
}
// solver type: SVM
SolverType type = SolverType.L2R_L1LOSS_SVC_DUAL;
m_libModel = Linear.train(libProblem, new Parameter(type, m_C, SVM.EPS));
setPersonalizedModel();
return 0;
}
use of Classifier.supervised.liblinear.Parameter in project IR_Base by Linda-sunshine.
the class IndividualSVM method train.
@Override
public double train() {
init();
// Transfer all user reviews to instances recognized by SVM, indexed by users.
int trainSize = 0, validUserIndex = 0;
ArrayList<Feature[]> fvs = new ArrayList<Feature[]>();
ArrayList<Double> ys = new ArrayList<Double>();
// Two for loop to access the reviews, indexed by users.
ArrayList<_Review> reviews;
for (_AdaptStruct user : m_supFlag ? m_supUserList : m_userList) {
trainSize = 0;
reviews = user.getReviews();
boolean validUser = false;
for (_Review r : reviews) {
if (r.getType() == rType.ADAPTATION) {
// we will only use the adaptation data for this purpose
fvs.add(createLibLinearFV(r, validUserIndex));
ys.add(new Double(r.getYLabel()));
trainSize++;
validUser = true;
}
}
if (validUser)
validUserIndex++;
// Train individual model for each user.
Problem libProblem = new Problem();
libProblem.l = trainSize;
libProblem.x = new Feature[trainSize][];
libProblem.y = new double[trainSize];
for (int i = 0; i < trainSize; i++) {
libProblem.x[i] = fvs.get(i);
libProblem.y[i] = ys.get(i);
}
if (m_bias) {
// including bias term; global model + user models
libProblem.n = m_featureSize + 1;
// bias term in liblinear.
libProblem.bias = 1;
} else {
libProblem.n = m_featureSize;
// no bias term in liblinear.
libProblem.bias = -1;
}
m_libModel = Linear.train(libProblem, new Parameter(m_solverType, m_C, SVM.EPS));
// Set users in the same cluster.
if (m_supFlag)
setPersonalizedModelInCluster(user.getUser().getClusterIndex());
else
setPersonalizedModel(user);
}
return 0;
}
use of Classifier.supervised.liblinear.Parameter in project IR_Base by Linda-sunshine.
the class SVM method libSVMTrain.
public static Model libSVMTrain(Collection<_Doc> trainSet, int fSize, SolverType type, double C, double bias) {
Feature[][] fvs = new Feature[trainSet.size()][];
double[] y = new double[trainSet.size()];
// file id
int fid = 0;
for (_Doc d : trainSet) {
if (bias > 0)
fvs[fid] = Utils.createLibLinearFV(d, fSize);
else
fvs[fid] = Utils.createLibLinearFV(d, 0);
y[fid] = d.getYLabel();
fid++;
}
Problem libProblem = new Problem();
libProblem.l = fid;
libProblem.n = bias >= 0 ? 1 + fSize : fSize;
libProblem.x = fvs;
libProblem.y = y;
libProblem.bias = bias;
return Linear.train(libProblem, new Parameter(type, C, SVM.EPS));
}
use of Classifier.supervised.liblinear.Parameter in project IR_Base by Linda-sunshine.
the class SVM method libSVMTrain.
public static Model libSVMTrain(ArrayList<Feature[]> featureArray, ArrayList<Integer> targetArray, int fSize, SolverType type, double C, double bias) {
Feature[][] featureMatrix = new Feature[featureArray.size()][];
double[] targetMatrix = new double[targetArray.size()];
for (int i = 0; i < featureArray.size(); i++) {
featureMatrix[i] = featureArray.get(i);
targetMatrix[i] = targetArray.get(i);
}
Problem libProblem = new Problem();
libProblem.l = featureMatrix.length;
libProblem.n = fSize;
libProblem.x = featureMatrix;
libProblem.y = targetMatrix;
libProblem.bias = bias;
return Linear.train(libProblem, new Parameter(type, C, SVM.EPS));
}
use of Classifier.supervised.liblinear.Parameter in project IR_Base by Linda-sunshine.
the class ACCTM_CZLR method updateFeatureWeight.
public void updateFeatureWeight(_ParentDoc pDoc, int iter, File weightIterFolder) {
int totalChildWordNum = 0;
int featureLen = 0;
ArrayList<Double> targetValList = new ArrayList<Double>();
ArrayList<Feature[]> featureList = new ArrayList<Feature[]>();
for (_ChildDoc cDoc : pDoc.m_childDocs) {
for (_Word w : cDoc.getWords()) {
double[] wordFeatures = w.getFeatures();
double x = w.getX();
featureLen = wordFeatures.length;
Feature[] featureVec = new Feature[featureLen];
for (int i = 0; i < featureLen; i++) {
featureVec[i] = new FeatureNode(i + 1, wordFeatures[i]);
}
featureList.add(featureVec);
targetValList.add(x);
}
}
totalChildWordNum = featureList.size();
double[] targetVal = new double[totalChildWordNum];
Feature[][] featureMatrix = new Feature[totalChildWordNum][];
for (int i = 0; i < totalChildWordNum; i++) {
featureMatrix[i] = featureList.get(i);
}
for (int i = 0; i < totalChildWordNum; i++) {
targetVal[i] = targetValList.get(i);
}
Problem problem = new Problem();
problem.l = totalChildWordNum;
// featureNum
problem.n = featureLen + 1;
problem.x = featureMatrix;
problem.y = targetVal;
SolverType solver = SolverType.L2R_LR;
double C = 1.0;
double eps = 0.01;
Parameter param = new Parameter(solver, C, eps);
Model model = Linear.train(problem, param);
int featureNum = model.getNrFeature();
for (int i = 0; i < featureNum; i++) pDoc.m_featureWeight[i] = model.getDecfunCoef(i, 0);
String weightFile = pDoc.getName() + ".txt";
File modelFile = new File(weightIterFolder, weightFile);
try {
// if((iter>200)&&(iter%100==0))
model.save(modelFile);
} catch (Exception e) {
System.out.println(e.getMessage());
}
}
Aggregations