use of Classifier.supervised.liblinear.SolverType in project IR_Base by Linda-sunshine.
the class MultiTaskSVM method train.
@Override
public double train() {
init();
// Transfer all user reviews to instances recognized by SVM, indexed by users.
int trainSize = 0, validUserIndex = 0;
ArrayList<Feature[]> fvs = new ArrayList<Feature[]>();
ArrayList<Double> ys = new ArrayList<Double>();
// Two for loop to access the reviews, indexed by users.
ArrayList<_Review> reviews;
for (_AdaptStruct user : m_userList) {
reviews = user.getReviews();
boolean validUser = false;
for (_Review r : reviews) {
if (r.getType() == rType.ADAPTATION) {
// we will only use the adaptation data for this purpose
fvs.add(createLibLinearFV(r, validUserIndex));
ys.add(new Double(r.getYLabel()));
trainSize++;
validUser = true;
}
}
if (validUser)
validUserIndex++;
}
// Train a liblinear model based on all reviews.
Problem libProblem = new Problem();
libProblem.l = trainSize;
libProblem.x = new Feature[trainSize][];
libProblem.y = new double[trainSize];
for (int i = 0; i < trainSize; i++) {
libProblem.x[i] = fvs.get(i);
libProblem.y[i] = ys.get(i);
}
if (m_bias) {
// including bias term; global model + user models
libProblem.n = (m_featureSize + 1) * (m_userSize + 1);
// bias term in liblinear.
libProblem.bias = 1;
} else {
libProblem.n = m_featureSize * (m_userSize + 1);
// no bias term in liblinear.
libProblem.bias = -1;
}
// solver type: SVM
SolverType type = SolverType.L2R_L1LOSS_SVC_DUAL;
m_libModel = Linear.train(libProblem, new Parameter(type, m_C, SVM.EPS));
setPersonalizedModel();
return 0;
}
use of Classifier.supervised.liblinear.SolverType in project IR_Base by Linda-sunshine.
the class ACCTM_CZLR method updateFeatureWeight.
public void updateFeatureWeight(_ParentDoc pDoc, int iter, File weightIterFolder) {
int totalChildWordNum = 0;
int featureLen = 0;
ArrayList<Double> targetValList = new ArrayList<Double>();
ArrayList<Feature[]> featureList = new ArrayList<Feature[]>();
for (_ChildDoc cDoc : pDoc.m_childDocs) {
for (_Word w : cDoc.getWords()) {
double[] wordFeatures = w.getFeatures();
double x = w.getX();
featureLen = wordFeatures.length;
Feature[] featureVec = new Feature[featureLen];
for (int i = 0; i < featureLen; i++) {
featureVec[i] = new FeatureNode(i + 1, wordFeatures[i]);
}
featureList.add(featureVec);
targetValList.add(x);
}
}
totalChildWordNum = featureList.size();
double[] targetVal = new double[totalChildWordNum];
Feature[][] featureMatrix = new Feature[totalChildWordNum][];
for (int i = 0; i < totalChildWordNum; i++) {
featureMatrix[i] = featureList.get(i);
}
for (int i = 0; i < totalChildWordNum; i++) {
targetVal[i] = targetValList.get(i);
}
Problem problem = new Problem();
problem.l = totalChildWordNum;
// featureNum
problem.n = featureLen + 1;
problem.x = featureMatrix;
problem.y = targetVal;
SolverType solver = SolverType.L2R_LR;
double C = 1.0;
double eps = 0.01;
Parameter param = new Parameter(solver, C, eps);
Model model = Linear.train(problem, param);
int featureNum = model.getNrFeature();
for (int i = 0; i < featureNum; i++) pDoc.m_featureWeight[i] = model.getDecfunCoef(i, 0);
String weightFile = pDoc.getName() + ".txt";
File modelFile = new File(weightIterFolder, weightFile);
try {
// if((iter>200)&&(iter%100==0))
model.save(modelFile);
} catch (Exception e) {
System.out.println(e.getMessage());
}
}
Aggregations