use of Classifier.supervised.liblinear.Model in project IR_Base by Linda-sunshine.
the class ACCTM_CZLR method updateFeatureWeight.
public void updateFeatureWeight(_ParentDoc pDoc, int iter, File weightIterFolder) {
int totalChildWordNum = 0;
int featureLen = 0;
ArrayList<Double> targetValList = new ArrayList<Double>();
ArrayList<Feature[]> featureList = new ArrayList<Feature[]>();
for (_ChildDoc cDoc : pDoc.m_childDocs) {
for (_Word w : cDoc.getWords()) {
double[] wordFeatures = w.getFeatures();
double x = w.getX();
featureLen = wordFeatures.length;
Feature[] featureVec = new Feature[featureLen];
for (int i = 0; i < featureLen; i++) {
featureVec[i] = new FeatureNode(i + 1, wordFeatures[i]);
}
featureList.add(featureVec);
targetValList.add(x);
}
}
totalChildWordNum = featureList.size();
double[] targetVal = new double[totalChildWordNum];
Feature[][] featureMatrix = new Feature[totalChildWordNum][];
for (int i = 0; i < totalChildWordNum; i++) {
featureMatrix[i] = featureList.get(i);
}
for (int i = 0; i < totalChildWordNum; i++) {
targetVal[i] = targetValList.get(i);
}
Problem problem = new Problem();
problem.l = totalChildWordNum;
// featureNum
problem.n = featureLen + 1;
problem.x = featureMatrix;
problem.y = targetVal;
SolverType solver = SolverType.L2R_LR;
double C = 1.0;
double eps = 0.01;
Parameter param = new Parameter(solver, C, eps);
Model model = Linear.train(problem, param);
int featureNum = model.getNrFeature();
for (int i = 0; i < featureNum; i++) pDoc.m_featureWeight[i] = model.getDecfunCoef(i, 0);
String weightFile = pDoc.getName() + ".txt";
File modelFile = new File(weightIterFolder, weightFile);
try {
// if((iter>200)&&(iter%100==0))
model.save(modelFile);
} catch (Exception e) {
System.out.println(e.getMessage());
}
}
use of Classifier.supervised.liblinear.Model in project IR_Base by Linda-sunshine.
the class L2RMetricLearning method L2RModelTraining.
protected void L2RModelTraining() {
// select the training pairs
createTrainingCorpus();
if (m_ranker == 0) {
ArrayList<Feature[]> fvs = new ArrayList<Feature[]>();
ArrayList<Integer> labels = new ArrayList<Integer>();
for (_Query q : m_queries) q.extractPairs4RankSVM(fvs, labels);
Model rankSVM = SVM.libSVMTrain(fvs, labels, RankFVSize, SolverType.L2R_L1LOSS_SVC_DUAL, m_tradeoff, -1);
m_weights = rankSVM.getFeatureWeights();
System.out.format("RankSVM training performance:\nMAP: %.4f\n", evaluate(OptimizationType.OT_MAP));
} else if (m_ranker == 1) {
// all the rest use LambdaRank with different evaluator
LambdaRank lambdaRank;
if (m_multithread) {
/**
** multi-thread version ***
*/
lambdaRank = new LambdaRankParallel(RankFVSize, m_tradeoff, m_queries, OptimizationType.OT_MAP, 10);
lambdaRank.setSigns(getRankingFVSigns());
// lambdaRank specific parameters
lambdaRank.train(100, 100, 1.0, 0.95);
} else {
/**
** single-thread version ***
*/
lambdaRank = new LambdaRank(RankFVSize, m_tradeoff, m_queries, OptimizationType.OT_MAP);
lambdaRank.setSigns(getRankingFVSigns());
// lambdaRank specific parameters
lambdaRank.train(300, 20, 1.0, 0.98);
}
m_weights = lambdaRank.getWeights();
} else if (m_ranker == 2) {
RankNet ranknet = new RankNet(RankFVSize, 5.0);
ArrayList<double[]> rfvs = new ArrayList<double[]>();
for (_Query q : m_queries) q.extractPairs4RankNet(rfvs);
ranknet.setSigns(getRankingFVSigns());
double likelihood = ranknet.train(rfvs);
m_weights = ranknet.getWeights();
System.out.format("RankNet training performance:\nlog-likelihood: %.4f\t MAP: %.4f\n", likelihood, evaluate(OptimizationType.OT_MAP));
}
for (int i = 0; i < RankFVSize; i++) System.out.format("%.5f ", m_weights[i]);
System.out.println();
}
Aggregations