use of edu.cmu.minorthird.classify.ClassifierLearner in project lucida by claritylab.
the class HierarchicalClassifierLearner method addExample.
public void addExample(Example example) {
for (int i = 0; i < prototypes.length; i++) {
String labelName = example.getLabel().bestClassName();
String prefix = getLabelPrefix(labelName, i);
String sublabel = getSublabel(labelName, i);
Example subExample = new Example(example.asInstance(), new ClassLabel(sublabel));
ClassifierLearner subLearner = classifierLearners.get(prefix);
subLearner.addExample(subExample);
}
}
use of edu.cmu.minorthird.classify.ClassifierLearner in project lucida by claritylab.
the class HierarchicalClassifierLearner method setSchema.
public void setSchema(ExampleSchema schema) {
String[] labelNames = schema.validClassNames();
for (int i = 0; i < labelNames.length; i++) {
for (int j = 0; j < prototypes.length; j++) {
String prefix = getLabelPrefix(labelNames[i], j);
if (!classifierLearners.containsKey(prefix)) {
System.out.println("Making new schema and learner for " + prefix);
ExampleSchema subSchema = createSubSchema(schema, prefix, j);
ClassifierLearner newLearner;
if (subSchema.getNumberOfClasses() == 1) {
System.out.println("Only 1 class to learn for " + prefix + "; using DummyClassifier and Learner");
newLearner = new DummyClassifierLearner(subSchema.getClassName(0));
} else {
newLearner = prototypes[j].copy();
newLearner.setSchema(subSchema);
}
classifierLearners.put(prefix, newLearner);
}
}
}
}
use of edu.cmu.minorthird.classify.ClassifierLearner in project lucida by claritylab.
the class HierarchicalClassifierTrainer method createLearnerByName.
public ClassifierLearner createLearnerByName(String name) {
ClassifierLearner learner;
//K-Nearest-Neighbor learner, using m3rd recommended parameters
if (name.equalsIgnoreCase("KNN")) {
learner = new KnnLearner();
} else //K-Way Mixture learner, using m3rd recommended parameters
if (name.equalsIgnoreCase("KWAY_MIX")) {
learner = new KWayMixtureLearner();
} else //Maximum Entropy learner, using m3rd recommended parameters
if (name.equalsIgnoreCase("MAX_ENT")) {
learner = new MaxEntLearner();
} else //Balanced Winnow learner with One vs All binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("BWINNOW_OVA")) {
learner = new OneVsAllLearner(new BalancedWinnow());
} else //Margin Perceptron learner with One vs All binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("MPERCEPTRON_OVA")) {
learner = new OneVsAllLearner(new MarginPerceptron());
} else //Naive Bayes learner with One vs All binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("NBAYES_OVA")) {
learner = new OneVsAllLearner(new NaiveBayes());
} else //Voted Perceptron learner with One vs All binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("VPERCEPTRON_OVA")) {
learner = new OneVsAllLearner(new VotedPerceptron());
} else //Ada Boost learner with One vs All binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("ADABOOST_OVA")) {
learner = new OneVsAllLearner(new AdaBoost());
} else //Ada Boost learner with Cascading binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("ADABOOST_CB")) {
learner = new CascadingBinaryLearner(new AdaBoost());
} else //Ada Boost learner with Most Frequent First binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("ADABOOST_MFF")) {
learner = new MostFrequentFirstLearner(new AdaBoost());
} else //Ada Boost learner (Logistic Regression version) with One vs All binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("ADABOOSTL_OVA")) {
learner = new OneVsAllLearner(new AdaBoost.L());
} else //Ada Boost learner (Logistic Regression version) with Cascading binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("ADABOOSTL_CB")) {
learner = new CascadingBinaryLearner(new AdaBoost.L());
} else //Ada Boost learner (Logistic Regression version) with Most Frequent First binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("ADABOOSTL_MFF")) {
learner = new MostFrequentFirstLearner(new AdaBoost.L());
} else //Decision Tree learner with One vs All binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("DTREE_OVA")) {
learner = new OneVsAllLearner(new DecisionTreeLearner());
} else //Decision Tree learner with Cascading binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("DTREE_CB")) {
learner = new CascadingBinaryLearner(new DecisionTreeLearner());
} else //Decision Tree learner with Most Frequent First binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("DTREE_MFF")) {
learner = new MostFrequentFirstLearner(new DecisionTreeLearner());
} else //Negative Binomial learner with One vs All binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("NEGBI_OVA")) {
learner = new OneVsAllLearner(new NegativeBinomialLearner());
} else //Negative Binomial learner with Cascading binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("NEGBI_CB")) {
learner = new CascadingBinaryLearner(new NegativeBinomialLearner());
} else //Negative Binomial learner with Most Frequent First binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("NEGBI_MFF")) {
learner = new MostFrequentFirstLearner(new NegativeBinomialLearner());
} else //SVM learner with One vs All binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("SVM_OVA")) {
learner = new OneVsAllLearner(new SVMLearner());
} else //SVM learner with One vs All binary transformer, using testing parameters
if (name.equalsIgnoreCase("SVM_OVA_CONF1")) {
svm_parameter param = new svm_parameter();
param.svm_type = svm_parameter.C_SVC;
param.kernel_type = svm_parameter.POLY;
param.degree = 2;
// 1/k
param.gamma = 1;
param.coef0 = 0;
param.nu = 0.5;
param.cache_size = 40;
param.C = 1;
param.eps = 1e-3;
param.p = 0.1;
param.shrinking = 1;
param.nr_weight = 0;
param.weight_label = new int[0];
param.weight = new double[0];
learner = new OneVsAllLearner(new SVMLearner(param));
} else //SVM learner with Cascading binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("SVM_CB")) {
learner = new CascadingBinaryLearner(new SVMLearner());
} else //SVM learner with Most Frequent First binary transformer, using m3rd recommended parameters
if (name.equalsIgnoreCase("SVM_MFF")) {
learner = new MostFrequentFirstLearner(new SVMLearner());
} else {
System.err.println("Unrecognized learner name: " + name);
learner = null;
}
return learner;
}
use of edu.cmu.minorthird.classify.ClassifierLearner in project lucida by claritylab.
the class ScoreNormalizationFilter method evaluate.
/**
* Performs a cross-validation on the given data set for the given features
* and model.
*
* @param serializedDir directory containing serialized results
* @param features selected features
* @param model selected model
* @return evaluation statistics
*/
public static Evaluation evaluate(String serializedDir, String[] features, String model) {
// create data set with selected features from serialized results
Dataset dataSet = createDataset(features, serializedDir);
// create learner for selected model
ClassifierLearner learner = createLearner(model);
// cross-validate model on data set
RandomElement r = new RandomElement(System.currentTimeMillis());
Splitter splitter = new CrossValSplitter(r, NUM_FOLDS);
CrossValidatedDataset cvDataset = new CrossValidatedDataset(learner, dataSet, splitter, true);
Evaluation eval = cvDataset.getEvaluation();
return eval;
}
use of edu.cmu.minorthird.classify.ClassifierLearner in project lucida by claritylab.
the class ScoreNormalizationFilter method train.
/**
* Trains a classifier using the given training data, features and model.
*
* @param serializedDir directory containing serialized results
* @param features selected features
* @param model selected model
* @return trained classifier
*/
public static Classifier train(String serializedDir, String[] features, String model) {
// create training set with given features from serialized results
Dataset trainingSet = createDataset(features, serializedDir);
// create learner for given model
ClassifierLearner learner = createLearner(model);
// train classifier
Classifier classifier = new DatasetClassifierTeacher(trainingSet).train(learner);
return classifier;
}
Aggregations