use of edu.illinois.cs.cogcomp.ner.LbjFeatures.NETaggerLevel2 in project cogcomp-nlp by CogComp.
the class LearningCurveMultiDataset method getLearningCurve.
/**
* use fixedNumIterations=-1 if you want to use the automatic convergence criterion
* <p>
* NB: assuming column format
*/
public static void getLearningCurve(Vector<Data> trainDataSet, Vector<Data> testDataSet, int fixedNumIterations) throws Exception {
double bestF1Level1 = -1;
int bestRoundLevel1 = 0;
// Get the directory name (<configname>.model is appended in LbjTagger/Parameters.java:139)
String modelPath = ParametersForLbjCode.currentParameters.pathToModelFile;
String modelPathDir = modelPath.substring(0, modelPath.lastIndexOf("/"));
if (IOUtils.exists(modelPathDir)) {
if (!IOUtils.isDirectory(modelPathDir)) {
String msg = "ERROR: " + NAME + ".getLearningCurve(): model directory '" + modelPathDir + "' already exists as a (non-directory) file.";
logger.error(msg);
throw new IOException(msg);
} else
logger.warn(NAME + ".getLearningCurve(): writing to existing model path '" + modelPathDir + "'...");
} else {
IOUtils.mkdir(modelPathDir);
}
NETaggerLevel1.Parameters paramLevel1 = new NETaggerLevel1.Parameters();
paramLevel1.baseLTU = new SparseAveragedPerceptron(ParametersForLbjCode.currentParameters.learningRatePredictionsLevel1, 0, ParametersForLbjCode.currentParameters.thicknessPredictionsLevel1);
logger.info("Level 1 classifier learning rate = " + ParametersForLbjCode.currentParameters.learningRatePredictionsLevel1 + ", thickness = " + ParametersForLbjCode.currentParameters.thicknessPredictionsLevel1);
NETaggerLevel1 tagger1 = new NETaggerLevel1(paramLevel1, modelPath + ".level1", modelPath + ".level1.lex");
tagger1.forget();
for (int dataId = 0; dataId < trainDataSet.size(); dataId++) {
Data trainData = trainDataSet.elementAt(dataId);
if (ParametersForLbjCode.currentParameters.featuresToUse.containsKey("PredictionsLevel1")) {
PredictionsAndEntitiesConfidenceScores.getAndMarkEntities(trainData, NEWord.LabelToLookAt.GoldLabel);
TwoLayerPredictionAggregationFeatures.setLevel1AggregationFeatures(trainData, true);
}
}
// preextract the L1 test and train data.
String path = ParametersForLbjCode.currentParameters.pathToModelFile;
String trainPathL1 = path + ".level1.prefetchedTrainData";
File deleteme = new File(trainPathL1);
if (deleteme.exists())
deleteme.delete();
String testPathL1 = path + ".level1.prefetchedTestData";
deleteme = new File(testPathL1);
if (deleteme.exists())
deleteme.delete();
logger.info("Pre-extracting the training data for Level 1 classifier, saving to " + trainPathL1);
BatchTrainer bt1train = prefetchAndGetBatchTrainer(tagger1, trainDataSet, trainPathL1);
logger.info("Pre-extracting the testing data for Level 1 classifier, saving to " + testPathL1);
BatchTrainer bt1test = prefetchAndGetBatchTrainer(tagger1, testDataSet, testPathL1);
Parser testParser1 = bt1test.getParser();
for (int i = 0; (fixedNumIterations == -1 && i < 200 && i - bestRoundLevel1 < 10) || (fixedNumIterations > 0 && i <= fixedNumIterations); ++i) {
bt1train.train(1);
testParser1.reset();
TestDiscrete simpleTest = new TestDiscrete();
simpleTest.addNull("O");
TestDiscrete.testDiscrete(simpleTest, tagger1, null, testParser1, true, 0);
double f1Level1 = simpleTest.getOverallStats()[2];
if (f1Level1 > bestF1Level1) {
bestF1Level1 = f1Level1;
bestRoundLevel1 = i;
tagger1.save();
}
logger.info(i + " rounds. Best so far for Level1 : (" + bestRoundLevel1 + ")=" + bestF1Level1);
}
logger.info("Level 1; best round : " + bestRoundLevel1 + "\tbest F1 : " + bestF1Level1);
// trash the l2 prefetch data
String trainPathL2 = path + ".level2.prefetchedTrainData";
deleteme = new File(trainPathL2);
if (deleteme.exists())
deleteme.delete();
String testPathL2 = path + ".level2.prefetchedTestData";
deleteme = new File(testPathL1);
if (deleteme.exists())
deleteme.delete();
NETaggerLevel2.Parameters paramLevel2 = new NETaggerLevel2.Parameters();
paramLevel2.baseLTU = new SparseAveragedPerceptron(ParametersForLbjCode.currentParameters.learningRatePredictionsLevel2, 0, ParametersForLbjCode.currentParameters.thicknessPredictionsLevel2);
NETaggerLevel2 tagger2 = new NETaggerLevel2(paramLevel2, ParametersForLbjCode.currentParameters.pathToModelFile + ".level2", ParametersForLbjCode.currentParameters.pathToModelFile + ".level2.lex");
tagger2.forget();
// Previously checked if PatternFeatures was in featuresToUse.
if (ParametersForLbjCode.currentParameters.featuresToUse.containsKey("PredictionsLevel1")) {
logger.info("Level 2 classifier learning rate = " + ParametersForLbjCode.currentParameters.learningRatePredictionsLevel2 + ", thickness = " + ParametersForLbjCode.currentParameters.thicknessPredictionsLevel2);
double bestF1Level2 = -1;
int bestRoundLevel2 = 0;
logger.info("Pre-extracting the training data for Level 2 classifier, saving to " + trainPathL2);
BatchTrainer bt2train = prefetchAndGetBatchTrainer(tagger2, trainDataSet, trainPathL2);
logger.info("Pre-extracting the testing data for Level 2 classifier, saving to " + testPathL2);
BatchTrainer bt2test = prefetchAndGetBatchTrainer(tagger2, testDataSet, testPathL2);
Parser testParser2 = bt2test.getParser();
for (int i = 0; (fixedNumIterations == -1 && i < 200 && i - bestRoundLevel2 < 10) || (fixedNumIterations > 0 && i <= fixedNumIterations); ++i) {
logger.info("Learning level 2 classifier; round " + i);
bt2train.train(1);
logger.info("Testing level 2 classifier; on prefetched data, round: " + i);
testParser2.reset();
TestDiscrete simpleTest = new TestDiscrete();
simpleTest.addNull("O");
TestDiscrete.testDiscrete(simpleTest, tagger2, null, testParser2, true, 0);
double f1Level2 = simpleTest.getOverallStats()[2];
if (f1Level2 > bestF1Level2) {
bestF1Level2 = f1Level2;
bestRoundLevel2 = i;
tagger2.save();
}
logger.info(i + " rounds. Best so far for Level2 : (" + bestRoundLevel2 + ") " + bestF1Level2);
}
// trash the l2 prefetch data
deleteme = new File(trainPathL2);
if (deleteme.exists())
deleteme.delete();
deleteme = new File(testPathL1);
if (deleteme.exists())
deleteme.delete();
logger.info("Level1: bestround=" + bestRoundLevel1 + "\t F1=" + bestF1Level1 + "\t Level2: bestround=" + bestRoundLevel2 + "\t F1=" + bestF1Level2);
}
/*
* This will override the models forcing to save the iteration we're interested in- the
* fixedNumIterations iteration, the last one. But note - both layers will be saved for this
* iteration. If the best performance for one of the layers came before the final iteration,
* we're in a small trouble- the performance will decrease
*/
if (fixedNumIterations > -1) {
tagger1.save();
tagger2.save();
}
}
use of edu.illinois.cs.cogcomp.ner.LbjFeatures.NETaggerLevel2 in project cogcomp-nlp by CogComp.
the class NEDisplayPredictions method test.
/**
* Display the predictions, the gazetteer matches and the labels.
*
* @param testDatapath path to test data.
* @param dataFormat the data format.
* @param verbose report more.
* @throws Exception
*/
public static void test(String testDatapath, String dataFormat, boolean verbose) throws Exception {
Data testData = new Data(testDatapath, testDatapath, dataFormat, new String[] {}, new String[] {});
ExpressiveFeaturesAnnotator.annotate(testData);
Vector<Data> data = new Vector<>();
data.addElement(testData);
NETaggerLevel1 taggerLevel1 = new NETaggerLevel1(ParametersForLbjCode.currentParameters.pathToModelFile + ".level1", ParametersForLbjCode.currentParameters.pathToModelFile + ".level1.lex");
NETaggerLevel2 taggerLevel2 = null;
if (ParametersForLbjCode.currentParameters.featuresToUse.containsKey("PredictionsLevel1")) {
taggerLevel2 = new NETaggerLevel2(ParametersForLbjCode.currentParameters.pathToModelFile + ".level2", ParametersForLbjCode.currentParameters.pathToModelFile + ".level2.lex");
}
for (int i = 0; i < data.size(); i++) Decoder.annotateDataBIO(data.elementAt(i), taggerLevel1, taggerLevel2);
reportPredictions(data.get(0));
}
use of edu.illinois.cs.cogcomp.ner.LbjFeatures.NETaggerLevel2 in project cogcomp-nlp by CogComp.
the class NETagPlain method init.
/**
* assumes ParametersForLbjCode has been initialized
*/
public static void init() {
String modelFile = ParametersForLbjCode.currentParameters.pathToModelFile;
logger.info("Initializing tagger level 1...");
tagger1 = new NETaggerLevel1(modelFile + ".level1", modelFile + ".level1.lex");
if (ParametersForLbjCode.currentParameters.featuresToUse.containsKey("PredictionsLevel1")) {
logger.info("Initializing tagger level 2...");
tagger2 = new NETaggerLevel2(modelFile + ".level2", modelFile + ".level2.lex");
}
}
use of edu.illinois.cs.cogcomp.ner.LbjFeatures.NETaggerLevel2 in project cogcomp-nlp by CogComp.
the class Parameters method loadClassifierModels.
public static void loadClassifierModels(ParametersForLbjCode config, ParametersForLbjCode outter) {
if (outter.debug) {
logger.debug("Reading the model at: " + config.pathToModelFile + ".level1");
}
config.taggerLevel1 = new NETaggerLevel1(config.pathToModelFile + ".level1", config.pathToModelFile + ".level1.lex");
if (outter.debug) {
logger.debug("Reading the model at: " + config.pathToModelFile + ".level2");
}
config.taggerLevel2 = new NETaggerLevel2(config.pathToModelFile + ".level2", config.pathToModelFile + ".level2.lex");
logger.debug("## Parameters.loadClassifierModels(): set taggerLevel1 and taggerLevel2 in config passed as argument.");
}
use of edu.illinois.cs.cogcomp.ner.LbjFeatures.NETaggerLevel2 in project cogcomp-nlp by CogComp.
the class ModelLoader method load.
/**
* Load the models wherever they are found. Check file system first, then classpath, and finally get it
* from Minio datastore.
* @param rm the resource manager.
* @param training if we are training.
* @param viewName the name of the view identifies the model.
* @param cp the parameters for the calling model.
*/
public static void load(ResourceManager rm, String viewName, boolean training, ParametersForLbjCode cp) {
// the loaded built into the model will check the local file system and the jar files in the classpath.
String modelPath = cp.pathToModelFile;
String modelFilePath = modelPath + ".level1";
java.io.File modelFile = new File(modelFilePath);
NETaggerLevel1 tagger1 = null;
NETaggerLevel2 tagger2 = null;
if (modelFile.exists()) {
tagger1 = new NETaggerLevel1(modelPath + ".level1", modelPath + ".level1.lex");
logger.info("Reading L1 model from file : " + modelPath + ".level2");
if (cp.featuresToUse.containsKey("PredictionsLevel1")) {
tagger2 = new NETaggerLevel2(modelPath + ".level2", modelPath + ".level2.lex");
logger.info("Reading L2 model from file : " + modelPath + ".level2");
} else {
logger.info("L2 model not required.");
}
} else if (IOUtilities.existsInClasspath(NETaggerLevel1.class, modelFilePath)) {
tagger1 = new NETaggerLevel1(modelPath + ".level1", modelPath + ".level1.lex");
logger.info("Reading L1 model from classpath : " + modelPath + ".level2");
if (cp.featuresToUse.containsKey("PredictionsLevel1")) {
tagger2 = new NETaggerLevel2(modelPath + ".level2", modelPath + ".level2.lex");
logger.info("Reading L2 model from classpath : " + modelPath + ".level2");
} else {
logger.info("L2 model not required.");
}
} else if (training) {
// we are training a new model, so it it doesn't exist, we don't care, just create a
// container.
tagger1 = new NETaggerLevel1(modelPath + ".level1", modelPath + ".level1.lex");
logger.info("Reading L1 model from file : " + modelPath + ".level2");
if (cp.featuresToUse.containsKey("PredictionsLevel1")) {
tagger2 = new NETaggerLevel2(modelPath + ".level2", modelPath + ".level2.lex");
logger.info("Reading L2 model from file : " + modelPath + ".level2");
} else {
logger.info("L2 model not required.");
}
} else {
// all else has filed, load from the datastore, create artifact ids based on the view
// name and training data designation.
String dataset;
String lowercaseViewName = viewName.toLowerCase();
if (lowercaseViewName.contains(ViewNames.NER_CONLL.toLowerCase())) {
dataset = "enron-conll";
} else if (lowercaseViewName.contains(ViewNames.NER_ONTONOTES.toLowerCase())) {
dataset = "ontonotes";
} else {
// not a standard model, and we can't find it on the command line.
throw new IllegalArgumentException("The NER models could not be found at \"" + modelPath + "\", and no default with view name " + viewName);
}
String data_split;
if (!rm.containsKey(NerBaseConfigurator.TRAINED_ON))
data_split = NerBaseConfigurator.TRAINED_ON_ALL_DATA;
else
data_split = rm.getString(NerBaseConfigurator.TRAINED_ON);
try {
Datastore ds = new Datastore(new ResourceConfigurator().getConfig(rm));
String artifact_id = "ner-model-" + dataset + "-" + data_split;
File modelDir = ds.getDirectory("edu.illinois.cs.cogcomp.ner", artifact_id, 4.0, false);
String model = "";
if (modelDir.getPath().contains("conll")) {
model = modelDir.getPath() + "/model/EnronCoNLL.model";
} else {
model = modelDir.getPath() + "/model/OntoNotes.model";
}
tagger1 = new NETaggerLevel1(model + ".level1", model + ".level1.lex");
if (cp.featuresToUse.containsKey("PredictionsLevel1")) {
tagger2 = new NETaggerLevel2(model + ".level2", model + ".level2.lex");
}
} catch (InvalidPortException | DatastoreException | InvalidEndpointException e) {
e.printStackTrace();
}
}
cp.taggerLevel1 = tagger1;
cp.taggerLevel2 = tagger2;
}
Aggregations