Search in sources :

Example 1 with SparseAveragedPerceptron

use of edu.illinois.cs.cogcomp.lbjava.learn.SparseAveragedPerceptron in project cogcomp-nlp by CogComp.

the class LearningCurveMultiDataset method getLearningCurve.

/**
     * use fixedNumIterations=-1 if you want to use the automatic convergence criterion
     * <p>
     * NB: assuming column format
     */
public static void getLearningCurve(Vector<Data> trainDataSet, Vector<Data> testDataSet, int fixedNumIterations) throws Exception {
    double bestF1Level1 = -1;
    int bestRoundLevel1 = 0;
    // Get the directory name (<configname>.model is appended in LbjTagger/Parameters.java:139)
    String modelPath = ParametersForLbjCode.currentParameters.pathToModelFile;
    String modelPathDir = modelPath.substring(0, modelPath.lastIndexOf("/"));
    if (IOUtils.exists(modelPathDir)) {
        if (!IOUtils.isDirectory(modelPathDir)) {
            String msg = "ERROR: " + NAME + ".getLearningCurve(): model directory '" + modelPathDir + "' already exists as a (non-directory) file.";
            logger.error(msg);
            throw new IOException(msg);
        } else
            logger.warn(NAME + ".getLearningCurve(): writing to existing model path '" + modelPathDir + "'...");
    } else {
        IOUtils.mkdir(modelPathDir);
    }
    NETaggerLevel1.Parameters paramLevel1 = new NETaggerLevel1.Parameters();
    paramLevel1.baseLTU = new SparseAveragedPerceptron(ParametersForLbjCode.currentParameters.learningRatePredictionsLevel1, 0, ParametersForLbjCode.currentParameters.thicknessPredictionsLevel1);
    logger.info("Level 1 classifier learning rate = " + ParametersForLbjCode.currentParameters.learningRatePredictionsLevel1 + ", thickness = " + ParametersForLbjCode.currentParameters.thicknessPredictionsLevel1);
    NETaggerLevel1 tagger1 = new NETaggerLevel1(paramLevel1, modelPath + ".level1", modelPath + ".level1.lex");
    tagger1.forget();
    for (int dataId = 0; dataId < trainDataSet.size(); dataId++) {
        Data trainData = trainDataSet.elementAt(dataId);
        if (ParametersForLbjCode.currentParameters.featuresToUse.containsKey("PredictionsLevel1")) {
            PredictionsAndEntitiesConfidenceScores.getAndMarkEntities(trainData, NEWord.LabelToLookAt.GoldLabel);
            TwoLayerPredictionAggregationFeatures.setLevel1AggregationFeatures(trainData, true);
        }
    }
    // preextract the L1 test and train data.
    String path = ParametersForLbjCode.currentParameters.pathToModelFile;
    String trainPathL1 = path + ".level1.prefetchedTrainData";
    File deleteme = new File(trainPathL1);
    if (deleteme.exists())
        deleteme.delete();
    String testPathL1 = path + ".level1.prefetchedTestData";
    deleteme = new File(testPathL1);
    if (deleteme.exists())
        deleteme.delete();
    logger.info("Pre-extracting the training data for Level 1 classifier, saving to " + trainPathL1);
    BatchTrainer bt1train = prefetchAndGetBatchTrainer(tagger1, trainDataSet, trainPathL1);
    logger.info("Pre-extracting the testing data for Level 1 classifier, saving to " + testPathL1);
    BatchTrainer bt1test = prefetchAndGetBatchTrainer(tagger1, testDataSet, testPathL1);
    Parser testParser1 = bt1test.getParser();
    for (int i = 0; (fixedNumIterations == -1 && i < 200 && i - bestRoundLevel1 < 10) || (fixedNumIterations > 0 && i <= fixedNumIterations); ++i) {
        bt1train.train(1);
        testParser1.reset();
        TestDiscrete simpleTest = new TestDiscrete();
        simpleTest.addNull("O");
        TestDiscrete.testDiscrete(simpleTest, tagger1, null, testParser1, true, 0);
        double f1Level1 = simpleTest.getOverallStats()[2];
        if (f1Level1 > bestF1Level1) {
            bestF1Level1 = f1Level1;
            bestRoundLevel1 = i;
            tagger1.save();
        }
        logger.info(i + " rounds.  Best so far for Level1 : (" + bestRoundLevel1 + ")=" + bestF1Level1);
    }
    logger.info("Level 1; best round : " + bestRoundLevel1 + "\tbest F1 : " + bestF1Level1);
    // trash the l2 prefetch data
    String trainPathL2 = path + ".level2.prefetchedTrainData";
    deleteme = new File(trainPathL2);
    if (deleteme.exists())
        deleteme.delete();
    String testPathL2 = path + ".level2.prefetchedTestData";
    deleteme = new File(testPathL1);
    if (deleteme.exists())
        deleteme.delete();
    NETaggerLevel2.Parameters paramLevel2 = new NETaggerLevel2.Parameters();
    paramLevel2.baseLTU = new SparseAveragedPerceptron(ParametersForLbjCode.currentParameters.learningRatePredictionsLevel2, 0, ParametersForLbjCode.currentParameters.thicknessPredictionsLevel2);
    NETaggerLevel2 tagger2 = new NETaggerLevel2(paramLevel2, ParametersForLbjCode.currentParameters.pathToModelFile + ".level2", ParametersForLbjCode.currentParameters.pathToModelFile + ".level2.lex");
    tagger2.forget();
    // Previously checked if PatternFeatures was in featuresToUse.
    if (ParametersForLbjCode.currentParameters.featuresToUse.containsKey("PredictionsLevel1")) {
        logger.info("Level 2 classifier learning rate = " + ParametersForLbjCode.currentParameters.learningRatePredictionsLevel2 + ", thickness = " + ParametersForLbjCode.currentParameters.thicknessPredictionsLevel2);
        double bestF1Level2 = -1;
        int bestRoundLevel2 = 0;
        logger.info("Pre-extracting the training data for Level 2 classifier, saving to " + trainPathL2);
        BatchTrainer bt2train = prefetchAndGetBatchTrainer(tagger2, trainDataSet, trainPathL2);
        logger.info("Pre-extracting the testing data for Level 2 classifier, saving to " + testPathL2);
        BatchTrainer bt2test = prefetchAndGetBatchTrainer(tagger2, testDataSet, testPathL2);
        Parser testParser2 = bt2test.getParser();
        for (int i = 0; (fixedNumIterations == -1 && i < 200 && i - bestRoundLevel2 < 10) || (fixedNumIterations > 0 && i <= fixedNumIterations); ++i) {
            logger.info("Learning level 2 classifier; round " + i);
            bt2train.train(1);
            logger.info("Testing level 2 classifier;  on prefetched data, round: " + i);
            testParser2.reset();
            TestDiscrete simpleTest = new TestDiscrete();
            simpleTest.addNull("O");
            TestDiscrete.testDiscrete(simpleTest, tagger2, null, testParser2, true, 0);
            double f1Level2 = simpleTest.getOverallStats()[2];
            if (f1Level2 > bestF1Level2) {
                bestF1Level2 = f1Level2;
                bestRoundLevel2 = i;
                tagger2.save();
            }
            logger.info(i + " rounds.  Best so far for Level2 : (" + bestRoundLevel2 + ") " + bestF1Level2);
        }
        // trash the l2 prefetch data
        deleteme = new File(trainPathL2);
        if (deleteme.exists())
            deleteme.delete();
        deleteme = new File(testPathL1);
        if (deleteme.exists())
            deleteme.delete();
        logger.info("Level1: bestround=" + bestRoundLevel1 + "\t F1=" + bestF1Level1 + "\t Level2: bestround=" + bestRoundLevel2 + "\t F1=" + bestF1Level2);
    }
    /*
         * This will override the models forcing to save the iteration we're interested in- the
         * fixedNumIterations iteration, the last one. But note - both layers will be saved for this
         * iteration. If the best performance for one of the layers came before the final iteration,
         * we're in a small trouble- the performance will decrease
         */
    if (fixedNumIterations > -1) {
        tagger1.save();
        tagger2.save();
    }
}
Also used : NETaggerLevel2(edu.illinois.cs.cogcomp.ner.LbjFeatures.NETaggerLevel2) NETaggerLevel1(edu.illinois.cs.cogcomp.ner.LbjFeatures.NETaggerLevel1) TestDiscrete(edu.illinois.cs.cogcomp.lbjava.classify.TestDiscrete) IOException(java.io.IOException) Parser(edu.illinois.cs.cogcomp.lbjava.parse.Parser) BatchTrainer(edu.illinois.cs.cogcomp.lbjava.learn.BatchTrainer) File(java.io.File) SparseAveragedPerceptron(edu.illinois.cs.cogcomp.lbjava.learn.SparseAveragedPerceptron)

Example 2 with SparseAveragedPerceptron

use of edu.illinois.cs.cogcomp.lbjava.learn.SparseAveragedPerceptron in project cogcomp-nlp by CogComp.

the class NETesterMultiDataset method test.

/**
 * Allows format to be specified.
 * @param testDatapath
 * @param verbose
 * @param dataFormat
 * @param labelsToIgnoreInEvaluation
 * @param labelsToAnonymizeInEvaluation
 * @throws Exception
 */
public static Vector<TestDiscrete[]> test(String testDatapath, boolean verbose, String dataFormat, Vector<String> labelsToIgnoreInEvaluation, Vector<String> labelsToAnonymizeInEvaluation, ParametersForLbjCode params) throws Exception {
    Data testData = new Data(testDatapath, testDatapath, dataFormat, new String[] {}, new String[] {}, params);
    ExpressiveFeaturesAnnotator.annotate(testData, params);
    Vector<Data> data = new Vector<>();
    data.addElement(testData);
    if (labelsToIgnoreInEvaluation != null)
        data.elementAt(0).setLabelsToIgnore(labelsToIgnoreInEvaluation);
    if (labelsToAnonymizeInEvaluation != null)
        data.elementAt(0).setLabelsToAnonymize(labelsToAnonymizeInEvaluation);
    NETaggerLevel1 taggerLevel1 = (NETaggerLevel1) params.taggerLevel1;
    NETaggerLevel2 taggerLevel2 = (NETaggerLevel2) params.taggerLevel2;
    SparseAveragedPerceptron sap1 = (SparseAveragedPerceptron) taggerLevel1.getBaseLTU();
    System.out.println("L1 SparseAveragedPerceptron learning rate = " + sap1.getLearningRate() + ", thickness = " + sap1.getPositiveThickness());
    if (params.featuresToUse.containsKey("PredictionsLevel1")) {
        SparseAveragedPerceptron sap2 = (SparseAveragedPerceptron) taggerLevel2.getBaseLTU();
        System.out.println("L2 SparseAveragedPerceptron learning rate = " + sap2.getLearningRate() + ", thickness = " + sap2.getPositiveThickness());
    }
    return printTestResultsByDataset(data, taggerLevel1, taggerLevel2, verbose, params);
}
Also used : LinkedVector(edu.illinois.cs.cogcomp.lbjava.parse.LinkedVector) Vector(java.util.Vector) FeatureVector(edu.illinois.cs.cogcomp.lbjava.classify.FeatureVector) SparseAveragedPerceptron(edu.illinois.cs.cogcomp.lbjava.learn.SparseAveragedPerceptron)

Example 3 with SparseAveragedPerceptron

use of edu.illinois.cs.cogcomp.lbjava.learn.SparseAveragedPerceptron in project cogcomp-nlp by CogComp.

the class NERAnnotator method getL1FeatureWeights.

/**
 * Return the features and the weight vectors for each SparseAveragedPerceptron
 * in the network learner for the L1 model.
 *
 * @return the set of string representing the tag values
 */
public HashMap<Feature, double[]> getL1FeatureWeights() {
    if (!isInitialized()) {
        doInitialize();
    }
    SparseNetworkLearner l1 = this.params.taggerLevel1;
    Map lex = l1.getLexicon().getMap();
    OVector ov = l1.getNetwork();
    HashMap<Feature, double[]> weightsPerFeature = new HashMap<>();
    // for each feature, make a map entry keyed on feature name.
    int cnt = 0;
    for (Object mapentry : lex.entrySet()) {
        // get the feature, and the features weight index within each of
        // the network fo learners.
        Feature feature = (Feature) ((Entry) mapentry).getKey();
        int index = ((Integer) ((Entry) mapentry).getValue()).intValue();
        double[] weights = new double[ov.size()];
        for (int i = 0; i < ov.size(); i++) {
            SparseAveragedPerceptron sap = (SparseAveragedPerceptron) (ov.get(i));
            AveragedWeightVector awv = sap.getAveragedWeightVector();
            weights[i] = awv.getRawWeights().get(index);
        }
        weightsPerFeature.put(feature, weights);
    }
    return weightsPerFeature;
}
Also used : HashMap(java.util.HashMap) OVector(edu.illinois.cs.cogcomp.core.datastructures.vectors.OVector) SparseNetworkLearner(edu.illinois.cs.cogcomp.lbjava.learn.SparseNetworkLearner) HashMap(java.util.HashMap) Map(java.util.Map) Feature(edu.illinois.cs.cogcomp.lbjava.classify.Feature) SparseAveragedPerceptron(edu.illinois.cs.cogcomp.lbjava.learn.SparseAveragedPerceptron) AveragedWeightVector(edu.illinois.cs.cogcomp.lbjava.learn.SparseAveragedPerceptron.AveragedWeightVector)

Example 4 with SparseAveragedPerceptron

use of edu.illinois.cs.cogcomp.lbjava.learn.SparseAveragedPerceptron in project cogcomp-nlp by CogComp.

the class NERAnnotator method getL2FeatureWeights.

/**
 * Return the features and the weight vectors for each SparseAveragedPerceptron
 * in the network learner for the L2 model.
 *
 * @return the set of string representing the tag values
 */
public HashMap<Feature, double[]> getL2FeatureWeights() {
    if (!isInitialized()) {
        doInitialize();
    }
    SparseNetworkLearner l2 = this.params.taggerLevel2;
    Map lex = l2.getLexicon().getMap();
    OVector ov = l2.getNetwork();
    HashMap<Feature, double[]> weightsPerFeature = new HashMap<>();
    // for each feature, make a map entry keyed on feature name.
    for (Object mapentry : lex.entrySet()) {
        // get the feature, and the features weight index within each of
        // the network fo learners.
        Feature feature = (Feature) ((Entry) mapentry).getKey();
        int index = ((Integer) ((Entry) mapentry).getValue()).intValue();
        double[] weights = new double[ov.size()];
        for (int i = 0; i < ov.size(); i++) {
            SparseAveragedPerceptron sap = (SparseAveragedPerceptron) (ov.get(i));
            AveragedWeightVector awv = sap.getAveragedWeightVector();
            weights[i] = awv.getRawWeights().get(index);
        }
        weightsPerFeature.put(feature, weights);
    }
    return weightsPerFeature;
}
Also used : HashMap(java.util.HashMap) OVector(edu.illinois.cs.cogcomp.core.datastructures.vectors.OVector) SparseNetworkLearner(edu.illinois.cs.cogcomp.lbjava.learn.SparseNetworkLearner) HashMap(java.util.HashMap) Map(java.util.Map) Feature(edu.illinois.cs.cogcomp.lbjava.classify.Feature) SparseAveragedPerceptron(edu.illinois.cs.cogcomp.lbjava.learn.SparseAveragedPerceptron) AveragedWeightVector(edu.illinois.cs.cogcomp.lbjava.learn.SparseAveragedPerceptron.AveragedWeightVector)

Example 5 with SparseAveragedPerceptron

use of edu.illinois.cs.cogcomp.lbjava.learn.SparseAveragedPerceptron in project cogcomp-nlp by CogComp.

the class NETesterMultiDataset method test.

/**
     * NB: assuming column format
     */
public static void test(String testDatapath, boolean verbose, Vector<String> labelsToIgnoreInEvaluation, Vector<String> labelsToAnonymizeInEvaluation) throws Exception {
    Data testData = new Data(testDatapath, testDatapath, "-c", new String[] {}, new String[] {});
    ExpressiveFeaturesAnnotator.annotate(testData);
    Vector<Data> data = new Vector<>();
    data.addElement(testData);
    if (labelsToIgnoreInEvaluation != null)
        data.elementAt(0).setLabelsToIgnore(labelsToIgnoreInEvaluation);
    if (labelsToAnonymizeInEvaluation != null)
        data.elementAt(0).setLabelsToAnonymize(labelsToAnonymizeInEvaluation);
    NETaggerLevel1 taggerLevel1 = new NETaggerLevel1(ParametersForLbjCode.currentParameters.pathToModelFile + ".level1", ParametersForLbjCode.currentParameters.pathToModelFile + ".level1.lex");
    SparseAveragedPerceptron sap1 = (SparseAveragedPerceptron) taggerLevel1.getBaseLTU();
    System.out.println("L1 SparseAveragedPerceptron learning rate = " + sap1.getLearningRate() + ", thickness = " + sap1.getPositiveThickness());
    NETaggerLevel2 taggerLevel2 = null;
    if (ParametersForLbjCode.currentParameters.featuresToUse.containsKey("PredictionsLevel1")) {
        taggerLevel2 = new NETaggerLevel2(ParametersForLbjCode.currentParameters.pathToModelFile + ".level2", ParametersForLbjCode.currentParameters.pathToModelFile + ".level2.lex");
        SparseAveragedPerceptron sap2 = (SparseAveragedPerceptron) taggerLevel2.getBaseLTU();
        System.out.println("L2 SparseAveragedPerceptron learning rate = " + sap2.getLearningRate() + ", thickness = " + sap2.getPositiveThickness());
    }
    printTestResultsByDataset(data, taggerLevel1, taggerLevel2, verbose);
}
Also used : LinkedVector(edu.illinois.cs.cogcomp.lbjava.parse.LinkedVector) Vector(java.util.Vector) FeatureVector(edu.illinois.cs.cogcomp.lbjava.classify.FeatureVector) SparseAveragedPerceptron(edu.illinois.cs.cogcomp.lbjava.learn.SparseAveragedPerceptron)

Aggregations

SparseAveragedPerceptron (edu.illinois.cs.cogcomp.lbjava.learn.SparseAveragedPerceptron)9 BatchTrainer (edu.illinois.cs.cogcomp.lbjava.learn.BatchTrainer)4 NETaggerLevel1 (edu.illinois.cs.cogcomp.ner.LbjFeatures.NETaggerLevel1)3 NETaggerLevel2 (edu.illinois.cs.cogcomp.ner.LbjFeatures.NETaggerLevel2)3 LocalCommaClassifier (edu.illinois.cs.cogcomp.comma.lbj.LocalCommaClassifier)2 EvaluateDiscrete (edu.illinois.cs.cogcomp.comma.utils.EvaluateDiscrete)2 OVector (edu.illinois.cs.cogcomp.core.datastructures.vectors.OVector)2 Feature (edu.illinois.cs.cogcomp.lbjava.classify.Feature)2 FeatureVector (edu.illinois.cs.cogcomp.lbjava.classify.FeatureVector)2 TestDiscrete (edu.illinois.cs.cogcomp.lbjava.classify.TestDiscrete)2 Lexicon (edu.illinois.cs.cogcomp.lbjava.learn.Lexicon)2 AveragedWeightVector (edu.illinois.cs.cogcomp.lbjava.learn.SparseAveragedPerceptron.AveragedWeightVector)2 SparseNetworkLearner (edu.illinois.cs.cogcomp.lbjava.learn.SparseNetworkLearner)2 FoldParser (edu.illinois.cs.cogcomp.lbjava.parse.FoldParser)2 LinkedVector (edu.illinois.cs.cogcomp.lbjava.parse.LinkedVector)2 Parser (edu.illinois.cs.cogcomp.lbjava.parse.Parser)2 File (java.io.File)2 IOException (java.io.IOException)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2