Search in sources :

Example 6 with LSBoost

use of edu.neu.ccs.pyramid.regression.least_squares_boost.LSBoost in project pyramid by cheng-li.

the class GBRegressor method train.

private static void train(Config config) throws Exception {
    String sparsity = config.getString("input.matrixType");
    DataSetType dataSetType = null;
    switch(sparsity) {
        case "dense":
            dataSetType = DataSetType.REG_DENSE;
            break;
        case "sparse":
            dataSetType = DataSetType.REG_SPARSE;
            break;
        default:
            throw new IllegalArgumentException("input.matrixType should be dense or sparse");
    }
    RegDataSet trainSet = TRECFormat.loadRegDataSet(config.getString("input.trainData"), dataSetType, true);
    RegDataSet testSet = null;
    if (config.getBoolean("train.showTestProgress")) {
        testSet = TRECFormat.loadRegDataSet(config.getString("input.testData"), dataSetType, true);
    }
    LSBoost lsBoost = new LSBoost();
    RegTreeConfig regTreeConfig = new RegTreeConfig().setMaxNumLeaves(config.getInt("train.numLeaves"));
    RegTreeFactory regTreeFactory = new RegTreeFactory(regTreeConfig);
    LSBoostOptimizer optimizer = new LSBoostOptimizer(lsBoost, trainSet, regTreeFactory);
    optimizer.setShrinkage(config.getDouble("train.shrinkage"));
    optimizer.initialize();
    int progressInterval = config.getInt("train.showProgress.interval");
    int numIterations = config.getInt("train.numIterations");
    for (int i = 1; i <= numIterations; i++) {
        System.out.println("iteration " + i);
        optimizer.iterate();
        if (config.getBoolean("train.showTrainProgress") && (i % progressInterval == 0 || i == numIterations)) {
            System.out.println("training RMSE = " + RMSE.rmse(lsBoost, trainSet));
        }
        if (config.getBoolean("train.showTestProgress") && (i % progressInterval == 0 || i == numIterations)) {
            System.out.println("test RMSE = " + RMSE.rmse(lsBoost, testSet));
        }
    }
    System.out.println("training done!");
    String output = config.getString("output.folder");
    new File(output).mkdirs();
    File serializedModel = new File(output, "model");
    Serialization.serialize(lsBoost, serializedModel);
    System.out.println("model saved to " + serializedModel.getAbsolutePath());
    File reportFile = new File(output, "train_predictions.txt");
    report(lsBoost, trainSet, reportFile);
    System.out.println("predictions on the training set are written to " + reportFile.getAbsolutePath());
}
Also used : RegTreeConfig(edu.neu.ccs.pyramid.regression.regression_tree.RegTreeConfig) DataSetType(edu.neu.ccs.pyramid.dataset.DataSetType) LSBoostOptimizer(edu.neu.ccs.pyramid.regression.least_squares_boost.LSBoostOptimizer) RegTreeFactory(edu.neu.ccs.pyramid.regression.regression_tree.RegTreeFactory) RegDataSet(edu.neu.ccs.pyramid.dataset.RegDataSet) File(java.io.File) LSBoost(edu.neu.ccs.pyramid.regression.least_squares_boost.LSBoost)

Example 7 with LSBoost

use of edu.neu.ccs.pyramid.regression.least_squares_boost.LSBoost in project pyramid by cheng-li.

the class Calibration method calibrate.

private static void calibrate(Config config, Logger logger) throws Exception {
    logger.info("start training calibrator");
    MultiLabelClfDataSet train = TRECFormat.loadMultiLabelClfDataSet(config.getString("input.trainData"), DataSetType.ML_CLF_SEQ_SPARSE, true);
    MultiLabelClfDataSet cal = TRECFormat.loadMultiLabelClfDataSet(config.getString("input.calibData"), DataSetType.ML_CLF_SEQ_SPARSE, true);
    MultiLabel[] calPred = loadPredictions(config.getString("input.calibPredictions"));
    int[] calIds = loadIds(config.getString("input.calibPredictions"));
    Vector[] calibScore = loadFeatures(config.getString("input.calibPredictions"));
    Pair<RegDataSet, PredictionFeatureExtractor> pair = createCaliData(cal, calPred, calibScore, calIds, train);
    RegDataSet calibRegData = pair.getFirst();
    PredictionFeatureExtractor predictionFeatureExtractor = pair.getSecond();
    int[] monotonicity = pair.getSecond().featureMonotonicity();
    MultiLabelClfDataSet valid = TRECFormat.loadMultiLabelClfDataSet(config.getString("input.validData"), DataSetType.ML_CLF_SEQ_SPARSE, true);
    MultiLabel[] validPred = loadPredictions(config.getString("input.validPredictions"));
    int[] validIds = loadIds(config.getString("input.validPredictions"));
    Vector[] validScore = loadFeatures(config.getString("input.validPredictions"));
    RegDataSet validRegData = createCaliData(valid, validPred, validScore, validIds, predictionFeatureExtractor).getFirst();
    LSBoost lsBoost = trainCalibrator(calibRegData, validRegData, monotonicity);
    VectorCalibrator vectorCalibrator = new RegressorCalibrator(lsBoost);
    Serialization.serialize(vectorCalibrator, Paths.get(config.getString("output.dir"), "set_calibrator").toFile());
    Serialization.serialize(predictionFeatureExtractor, Paths.get(config.getString("output.dir"), "prediction_feature_extractor").toFile());
    logger.info("finish training calibrator");
}
Also used : DenseVector(org.apache.mahout.math.DenseVector) Vector(org.apache.mahout.math.Vector) LSBoost(edu.neu.ccs.pyramid.regression.least_squares_boost.LSBoost)

Example 8 with LSBoost

use of edu.neu.ccs.pyramid.regression.least_squares_boost.LSBoost in project pyramid by cheng-li.

the class RerankerTrainer method train.

public Reranker train(RegDataSet regDataSet, double[] instanceWeights, MultiLabelClassifier.ClassProbEstimator classProbEstimator, PredictionFeatureExtractor predictionFeatureExtractor, LabelCalibrator labelCalibrator, RegDataSet validation) {
    LSBoost lsBoost = new LSBoost();
    RegTreeConfig regTreeConfig = new RegTreeConfig().setMaxNumLeaves(numLeaves).setMinDataPerLeaf(minDataPerLeaf).setMonotonicityType(monotonicityType);
    RegTreeFactory regTreeFactory = new RegTreeFactory(regTreeConfig);
    LSBoostOptimizer optimizer = new LSBoostOptimizer(lsBoost, regDataSet, regTreeFactory, instanceWeights, regDataSet.getLabels());
    if (!monotonicityType.equals("none")) {
        int[][] mono = new int[1][regDataSet.getNumFeatures()];
        mono[0] = predictionFeatureExtractor.featureMonotonicity();
        optimizer.setMonotonicity(mono);
    }
    optimizer.setShrinkage(shrinkage);
    optimizer.initialize();
    EarlyStopper earlyStopper = new EarlyStopper(EarlyStopper.Goal.MINIMIZE, 5);
    LSBoost bestModel = null;
    for (int i = 1; i <= maxIter; i++) {
        optimizer.iterate();
        if (i % 10 == 0 || i == maxIter) {
            double mse = MSE.mse(lsBoost, validation);
            earlyStopper.add(i, mse);
            if (earlyStopper.getBestIteration() == i) {
                try {
                    bestModel = (LSBoost) Serialization.deepCopy(lsBoost);
                } catch (IOException e) {
                    e.printStackTrace();
                } catch (ClassNotFoundException e) {
                    e.printStackTrace();
                }
            }
            if (earlyStopper.shouldStop()) {
                break;
            }
        }
    }
    // System.out.println("best iteration = "+earlyStopper.getBestIteration());
    return new Reranker(bestModel, classProbEstimator, numCandidates, predictionFeatureExtractor, labelCalibrator);
}
Also used : RegTreeConfig(edu.neu.ccs.pyramid.regression.regression_tree.RegTreeConfig) LSBoostOptimizer(edu.neu.ccs.pyramid.regression.least_squares_boost.LSBoostOptimizer) RegTreeFactory(edu.neu.ccs.pyramid.regression.regression_tree.RegTreeFactory) EarlyStopper(edu.neu.ccs.pyramid.optimization.EarlyStopper) IOException(java.io.IOException) LSBoost(edu.neu.ccs.pyramid.regression.least_squares_boost.LSBoost)

Example 9 with LSBoost

use of edu.neu.ccs.pyramid.regression.least_squares_boost.LSBoost in project pyramid by cheng-li.

the class GBRegressor method test.

private static void test(Config config, Logger logger) throws Exception {
    String output = config.getString("output.folder");
    File serializedModel = new File(output, "model");
    LSBoost lsBoost = (LSBoost) Serialization.deserialize(serializedModel);
    String sparsity = config.getString("input.matrixType");
    DataSetType dataSetType = null;
    switch(sparsity) {
        case "dense":
            dataSetType = DataSetType.REG_DENSE;
            break;
        case "sparse":
            dataSetType = DataSetType.REG_SPARSE;
            break;
        default:
            throw new IllegalArgumentException("input.matrixType should be dense or sparse");
    }
    RegDataSet testSet = TRECFormat.loadRegDataSet(config.getString("input.testData"), dataSetType, true);
    logger.info("test RMSE = " + RMSE.rmse(lsBoost, testSet));
    String testReportName = config.getString("output.testReportFolderName");
    File reportFile = Paths.get(output, testReportName, "test_predictions.txt").toFile();
    report(lsBoost, testSet, reportFile);
    logger.info("predictions on the test set are written to " + reportFile.getAbsolutePath());
}
Also used : DataSetType(edu.neu.ccs.pyramid.dataset.DataSetType) RegDataSet(edu.neu.ccs.pyramid.dataset.RegDataSet) File(java.io.File) LSBoost(edu.neu.ccs.pyramid.regression.least_squares_boost.LSBoost)

Aggregations

LSBoost (edu.neu.ccs.pyramid.regression.least_squares_boost.LSBoost)9 RegDataSet (edu.neu.ccs.pyramid.dataset.RegDataSet)6 LSBoostOptimizer (edu.neu.ccs.pyramid.regression.least_squares_boost.LSBoostOptimizer)6 RegTreeConfig (edu.neu.ccs.pyramid.regression.regression_tree.RegTreeConfig)6 RegTreeFactory (edu.neu.ccs.pyramid.regression.regression_tree.RegTreeFactory)6 DataSetType (edu.neu.ccs.pyramid.dataset.DataSetType)4 File (java.io.File)4 EarlyStopper (edu.neu.ccs.pyramid.optimization.EarlyStopper)2 IOException (java.io.IOException)2 DenseVector (org.apache.mahout.math.DenseVector)1 Vector (org.apache.mahout.math.Vector)1