Search in sources :

Example 6 with SRW

use of edu.cmu.ml.proppr.learn.SRW in project ProPPR by TeamCohen.

the class TrainerTest method setup.

@Before
public void setup() {
    super.setup();
    this.srw = new SRW();
    this.srw.setRegularizer(new RegularizationSchedule(this.srw, new RegularizeL2()));
    this.srw.setSquashingFunction(new ReLU<String>());
    this.initTrainer();
    query = new TIntDoubleHashMap();
    query.put(nodes.getId("r0"), 1.0);
    examples = new ArrayList<String>();
    for (int k = 0; k < this.magicNumber; k++) {
        for (int p = 0; p < this.magicNumber; p++) {
            examples.add(new PosNegRWExample(brGraph, query, new int[] { nodes.getId("b" + k) }, new int[] { nodes.getId("r" + p) }).serialize());
        }
    }
}
Also used : RegularizationSchedule(edu.cmu.ml.proppr.learn.RegularizationSchedule) TIntDoubleHashMap(gnu.trove.map.hash.TIntDoubleHashMap) RegularizeL2(edu.cmu.ml.proppr.learn.RegularizeL2) PosNegRWExample(edu.cmu.ml.proppr.examples.PosNegRWExample) SRW(edu.cmu.ml.proppr.learn.SRW) Before(org.junit.Before)

Example 7 with SRW

use of edu.cmu.ml.proppr.learn.SRW in project ProPPR by TeamCohen.

the class Trainer method train.

public ParamVector<String, ?> train(SymbolTable<String> masterFeatures, Iterable<String> examples, LearningGraphBuilder builder, ParamVector<String, ?> initialParamVec, int numEpochs) {
    ParamVector<String, ?> paramVec = this.masterLearner.setupParams(initialParamVec);
    if (masterFeatures.size() > 0)
        LearningGraphBuilder.setFeatures(masterFeatures);
    NamedThreadFactory workingThreads = new NamedThreadFactory("work-");
    NamedThreadFactory cleaningThreads = new NamedThreadFactory("cleanup-");
    ThreadPoolExecutor workingPool;
    ExecutorService cleanPool;
    TrainingStatistics total = new TrainingStatistics();
    StoppingCriterion stopper = new StoppingCriterion(numEpochs, this.stoppingPercent, this.stoppingEpoch);
    boolean graphSizesStatusLog = true;
    StatusLogger stattime = new StatusLogger();
    // repeat until ready to stop
    while (!stopper.satisified()) {
        // set up current epoch
        this.epoch++;
        for (SRW learner : this.learners.values()) {
            learner.setEpoch(epoch);
            learner.clearLoss();
        }
        log.info("epoch " + epoch + " ...");
        status.tick();
        // reset counters & file pointers
        this.statistics = new TrainingStatistics();
        workingThreads.reset();
        cleaningThreads.reset();
        workingPool = new ThreadPoolExecutor(this.nthreads, Integer.MAX_VALUE, 10, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), workingThreads);
        cleanPool = Executors.newSingleThreadExecutor(cleaningThreads);
        // run examples
        int id = 1;
        stattime.start();
        int countdown = -1;
        Trainer notify = null;
        for (String s : examples) {
            if (log.isDebugEnabled())
                log.debug("Queue size " + (workingPool.getTaskCount() - workingPool.getCompletedTaskCount()));
            statistics.updateReadingStatistics(stattime.sinceLast());
            /*
				 * Throttling behavior:
				 * Once the number of unfinished tasks exceeds 1.5x the number of threads,
				 * we add a 'notify' object to the next nthreads training tasks. Then, the
				 * master thread gathers 'notify' signals until the number of unfinished tasks 
				 * is no longer greater than the number of threads. Then we start adding tasks again.
				 * 
				 * This works more or less fine, since the master thread stops pulling examples
				 * from disk when there are then a maximum of 2.5x training examples in the queue (that's
				 * the original 1.5x, which could represent a maximum of 1.5x training examples,
				 * plus the nthreads training tasks with active 'notify' objects. There's an 
				 * additional nthreads parsing tasks in the queue but those don't take up much 
				 * memory so we don't care). This lets us read in a good-sized buffer without
				 * blowing up the heap.
				 * 
				 * Worst-case: None of the backlog is cleared before the master thread enters
				 * the synchronized block. nthreads-1 threads will be training long jobs, and 
				 * the one free thread works through the 0.5x backlog and all nthreads countdown 
				 * examples. The notify() sent by the final countdown example will occur when 
				 * there are nthreads unfinished tasks in the queue, and the master thread will exit
				 * the synchronized block and proceed.
				 * 
				 * Best-case: The backlog is already cleared by the time the master thread enters
				 * the synchronized block. The while() loop immediately exits, and the notify()
				 * signals from the countdown examples have no effect.
				 */
            if (countdown > 0) {
                if (log.isDebugEnabled())
                    log.debug("Countdown " + countdown);
                countdown--;
            } else if (countdown == 0) {
                if (log.isDebugEnabled())
                    log.debug("Countdown " + countdown + "; throttling:");
                countdown--;
                notify = null;
                try {
                    synchronized (this) {
                        if (log.isDebugEnabled())
                            log.debug("Clearing training queue...");
                        while (workingPool.getTaskCount() - workingPool.getCompletedTaskCount() > this.nthreads) this.wait();
                        if (log.isDebugEnabled())
                            log.debug("Queue cleared.");
                    }
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            } else if (workingPool.getTaskCount() - workingPool.getCompletedTaskCount() > 1.5 * this.nthreads) {
                if (log.isDebugEnabled())
                    log.debug("Starting countdown");
                countdown = this.nthreads;
                notify = this;
            }
            Future<PosNegRWExample> parsed = workingPool.submit(new Parse(s, builder, id));
            Future<ExampleStats> trained = workingPool.submit(new Train(parsed, paramVec, id, notify));
            cleanPool.submit(new TraceLosses(trained, id));
            id++;
            stattime.tick();
            if (log.isInfoEnabled() && status.due(1))
                log.info("parsed: " + id + " trained: " + statistics.exampleSetSize);
        }
        cleanEpoch(workingPool, cleanPool, paramVec, stopper, id, total);
        if (graphSizesStatusLog) {
            log.info("Dataset size stats: " + statistics.totalGraphSize + " total nodes / max " + statistics.maxGraphSize + " / avg " + (statistics.totalGraphSize / id));
            graphSizesStatusLog = false;
        }
    }
    log.info("Reading  statistics: min " + total.minReadTime + " / max " + total.maxReadTime + " / total " + total.readTime);
    log.info("Parsing  statistics: min " + total.minParseTime + " / max " + total.maxParseTime + " / total " + total.parseTime);
    log.info("Training statistics: min " + total.minTrainTime + " / max " + total.maxTrainTime + " / total " + total.trainTime);
    return paramVec;
}
Also used : StatusLogger(edu.cmu.ml.proppr.util.StatusLogger) NamedThreadFactory(edu.cmu.ml.proppr.util.multithreading.NamedThreadFactory) StoppingCriterion(edu.cmu.ml.proppr.learn.tools.StoppingCriterion) PosNegRWExample(edu.cmu.ml.proppr.examples.PosNegRWExample) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) ExecutorService(java.util.concurrent.ExecutorService) SRW(edu.cmu.ml.proppr.learn.SRW) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor)

Aggregations

SRW (edu.cmu.ml.proppr.learn.SRW)7 PosNegRWExample (edu.cmu.ml.proppr.examples.PosNegRWExample)4 ExecutorService (java.util.concurrent.ExecutorService)3 RegularizationSchedule (edu.cmu.ml.proppr.learn.RegularizationSchedule)2 RegularizeL2 (edu.cmu.ml.proppr.learn.RegularizeL2)2 StoppingCriterion (edu.cmu.ml.proppr.learn.tools.StoppingCriterion)2 StatusLogger (edu.cmu.ml.proppr.util.StatusLogger)2 NamedThreadFactory (edu.cmu.ml.proppr.util.multithreading.NamedThreadFactory)2 TIntDoubleHashMap (gnu.trove.map.hash.TIntDoubleHashMap)2 Before (org.junit.Before)2 ArrayLearningGraphBuilder (edu.cmu.ml.proppr.graph.ArrayLearningGraphBuilder)1 ZeroGradientData (edu.cmu.ml.proppr.learn.SRW.ZeroGradientData)1 LossData (edu.cmu.ml.proppr.learn.tools.LossData)1 RWExampleParser (edu.cmu.ml.proppr.learn.tools.RWExampleParser)1 ModuleConfiguration (edu.cmu.ml.proppr.util.ModuleConfiguration)1 ParsedFile (edu.cmu.ml.proppr.util.ParsedFile)1 ArrayList (java.util.ArrayList)1 HashSet (java.util.HashSet)1 Callable (java.util.concurrent.Callable)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1