Search in sources :

Example 1 with ValidatingTrainer

use of com.simiacryptus.mindseye.opt.ValidatingTrainer in project MindsEye by SimiaCryptus.

the class QQNTest method train.

@Override
public void train(@Nonnull final NotebookOutput log, @Nonnull final Layer network, @Nonnull final Tensor[][] trainingData, final TrainingMonitor monitor) {
    log.code(() -> {
        @Nonnull final SimpleLossNetwork supervisedNetwork = new SimpleLossNetwork(network, new EntropyLossLayer());
        // return new IterativeTrainer(new SampledArrayTrainable(trainingData, supervisedNetwork, 10000))
        @Nonnull ValidatingTrainer trainer = new ValidatingTrainer(new SampledArrayTrainable(trainingData, supervisedNetwork, 1000, 10000), new ArrayTrainable(trainingData, supervisedNetwork)).setMonitor(monitor);
        trainer.getRegimen().get(0).setOrientation(new QQN());
        return trainer.setTimeout(5, TimeUnit.MINUTES).setMaxIterations(500).run();
    });
}
Also used : Nonnull(javax.annotation.Nonnull) SampledArrayTrainable(com.simiacryptus.mindseye.eval.SampledArrayTrainable) EntropyLossLayer(com.simiacryptus.mindseye.layers.java.EntropyLossLayer) ValidatingTrainer(com.simiacryptus.mindseye.opt.ValidatingTrainer) ArrayTrainable(com.simiacryptus.mindseye.eval.ArrayTrainable) SampledArrayTrainable(com.simiacryptus.mindseye.eval.SampledArrayTrainable) SimpleLossNetwork(com.simiacryptus.mindseye.network.SimpleLossNetwork)

Example 2 with ValidatingTrainer

use of com.simiacryptus.mindseye.opt.ValidatingTrainer in project MindsEye by SimiaCryptus.

the class ClassifyProblem method run.

@Nonnull
@Override
public ClassifyProblem run(@Nonnull final NotebookOutput log) {
    @Nonnull final TrainingMonitor monitor = TestUtil.getMonitor(history);
    final Tensor[][] trainingData = getTrainingData(log);
    @Nonnull final DAGNetwork network = fwdFactory.imageToVector(log, categories);
    log.h3("Network Diagram");
    log.code(() -> {
        return Graphviz.fromGraph(TestUtil.toGraph(network)).height(400).width(600).render(Format.PNG).toImage();
    });
    log.h3("Training");
    @Nonnull final SimpleLossNetwork supervisedNetwork = new SimpleLossNetwork(network, new EntropyLossLayer());
    TestUtil.instrumentPerformance(supervisedNetwork);
    int initialSampleSize = Math.max(trainingData.length / 5, Math.min(10, trainingData.length / 2));
    @Nonnull final ValidatingTrainer trainer = optimizer.train(log, new SampledArrayTrainable(trainingData, supervisedNetwork, initialSampleSize, getBatchSize()), new ArrayTrainable(trainingData, supervisedNetwork, getBatchSize()), monitor);
    log.code(() -> {
        trainer.setTimeout(timeoutMinutes, TimeUnit.MINUTES).setMaxIterations(10000).run();
    });
    if (!history.isEmpty()) {
        log.code(() -> {
            return TestUtil.plot(history);
        });
        log.code(() -> {
            return TestUtil.plotTime(history);
        });
    }
    try {
        @Nonnull String filename = log.getName() + "_" + ClassifyProblem.modelNo++ + "_plot.png";
        ImageIO.write(Util.toImage(TestUtil.plot(history)), "png", log.file(filename));
        @Nonnull File file = new File(log.getResourceDir(), filename);
        log.appendFrontMatterProperty("result_plot", file.toString(), ";");
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
    TestUtil.extractPerformance(log, supervisedNetwork);
    @Nonnull final String modelName = "classification_model_" + ClassifyProblem.modelNo++ + ".json";
    log.appendFrontMatterProperty("result_model", modelName, ";");
    log.p("Saved model as " + log.file(network.getJson().toString(), modelName, modelName));
    log.h3("Validation");
    log.p("If we apply our model against the entire validation dataset, we get this accuracy:");
    log.code(() -> {
        return data.validationData().mapToDouble(labeledObject -> predict(network, labeledObject)[0] == parse(labeledObject.label) ? 1 : 0).average().getAsDouble() * 100;
    });
    log.p("Let's examine some incorrectly predicted results in more detail:");
    log.code(() -> {
        try {
            @Nonnull final TableOutput table = new TableOutput();
            Lists.partition(data.validationData().collect(Collectors.toList()), 100).stream().flatMap(batch -> {
                @Nonnull TensorList batchIn = TensorArray.create(batch.stream().map(x -> x.data).toArray(i -> new Tensor[i]));
                TensorList batchOut = network.eval(new ConstantResult(batchIn)).getData();
                return IntStream.range(0, batchOut.length()).mapToObj(i -> toRow(log, batch.get(i), batchOut.get(i).getData()));
            }).filter(x -> null != x).limit(10).forEach(table::putRow);
            return table;
        } catch (@Nonnull final IOException e) {
            throw new RuntimeException(e);
        }
    });
    return this;
}
Also used : IntStream(java.util.stream.IntStream) Graphviz(guru.nidi.graphviz.engine.Graphviz) EntropyLossLayer(com.simiacryptus.mindseye.layers.java.EntropyLossLayer) Arrays(java.util.Arrays) TableOutput(com.simiacryptus.util.TableOutput) Tensor(com.simiacryptus.mindseye.lang.Tensor) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) Lists(com.google.common.collect.Lists) ConstantResult(com.simiacryptus.mindseye.lang.ConstantResult) Format(guru.nidi.graphviz.engine.Format) LabeledObject(com.simiacryptus.util.test.LabeledObject) TrainingMonitor(com.simiacryptus.mindseye.opt.TrainingMonitor) ImageIO(javax.imageio.ImageIO) SampledArrayTrainable(com.simiacryptus.mindseye.eval.SampledArrayTrainable) Layer(com.simiacryptus.mindseye.lang.Layer) ValidatingTrainer(com.simiacryptus.mindseye.opt.ValidatingTrainer) StepRecord(com.simiacryptus.mindseye.test.StepRecord) NotebookOutput(com.simiacryptus.util.io.NotebookOutput) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) Util(com.simiacryptus.util.Util) SimpleLossNetwork(com.simiacryptus.mindseye.network.SimpleLossNetwork) IOException(java.io.IOException) TestUtil(com.simiacryptus.mindseye.test.TestUtil) Collectors(java.util.stream.Collectors) File(java.io.File) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) Stream(java.util.stream.Stream) ArrayTrainable(com.simiacryptus.mindseye.eval.ArrayTrainable) TensorList(com.simiacryptus.mindseye.lang.TensorList) TensorArray(com.simiacryptus.mindseye.lang.TensorArray) DAGNetwork(com.simiacryptus.mindseye.network.DAGNetwork) Comparator(java.util.Comparator) Nonnull(javax.annotation.Nonnull) ConstantResult(com.simiacryptus.mindseye.lang.ConstantResult) DAGNetwork(com.simiacryptus.mindseye.network.DAGNetwork) EntropyLossLayer(com.simiacryptus.mindseye.layers.java.EntropyLossLayer) SampledArrayTrainable(com.simiacryptus.mindseye.eval.SampledArrayTrainable) ArrayTrainable(com.simiacryptus.mindseye.eval.ArrayTrainable) IOException(java.io.IOException) TensorList(com.simiacryptus.mindseye.lang.TensorList) SimpleLossNetwork(com.simiacryptus.mindseye.network.SimpleLossNetwork) TrainingMonitor(com.simiacryptus.mindseye.opt.TrainingMonitor) TableOutput(com.simiacryptus.util.TableOutput) SampledArrayTrainable(com.simiacryptus.mindseye.eval.SampledArrayTrainable) ValidatingTrainer(com.simiacryptus.mindseye.opt.ValidatingTrainer) File(java.io.File) Nonnull(javax.annotation.Nonnull)

Example 3 with ValidatingTrainer

use of com.simiacryptus.mindseye.opt.ValidatingTrainer in project MindsEye by SimiaCryptus.

the class LBFGSTest method train.

@Override
public void train(@Nonnull final NotebookOutput log, @Nonnull final Layer network, @Nonnull final Tensor[][] trainingData, final TrainingMonitor monitor) {
    log.code(() -> {
        @Nonnull final SimpleLossNetwork supervisedNetwork = new SimpleLossNetwork(network, new EntropyLossLayer());
        @Nonnull ValidatingTrainer trainer = new ValidatingTrainer(new SampledArrayTrainable(trainingData, supervisedNetwork, 1000, 10000), new ArrayTrainable(trainingData, supervisedNetwork).cached()).setMonitor(monitor);
        trainer.getRegimen().get(0).setOrientation(new LBFGS()).setLineSearchFactory(name -> name.toString().contains("LBFGS") ? new QuadraticSearch().setCurrentRate(1.0) : new QuadraticSearch());
        return trainer.setTimeout(5, TimeUnit.MINUTES).setMaxIterations(500).run();
    });
}
Also used : Nonnull(javax.annotation.Nonnull) SampledArrayTrainable(com.simiacryptus.mindseye.eval.SampledArrayTrainable) QuadraticSearch(com.simiacryptus.mindseye.opt.line.QuadraticSearch) EntropyLossLayer(com.simiacryptus.mindseye.layers.java.EntropyLossLayer) ValidatingTrainer(com.simiacryptus.mindseye.opt.ValidatingTrainer) ArrayTrainable(com.simiacryptus.mindseye.eval.ArrayTrainable) SampledArrayTrainable(com.simiacryptus.mindseye.eval.SampledArrayTrainable) SimpleLossNetwork(com.simiacryptus.mindseye.network.SimpleLossNetwork)

Example 4 with ValidatingTrainer

use of com.simiacryptus.mindseye.opt.ValidatingTrainer in project MindsEye by SimiaCryptus.

the class RecursiveSubspaceTest method train.

@Override
public void train(@Nonnull final NotebookOutput log, @Nonnull final Layer network, @Nonnull final Tensor[][] trainingData, final TrainingMonitor monitor) {
    log.code(() -> {
        @Nonnull final SimpleLossNetwork supervisedNetwork = new SimpleLossNetwork(network, new EntropyLossLayer());
        @Nonnull ValidatingTrainer trainer = new ValidatingTrainer(new SampledArrayTrainable(trainingData, supervisedNetwork, 1000, 1000), new ArrayTrainable(trainingData, supervisedNetwork, 1000).cached()).setMonitor(monitor);
        trainer.getRegimen().get(0).setOrientation(getOrientation()).setLineSearchFactory(name -> name.toString().contains("LBFGS") ? new StaticLearningRate(1.0) : new QuadraticSearch());
        return trainer.setTimeout(15, TimeUnit.MINUTES).setMaxIterations(500).run();
    });
}
Also used : Nonnull(javax.annotation.Nonnull) SampledArrayTrainable(com.simiacryptus.mindseye.eval.SampledArrayTrainable) QuadraticSearch(com.simiacryptus.mindseye.opt.line.QuadraticSearch) StaticLearningRate(com.simiacryptus.mindseye.opt.line.StaticLearningRate) EntropyLossLayer(com.simiacryptus.mindseye.layers.java.EntropyLossLayer) ValidatingTrainer(com.simiacryptus.mindseye.opt.ValidatingTrainer) SampledArrayTrainable(com.simiacryptus.mindseye.eval.SampledArrayTrainable) ArrayTrainable(com.simiacryptus.mindseye.eval.ArrayTrainable) SimpleLossNetwork(com.simiacryptus.mindseye.network.SimpleLossNetwork)

Example 5 with ValidatingTrainer

use of com.simiacryptus.mindseye.opt.ValidatingTrainer in project MindsEye by SimiaCryptus.

the class ImageDecompositionLab method train.

/**
 * Train.
 *
 * @param log            the log
 * @param monitor        the monitor
 * @param network        the network
 * @param data           the data
 * @param timeoutMinutes the timeout minutes
 * @param mask           the mask
 */
protected void train(@Nonnull final NotebookOutput log, final TrainingMonitor monitor, final Layer network, @Nonnull final Tensor[][] data, final int timeoutMinutes, final boolean... mask) {
    log.out("Training for %s minutes, mask=%s", timeoutMinutes, Arrays.toString(mask));
    log.code(() -> {
        @Nonnull SampledTrainable trainingSubject = new SampledArrayTrainable(data, network, data.length);
        trainingSubject = (SampledTrainable) ((TrainableDataMask) trainingSubject).setMask(mask);
        @Nonnull final ValidatingTrainer validatingTrainer = new ValidatingTrainer(trainingSubject, new ArrayTrainable(data, network)).setMaxTrainingSize(data.length).setMinTrainingSize(5).setMonitor(monitor).setTimeout(timeoutMinutes, TimeUnit.MINUTES).setMaxIterations(1000);
        validatingTrainer.getRegimen().get(0).setOrientation(new GradientDescent()).setLineSearchFactory(name -> name.equals(QQN.CURSOR_NAME) ? new QuadraticSearch().setCurrentRate(1.0) : new QuadraticSearch().setCurrentRate(1.0));
        validatingTrainer.run();
    });
}
Also used : TrainableDataMask(com.simiacryptus.mindseye.eval.TrainableDataMask) SampledTrainable(com.simiacryptus.mindseye.eval.SampledTrainable) Nonnull(javax.annotation.Nonnull) SampledArrayTrainable(com.simiacryptus.mindseye.eval.SampledArrayTrainable) QuadraticSearch(com.simiacryptus.mindseye.opt.line.QuadraticSearch) GradientDescent(com.simiacryptus.mindseye.opt.orient.GradientDescent) ValidatingTrainer(com.simiacryptus.mindseye.opt.ValidatingTrainer) SampledArrayTrainable(com.simiacryptus.mindseye.eval.SampledArrayTrainable) ArrayTrainable(com.simiacryptus.mindseye.eval.ArrayTrainable)

Aggregations

ArrayTrainable (com.simiacryptus.mindseye.eval.ArrayTrainable)7 SampledArrayTrainable (com.simiacryptus.mindseye.eval.SampledArrayTrainable)7 ValidatingTrainer (com.simiacryptus.mindseye.opt.ValidatingTrainer)7 Nonnull (javax.annotation.Nonnull)7 EntropyLossLayer (com.simiacryptus.mindseye.layers.java.EntropyLossLayer)5 SimpleLossNetwork (com.simiacryptus.mindseye.network.SimpleLossNetwork)4 Tensor (com.simiacryptus.mindseye.lang.Tensor)3 DAGNetwork (com.simiacryptus.mindseye.network.DAGNetwork)3 TrainingMonitor (com.simiacryptus.mindseye.opt.TrainingMonitor)3 QuadraticSearch (com.simiacryptus.mindseye.opt.line.QuadraticSearch)3 StepRecord (com.simiacryptus.mindseye.test.StepRecord)3 TestUtil (com.simiacryptus.mindseye.test.TestUtil)3 TableOutput (com.simiacryptus.util.TableOutput)3 NotebookOutput (com.simiacryptus.util.io.NotebookOutput)3 Format (guru.nidi.graphviz.engine.Format)3 Graphviz (guru.nidi.graphviz.engine.Graphviz)3 IOException (java.io.IOException)3 ArrayList (java.util.ArrayList)3 LinkedHashMap (java.util.LinkedHashMap)3 List (java.util.List)3