Search in sources :

Example 26 with PipelineNetwork

use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.

the class DeepDream method processStats.

/**
 * Measure style neural setup.
 *
 * @param style the style
 * @return the neural setup
 */
public NeuralSetup processStats(final StyleSetup<T> style) {
    NeuralSetup<T> self = new NeuralSetup(style);
    Tensor contentInput = Tensor.fromRGB(style.contentImage);
    self.contentTarget = new ContentTarget();
    for (final T layerType : getLayerTypes()) {
        System.gc();
        final PipelineNetwork network = layerType.texture();
        self.contentTarget.content.put(layerType, network.eval(contentInput).getDataAndFree().getAndFree(0));
        logger.info(String.format("target_content_%s=%s", layerType.name(), self.contentTarget.content.get(layerType).prettyPrint()));
    }
    return self;
}
Also used : Tensor(com.simiacryptus.mindseye.lang.Tensor) PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork)

Example 27 with PipelineNetwork

use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.

the class ImageClassifier method deepDream.

/**
 * Deep dream.
 *
 * @param log                 the log
 * @param image               the image
 * @param targetCategoryIndex the target category index
 * @param totalCategories     the total categories
 * @param config              the config
 * @param network             the network
 * @param lossLayer           the loss layer
 * @param targetValue         the target value
 */
public void deepDream(@Nonnull final NotebookOutput log, final Tensor image, final int targetCategoryIndex, final int totalCategories, Function<IterativeTrainer, IterativeTrainer> config, final Layer network, final Layer lossLayer, final double targetValue) {
    @Nonnull List<Tensor[]> data = Arrays.<Tensor[]>asList(new Tensor[] { image, new Tensor(1, 1, totalCategories).set(targetCategoryIndex, targetValue) });
    log.code(() -> {
        for (Tensor[] tensors : data) {
            ImageClassifier.log.info(log.image(tensors[0].toImage(), "") + tensors[1]);
        }
    });
    log.code(() -> {
        @Nonnull ArrayList<StepRecord> history = new ArrayList<>();
        @Nonnull PipelineNetwork clamp = new PipelineNetwork(1);
        clamp.add(new ActivationLayer(ActivationLayer.Mode.RELU));
        clamp.add(new LinearActivationLayer().setBias(255).setScale(-1).freeze());
        clamp.add(new ActivationLayer(ActivationLayer.Mode.RELU));
        clamp.add(new LinearActivationLayer().setBias(255).setScale(-1).freeze());
        @Nonnull PipelineNetwork supervised = new PipelineNetwork(2);
        supervised.wrap(lossLayer, supervised.add(network.freeze(), supervised.wrap(clamp, supervised.getInput(0))), supervised.getInput(1));
        // TensorList[] gpuInput = data.stream().map(data1 -> {
        // return CudnnHandle.apply(gpu -> {
        // Precision precision = Precision.Float;
        // return CudaTensorList.wrap(gpu.getPtr(TensorArray.wrap(data1), precision, MemoryType.Managed), 1, image.getDimensions(), precision);
        // });
        // }).toArray(i -> new TensorList[i]);
        // @Nonnull Trainable trainable = new TensorListTrainable(supervised, gpuInput).setVerbosity(1).setMask(true);
        @Nonnull Trainable trainable = new ArrayTrainable(supervised, 1).setVerbose(true).setMask(true, false).setData(data);
        config.apply(new IterativeTrainer(trainable).setMonitor(getTrainingMonitor(history, supervised)).setOrientation(new QQN()).setLineSearchFactory(name -> new ArmijoWolfeSearch()).setTimeout(60, TimeUnit.MINUTES)).setTerminateThreshold(Double.NEGATIVE_INFINITY).runAndFree();
        return TestUtil.plot(history);
    });
}
Also used : Tensor(com.simiacryptus.mindseye.lang.Tensor) ActivationLayer(com.simiacryptus.mindseye.layers.cudnn.ActivationLayer) LinearActivationLayer(com.simiacryptus.mindseye.layers.java.LinearActivationLayer) IterativeTrainer(com.simiacryptus.mindseye.opt.IterativeTrainer) Nonnull(javax.annotation.Nonnull) ArrayList(java.util.ArrayList) PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) ArrayTrainable(com.simiacryptus.mindseye.eval.ArrayTrainable) LinearActivationLayer(com.simiacryptus.mindseye.layers.java.LinearActivationLayer) QQN(com.simiacryptus.mindseye.opt.orient.QQN) StepRecord(com.simiacryptus.mindseye.test.StepRecord) ArmijoWolfeSearch(com.simiacryptus.mindseye.opt.line.ArmijoWolfeSearch) Trainable(com.simiacryptus.mindseye.eval.Trainable) ArrayTrainable(com.simiacryptus.mindseye.eval.ArrayTrainable)

Example 28 with PipelineNetwork

use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.

the class StyleTransfer method fitnessNetwork.

/**
 * Fitness function pipeline network.
 *
 * @param setup the setup
 * @return the pipeline network
 */
@Nonnull
public PipelineNetwork fitnessNetwork(NeuralSetup setup) {
    PipelineNetwork pipelineNetwork = getInstance().getNetwork();
    Map<T, DAGNode> nodes = new HashMap<>();
    Map<T, UUID> ids = getInstance().getNodes();
    ids.forEach((l, id) -> nodes.put(l, pipelineNetwork.getChildNode(id)));
    PipelineNetwork network = buildNetwork(setup, nodes, pipelineNetwork);
    // network = withClamp(network);
    ArtistryUtil.setPrecision(network, setup.style.precision);
    return network;
}
Also used : HashMap(java.util.HashMap) PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) DAGNode(com.simiacryptus.mindseye.network.DAGNode) UUID(java.util.UUID) Nonnull(javax.annotation.Nonnull)

Example 29 with PipelineNetwork

use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.

the class StyleTransfer method getContentComponents.

/**
 * Gets content components.
 *
 * @param setup   the setup
 * @param nodeMap the node map
 * @return the content components
 */
@Nonnull
public ArrayList<Tuple2<Double, DAGNode>> getContentComponents(NeuralSetup<T> setup, final Map<T, DAGNode> nodeMap) {
    ArrayList<Tuple2<Double, DAGNode>> contentComponents = new ArrayList<>();
    for (final T layerType : getLayerTypes()) {
        final DAGNode node = nodeMap.get(layerType);
        final double coeff_content = !setup.style.content.params.containsKey(layerType) ? 0 : setup.style.content.params.get(layerType);
        final PipelineNetwork network1 = (PipelineNetwork) node.getNetwork();
        if (coeff_content != 0) {
            Tensor content = setup.contentTarget.content.get(layerType);
            contentComponents.add(new Tuple2<>(coeff_content, network1.wrap(new MeanSqLossLayer().setAlpha(1.0 / content.rms()), node, network1.wrap(new ValueLayer(content), new DAGNode[] {}))));
        }
    }
    return contentComponents;
}
Also used : Tensor(com.simiacryptus.mindseye.lang.Tensor) Tuple2(com.simiacryptus.util.lang.Tuple2) ArrayList(java.util.ArrayList) ValueLayer(com.simiacryptus.mindseye.layers.cudnn.ValueLayer) PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) DAGNode(com.simiacryptus.mindseye.network.DAGNode) MeanSqLossLayer(com.simiacryptus.mindseye.layers.cudnn.MeanSqLossLayer) Nonnull(javax.annotation.Nonnull)

Example 30 with PipelineNetwork

use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.

the class ArtistryUtil method getClamp.

/**
 * Gets clamp.
 *
 * @param max the max
 * @return the clamp
 */
@Nonnull
public static PipelineNetwork getClamp(final int max) {
    @Nonnull PipelineNetwork clamp = new PipelineNetwork(1);
    clamp.add(new ActivationLayer(ActivationLayer.Mode.RELU));
    clamp.add(new LinearActivationLayer().setBias(max).setScale(-1).freeze());
    clamp.add(new ActivationLayer(ActivationLayer.Mode.RELU));
    clamp.add(new LinearActivationLayer().setBias(max).setScale(-1).freeze());
    return clamp;
}
Also used : ActivationLayer(com.simiacryptus.mindseye.layers.cudnn.ActivationLayer) SquareActivationLayer(com.simiacryptus.mindseye.layers.cudnn.SquareActivationLayer) LinearActivationLayer(com.simiacryptus.mindseye.layers.java.LinearActivationLayer) Nonnull(javax.annotation.Nonnull) PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) LinearActivationLayer(com.simiacryptus.mindseye.layers.java.LinearActivationLayer) Nonnull(javax.annotation.Nonnull)

Aggregations

PipelineNetwork (com.simiacryptus.mindseye.network.PipelineNetwork)33 Nonnull (javax.annotation.Nonnull)29 Tensor (com.simiacryptus.mindseye.lang.Tensor)16 DAGNode (com.simiacryptus.mindseye.network.DAGNode)13 Nullable (javax.annotation.Nullable)12 ArrayList (java.util.ArrayList)11 StepRecord (com.simiacryptus.mindseye.test.StepRecord)10 Layer (com.simiacryptus.mindseye.lang.Layer)9 Arrays (java.util.Arrays)9 List (java.util.List)9 ArrayTrainable (com.simiacryptus.mindseye.eval.ArrayTrainable)8 DAGNetwork (com.simiacryptus.mindseye.network.DAGNetwork)8 IntStream (java.util.stream.IntStream)8 MeanSqLossLayer (com.simiacryptus.mindseye.layers.java.MeanSqLossLayer)7 TestUtil (com.simiacryptus.mindseye.test.TestUtil)7 NotebookOutput (com.simiacryptus.util.io.NotebookOutput)7 Map (java.util.Map)7 Trainable (com.simiacryptus.mindseye.eval.Trainable)6 IterativeTrainer (com.simiacryptus.mindseye.opt.IterativeTrainer)6 BufferedImage (java.awt.image.BufferedImage)6