Search in sources :

Example 1 with ConvolutionLayer

use of com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer in project MindsEye by SimiaCryptus.

the class VGG16_HDF5 method phase3a.

/**
 * Phase 3 a.
 */
protected void phase3a() {
    add(new ConvolutionLayer(1, 1, 4096, 4096).setPaddingXY(0, 0).setAndFree(hdf5.readDataSet("param_0", "layer_34").permuteDimensionsAndFree(fullyconnectedOrder)));
    add(new ImgBandBiasLayer(4096).setAndFree((hdf5.readDataSet("param_1", "layer_34"))));
    add(new ActivationLayer(ActivationLayer.Mode.RELU));
    add(new ConvolutionLayer(1, 1, 4096, 1000).setPaddingXY(0, 0).setAndFree(hdf5.readDataSet("param_0", "layer_36").permuteDimensionsAndFree(fullyconnectedOrder)));
    add(new ImgBandBiasLayer(1000).setAndFree((hdf5.readDataSet("param_1", "layer_36"))));
}
Also used : ImgBandBiasLayer(com.simiacryptus.mindseye.layers.cudnn.ImgBandBiasLayer) SoftmaxActivationLayer(com.simiacryptus.mindseye.layers.cudnn.SoftmaxActivationLayer) ActivationLayer(com.simiacryptus.mindseye.layers.cudnn.ActivationLayer) ConvolutionLayer(com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer)

Example 2 with ConvolutionLayer

use of com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer in project MindsEye by SimiaCryptus.

the class RecursiveSubspaceTest method buildModel.

@Override
public DAGNetwork buildModel(@Nonnull NotebookOutput log) {
    log.h3("Model");
    log.p("We use a multi-level convolution network");
    return log.code(() -> {
        @Nonnull final PipelineNetwork network = new PipelineNetwork();
        double weight = 1e-3;
        @Nonnull DoubleSupplier init = () -> weight * (Math.random() - 0.5);
        network.add(new ConvolutionLayer(3, 3, 1, 5).set(init));
        network.add(new ImgBandBiasLayer(5));
        network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
        network.add(new ActivationLayer(ActivationLayer.Mode.RELU));
        network.add(newNormalizationLayer());
        network.add(new ConvolutionLayer(3, 3, 5, 5).set(init));
        network.add(new ImgBandBiasLayer(5));
        network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
        network.add(new ActivationLayer(ActivationLayer.Mode.RELU));
        network.add(newNormalizationLayer());
        network.add(new BiasLayer(7, 7, 5));
        network.add(new FullyConnectedLayer(new int[] { 7, 7, 5 }, new int[] { 10 }).set(init));
        network.add(new SoftmaxActivationLayer());
        return network;
    });
}
Also used : SoftmaxActivationLayer(com.simiacryptus.mindseye.layers.java.SoftmaxActivationLayer) FullyConnectedLayer(com.simiacryptus.mindseye.layers.java.FullyConnectedLayer) ImgBandBiasLayer(com.simiacryptus.mindseye.layers.cudnn.ImgBandBiasLayer) ActivationLayer(com.simiacryptus.mindseye.layers.cudnn.ActivationLayer) SoftmaxActivationLayer(com.simiacryptus.mindseye.layers.java.SoftmaxActivationLayer) Nonnull(javax.annotation.Nonnull) DoubleSupplier(java.util.function.DoubleSupplier) PoolingLayer(com.simiacryptus.mindseye.layers.cudnn.PoolingLayer) PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) ConvolutionLayer(com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer) ImgBandBiasLayer(com.simiacryptus.mindseye.layers.cudnn.ImgBandBiasLayer) BiasLayer(com.simiacryptus.mindseye.layers.java.BiasLayer)

Example 3 with ConvolutionLayer

use of com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer in project MindsEye by SimiaCryptus.

the class ConvPipelineTest method buildList_1.

/**
 * Build list 1 nn layer [ ].
 *
 * @return the nn layer [ ]
 */
public static Layer[] buildList_1() {
    @Nonnull final ArrayList<Layer> network = new ArrayList<Layer>();
    network.add(new ConvolutionLayer(3, 3, 3, 10).set(i -> 1e-8 * (Math.random() - 0.5)));
    network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
    network.add(new ReLuActivationLayer());
    network.add(new ImgCropLayer(126, 126));
    network.add(new ConvolutionLayer(3, 3, 10, 20).set(i -> 1e-8 * (Math.random() - 0.5)));
    network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
    network.add(new ReLuActivationLayer());
    network.add(new ImgCropLayer(62, 62));
    network.add(new ConvolutionLayer(5, 5, 20, 30).set(i -> 1e-8 * (Math.random() - 0.5)));
    network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
    network.add(new ReLuActivationLayer());
    network.add(new ImgCropLayer(18, 18));
    network.add(new ConvolutionLayer(3, 3, 30, 40).set(i -> 1e-8 * (Math.random() - 0.5)));
    network.add(new PoolingLayer().setWindowX(4).setWindowY(4).setMode(PoolingLayer.PoolingMode.Avg));
    network.add(new ReLuActivationLayer());
    network.add(new ImgCropLayer(4, 4));
    network.add(new ImgBandBiasLayer(40));
    network.add(new FullyConnectedLayer(new int[] { 4, 4, 40 }, new int[] { 100 }).set(() -> 0.001 * (Math.random() - 0.45)));
    network.add(new SoftmaxActivationLayer());
    return network.toArray(new Layer[] {});
}
Also used : ConvolutionLayer(com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer) FullyConnectedLayer(com.simiacryptus.mindseye.layers.java.FullyConnectedLayer) SoftmaxActivationLayer(com.simiacryptus.mindseye.layers.java.SoftmaxActivationLayer) ImgCropLayer(com.simiacryptus.mindseye.layers.java.ImgCropLayer) Layer(com.simiacryptus.mindseye.lang.Layer) ImgBandBiasLayer(com.simiacryptus.mindseye.layers.java.ImgBandBiasLayer) PoolingLayer(com.simiacryptus.mindseye.layers.cudnn.PoolingLayer) Nonnull(javax.annotation.Nonnull) ReLuActivationLayer(com.simiacryptus.mindseye.layers.java.ReLuActivationLayer) ArrayList(java.util.ArrayList) SoftmaxActivationLayer(com.simiacryptus.mindseye.layers.java.SoftmaxActivationLayer) FullyConnectedLayer(com.simiacryptus.mindseye.layers.java.FullyConnectedLayer) ImgBandBiasLayer(com.simiacryptus.mindseye.layers.java.ImgBandBiasLayer) Nonnull(javax.annotation.Nonnull) PoolingLayer(com.simiacryptus.mindseye.layers.cudnn.PoolingLayer) ArrayList(java.util.ArrayList) ConvolutionLayer(com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer) FullyConnectedLayer(com.simiacryptus.mindseye.layers.java.FullyConnectedLayer) SoftmaxActivationLayer(com.simiacryptus.mindseye.layers.java.SoftmaxActivationLayer) ImgCropLayer(com.simiacryptus.mindseye.layers.java.ImgCropLayer) Layer(com.simiacryptus.mindseye.lang.Layer) ImgBandBiasLayer(com.simiacryptus.mindseye.layers.java.ImgBandBiasLayer) PoolingLayer(com.simiacryptus.mindseye.layers.cudnn.PoolingLayer) ReLuActivationLayer(com.simiacryptus.mindseye.layers.java.ReLuActivationLayer) ReLuActivationLayer(com.simiacryptus.mindseye.layers.java.ReLuActivationLayer) ImgCropLayer(com.simiacryptus.mindseye.layers.java.ImgCropLayer) ConvolutionLayer(com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer)

Example 4 with ConvolutionLayer

use of com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer in project MindsEye by SimiaCryptus.

the class ImgTileSubnetLayerTest method getLayer.

@Nonnull
@Override
public Layer getLayer(final int[][] inputSize, Random random) {
    ConvolutionLayer subnetwork = new ConvolutionLayer(3, 3, 1, 1).set(() -> this.random());
    ImgTileSubnetLayer tileSubnetLayer = new ImgTileSubnetLayer(subnetwork, 3, 3, 2, 2);
    subnetwork.freeRef();
    return tileSubnetLayer;
}
Also used : ConvolutionLayer(com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer) Nonnull(javax.annotation.Nonnull)

Example 5 with ConvolutionLayer

use of com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer in project MindsEye by SimiaCryptus.

the class ArtistryUtil method gram.

/**
 * Gram pipeline network.
 *
 * @param network      the network
 * @param mean         the mean
 * @param pcaTransform the pca transform
 * @return the pipeline network
 */
@Nonnull
public static PipelineNetwork gram(final PipelineNetwork network, Tensor mean, Tensor pcaTransform) {
    int[] dimensions = pcaTransform.getDimensions();
    int inputBands = mean.getDimensions()[2];
    int pcaBands = dimensions[2];
    int outputBands = pcaBands / inputBands;
    int width = dimensions[0];
    int height = dimensions[1];
    network.wrap(new ImgBandBiasLayer(mean.scale(-1)));
    network.wrap(new ConvolutionLayer(width, height, inputBands, outputBands).set(pcaTransform));
    network.wrap(new GramianLayer());
    return network;
}
Also used : ImgBandBiasLayer(com.simiacryptus.mindseye.layers.cudnn.ImgBandBiasLayer) ConvolutionLayer(com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer) GramianLayer(com.simiacryptus.mindseye.layers.cudnn.GramianLayer) Nonnull(javax.annotation.Nonnull)

Aggregations

ConvolutionLayer (com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer)13 ImgBandBiasLayer (com.simiacryptus.mindseye.layers.cudnn.ImgBandBiasLayer)10 ActivationLayer (com.simiacryptus.mindseye.layers.cudnn.ActivationLayer)8 Nonnull (javax.annotation.Nonnull)7 SoftmaxActivationLayer (com.simiacryptus.mindseye.layers.cudnn.SoftmaxActivationLayer)6 Layer (com.simiacryptus.mindseye.lang.Layer)2 ImgModulusPaddingLayer (com.simiacryptus.mindseye.layers.cudnn.ImgModulusPaddingLayer)2 PoolingLayer (com.simiacryptus.mindseye.layers.cudnn.PoolingLayer)2 FullyConnectedLayer (com.simiacryptus.mindseye.layers.java.FullyConnectedLayer)2 ImgReshapeLayer (com.simiacryptus.mindseye.layers.java.ImgReshapeLayer)2 SoftmaxActivationLayer (com.simiacryptus.mindseye.layers.java.SoftmaxActivationLayer)2 BandAvgReducerLayer (com.simiacryptus.mindseye.layers.cudnn.BandAvgReducerLayer)1 GramianLayer (com.simiacryptus.mindseye.layers.cudnn.GramianLayer)1 SquareActivationLayer (com.simiacryptus.mindseye.layers.cudnn.SquareActivationLayer)1 BiasLayer (com.simiacryptus.mindseye.layers.java.BiasLayer)1 ImgBandBiasLayer (com.simiacryptus.mindseye.layers.java.ImgBandBiasLayer)1 ImgCropLayer (com.simiacryptus.mindseye.layers.java.ImgCropLayer)1 ReLuActivationLayer (com.simiacryptus.mindseye.layers.java.ReLuActivationLayer)1 PipelineNetwork (com.simiacryptus.mindseye.network.PipelineNetwork)1 Util (com.simiacryptus.util.Util)1