use of com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer in project MindsEye by SimiaCryptus.
the class VGG16_HDF5 method phase3a.
/**
* Phase 3 a.
*/
protected void phase3a() {
add(new ConvolutionLayer(1, 1, 4096, 4096).setPaddingXY(0, 0).setAndFree(hdf5.readDataSet("param_0", "layer_34").permuteDimensionsAndFree(fullyconnectedOrder)));
add(new ImgBandBiasLayer(4096).setAndFree((hdf5.readDataSet("param_1", "layer_34"))));
add(new ActivationLayer(ActivationLayer.Mode.RELU));
add(new ConvolutionLayer(1, 1, 4096, 1000).setPaddingXY(0, 0).setAndFree(hdf5.readDataSet("param_0", "layer_36").permuteDimensionsAndFree(fullyconnectedOrder)));
add(new ImgBandBiasLayer(1000).setAndFree((hdf5.readDataSet("param_1", "layer_36"))));
}
use of com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer in project MindsEye by SimiaCryptus.
the class RecursiveSubspaceTest method buildModel.
@Override
public DAGNetwork buildModel(@Nonnull NotebookOutput log) {
log.h3("Model");
log.p("We use a multi-level convolution network");
return log.code(() -> {
@Nonnull final PipelineNetwork network = new PipelineNetwork();
double weight = 1e-3;
@Nonnull DoubleSupplier init = () -> weight * (Math.random() - 0.5);
network.add(new ConvolutionLayer(3, 3, 1, 5).set(init));
network.add(new ImgBandBiasLayer(5));
network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
network.add(new ActivationLayer(ActivationLayer.Mode.RELU));
network.add(newNormalizationLayer());
network.add(new ConvolutionLayer(3, 3, 5, 5).set(init));
network.add(new ImgBandBiasLayer(5));
network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
network.add(new ActivationLayer(ActivationLayer.Mode.RELU));
network.add(newNormalizationLayer());
network.add(new BiasLayer(7, 7, 5));
network.add(new FullyConnectedLayer(new int[] { 7, 7, 5 }, new int[] { 10 }).set(init));
network.add(new SoftmaxActivationLayer());
return network;
});
}
use of com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer in project MindsEye by SimiaCryptus.
the class ConvPipelineTest method buildList_1.
/**
* Build list 1 nn layer [ ].
*
* @return the nn layer [ ]
*/
public static Layer[] buildList_1() {
@Nonnull final ArrayList<Layer> network = new ArrayList<Layer>();
network.add(new ConvolutionLayer(3, 3, 3, 10).set(i -> 1e-8 * (Math.random() - 0.5)));
network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
network.add(new ReLuActivationLayer());
network.add(new ImgCropLayer(126, 126));
network.add(new ConvolutionLayer(3, 3, 10, 20).set(i -> 1e-8 * (Math.random() - 0.5)));
network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
network.add(new ReLuActivationLayer());
network.add(new ImgCropLayer(62, 62));
network.add(new ConvolutionLayer(5, 5, 20, 30).set(i -> 1e-8 * (Math.random() - 0.5)));
network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
network.add(new ReLuActivationLayer());
network.add(new ImgCropLayer(18, 18));
network.add(new ConvolutionLayer(3, 3, 30, 40).set(i -> 1e-8 * (Math.random() - 0.5)));
network.add(new PoolingLayer().setWindowX(4).setWindowY(4).setMode(PoolingLayer.PoolingMode.Avg));
network.add(new ReLuActivationLayer());
network.add(new ImgCropLayer(4, 4));
network.add(new ImgBandBiasLayer(40));
network.add(new FullyConnectedLayer(new int[] { 4, 4, 40 }, new int[] { 100 }).set(() -> 0.001 * (Math.random() - 0.45)));
network.add(new SoftmaxActivationLayer());
return network.toArray(new Layer[] {});
}
use of com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer in project MindsEye by SimiaCryptus.
the class ImgTileSubnetLayerTest method getLayer.
@Nonnull
@Override
public Layer getLayer(final int[][] inputSize, Random random) {
ConvolutionLayer subnetwork = new ConvolutionLayer(3, 3, 1, 1).set(() -> this.random());
ImgTileSubnetLayer tileSubnetLayer = new ImgTileSubnetLayer(subnetwork, 3, 3, 2, 2);
subnetwork.freeRef();
return tileSubnetLayer;
}
use of com.simiacryptus.mindseye.layers.cudnn.ConvolutionLayer in project MindsEye by SimiaCryptus.
the class ArtistryUtil method gram.
/**
* Gram pipeline network.
*
* @param network the network
* @param mean the mean
* @param pcaTransform the pca transform
* @return the pipeline network
*/
@Nonnull
public static PipelineNetwork gram(final PipelineNetwork network, Tensor mean, Tensor pcaTransform) {
int[] dimensions = pcaTransform.getDimensions();
int inputBands = mean.getDimensions()[2];
int pcaBands = dimensions[2];
int outputBands = pcaBands / inputBands;
int width = dimensions[0];
int height = dimensions[1];
network.wrap(new ImgBandBiasLayer(mean.scale(-1)));
network.wrap(new ConvolutionLayer(width, height, inputBands, outputBands).set(pcaTransform));
network.wrap(new GramianLayer());
return network;
}
Aggregations