use of com.simiacryptus.mindseye.layers.java.FullyConnectedLayer in project MindsEye by SimiaCryptus.
the class MnistTestBase method buildModel.
/**
* Build model dag network.
*
* @param log the log
* @return the dag network
*/
public DAGNetwork buildModel(@Nonnull final NotebookOutput log) {
log.h1("Model");
log.p("This is a very simple model that performs basic logistic regression. " + "It is expected to be trainable to about 91% accuracy on MNIST.");
return log.code(() -> {
@Nonnull final PipelineNetwork network = new PipelineNetwork();
network.add(new BiasLayer(28, 28, 1));
network.add(new FullyConnectedLayer(new int[] { 28, 28, 1 }, new int[] { 10 }).set(() -> 0.001 * (Math.random() - 0.45)));
network.add(new SoftmaxActivationLayer());
return network;
});
}
use of com.simiacryptus.mindseye.layers.java.FullyConnectedLayer in project MindsEye by SimiaCryptus.
the class SigmoidTreeNetwork method nextPhase.
@Override
public void nextPhase() {
switch(getMode()) {
case Linear:
{
head = null;
@Nonnull final FullyConnectedLayer alpha = (FullyConnectedLayer) this.alpha;
// alpha.weights.scale(2);
gate = new FullyConnectedLayer(alpha.inputDims, multigate ? alpha.outputDims : new int[] { 1 });
gateBias = new BiasLayer(alpha.inputDims);
mode = NodeMode.Fuzzy;
break;
}
case Fuzzy:
{
head = null;
@Nullable final FullyConnectedLayer alpha = (FullyConnectedLayer) this.alpha;
@Nonnull final BiasLayer alphaBias = (BiasLayer) this.alphaBias;
beta = new FullyConnectedLayer(alpha.inputDims, alpha.outputDims).set(() -> {
return initialFuzzyCoeff * (FastRandom.INSTANCE.random() - 0.5);
});
betaBias = new BiasLayer(alphaBias.bias.length);
copyState(alpha, beta);
copyState(alphaBias, betaBias);
mode = NodeMode.Bilinear;
if (isSkipFuzzy()) {
nextPhase();
}
break;
}
case Bilinear:
head = null;
alpha = new SigmoidTreeNetwork(alpha, alphaBias);
if (skipChildStage()) {
((SigmoidTreeNetwork) alpha).nextPhase();
}
beta = new SigmoidTreeNetwork(beta, betaBias);
if (skipChildStage()) {
((SigmoidTreeNetwork) beta).nextPhase();
}
mode = NodeMode.Final;
break;
case Final:
@Nonnull final SigmoidTreeNetwork alpha = (SigmoidTreeNetwork) this.alpha;
@Nonnull final SigmoidTreeNetwork beta = (SigmoidTreeNetwork) this.beta;
alpha.nextPhase();
beta.nextPhase();
break;
}
}
use of com.simiacryptus.mindseye.layers.java.FullyConnectedLayer in project MindsEye by SimiaCryptus.
the class RecursiveSubspaceTest method buildModel.
@Override
public DAGNetwork buildModel(@Nonnull NotebookOutput log) {
log.h3("Model");
log.p("We use a multi-level convolution network");
return log.code(() -> {
@Nonnull final PipelineNetwork network = new PipelineNetwork();
double weight = 1e-3;
@Nonnull DoubleSupplier init = () -> weight * (Math.random() - 0.5);
network.add(new ConvolutionLayer(3, 3, 1, 5).set(init));
network.add(new ImgBandBiasLayer(5));
network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
network.add(new ActivationLayer(ActivationLayer.Mode.RELU));
network.add(newNormalizationLayer());
network.add(new ConvolutionLayer(3, 3, 5, 5).set(init));
network.add(new ImgBandBiasLayer(5));
network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
network.add(new ActivationLayer(ActivationLayer.Mode.RELU));
network.add(newNormalizationLayer());
network.add(new BiasLayer(7, 7, 5));
network.add(new FullyConnectedLayer(new int[] { 7, 7, 5 }, new int[] { 10 }).set(init));
network.add(new SoftmaxActivationLayer());
return network;
});
}
use of com.simiacryptus.mindseye.layers.java.FullyConnectedLayer in project MindsEye by SimiaCryptus.
the class ConvPipelineTest method buildList_1.
/**
* Build list 1 nn layer [ ].
*
* @return the nn layer [ ]
*/
public static Layer[] buildList_1() {
@Nonnull final ArrayList<Layer> network = new ArrayList<Layer>();
network.add(new ConvolutionLayer(3, 3, 3, 10).set(i -> 1e-8 * (Math.random() - 0.5)));
network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
network.add(new ReLuActivationLayer());
network.add(new ImgCropLayer(126, 126));
network.add(new ConvolutionLayer(3, 3, 10, 20).set(i -> 1e-8 * (Math.random() - 0.5)));
network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
network.add(new ReLuActivationLayer());
network.add(new ImgCropLayer(62, 62));
network.add(new ConvolutionLayer(5, 5, 20, 30).set(i -> 1e-8 * (Math.random() - 0.5)));
network.add(new PoolingLayer().setMode(PoolingLayer.PoolingMode.Max));
network.add(new ReLuActivationLayer());
network.add(new ImgCropLayer(18, 18));
network.add(new ConvolutionLayer(3, 3, 30, 40).set(i -> 1e-8 * (Math.random() - 0.5)));
network.add(new PoolingLayer().setWindowX(4).setWindowY(4).setMode(PoolingLayer.PoolingMode.Avg));
network.add(new ReLuActivationLayer());
network.add(new ImgCropLayer(4, 4));
network.add(new ImgBandBiasLayer(40));
network.add(new FullyConnectedLayer(new int[] { 4, 4, 40 }, new int[] { 100 }).set(() -> 0.001 * (Math.random() - 0.45)));
network.add(new SoftmaxActivationLayer());
return network.toArray(new Layer[] {});
}
use of com.simiacryptus.mindseye.layers.java.FullyConnectedLayer in project MindsEye by SimiaCryptus.
the class DeepLinear method addLayer.
@Override
public void addLayer(@Nonnull final PipelineNetwork network, @Nonnull final int[] in, @Nonnull final int[] dims) {
network.add(new FullyConnectedLayer(in, dims).set(this::random));
network.add(new BiasLayer(dims));
network.add(getActivation());
}
Aggregations