use of org.encog.neural.flat.FlatLayer in project shifu by ShifuML.
the class FloatNeuralStructure method finalizeStruct.
/**
* Build the synapse and layer structure. This method should be called afteryou are done adding layers to a network,
* or change the network's logic property.
*/
public void finalizeStruct() {
if (this.getLayers().size() < 2) {
throw new NeuralNetworkError("There must be at least two layers before the structure is finalized.");
}
final FlatLayer[] flatLayers = new FlatLayer[this.getLayers().size()];
for (int i = 0; i < this.getLayers().size(); i++) {
final BasicLayer layer = (BasicLayer) this.getLayers().get(i);
if (layer.getActivation() == null) {
layer.setActivation(new ActivationLinear());
}
flatLayers[i] = layer;
}
this.setFlat(new FloatFlatNetwork(flatLayers, true));
finalizeLimit();
this.getLayers().clear();
enforceLimit();
}
use of org.encog.neural.flat.FlatLayer in project shifu by ShifuML.
the class FloatFlatNetwork method init.
private void init(FlatLayer[] layers, boolean dropout) {
super.init(layers);
final int layerCount = layers.length;
if (dropout) {
this.setLayerDropoutRates(new double[layerCount]);
} else {
this.setLayerDropoutRates(new double[0]);
}
int index = 0;
for (int i = layers.length - 1; i >= 0; i--) {
final FlatLayer layer = layers[i];
if (dropout && layer instanceof BasicDropoutLayer) {
this.getLayerDropoutRates()[index] = ((BasicDropoutLayer) layer).getDropout();
}
index += 1;
}
}
Aggregations