use of org.encog.engine.network.activation.ActivationSIN in project shifu by ShifuML.
the class NNTrainer method buildNetwork.
@SuppressWarnings("unchecked")
public void buildNetwork() {
network = new BasicNetwork();
network.addLayer(new BasicLayer(new ActivationLinear(), true, trainSet.getInputSize()));
int numLayers = (Integer) modelConfig.getParams().get(CommonConstants.NUM_HIDDEN_LAYERS);
List<String> actFunc = (List<String>) modelConfig.getParams().get(CommonConstants.ACTIVATION_FUNC);
List<Integer> hiddenNodeList = (List<Integer>) modelConfig.getParams().get(CommonConstants.NUM_HIDDEN_NODES);
if (numLayers != 0 && (numLayers != actFunc.size() || numLayers != hiddenNodeList.size())) {
throw new RuntimeException("the number of layer do not equal to the number of activation function or the function list and node list empty");
}
if (toLoggingProcess)
LOG.info(" - total " + numLayers + " layers, each layers are: " + Arrays.toString(hiddenNodeList.toArray()) + " the activation function are: " + Arrays.toString(actFunc.toArray()));
for (int i = 0; i < numLayers; i++) {
String func = actFunc.get(i);
Integer numHiddenNode = hiddenNodeList.get(i);
// java 6
if ("linear".equalsIgnoreCase(func)) {
network.addLayer(new BasicLayer(new ActivationLinear(), true, numHiddenNode));
} else if (func.equalsIgnoreCase("sigmoid")) {
network.addLayer(new BasicLayer(new ActivationSigmoid(), true, numHiddenNode));
} else if (func.equalsIgnoreCase("tanh")) {
network.addLayer(new BasicLayer(new ActivationTANH(), true, numHiddenNode));
} else if (func.equalsIgnoreCase("log")) {
network.addLayer(new BasicLayer(new ActivationLOG(), true, numHiddenNode));
} else if (func.equalsIgnoreCase("sin")) {
network.addLayer(new BasicLayer(new ActivationSIN(), true, numHiddenNode));
} else {
LOG.info("Unsupported activation function: " + func + " !! Set this layer activation function to be Sigmoid ");
network.addLayer(new BasicLayer(new ActivationSigmoid(), true, numHiddenNode));
}
}
network.addLayer(new BasicLayer(new ActivationSigmoid(), false, trainSet.getIdealSize()));
network.getStructure().finalizeStructure();
if (!modelConfig.isFixInitialInput()) {
network.reset();
} else {
int numWeight = 0;
for (int i = 0; i < network.getLayerCount() - 1; i++) {
numWeight = numWeight + network.getLayerTotalNeuronCount(i) * network.getLayerNeuronCount(i + 1);
}
LOG.info(" - You have " + numWeight + " weights to be initialize");
loadWeightsInput(numWeight);
}
}
Aggregations