use of org.encog.neural.networks.BasicNetwork in project shifu by ShifuML.
the class AbstractNNWorker method initGradient.
@SuppressWarnings("unchecked")
private void initGradient(FloatMLDataSet training, FloatMLDataSet testing, double[] weights, boolean isCrossOver) {
int numLayers = (Integer) this.validParams.get(CommonConstants.NUM_HIDDEN_LAYERS);
List<String> actFunc = (List<String>) this.validParams.get(CommonConstants.ACTIVATION_FUNC);
List<Integer> hiddenNodeList = (List<Integer>) this.validParams.get(CommonConstants.NUM_HIDDEN_NODES);
String outputActivationFunc = (String) validParams.get(CommonConstants.OUTPUT_ACTIVATION_FUNC);
BasicNetwork network = DTrainUtils.generateNetwork(this.featureInputsCnt, this.outputNodeCount, numLayers, actFunc, hiddenNodeList, false, this.dropoutRate, this.wgtInit, CommonUtils.isLinearTarget(modelConfig, columnConfigList), outputActivationFunc);
// use the weights from master
network.getFlat().setWeights(weights);
FlatNetwork flat = network.getFlat();
// copy Propagation from encog, fix flat spot problem
double[] flatSpot = new double[flat.getActivationFunctions().length];
for (int i = 0; i < flat.getActivationFunctions().length; i++) {
flatSpot[i] = flat.getActivationFunctions()[i] instanceof ActivationSigmoid ? 0.1 : 0.0;
}
LOG.info("Gradient computing thread count is {}.", modelConfig.getTrain().getWorkerThreadCount());
this.gradient = new ParallelGradient((FloatFlatNetwork) flat, training, testing, flatSpot, new LinearErrorFunction(), isCrossOver, modelConfig.getTrain().getWorkerThreadCount(), this.lossStr, this.batchs);
}
use of org.encog.neural.networks.BasicNetwork in project shifu by ShifuML.
the class NNModelSpecTest method testModelTraverse.
// @Test
public void testModelTraverse() {
BasicML basicML = BasicML.class.cast(EncogDirectoryPersistence.loadObject(new File("src/test/resources/model/model0.nn")));
BasicNetwork basicNetwork = (BasicNetwork) basicML;
FlatNetwork flatNetwork = basicNetwork.getFlat();
BasicML extendedBasicML = BasicML.class.cast(EncogDirectoryPersistence.loadObject(new File("src/test/resources/model/model1.nn")));
BasicNetwork extendedBasicNetwork = (BasicNetwork) extendedBasicML;
FlatNetwork extendedFlatNetwork = extendedBasicNetwork.getFlat();
for (int layer = flatNetwork.getLayerIndex().length - 1; layer > 0; layer--) {
int layerOutputCnt = flatNetwork.getLayerFeedCounts()[layer - 1];
int layerInputCnt = flatNetwork.getLayerCounts()[layer];
System.out.println("Weight index for layer " + (flatNetwork.getLayerIndex().length - layer));
int extendedLayerInputCnt = extendedFlatNetwork.getLayerCounts()[layer];
int indexPos = flatNetwork.getWeightIndex()[layer - 1];
int extendedIndexPos = extendedFlatNetwork.getWeightIndex()[layer - 1];
for (int i = 0; i < layerOutputCnt; i++) {
for (int j = 0; j < layerInputCnt; j++) {
int weightIndex = indexPos + (i * layerInputCnt) + j;
int extendedWeightIndex = extendedIndexPos + (i * extendedLayerInputCnt) + j;
if (j == layerInputCnt - 1) {
// move bias to end
extendedWeightIndex = extendedIndexPos + (i * extendedLayerInputCnt) + (extendedLayerInputCnt - 1);
}
System.out.println(weightIndex + " --> " + extendedWeightIndex);
}
}
}
}
use of org.encog.neural.networks.BasicNetwork in project shifu by ShifuML.
the class NNModelSpecTest method testModelFitIn.
@Test
public void testModelFitIn() {
PersistorRegistry.getInstance().add(new PersistBasicFloatNetwork());
BasicML basicML = BasicML.class.cast(EncogDirectoryPersistence.loadObject(new File("src/test/resources/model/model5.nn")));
BasicNetwork basicNetwork = (BasicNetwork) basicML;
FlatNetwork flatNetwork = basicNetwork.getFlat();
BasicML extendedBasicML = BasicML.class.cast(EncogDirectoryPersistence.loadObject(new File("src/test/resources/model/model6.nn")));
BasicNetwork extendedBasicNetwork = (BasicNetwork) extendedBasicML;
FlatNetwork extendedFlatNetwork = extendedBasicNetwork.getFlat();
NNMaster master = new NNMaster();
Set<Integer> fixedWeightIndexSet = master.fitExistingModelIn(flatNetwork, extendedFlatNetwork, Arrays.asList(new Integer[] { 1, 2, 3 }));
Assert.assertEquals(fixedWeightIndexSet.size(), 931);
fixedWeightIndexSet = master.fitExistingModelIn(flatNetwork, extendedFlatNetwork, Arrays.asList(new Integer[] { 1, 2, 3 }), false);
Assert.assertEquals(fixedWeightIndexSet.size(), 910);
}
use of org.encog.neural.networks.BasicNetwork in project shifu by ShifuML.
the class NNTrainerTest method testXorOperation.
// @Test
public void testXorOperation() throws IOException {
ModelConfig config = ModelConfig.createInitModelConfig(".", ALGORITHM.NN, ".", false);
config.getTrain().setBaggingSampleRate(1.0);
config.getTrain().setValidSetRate(0.1);
config.getTrain().getParams().put("Propagation", "Q");
config.getTrain().getParams().put("NumHiddenLayers", 1);
config.getTrain().getParams().put("LearningRate", 1);
List<Integer> nodes = new ArrayList<Integer>();
nodes.add(5);
List<String> func = new ArrayList<String>();
func.add("tanh");
config.getTrain().getParams().put("NumHiddenNodes", nodes);
config.getTrain().getParams().put("ActivationFunc", func);
config.getTrain().setNumTrainEpochs(100);
NNTrainer trainer = new NNTrainer(config, 0, false);
trainer.setTrainSet(xor_Trainset);
trainer.setValidSet(xor_Validset);
trainer.train();
BasicNetwork bn = trainer.getNetwork();
boolean[] cases = { true, false, false, true };
int i = 0;
for (MLDataPair data : xor_Validset) {
double[] score = bn.compute(data.getInput()).getData();
Assert.assertEquals(score[0] * 1000 < 500, cases[i]);
i++;
}
Assert.assertEquals(bn.getLayerCount(), (Integer) (config.getTrain().getParams().get("NumHiddenLayers")) + 2);
}
use of org.encog.neural.networks.BasicNetwork in project shifu by ShifuML.
the class NNTrainerTest method setUp.
@BeforeClass
public void setUp() {
trainSet = new BasicMLDataSet();
network = new BasicNetwork();
network.addLayer(new BasicLayer(new ActivationLinear(), true, 2));
network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 4));
network.addLayer(new BasicLayer(new ActivationLOG(), true, 3));
network.addLayer(new BasicLayer(new ActivationSIN(), true, 3));
network.addLayer(new BasicLayer(new ActivationTANH(), false, 1));
network.getStructure().finalizeStructure();
network.reset();
}
Aggregations