use of org.encog.neural.error.LinearErrorFunction in project shifu by ShifuML.
the class AbstractNNWorker method initGradient.
@SuppressWarnings("unchecked")
private void initGradient(FloatMLDataSet training, FloatMLDataSet testing, double[] weights, boolean isCrossOver) {
int numLayers = (Integer) this.validParams.get(CommonConstants.NUM_HIDDEN_LAYERS);
List<String> actFunc = (List<String>) this.validParams.get(CommonConstants.ACTIVATION_FUNC);
List<Integer> hiddenNodeList = (List<Integer>) this.validParams.get(CommonConstants.NUM_HIDDEN_NODES);
String outputActivationFunc = (String) validParams.get(CommonConstants.OUTPUT_ACTIVATION_FUNC);
BasicNetwork network = DTrainUtils.generateNetwork(this.featureInputsCnt, this.outputNodeCount, numLayers, actFunc, hiddenNodeList, false, this.dropoutRate, this.wgtInit, CommonUtils.isLinearTarget(modelConfig, columnConfigList), outputActivationFunc);
// use the weights from master
network.getFlat().setWeights(weights);
FlatNetwork flat = network.getFlat();
// copy Propagation from encog, fix flat spot problem
double[] flatSpot = new double[flat.getActivationFunctions().length];
for (int i = 0; i < flat.getActivationFunctions().length; i++) {
flatSpot[i] = flat.getActivationFunctions()[i] instanceof ActivationSigmoid ? 0.1 : 0.0;
}
LOG.info("Gradient computing thread count is {}.", modelConfig.getTrain().getWorkerThreadCount());
this.gradient = new ParallelGradient((FloatFlatNetwork) flat, training, testing, flatSpot, new LinearErrorFunction(), isCrossOver, modelConfig.getTrain().getWorkerThreadCount(), this.lossStr, this.batchs);
}
use of org.encog.neural.error.LinearErrorFunction in project shifu by ShifuML.
the class DTrainTest method initGradient.
public Gradient initGradient(MLDataSet training) {
FlatNetwork flat = network.getFlat().clone();
// copy Propagation from encog
double[] flatSpot = new double[flat.getActivationFunctions().length];
for (int i = 0; i < flat.getActivationFunctions().length; i++) {
final ActivationFunction af = flat.getActivationFunctions()[i];
if (af instanceof ActivationSigmoid) {
flatSpot[i] = 0.1;
} else {
flatSpot[i] = 0.0;
}
}
return new Gradient(flat, training.openAdditional(), training, flatSpot, new LinearErrorFunction(), false);
}
Aggregations