use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class TestDecayPolicies method testLearningRatePolyDecaySingleLayer.
@Test
public void testLearningRatePolyDecaySingleLayer() {
int iterations = 2;
double lr = 1e-2;
double power = 3;
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().learningRate(lr).learningRateDecayPolicy(LearningRatePolicy.Poly).lrPolicyPower(power).iterations(iterations).layer(new DenseLayer.Builder().nIn(nIn).nOut(nOut).updater(org.deeplearning4j.nn.conf.Updater.SGD).build()).build();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
Layer layer = conf.getLayer().instantiate(conf, null, 0, params, true);
Updater updater = UpdaterCreator.getUpdater(layer);
Gradient gradientActual = new DefaultGradient();
gradientActual.setGradientFor(DefaultParamInitializer.WEIGHT_KEY, weightGradient);
gradientActual.setGradientFor(DefaultParamInitializer.BIAS_KEY, biasGradient);
for (int i = 0; i < iterations; i++) {
updater.update(layer, gradientActual, i, 1);
double expectedLr = calcPolyDecay(lr, i, power, iterations);
assertEquals(expectedLr, layer.conf().getLearningRateByParam("W"), 1e-4);
assertEquals(expectedLr, layer.conf().getLearningRateByParam("b"), 1e-4);
}
}
use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class TestGradientNormalization method testL2ClippingPerParamType.
@Test
public void testL2ClippingPerParamType() {
Nd4j.getRandom().setSeed(12345);
double threshold = 3;
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().layer(new DenseLayer.Builder().nIn(10).nOut(20).updater(org.deeplearning4j.nn.conf.Updater.NONE).gradientNormalization(GradientNormalization.ClipL2PerParamType).gradientNormalizationThreshold(threshold).build()).build();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
Layer layer = conf.getLayer().instantiate(conf, null, 0, params, true);
Updater updater = UpdaterCreator.getUpdater(layer);
INDArray weightGrad = Nd4j.rand(10, 20).muli(0.05);
INDArray biasGrad = Nd4j.rand(1, 10).muli(10);
INDArray weightGradCopy = weightGrad.dup();
INDArray biasGradCopy = biasGrad.dup();
Gradient gradient = new DefaultGradient();
gradient.setGradientFor(DefaultParamInitializer.WEIGHT_KEY, weightGrad);
gradient.setGradientFor(DefaultParamInitializer.BIAS_KEY, biasGrad);
double weightL2 = weightGrad.norm2Number().doubleValue();
double biasL2 = biasGrad.norm2Number().doubleValue();
assertTrue(weightL2 < threshold);
assertTrue(biasL2 > threshold);
updater.update(layer, gradient, 0, 1);
//weight norm2 < threshold -> no change
assertEquals(weightGradCopy, weightGrad);
//bias norm2 > threshold -> rescale
assertNotEquals(biasGradCopy, biasGrad);
double biasScalingFactor = threshold / biasL2;
INDArray expectedBiasGrad = biasGradCopy.mul(biasScalingFactor);
assertEquals(expectedBiasGrad, gradient.getGradientFor(DefaultParamInitializer.BIAS_KEY));
}
use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class TestGradientNormalization method testAbsValueClippingPerElement.
@Test
public void testAbsValueClippingPerElement() {
Nd4j.getRandom().setSeed(12345);
double threshold = 3;
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().layer(new DenseLayer.Builder().nIn(10).nOut(20).updater(org.deeplearning4j.nn.conf.Updater.NONE).gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue).gradientNormalizationThreshold(threshold).build()).build();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
Layer layer = conf.getLayer().instantiate(conf, null, 0, params, true);
Updater updater = UpdaterCreator.getUpdater(layer);
INDArray weightGrad = Nd4j.rand(10, 20).muli(10).subi(5);
INDArray biasGrad = Nd4j.rand(1, 10).muli(10).subi(5);
INDArray weightGradCopy = weightGrad.dup();
INDArray biasGradCopy = biasGrad.dup();
Gradient gradient = new DefaultGradient();
gradient.setGradientFor(DefaultParamInitializer.WEIGHT_KEY, weightGrad);
gradient.setGradientFor(DefaultParamInitializer.BIAS_KEY, biasGrad);
updater.update(layer, gradient, 0, 1);
assertNotEquals(weightGradCopy, weightGrad);
assertNotEquals(biasGradCopy, biasGrad);
INDArray expectedWeightGrad = weightGradCopy.dup();
for (int i = 0; i < expectedWeightGrad.length(); i++) {
double d = expectedWeightGrad.getDouble(i);
if (d > threshold)
expectedWeightGrad.putScalar(i, threshold);
else if (d < -threshold)
expectedWeightGrad.putScalar(i, -threshold);
}
INDArray expectedBiasGrad = biasGradCopy.dup();
for (int i = 0; i < expectedBiasGrad.length(); i++) {
double d = expectedBiasGrad.getDouble(i);
if (d > threshold)
expectedBiasGrad.putScalar(i, threshold);
else if (d < -threshold)
expectedBiasGrad.putScalar(i, -threshold);
}
assertEquals(expectedWeightGrad, gradient.getGradientFor(DefaultParamInitializer.WEIGHT_KEY));
assertEquals(expectedBiasGrad, gradient.getGradientFor(DefaultParamInitializer.BIAS_KEY));
}
use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class TestGradientNormalization method testRenormalizatonPerLayer.
@Test
public void testRenormalizatonPerLayer() {
Nd4j.getRandom().setSeed(12345);
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().layer(new DenseLayer.Builder().nIn(10).nOut(20).updater(org.deeplearning4j.nn.conf.Updater.NONE).gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).build()).build();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
Layer layer = conf.getLayer().instantiate(conf, null, 0, params, true);
Updater updater = UpdaterCreator.getUpdater(layer);
INDArray weightGrad = Nd4j.rand(10, 20);
INDArray biasGrad = Nd4j.rand(1, 10);
INDArray weightGradCopy = weightGrad.dup();
INDArray biasGradCopy = biasGrad.dup();
Gradient gradient = new DefaultGradient();
gradient.setGradientFor(DefaultParamInitializer.WEIGHT_KEY, weightGrad);
gradient.setGradientFor(DefaultParamInitializer.BIAS_KEY, biasGrad);
updater.update(layer, gradient, 0, 1);
assertNotEquals(weightGradCopy, weightGrad);
assertNotEquals(biasGradCopy, biasGrad);
double sumSquaresWeight = weightGradCopy.mul(weightGradCopy).sumNumber().doubleValue();
double sumSquaresBias = biasGradCopy.mul(biasGradCopy).sumNumber().doubleValue();
double sumSquares = sumSquaresWeight + sumSquaresBias;
double l2Layer = Math.sqrt(sumSquares);
INDArray normWeightsExpected = weightGradCopy.div(l2Layer);
INDArray normBiasExpected = biasGradCopy.div(l2Layer);
double l2Weight = gradient.getGradientFor(DefaultParamInitializer.WEIGHT_KEY).norm2Number().doubleValue();
double l2Bias = gradient.getGradientFor(DefaultParamInitializer.BIAS_KEY).norm2Number().doubleValue();
assertTrue(!Double.isNaN(l2Weight) && l2Weight > 0.0);
assertTrue(!Double.isNaN(l2Bias) && l2Bias > 0.0);
assertEquals(normWeightsExpected, gradient.getGradientFor(DefaultParamInitializer.WEIGHT_KEY));
assertEquals(normBiasExpected, gradient.getGradientFor(DefaultParamInitializer.BIAS_KEY));
}
use of org.deeplearning4j.nn.conf.NeuralNetConfiguration in project deeplearning4j by deeplearning4j.
the class TestGradientNormalization method testRenormalizationPerParamType.
@Test
public void testRenormalizationPerParamType() {
Nd4j.getRandom().setSeed(12345);
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().layer(new DenseLayer.Builder().nIn(10).nOut(20).updater(org.deeplearning4j.nn.conf.Updater.NONE).gradientNormalization(GradientNormalization.RenormalizeL2PerParamType).build()).build();
int numParams = conf.getLayer().initializer().numParams(conf);
INDArray params = Nd4j.create(1, numParams);
Layer layer = conf.getLayer().instantiate(conf, null, 0, params, true);
Updater updater = UpdaterCreator.getUpdater(layer);
INDArray weightGrad = Nd4j.rand(10, 20);
INDArray biasGrad = Nd4j.rand(1, 10);
INDArray weightGradCopy = weightGrad.dup();
INDArray biasGradCopy = biasGrad.dup();
Gradient gradient = new DefaultGradient();
gradient.setGradientFor(DefaultParamInitializer.WEIGHT_KEY, weightGrad);
gradient.setGradientFor(DefaultParamInitializer.BIAS_KEY, biasGrad);
updater.update(layer, gradient, 0, 1);
INDArray normWeightsExpected = weightGradCopy.div(weightGradCopy.norm2Number());
INDArray normBiasExpected = biasGradCopy.div(biasGradCopy.norm2Number());
assertEquals(normWeightsExpected, gradient.getGradientFor(DefaultParamInitializer.WEIGHT_KEY));
assertEquals(normBiasExpected, gradient.getGradientFor(DefaultParamInitializer.BIAS_KEY));
}
Aggregations