use of ml.shifu.shifu.core.dtrain.nn.ActivationPTANH in project shifu by ShifuML.
the class ActivationPTANHTest method test.
@Test
public void test() {
ActivationTANH tanh = new ActivationTANH();
ActivationPTANH ptanh = new ActivationPTANH();
double[] inputs = new double[] { 0.0d, 1.0d, -1.0d };
ptanh.activationFunction(inputs, 0, 3);
Assert.assertTrue(Math.abs(inputs[0] - 0.0) < 1e-6);
Assert.assertTrue(Math.abs(inputs[1] - 0.7615941559557649d) < 1e-6);
Assert.assertTrue(Math.abs(inputs[2] + 0.1903985389889412d) < 1e-6);
double d = ptanh.derivativeFunction(0.0d, inputs[0]);
Assert.assertTrue(Math.abs(d - 0.25d) < 1e-6);
d = ptanh.derivativeFunction(1.0d, inputs[1]);
Assert.assertTrue(Math.abs(d - tanh.derivativeFunction(1.0d, inputs[1])) < 1e-6);
double[] t = new double[] { -1.0d };
tanh.activationFunction(t, 0, 1);
d = ptanh.derivativeFunction(-1.0d, inputs[2]);
Assert.assertTrue(Math.abs(d - 0.25 * tanh.derivativeFunction(-1.0d, t[0])) < 1e-6);
}
Aggregations