Search in sources :

Example 36 with SameDiff

use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.

the class GradCheckReductions method testReductionGradients2.

@Test
public void testReductionGradients2() {
    // Test reductions: NON-final function
    Nd4j.getRandom().setSeed(12345);
    int d0 = 3;
    int d1 = 4;
    int d2 = 5;
    List<String> allFailed = new ArrayList<>();
    for (int reduceDim : new int[] { 0, 1, 2 }) {
        for (int i = 0; i < 12; i++) {
            int[] outShape;
            switch(reduceDim) {
                case 0:
                    outShape = new int[] { d1, d2 };
                    break;
                case 1:
                    outShape = new int[] { d0, d2 };
                    break;
                case 2:
                    outShape = new int[] { d0, d1 };
                    break;
                default:
                    throw new RuntimeException();
            }
            SameDiff sd = SameDiff.create();
            sd.setLogExecution(false);
            SDVariable in = sd.var("in", new int[] { -1, d1, d2 });
            SDVariable label = sd.var("label", outShape);
            SDVariable second = in.mul(2);
            double maxRelError = 1e-5;
            double minAbsError = 1e-4;
            INDArray inputArr = Nd4j.randn(new int[] { d0, d1, d2 }).muli(1000);
            INDArray labelArr = Nd4j.randn(outShape).muli(1000);
            SDVariable reduced;
            String name;
            switch(i) {
                case 0:
                    reduced = sd.mean("reduced", second, reduceDim);
                    name = "mean";
                    break;
                case 1:
                    reduced = sd.sum("reduced", second, reduceDim);
                    name = "sum";
                    break;
                case 2:
                    reduced = sd.standardDeviation("reduced", second, true, reduceDim);
                    name = "stdev";
                    break;
                case 3:
                    reduced = sd.min("reduced", second, reduceDim);
                    name = "min";
                    break;
                case 4:
                    reduced = sd.max("reduced", second, reduceDim);
                    name = "max";
                    break;
                case 5:
                    // Variance is a bit finniky for gradient checks, due to huge score/output...
                    maxRelError = 1e-3;
                    // Most gradients ane in the range 1k to >100k
                    minAbsError = 1;
                    inputArr.divi(10);
                    labelArr.divi(100);
                    BooleanIndexing.replaceWhere(inputArr, Nd4j.rand(inputArr.shape()).muli(100).addi(100), Conditions.absLessThan(1.0));
                    reduced = sd.variance("reduced", second, true, reduceDim);
                    name = "variance";
                    break;
                case 6:
                    inputArr.divi(1000);
                    labelArr.divi(1000);
                    reduced = sd.prod("reduced", second, reduceDim);
                    name = "prod";
                    break;
                case 7:
                    reduced = sd.norm1("reduced", second, reduceDim);
                    name = "norm1";
                    break;
                case 8:
                    reduced = sd.norm2("reduced", second, reduceDim);
                    name = "norm2";
                    break;
                case 9:
                    inputArr = Nd4j.rand(new int[] { d0, d1, d2 });
                    labelArr = Nd4j.rand(outShape);
                    reduced = sd.normmax("reduced", second, reduceDim);
                    name = "normmax";
                    break;
                case 10:
                    reduced = sd.argmax("reduced", second, reduceDim);
                    name = "argmax";
                    break;
                case 11:
                    reduced = sd.argmin("reduced", second, reduceDim);
                    name = "argmin";
                    break;
                default:
                    throw new RuntimeException();
            }
            SDVariable add = reduced.add(1.0);
            SDVariable diff = label.sub(add);
            SDVariable sqDiff = diff.mul(diff);
            SDVariable mseLoss = sd.mean("loss", sqDiff);
            String msg = "(test " + i + " - " + name + ", dimension=" + reduceDim + ")";
            log.info("*** Starting test: " + msg);
            sd.associateArrayWithVariable(inputArr, in);
            sd.associateArrayWithVariable(labelArr, label);
            try {
                boolean ok = GradCheckUtil.checkGradients(sd, 1e-5, maxRelError, minAbsError, true, false);
                if (!ok) {
                    allFailed.add(msg);
                }
            } catch (Exception e) {
                e.printStackTrace();
                allFailed.add(msg + " - EXCEPTION");
            }
        }
    }
    assertEquals("Failed: " + allFailed, 0, allFailed.size());
}
Also used : SDVariable(org.nd4j.autodiff.samediff.SDVariable) INDArray(org.nd4j.linalg.api.ndarray.INDArray) SameDiff(org.nd4j.autodiff.samediff.SameDiff) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 37 with SameDiff

use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.

the class SameDiffOpExecutionerTest method testupdateGraphFromProfiler.

@Test
public void testupdateGraphFromProfiler() {
    SameDiffOpExecutioner sameDiffOpExecutioner = new SameDiffOpExecutioner();
    Nd4j.getExecutioner().setProfilingMode(OpExecutioner.ProfilingMode.ALL);
    Nd4j.getExecutioner().exec(new Sigmoid(Nd4j.scalar(1.0)));
    SameDiff sameDiff = sameDiffOpExecutioner.getSameDiff();
}
Also used : SameDiff(org.nd4j.autodiff.samediff.SameDiff) SameDiffOpExecutioner(org.nd4j.autodiff.samediff.SameDiffOpExecutioner) Sigmoid(org.nd4j.linalg.api.ops.impl.transforms.Sigmoid) Test(org.junit.Test)

Example 38 with SameDiff

use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.

the class GradCheckMisc method testGradientAutoBroadcast3.

@Test
public void testGradientAutoBroadcast3() {
    // These tests: output size > input sizes
    Nd4j.getRandom().setSeed(12345);
    List<String> allFailed = new ArrayList<>();
    // Test cases: in1Shape, in2Shape, shapeOf(op(in1,in2))
    List<Triple<int[], int[], int[]>> testCases = new ArrayList<>();
    testCases.add(new Triple<>(new int[] { 3, 1 }, new int[] { 1, 4 }, new int[] { 3, 4 }));
    testCases.add(new Triple<>(new int[] { 3, 1 }, new int[] { 3, 4 }, new int[] { 3, 4 }));
    testCases.add(new Triple<>(new int[] { 3, 4 }, new int[] { 1, 4 }, new int[] { 3, 4 }));
    testCases.add(new Triple<>(new int[] { 3, 4, 1 }, new int[] { 1, 1, 5 }, new int[] { 3, 4, 5 }));
    testCases.add(new Triple<>(new int[] { 3, 4, 1 }, new int[] { 3, 1, 5 }, new int[] { 3, 4, 5 }));
    testCases.add(new Triple<>(new int[] { 3, 1, 5 }, new int[] { 1, 4, 1 }, new int[] { 3, 4, 5 }));
    testCases.add(new Triple<>(new int[] { 3, 1, 5 }, new int[] { 1, 4, 5 }, new int[] { 3, 4, 5 }));
    testCases.add(new Triple<>(new int[] { 3, 1, 5 }, new int[] { 3, 4, 5 }, new int[] { 3, 4, 5 }));
    testCases.add(new Triple<>(new int[] { 3, 1, 1, 1 }, new int[] { 1, 4, 5, 6 }, new int[] { 3, 4, 5, 6 }));
    testCases.add(new Triple<>(new int[] { 1, 1, 1, 6 }, new int[] { 3, 4, 5, 6 }, new int[] { 3, 4, 5, 6 }));
    testCases.add(new Triple<>(new int[] { 1, 4, 5, 1 }, new int[] { 3, 1, 1, 6 }, new int[] { 3, 4, 5, 6 }));
    testCases.add(new Triple<>(new int[] { 1, 6 }, new int[] { 3, 4, 5, 1 }, new int[] { 3, 4, 5, 6 }));
    for (Triple<int[], int[], int[]> p : testCases) {
        for (int i = 0; i < 6; i++) {
            SameDiff sd = SameDiff.create();
            SDVariable in3 = sd.var("in1", p.getFirst());
            SDVariable in2 = sd.var("in2", p.getSecond());
            String name;
            SDVariable bcOp;
            switch(i) {
                case 0:
                    bcOp = in3.add(in2);
                    name = "add";
                    break;
                case 1:
                    bcOp = in3.sub(in2);
                    name = "sub";
                    break;
                case 2:
                    bcOp = in3.mul(in2);
                    name = "mul";
                    break;
                case 3:
                    bcOp = in3.div(in2);
                    name = "div";
                    break;
                case 4:
                    bcOp = in3.rsub(in2);
                    name = "rsub";
                    break;
                case 5:
                    bcOp = in3.rdiv(in2);
                    name = "rdiv";
                    break;
                case 6:
                    bcOp = sd.f().floorDiv(in3, in2);
                    name = "floordiv";
                    break;
                case 7:
                    bcOp = sd.f().floorMod(in3, in2);
                    name = "floormod";
                    break;
                default:
                    throw new RuntimeException();
            }
            SDVariable outVar = sd.sum(bcOp);
            String msg = "(test " + i + ": " + name + ", array 1 size =" + Arrays.toString(p.getFirst()) + ", array 2 size = " + Arrays.toString(p.getSecond()) + ")";
            log.info("*** Starting test: " + msg);
            INDArray in3Arr = Nd4j.randn(p.getFirst()).muli(100);
            INDArray in2Arr = Nd4j.randn(p.getSecond()).muli(100);
            sd.associateArrayWithVariable(in3Arr, in3);
            sd.associateArrayWithVariable(in2Arr, in2);
            try {
                INDArray out = sd.execAndEndResult();
                assertNotNull(out);
                assertArrayEquals(new int[] { 1, 1 }, out.shape());
                INDArray bcOut = bcOp.getArr();
                assertNotNull(bcOp);
                assertArrayEquals(p.getThird(), bcOut.shape());
                // System.out.println(sd.asFlatPrint());
                boolean ok = GradCheckUtil.checkGradients(sd);
                if (!ok) {
                    allFailed.add(msg);
                }
            } catch (Exception e) {
                e.printStackTrace();
                allFailed.add(msg + " - EXCEPTION");
            }
        }
    }
    assertEquals("Failed: " + allFailed, 0, allFailed.size());
}
Also used : SameDiff(org.nd4j.autodiff.samediff.SameDiff) ArrayList(java.util.ArrayList) Triple(org.nd4j.linalg.primitives.Triple) SDVariable(org.nd4j.autodiff.samediff.SDVariable) INDArray(org.nd4j.linalg.api.ndarray.INDArray) Test(org.junit.Test)

Example 39 with SameDiff

use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.

the class GradCheckMisc method testExpandDimsGradient.

@Test
public void testExpandDimsGradient() {
    int[] origShape = new int[] { 3, 4 };
    boolean first = true;
    for (int i = 0; i < 3; i++) {
        int[] expExpandShape;
        switch(i) {
            case 0:
                expExpandShape = new int[] { 1, 3, 4 };
                break;
            case 1:
                expExpandShape = new int[] { 3, 1, 4 };
                break;
            case 2:
                expExpandShape = new int[] { 3, 4, 1 };
                break;
            default:
                throw new RuntimeException();
        }
        for (Pair<INDArray, String> p : NDArrayCreationUtil.getAllTestMatricesWithShape(origShape[0], origShape[1], 12345)) {
            INDArray inArr = p.getFirst().muli(100);
            SameDiff sd = SameDiff.create();
            SDVariable in = sd.var("in", inArr);
            SDVariable expand = sd.f().expandDims(in, i);
            // Using stdev here: mean/sum would backprop the same gradient for each input...
            SDVariable stdev = sd.standardDeviation("out", expand, true);
            INDArray out = sd.execAndEndResult();
            INDArray expOut = in.getArr().std(true, Integer.MAX_VALUE);
            assertEquals(expOut, out);
            assertArrayEquals(expExpandShape, expand.getArr().shape());
            INDArray expExpand = inArr.dup('c').reshape(expExpandShape);
            assertEquals(expExpand, expand.getArr());
            String msg = "expandDim=" + i + ", source=" + p.getSecond();
            log.info("Starting: " + msg);
            boolean ok = GradCheckUtil.checkGradients(sd);
            assertTrue(msg, ok);
        }
    }
}
Also used : SDVariable(org.nd4j.autodiff.samediff.SDVariable) INDArray(org.nd4j.linalg.api.ndarray.INDArray) SameDiff(org.nd4j.autodiff.samediff.SameDiff) Test(org.junit.Test)

Example 40 with SameDiff

use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.

the class GradCheckMisc method testSliceGradient.

@Test
public void testSliceGradient() {
    Nd4j.getRandom().setSeed(12345);
    // Order here: original shape, begin, size
    List<Triple<int[], int[], int[]>> testCases = new ArrayList<>();
    testCases.add(new Triple<>(new int[] { 3, 4 }, new int[] { 0, 0 }, new int[] { 3, 4 }));
    testCases.add(new Triple<>(new int[] { 3, 4 }, new int[] { 1, 1 }, new int[] { 3, 4 }));
    testCases.add(new Triple<>(new int[] { 3, 4 }, new int[] { 1, 2 }, new int[] { 2, 3 }));
    testCases.add(new Triple<>(new int[] { 3, 4, 5 }, new int[] { 0, 0, 0 }, new int[] { 3, 4, 5 }));
    testCases.add(new Triple<>(new int[] { 3, 4, 5 }, new int[] { 1, 1, 1 }, new int[] { 2, 3, 4 }));
    testCases.add(new Triple<>(new int[] { 3, 4, 5 }, new int[] { 1, 0, 2 }, new int[] { 3, 3, 4 }));
    for (int i = 0; i < testCases.size(); i++) {
        Triple<int[], int[], int[]> t = testCases.get(i);
        int[] os = t.getFirst();
        int[] b = t.getSecond();
        int[] e = t.getThird();
        INDArray arr = Nd4j.rand(os);
        SameDiff sd = SameDiff.create();
        SDVariable in = sd.var("in", arr);
        SDVariable slice = sd.slice(in, b, e);
        SDVariable stdev = sd.standardDeviation(slice, true);
        String msg = "i=" + i + ": inShape=" + Arrays.toString(os) + ", begin=" + Arrays.toString(b) + ", end=" + Arrays.toString(e);
        log.info("Starting test: " + msg);
        GradCheckUtil.checkGradients(sd);
    }
}
Also used : Triple(org.nd4j.linalg.primitives.Triple) SDVariable(org.nd4j.autodiff.samediff.SDVariable) INDArray(org.nd4j.linalg.api.ndarray.INDArray) SameDiff(org.nd4j.autodiff.samediff.SameDiff) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Aggregations

SameDiff (org.nd4j.autodiff.samediff.SameDiff)50 Test (org.junit.Test)42 SDVariable (org.nd4j.autodiff.samediff.SDVariable)41 INDArray (org.nd4j.linalg.api.ndarray.INDArray)37 ArrayList (java.util.ArrayList)10 DynamicCustomOp (org.nd4j.linalg.api.ops.DynamicCustomOp)10 Ignore (org.junit.Ignore)7 ClassPathResource (org.nd4j.linalg.io.ClassPathResource)6 lombok.val (lombok.val)4 LossFunctions (org.nd4j.autodiff.loss.LossFunctions)4 LossInfo (org.nd4j.autodiff.loss.LossInfo)4 BernoulliDistribution (org.nd4j.linalg.api.ops.random.impl.BernoulliDistribution)4 DifferentialFunction (org.nd4j.autodiff.functions.DifferentialFunction)2 Triple (org.nd4j.linalg.primitives.Triple)2 ZeroInitScheme (org.nd4j.weightinit.impl.ZeroInitScheme)2 DataOutputStream (java.io.DataOutputStream)1 FileOutputStream (java.io.FileOutputStream)1 ByteBuffer (java.nio.ByteBuffer)1 HashMap (java.util.HashMap)1 LinkedHashMap (java.util.LinkedHashMap)1