Search in sources :

Example 21 with SameDiff

use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.

the class GradCheckMisc method testGradientAutoBroadcast1.

@Test
public void testGradientAutoBroadcast1() {
    Nd4j.getRandom().setSeed(12345);
    List<String> allFailed = new ArrayList<>();
    for (int dim_sz1 : new int[] { 0, 1, 2 }) {
        int[] in2Shape = { 3, 4, 5 };
        in2Shape[dim_sz1] = 1;
        for (int i = 2; i < 3; i++) {
            SameDiff sd = SameDiff.create();
            SDVariable in3 = sd.var("in3", Nd4j.rand(new int[] { 3, 4, 5 }));
            SDVariable in2 = sd.var("in2", in2Shape);
            SDVariable bcOp;
            String name;
            switch(i) {
                case 0:
                    bcOp = in3.add(in2);
                    name = "add";
                    break;
                case 1:
                    bcOp = in3.sub(in2);
                    name = "sub";
                    break;
                case 2:
                    bcOp = in3.mul(in2);
                    name = "mul";
                    break;
                case 3:
                    bcOp = in3.div(in2);
                    name = "div";
                    break;
                case 4:
                    bcOp = in3.rsub(in2);
                    name = "rsub";
                    break;
                case 5:
                    bcOp = in3.rdiv(in2);
                    name = "rdiv";
                    break;
                case 6:
                    bcOp = sd.f().floorDiv(in3, in2);
                    name = "floordiv";
                    break;
                case 7:
                    bcOp = sd.f().floorMod(in3, in2);
                    name = "floormod";
                    break;
                default:
                    throw new RuntimeException();
            }
            SDVariable outVar = sd.sum(bcOp);
            String msg = "(test " + i + ": " + name + ", dimension=" + dim_sz1 + ")";
            log.info("*** Starting test: " + msg);
            INDArray in3Arr = Nd4j.randn(new int[] { 3, 4, 5 }).muli(100);
            INDArray in2Arr = Nd4j.randn(in2Shape).muli(100);
            sd.associateArrayWithVariable(in3Arr, in3);
            sd.associateArrayWithVariable(in2Arr, in2);
            try {
                INDArray out = sd.execAndEndResult();
                assertNotNull(out);
                assertArrayEquals(new int[] { 1, 1 }, out.shape());
                // System.out.println(sd.asFlatPrint());
                boolean ok = GradCheckUtil.checkGradients(sd);
                if (!ok) {
                    allFailed.add(msg);
                }
            } catch (Exception e) {
                e.printStackTrace();
                allFailed.add(msg + " - EXCEPTION");
            }
        }
    }
    assertEquals("Failed: " + allFailed, 0, allFailed.size());
}
Also used : SDVariable(org.nd4j.autodiff.samediff.SDVariable) INDArray(org.nd4j.linalg.api.ndarray.INDArray) SameDiff(org.nd4j.autodiff.samediff.SameDiff) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 22 with SameDiff

use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.

the class GradCheckMisc method testGradientAutoBroadcast2.

@Test
public void testGradientAutoBroadcast2() {
    Nd4j.getRandom().setSeed(12345);
    List<String> allFailed = new ArrayList<>();
    for (int[] dim_sz1s : new int[][] { { 0, 1 }, { 0, 2 }, { 1, 2 }, { 0, 1, 2 } }) {
        int[] otherShape = { 3, 4, 5 };
        otherShape[dim_sz1s[0]] = 1;
        otherShape[dim_sz1s[1]] = 1;
        if (dim_sz1s.length == 3) {
            otherShape[dim_sz1s[2]] = 1;
        }
        for (int i = 0; i < 6; i++) {
            SameDiff sd = SameDiff.create();
            SDVariable in3 = sd.var("in3", new int[] { 3, 4, 5 });
            SDVariable in2 = sd.var("inToBc", otherShape);
            String name;
            SDVariable bcOp;
            switch(i) {
                case 0:
                    bcOp = in3.add(in2);
                    name = "add";
                    break;
                case 1:
                    bcOp = in3.sub(in2);
                    name = "sub";
                    break;
                case 2:
                    bcOp = in3.mul(in2);
                    name = "mul";
                    break;
                case 3:
                    bcOp = in3.div(in2);
                    name = "div";
                    break;
                case 4:
                    bcOp = in3.rsub(in2);
                    name = "rsub";
                    break;
                case 5:
                    bcOp = in3.rdiv(in2);
                    name = "rdiv";
                    break;
                case 6:
                    bcOp = sd.f().floorDiv(in3, in2);
                    name = "floordiv";
                    break;
                case 7:
                    bcOp = sd.f().floorMod(in3, in2);
                    name = "floormod";
                    break;
                default:
                    throw new RuntimeException();
            }
            SDVariable outVar = sd.sum(bcOp);
            String msg = "(test " + i + ": " + name + ", dimensions=" + Arrays.toString(dim_sz1s) + ")";
            log.info("*** Starting test: " + msg);
            INDArray in3Arr = Nd4j.randn(new int[] { 3, 4, 5 }).muli(100);
            INDArray in2Arr = Nd4j.randn(otherShape).muli(100);
            sd.associateArrayWithVariable(in3Arr, in3);
            sd.associateArrayWithVariable(in2Arr, in2);
            try {
                INDArray out = sd.execAndEndResult();
                assertNotNull(out);
                assertArrayEquals(new int[] { 1, 1 }, out.shape());
                // System.out.println(sd.asFlatPrint());
                boolean ok = GradCheckUtil.checkGradients(sd);
                if (!ok) {
                    allFailed.add(msg);
                }
            } catch (Exception e) {
                e.printStackTrace();
                allFailed.add(msg + " - EXCEPTION");
            }
        }
    }
    assertEquals("Failed: " + allFailed, 0, allFailed.size());
}
Also used : SDVariable(org.nd4j.autodiff.samediff.SDVariable) INDArray(org.nd4j.linalg.api.ndarray.INDArray) SameDiff(org.nd4j.autodiff.samediff.SameDiff) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 23 with SameDiff

use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.

the class While method init.

private void init(String blockName, SameDiff parent, SDVariable[] inputVars, SameDiff.SameDiffConditional predicate, SameDiff.SameDiffFunctionDefinition condition, SameDiff.SameDiffFunctionDefinition trueBody) {
    this.sameDiff = parent;
    this.inputVars = inputVars;
    this.predicate = predicate;
    this.trueBody = trueBody;
    this.blockName = blockName;
    this.dummyResult = parent.var("dummyresult-" + UUID.randomUUID().toString(), new int[] { 1, 1 }, new ZeroInitScheme('f'));
    parent.putFunctionForId(getOwnName(), this);
    parent.addArgsFor(inputVars, this);
    parent.addOutgoingFor(new SDVariable[] { dummyResult }, this);
    // create a samediff sub graph for running just the execution
    // return a reference to the loop for referencing during actual execution
    SameDiff sameDiff = SameDiff.create();
    // store the reference to the result array and the same diff execution instance
    this.targetBoolean = predicate.eval(sameDiff, condition, inputVars);
    this.predicateExecution = sameDiff;
    // store references to the loop body
    String trueBodyName = "true-body-" + UUID.randomUUID().toString();
    this.trueBodyName = trueBodyName;
    // running define function will setup a proper same diff instance
    parent.defineFunction(trueBodyName, trueBody, inputVars);
    parent.defineFunction(blockName, condition, inputVars);
    parent.putSubFunction("predicate-eval-body", sameDiff);
    // get a reference to the actual loop body
    this.loopBodyExecution = parent.getFunction(trueBodyName);
}
Also used : ZeroInitScheme(org.nd4j.weightinit.impl.ZeroInitScheme) SameDiff(org.nd4j.autodiff.samediff.SameDiff)

Example 24 with SameDiff

use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.

the class GradCheckTransforms method testDepthToSpace.

@Test
public void testDepthToSpace() {
    Nd4j.getRandom().setSeed(1337);
    int miniBatch = 128;
    int blockSize = 4;
    String dataFormat = "NHWC";
    int isNHWC = dataFormat.equals("NHWC") ? 1 : 0;
    int[] inputShape = new int[] { miniBatch, 2, 2, blockSize * blockSize };
    INDArray input = Nd4j.randn(inputShape);
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("in", inputShape);
    INDArray expOut = Nd4j.create(miniBatch, 2 * blockSize, 2 * blockSize, 1);
    DynamicCustomOp op = DynamicCustomOp.builder("depth_to_space").addInputs(input).addIntegerArguments(blockSize, isNHWC).addOutputs(expOut).build();
    Nd4j.getExecutioner().exec(op);
    sd.associateArrayWithVariable(input, sdInput);
    SDVariable t = sd.depthToSpace(sdInput, blockSize, dataFormat);
    SDVariable loss = sd.mean("loss", t);
    sd.exec();
    INDArray out = t.getArr();
    if (!expOut.equals(out)) {
        log.info("depth to space failed on forward");
    }
    try {
        GradCheckUtil.checkGradients(sd);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
Also used : SDVariable(org.nd4j.autodiff.samediff.SDVariable) INDArray(org.nd4j.linalg.api.ndarray.INDArray) SameDiff(org.nd4j.autodiff.samediff.SameDiff) DynamicCustomOp(org.nd4j.linalg.api.ops.DynamicCustomOp) Test(org.junit.Test)

Example 25 with SameDiff

use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.

the class GradCheckTransforms method testCross.

@Test
public void testCross() {
    INDArray a = Nd4j.create(new float[] { 4, 2, 1 }, new int[] { 1, 3 });
    INDArray b = Nd4j.create(new float[] { 1, 3, 4 }, new int[] { 1, 3 });
    INDArray expOut = Nd4j.create(1, 3);
    DynamicCustomOp op = DynamicCustomOp.builder("cross").addInputs(a, b).addOutputs(expOut).build();
    Nd4j.getExecutioner().exec(op);
    SameDiff sd = SameDiff.create();
    SDVariable sdA = sd.var("a", expOut.shape());
    SDVariable sdB = sd.var("b", expOut.shape());
    sd.associateArrayWithVariable(a, sdA);
    sd.associateArrayWithVariable(b, sdB);
    SDVariable t = sd.cross(sdA, sdB);
    SDVariable loss = sd.mean("loss", t);
    sd.exec();
    INDArray out = t.getArr();
    if (!expOut.equals(out)) {
        log.info("batch to space failed on forward");
    }
    try {
        GradCheckUtil.checkGradients(sd);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
Also used : SDVariable(org.nd4j.autodiff.samediff.SDVariable) INDArray(org.nd4j.linalg.api.ndarray.INDArray) SameDiff(org.nd4j.autodiff.samediff.SameDiff) DynamicCustomOp(org.nd4j.linalg.api.ops.DynamicCustomOp) Test(org.junit.Test)

Aggregations

SameDiff (org.nd4j.autodiff.samediff.SameDiff)50 Test (org.junit.Test)42 SDVariable (org.nd4j.autodiff.samediff.SDVariable)41 INDArray (org.nd4j.linalg.api.ndarray.INDArray)37 ArrayList (java.util.ArrayList)10 DynamicCustomOp (org.nd4j.linalg.api.ops.DynamicCustomOp)10 Ignore (org.junit.Ignore)7 ClassPathResource (org.nd4j.linalg.io.ClassPathResource)6 lombok.val (lombok.val)4 LossFunctions (org.nd4j.autodiff.loss.LossFunctions)4 LossInfo (org.nd4j.autodiff.loss.LossInfo)4 BernoulliDistribution (org.nd4j.linalg.api.ops.random.impl.BernoulliDistribution)4 DifferentialFunction (org.nd4j.autodiff.functions.DifferentialFunction)2 Triple (org.nd4j.linalg.primitives.Triple)2 ZeroInitScheme (org.nd4j.weightinit.impl.ZeroInitScheme)2 DataOutputStream (java.io.DataOutputStream)1 FileOutputStream (java.io.FileOutputStream)1 ByteBuffer (java.nio.ByteBuffer)1 HashMap (java.util.HashMap)1 LinkedHashMap (java.util.LinkedHashMap)1