use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.
the class GradCheckReductions method testReductionGradients2.
@Test
public void testReductionGradients2() {
// Test reductions: NON-final function
Nd4j.getRandom().setSeed(12345);
int d0 = 3;
int d1 = 4;
int d2 = 5;
List<String> allFailed = new ArrayList<>();
for (int reduceDim : new int[] { 0, 1, 2 }) {
for (int i = 0; i < 12; i++) {
int[] outShape;
switch(reduceDim) {
case 0:
outShape = new int[] { d1, d2 };
break;
case 1:
outShape = new int[] { d0, d2 };
break;
case 2:
outShape = new int[] { d0, d1 };
break;
default:
throw new RuntimeException();
}
SameDiff sd = SameDiff.create();
sd.setLogExecution(false);
SDVariable in = sd.var("in", new int[] { -1, d1, d2 });
SDVariable label = sd.var("label", outShape);
SDVariable second = in.mul(2);
double maxRelError = 1e-5;
double minAbsError = 1e-4;
INDArray inputArr = Nd4j.randn(new int[] { d0, d1, d2 }).muli(1000);
INDArray labelArr = Nd4j.randn(outShape).muli(1000);
SDVariable reduced;
String name;
switch(i) {
case 0:
reduced = sd.mean("reduced", second, reduceDim);
name = "mean";
break;
case 1:
reduced = sd.sum("reduced", second, reduceDim);
name = "sum";
break;
case 2:
reduced = sd.standardDeviation("reduced", second, true, reduceDim);
name = "stdev";
break;
case 3:
reduced = sd.min("reduced", second, reduceDim);
name = "min";
break;
case 4:
reduced = sd.max("reduced", second, reduceDim);
name = "max";
break;
case 5:
// Variance is a bit finniky for gradient checks, due to huge score/output...
maxRelError = 1e-3;
// Most gradients ane in the range 1k to >100k
minAbsError = 1;
inputArr.divi(10);
labelArr.divi(100);
BooleanIndexing.replaceWhere(inputArr, Nd4j.rand(inputArr.shape()).muli(100).addi(100), Conditions.absLessThan(1.0));
reduced = sd.variance("reduced", second, true, reduceDim);
name = "variance";
break;
case 6:
inputArr.divi(1000);
labelArr.divi(1000);
reduced = sd.prod("reduced", second, reduceDim);
name = "prod";
break;
case 7:
reduced = sd.norm1("reduced", second, reduceDim);
name = "norm1";
break;
case 8:
reduced = sd.norm2("reduced", second, reduceDim);
name = "norm2";
break;
case 9:
inputArr = Nd4j.rand(new int[] { d0, d1, d2 });
labelArr = Nd4j.rand(outShape);
reduced = sd.normmax("reduced", second, reduceDim);
name = "normmax";
break;
case 10:
reduced = sd.argmax("reduced", second, reduceDim);
name = "argmax";
break;
case 11:
reduced = sd.argmin("reduced", second, reduceDim);
name = "argmin";
break;
default:
throw new RuntimeException();
}
SDVariable add = reduced.add(1.0);
SDVariable diff = label.sub(add);
SDVariable sqDiff = diff.mul(diff);
SDVariable mseLoss = sd.mean("loss", sqDiff);
String msg = "(test " + i + " - " + name + ", dimension=" + reduceDim + ")";
log.info("*** Starting test: " + msg);
sd.associateArrayWithVariable(inputArr, in);
sd.associateArrayWithVariable(labelArr, label);
try {
boolean ok = GradCheckUtil.checkGradients(sd, 1e-5, maxRelError, minAbsError, true, false);
if (!ok) {
allFailed.add(msg);
}
} catch (Exception e) {
e.printStackTrace();
allFailed.add(msg + " - EXCEPTION");
}
}
}
assertEquals("Failed: " + allFailed, 0, allFailed.size());
}
use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.
the class SameDiffOpExecutionerTest method testupdateGraphFromProfiler.
@Test
public void testupdateGraphFromProfiler() {
SameDiffOpExecutioner sameDiffOpExecutioner = new SameDiffOpExecutioner();
Nd4j.getExecutioner().setProfilingMode(OpExecutioner.ProfilingMode.ALL);
Nd4j.getExecutioner().exec(new Sigmoid(Nd4j.scalar(1.0)));
SameDiff sameDiff = sameDiffOpExecutioner.getSameDiff();
}
use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.
the class GradCheckMisc method testGradientAutoBroadcast3.
@Test
public void testGradientAutoBroadcast3() {
// These tests: output size > input sizes
Nd4j.getRandom().setSeed(12345);
List<String> allFailed = new ArrayList<>();
// Test cases: in1Shape, in2Shape, shapeOf(op(in1,in2))
List<Triple<int[], int[], int[]>> testCases = new ArrayList<>();
testCases.add(new Triple<>(new int[] { 3, 1 }, new int[] { 1, 4 }, new int[] { 3, 4 }));
testCases.add(new Triple<>(new int[] { 3, 1 }, new int[] { 3, 4 }, new int[] { 3, 4 }));
testCases.add(new Triple<>(new int[] { 3, 4 }, new int[] { 1, 4 }, new int[] { 3, 4 }));
testCases.add(new Triple<>(new int[] { 3, 4, 1 }, new int[] { 1, 1, 5 }, new int[] { 3, 4, 5 }));
testCases.add(new Triple<>(new int[] { 3, 4, 1 }, new int[] { 3, 1, 5 }, new int[] { 3, 4, 5 }));
testCases.add(new Triple<>(new int[] { 3, 1, 5 }, new int[] { 1, 4, 1 }, new int[] { 3, 4, 5 }));
testCases.add(new Triple<>(new int[] { 3, 1, 5 }, new int[] { 1, 4, 5 }, new int[] { 3, 4, 5 }));
testCases.add(new Triple<>(new int[] { 3, 1, 5 }, new int[] { 3, 4, 5 }, new int[] { 3, 4, 5 }));
testCases.add(new Triple<>(new int[] { 3, 1, 1, 1 }, new int[] { 1, 4, 5, 6 }, new int[] { 3, 4, 5, 6 }));
testCases.add(new Triple<>(new int[] { 1, 1, 1, 6 }, new int[] { 3, 4, 5, 6 }, new int[] { 3, 4, 5, 6 }));
testCases.add(new Triple<>(new int[] { 1, 4, 5, 1 }, new int[] { 3, 1, 1, 6 }, new int[] { 3, 4, 5, 6 }));
testCases.add(new Triple<>(new int[] { 1, 6 }, new int[] { 3, 4, 5, 1 }, new int[] { 3, 4, 5, 6 }));
for (Triple<int[], int[], int[]> p : testCases) {
for (int i = 0; i < 6; i++) {
SameDiff sd = SameDiff.create();
SDVariable in3 = sd.var("in1", p.getFirst());
SDVariable in2 = sd.var("in2", p.getSecond());
String name;
SDVariable bcOp;
switch(i) {
case 0:
bcOp = in3.add(in2);
name = "add";
break;
case 1:
bcOp = in3.sub(in2);
name = "sub";
break;
case 2:
bcOp = in3.mul(in2);
name = "mul";
break;
case 3:
bcOp = in3.div(in2);
name = "div";
break;
case 4:
bcOp = in3.rsub(in2);
name = "rsub";
break;
case 5:
bcOp = in3.rdiv(in2);
name = "rdiv";
break;
case 6:
bcOp = sd.f().floorDiv(in3, in2);
name = "floordiv";
break;
case 7:
bcOp = sd.f().floorMod(in3, in2);
name = "floormod";
break;
default:
throw new RuntimeException();
}
SDVariable outVar = sd.sum(bcOp);
String msg = "(test " + i + ": " + name + ", array 1 size =" + Arrays.toString(p.getFirst()) + ", array 2 size = " + Arrays.toString(p.getSecond()) + ")";
log.info("*** Starting test: " + msg);
INDArray in3Arr = Nd4j.randn(p.getFirst()).muli(100);
INDArray in2Arr = Nd4j.randn(p.getSecond()).muli(100);
sd.associateArrayWithVariable(in3Arr, in3);
sd.associateArrayWithVariable(in2Arr, in2);
try {
INDArray out = sd.execAndEndResult();
assertNotNull(out);
assertArrayEquals(new int[] { 1, 1 }, out.shape());
INDArray bcOut = bcOp.getArr();
assertNotNull(bcOp);
assertArrayEquals(p.getThird(), bcOut.shape());
// System.out.println(sd.asFlatPrint());
boolean ok = GradCheckUtil.checkGradients(sd);
if (!ok) {
allFailed.add(msg);
}
} catch (Exception e) {
e.printStackTrace();
allFailed.add(msg + " - EXCEPTION");
}
}
}
assertEquals("Failed: " + allFailed, 0, allFailed.size());
}
use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.
the class GradCheckMisc method testExpandDimsGradient.
@Test
public void testExpandDimsGradient() {
int[] origShape = new int[] { 3, 4 };
boolean first = true;
for (int i = 0; i < 3; i++) {
int[] expExpandShape;
switch(i) {
case 0:
expExpandShape = new int[] { 1, 3, 4 };
break;
case 1:
expExpandShape = new int[] { 3, 1, 4 };
break;
case 2:
expExpandShape = new int[] { 3, 4, 1 };
break;
default:
throw new RuntimeException();
}
for (Pair<INDArray, String> p : NDArrayCreationUtil.getAllTestMatricesWithShape(origShape[0], origShape[1], 12345)) {
INDArray inArr = p.getFirst().muli(100);
SameDiff sd = SameDiff.create();
SDVariable in = sd.var("in", inArr);
SDVariable expand = sd.f().expandDims(in, i);
// Using stdev here: mean/sum would backprop the same gradient for each input...
SDVariable stdev = sd.standardDeviation("out", expand, true);
INDArray out = sd.execAndEndResult();
INDArray expOut = in.getArr().std(true, Integer.MAX_VALUE);
assertEquals(expOut, out);
assertArrayEquals(expExpandShape, expand.getArr().shape());
INDArray expExpand = inArr.dup('c').reshape(expExpandShape);
assertEquals(expExpand, expand.getArr());
String msg = "expandDim=" + i + ", source=" + p.getSecond();
log.info("Starting: " + msg);
boolean ok = GradCheckUtil.checkGradients(sd);
assertTrue(msg, ok);
}
}
}
use of org.nd4j.autodiff.samediff.SameDiff in project nd4j by deeplearning4j.
the class GradCheckMisc method testSliceGradient.
@Test
public void testSliceGradient() {
Nd4j.getRandom().setSeed(12345);
// Order here: original shape, begin, size
List<Triple<int[], int[], int[]>> testCases = new ArrayList<>();
testCases.add(new Triple<>(new int[] { 3, 4 }, new int[] { 0, 0 }, new int[] { 3, 4 }));
testCases.add(new Triple<>(new int[] { 3, 4 }, new int[] { 1, 1 }, new int[] { 3, 4 }));
testCases.add(new Triple<>(new int[] { 3, 4 }, new int[] { 1, 2 }, new int[] { 2, 3 }));
testCases.add(new Triple<>(new int[] { 3, 4, 5 }, new int[] { 0, 0, 0 }, new int[] { 3, 4, 5 }));
testCases.add(new Triple<>(new int[] { 3, 4, 5 }, new int[] { 1, 1, 1 }, new int[] { 2, 3, 4 }));
testCases.add(new Triple<>(new int[] { 3, 4, 5 }, new int[] { 1, 0, 2 }, new int[] { 3, 3, 4 }));
for (int i = 0; i < testCases.size(); i++) {
Triple<int[], int[], int[]> t = testCases.get(i);
int[] os = t.getFirst();
int[] b = t.getSecond();
int[] e = t.getThird();
INDArray arr = Nd4j.rand(os);
SameDiff sd = SameDiff.create();
SDVariable in = sd.var("in", arr);
SDVariable slice = sd.slice(in, b, e);
SDVariable stdev = sd.standardDeviation(slice, true);
String msg = "i=" + i + ": inShape=" + Arrays.toString(os) + ", begin=" + Arrays.toString(b) + ", end=" + Arrays.toString(e);
log.info("Starting test: " + msg);
GradCheckUtil.checkGradients(sd);
}
}
Aggregations