use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class GradCheckTransforms method testPairwiseTransforms.
@Test
public void testPairwiseTransforms() {
/*
add, sub, mul, div, rsub, rdiv
eq, neq, gt, lt, gte, lte, or, and, xor
min, max
mmul
tensormmul
*/
// Test transforms (pairwise)
Nd4j.getRandom().setSeed(12345);
List<String> allSkipped = new ArrayList<>();
List<String> allFailed = new ArrayList<>();
for (int i = 0; i < 23; i++) {
boolean skipBackward = false;
SameDiff sd = SameDiff.create();
int nOut = 4;
int minibatch = 5;
SDVariable in1 = sd.var("in1", new int[] { -1, nOut });
SDVariable in2 = sd.var("in2", new int[] { -1, nOut });
INDArray ia = Nd4j.randn(minibatch, nOut);
INDArray ib = Nd4j.randn(minibatch, nOut);
SDVariable t;
INDArray expOut;
switch(i) {
case 0:
t = in1.add(in2);
expOut = ia.add(ib);
break;
case 1:
t = in1.sub(in2);
expOut = ia.sub(ib);
break;
case 2:
t = in1.mul(in2);
expOut = ia.mul(ib);
break;
case 3:
// break;
continue;
case 4:
t = in1.rsub(in2);
expOut = ia.rsub(ib);
break;
case 5:
t = in1.rdiv(in2);
expOut = ia.rdiv(ib);
break;
case 6:
t = sd.eq(in1, in2);
expOut = ia.eq(ib);
break;
case 7:
t = sd.neq(in1, in2);
expOut = ia.neq(ib);
break;
case 8:
t = sd.gt(in1, in2);
expOut = ia.gt(ib);
break;
case 9:
t = sd.lt(in1, in2);
expOut = ia.lt(ib);
break;
case 10:
t = sd.gte(in1, in2);
expOut = ia.dup();
Nd4j.getExecutioner().exec(new GreaterThanOrEqual(new INDArray[] { ia, ib }, new INDArray[] { expOut }));
break;
case 11:
t = sd.lte(in1, in2);
expOut = ia.dup();
Nd4j.getExecutioner().exec(new LessThanOrEqual(new INDArray[] { ia, ib }, new INDArray[] { expOut }));
break;
case 12:
ia = Nd4j.getExecutioner().exec(new BernoulliDistribution(ia, 0.5));
ib = Nd4j.getExecutioner().exec(new BernoulliDistribution(ib, 0.5));
t = sd.or(in1, in2);
expOut = Transforms.or(ia, ib);
break;
case 13:
ib = Nd4j.randn(nOut, nOut);
t = sd.mmul(in1, in2);
expOut = ia.mmul(ib);
break;
case 14:
t = sd.max(in1, in2);
expOut = Nd4j.getExecutioner().execAndReturn(new OldMax(ia, ib, ia.dup(), ia.length()));
break;
case 15:
t = sd.min(in1, in2);
expOut = Nd4j.getExecutioner().execAndReturn(new OldMin(ia, ib, ia.dup(), ia.length()));
break;
case 16:
ia = Nd4j.getExecutioner().exec(new BernoulliDistribution(ia, 0.5));
ib = Nd4j.getExecutioner().exec(new BernoulliDistribution(ib, 0.5));
t = sd.and(in1, in2);
expOut = Transforms.and(ia, ib);
break;
case 17:
ia = Nd4j.getExecutioner().exec(new BernoulliDistribution(ia, 0.5));
ib = Nd4j.getExecutioner().exec(new BernoulliDistribution(ib, 0.5));
t = sd.xor(in1, in2);
expOut = Transforms.xor(ia, ib);
break;
case 18:
t = sd.assign(in1, in2);
expOut = ib;
break;
case 19:
t = sd.atan2(in1, in2);
// Note: y,x order for samediff; x,y order for transforms
expOut = Transforms.atan2(ib, ia);
skipBackward = true;
break;
case 20:
t = sd.mergeAdd(in1, in2, in2);
expOut = ia.add(ib).add(ib);
break;
case 21:
ia = Nd4j.create(new float[] { 2, 4 });
ib = Nd4j.create(new float[] { 42, 2 });
in1 = sd.var("in1", new int[] { 1, 2 });
in2 = sd.var("in2", new int[] { 1, 2 });
t = in1.truncatedDiv(in2);
expOut = Nd4j.create(ia.shape(), ia.ordering());
Nd4j.getExecutioner().exec(new TruncateDivOp(ia, ib, expOut));
skipBackward = true;
break;
case 22:
t = in1.squaredDifference(in2);
expOut = Nd4j.create(ia.shape(), ia.ordering());
DynamicCustomOp squareDiff = DynamicCustomOp.builder("squaredsubtract").addInputs(ia, ib).addOutputs(expOut).build();
Nd4j.getExecutioner().exec(squareDiff);
skipBackward = true;
break;
default:
throw new RuntimeException();
}
DifferentialFunction[] funcs = sd.functions();
String name = funcs[0].opName();
String msg = "test: " + i + " - " + name;
log.info("*** Starting test: " + msg);
SDVariable loss = sd.mean("loss", t);
sd.associateArrayWithVariable(ia, in1);
sd.associateArrayWithVariable(ib, in2);
sd.exec();
INDArray out = t.getArr();
assertEquals(msg, expOut, out);
boolean ok;
if (skipBackward) {
ok = true;
msg += " - SKIPPED";
allSkipped.add(msg);
} else {
try {
ok = GradCheckUtil.checkGradients(sd);
} catch (Exception e) {
e.printStackTrace();
msg += " - EXCEPTION";
ok = false;
}
}
if (!ok) {
allFailed.add(msg);
}
}
if (allSkipped.size() > 0) {
log.info("All backward skipped transforms: " + allSkipped);
log.info(allSkipped.size() + " backward passes were skipped.");
}
if (allFailed.size() > 0) {
log.error("All failed transforms: " + allFailed);
fail(allFailed.size() + " transforms failed");
}
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class GradCheckTransforms method testDiag.
@Test
public void testDiag() {
SameDiff sd = SameDiff.create();
INDArray ia = Nd4j.create(new float[] { 4, 2 });
SDVariable in = sd.var("in", new int[] { 1, 2 });
INDArray expOut = Nd4j.create(new int[] { 2, 2 });
DynamicCustomOp diag = DynamicCustomOp.builder("diag").addInputs(ia).addOutputs(expOut).build();
Nd4j.getExecutioner().exec(diag);
SDVariable t = sd.diag(in);
SDVariable loss = sd.max("loss", t, 0, 1);
sd.associateArrayWithVariable(ia, in);
sd.exec();
INDArray out = t.getArr();
if (!expOut.equals(out)) {
log.info("forward failed");
}
try {
GradCheckUtil.checkGradients(sd);
} catch (Exception e) {
e.printStackTrace();
}
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class GraphExecutionerTest method testSums1.
@Test
@Ignore
public void testSums1() throws Exception {
SameDiff sameDiff = SameDiff.create();
INDArray ones = Nd4j.ones(4);
SDVariable sdVariable = sameDiff.var("ones", ones);
SDVariable result = sdVariable.addi(1.0);
SDVariable total = sameDiff.sum(result, Integer.MAX_VALUE);
val executioner = new NativeGraphExecutioner();
INDArray[] res = executioner.executeGraph(sameDiff);
assertEquals(8.0, res[0].getDouble(0), 1e-5);
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class GraphExecutionerTest method testConversion.
@Test
@Ignore
public void testConversion() throws Exception {
SameDiff sameDiff = SameDiff.create();
INDArray ones = Nd4j.ones(4);
SDVariable sdVariable = sameDiff.var("ones", ones);
SDVariable result = sdVariable.addi(1.0);
SDVariable total = sameDiff.sum(result, Integer.MAX_VALUE);
val executioner = new NativeGraphExecutioner();
ByteBuffer buffer = executioner.convertToFlatBuffers(sameDiff, ExecutorConfiguration.builder().profilingMode(OpExecutioner.ProfilingMode.DISABLED).executionMode(ExecutionMode.SEQUENTIAL).outputMode(OutputMode.IMPLICIT).build());
val offset = buffer.position();
val array = buffer.array();
try (val fos = new FileOutputStream("../../libnd4j/tests/resources/adam_sum.fb");
val dos = new DataOutputStream(fos)) {
dos.write(array, offset, array.length - offset);
}
// INDArray[] res = executioner.executeGraph(sameDiff);
// assertEquals(8.0, res[0].getDouble(0), 1e-5);
/*
INDArray output = null;
for(int i = 0; i < 5; i++) {
output = sameDiff.execAndEndResult(ops);
System.out.println("Ones " + ones);
System.out.println(output);
}
assertEquals(Nd4j.valueArrayOf(4,7),ones);
assertEquals(28,output.getDouble(0),1e-1);
*/
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class GraphExecutionerTest method testEquality1.
/**
* VarSpace should dump everything. 4 variables in our case
* @throws Exception
*/
@Test
public void testEquality1() throws Exception {
GraphExecutioner executionerA = new BasicGraphExecutioner();
GraphExecutioner executionerB = new NativeGraphExecutioner();
SameDiff sameDiff = SameDiff.create();
INDArray ones = Nd4j.ones(4);
SDVariable sdVariable = sameDiff.var("ones", ones);
SDVariable scalarOne = sameDiff.var("add1", Nd4j.scalar(1.0));
SDVariable result = sdVariable.addi(scalarOne);
SDVariable total = sameDiff.sum(result, Integer.MAX_VALUE);
log.info("TOTAL: {}; Id: {}", total.getVarName(), total);
INDArray[] resB = executionerB.executeGraph(sameDiff, configVarSpace);
assertEquals(6, resB.length);
assertEquals(Nd4j.create(new float[] { 2f, 2f, 2f, 2f }), resB[4]);
assertEquals(Nd4j.scalar(1), resB[1]);
assertEquals(Nd4j.scalar(8.0), resB[5]);
}
Aggregations