use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class Norm1 method doDiff.
@Override
public List<SDVariable> doDiff(List<SDVariable> i_v1) {
// d l1Norm(in)/dx = signum(x)
SDVariable signum = sameDiff.sign(arg());
// Note that we need to expand the dimensions of the gradient - auto-broadcast won't work for all cases.
// TODO shape may not always be defined?
int origRank = Shape.rankFromShape(arg().getShape());
SDVariable bcGrad = sameDiff.f().reductionBroadcastableWithOrigShape(origRank, dimensions, i_v1.get(0));
return Arrays.asList(signum.mul(bcGrad));
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class Norm2 method doDiff.
@Override
public List<SDVariable> doDiff(List<SDVariable> i_v1) {
// d norm2(in)/dx = x / norm2(in)
SDVariable norm2 = outputVariables()[0];
// TODO shape may not always be defined?
int origRank = Shape.rankFromShape(arg().getShape());
SDVariable broadcastableNorm2 = f().reductionBroadcastableWithOrigShape(origRank, dimensions, norm2);
SDVariable broadcastableGradOut = f().reductionBroadcastableWithOrigShape(origRank, dimensions, i_v1.get(0));
SDVariable ret = arg().div(broadcastableNorm2).mul(broadcastableGradOut);
return Arrays.asList(ret);
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class GradCheckTransforms method testDepthToSpace.
@Test
public void testDepthToSpace() {
Nd4j.getRandom().setSeed(1337);
int miniBatch = 128;
int blockSize = 4;
String dataFormat = "NHWC";
int isNHWC = dataFormat.equals("NHWC") ? 1 : 0;
int[] inputShape = new int[] { miniBatch, 2, 2, blockSize * blockSize };
INDArray input = Nd4j.randn(inputShape);
SameDiff sd = SameDiff.create();
SDVariable sdInput = sd.var("in", inputShape);
INDArray expOut = Nd4j.create(miniBatch, 2 * blockSize, 2 * blockSize, 1);
DynamicCustomOp op = DynamicCustomOp.builder("depth_to_space").addInputs(input).addIntegerArguments(blockSize, isNHWC).addOutputs(expOut).build();
Nd4j.getExecutioner().exec(op);
sd.associateArrayWithVariable(input, sdInput);
SDVariable t = sd.depthToSpace(sdInput, blockSize, dataFormat);
SDVariable loss = sd.mean("loss", t);
sd.exec();
INDArray out = t.getArr();
if (!expOut.equals(out)) {
log.info("depth to space failed on forward");
}
try {
GradCheckUtil.checkGradients(sd);
} catch (Exception e) {
e.printStackTrace();
}
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class GradCheckTransforms method testCross.
@Test
public void testCross() {
INDArray a = Nd4j.create(new float[] { 4, 2, 1 }, new int[] { 1, 3 });
INDArray b = Nd4j.create(new float[] { 1, 3, 4 }, new int[] { 1, 3 });
INDArray expOut = Nd4j.create(1, 3);
DynamicCustomOp op = DynamicCustomOp.builder("cross").addInputs(a, b).addOutputs(expOut).build();
Nd4j.getExecutioner().exec(op);
SameDiff sd = SameDiff.create();
SDVariable sdA = sd.var("a", expOut.shape());
SDVariable sdB = sd.var("b", expOut.shape());
sd.associateArrayWithVariable(a, sdA);
sd.associateArrayWithVariable(b, sdB);
SDVariable t = sd.cross(sdA, sdB);
SDVariable loss = sd.mean("loss", t);
sd.exec();
INDArray out = t.getArr();
if (!expOut.equals(out)) {
log.info("batch to space failed on forward");
}
try {
GradCheckUtil.checkGradients(sd);
} catch (Exception e) {
e.printStackTrace();
}
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class GradCheckTransforms method testSpaceToBatch.
@Test
public void testSpaceToBatch() {
Nd4j.getRandom().setSeed(7331);
int miniBatch = 4;
int[] inputShape = new int[] { 1, 2, 2, 1 };
int M = 2;
int[] blockShape = new int[] { M, 1 };
int[] paddingShape = new int[] { M, 2 };
INDArray input = Nd4j.randn(inputShape);
INDArray blocks = Nd4j.create(new float[] { 2, 2 }, blockShape);
INDArray padding = Nd4j.create(new float[] { 0, 0, 0, 0 }, paddingShape);
SameDiff sd = SameDiff.create();
SDVariable sdInput = sd.var("in", inputShape);
INDArray expOut = Nd4j.create(miniBatch, 1, 1, 1);
DynamicCustomOp op = DynamicCustomOp.builder("space_to_batch").addInputs(input, blocks, padding).addOutputs(expOut).build();
Nd4j.getExecutioner().exec(op);
sd.associateArrayWithVariable(input, sdInput);
SDVariable t = sd.spaceToBatch(sdInput, new int[] { 2, 2 }, new int[][] { { 0, 0 }, { 0, 0 } });
SDVariable loss = sd.mean("loss", t);
sd.exec();
INDArray out = t.getArr();
if (!expOut.equals(out)) {
log.info("space to batch failed on forward");
}
try {
GradCheckUtil.checkGradients(sd);
} catch (Exception e) {
e.printStackTrace();
}
}
Aggregations