use of org.nd4j.linalg.api.ops.DynamicCustomOp in project nd4j by deeplearning4j.
the class ConvolutionTests method testPooling7.
@Test
public void testPooling7() {
for (char outputOrder : new char[] { 'c', 'f' }) {
INDArray exp = Nd4j.create(new float[] { 7.f, 9.f, 17.f, 19.f, 32.f, 34.f, 42.f, 44.f, 57.f, 59.f, 67.f, 69.f, 82.f, 84.f, 92.f, 94.f }, new int[] { 2, 2, 2, 2 }, 'c');
int len = 2 * 2 * 5 * 5;
INDArray x = Nd4j.linspace(1, len, len).reshape('c', 2, 2, 5, 5);
DynamicCustomOp op = DynamicCustomOp.builder("maxpool2d").addIntegerArguments(new int[] { 2, 2, 2, 2, 0, 0, 1, 1, 0, 1, 0 }).addInputs(x).addOutputs(Nd4j.create(new int[] { 2, 2, 2, 2 }, outputOrder)).build();
Nd4j.getExecutioner().exec(op);
INDArray out = op.getOutputArgument(0);
assertEquals("Output order: " + outputOrder, exp, out);
}
}
use of org.nd4j.linalg.api.ops.DynamicCustomOp in project nd4j by deeplearning4j.
the class ConvolutionTests method testPooling10.
@Test
public void testPooling10() {
for (char outputOrder : new char[] { 'c', 'f' }) {
INDArray exp = Nd4j.create(new float[] { 4.f, 6.f, 7.5f, 14.f, 16.f, 17.5f, 21.5f, 23.5f, 25.f, 29.f, 31.f, 32.5f, 39.f, 41.f, 42.5f, 46.5f, 48.5f, 50.f, 54.f, 56.f, 57.5f, 64.f, 66.f, 67.5f, 71.5f, 73.5f, 75.f, 79.f, 81.f, 82.5f, 89.f, 91.f, 92.5f, 96.5f, 98.5f, 100.f }, new int[] { 2, 2, 3, 3 }, 'c');
int len = 2 * 2 * 5 * 5;
INDArray x = Nd4j.linspace(1, len, len).reshape('c', 2, 2, 5, 5);
DynamicCustomOp op = DynamicCustomOp.builder("avgpool2d").addIntegerArguments(new int[] { 2, 2, 2, 2, 0, 0, 1, 1, 1, 0, 0 }).addInputs(x).addOutputs(Nd4j.create(new int[] { 2, 2, 3, 3 }, outputOrder)).build();
Nd4j.getExecutioner().exec(op);
INDArray out = op.getOutputArgument(0);
assertEquals("Output order: " + outputOrder, exp, out);
}
}
use of org.nd4j.linalg.api.ops.DynamicCustomOp in project nd4j by deeplearning4j.
the class Nd4jTestsC method testMmulOp.
@Test
public void testMmulOp() {
INDArray arr = Nd4j.create(new double[][] { { 1, 2, 3 }, { 4, 5, 6 } });
INDArray z = Nd4j.create(2, 2);
INDArray assertion = Nd4j.create(new double[][] { { 14, 32 }, { 32, 77 } });
MMulTranspose mMulTranspose = MMulTranspose.builder().transposeB(true).a(arr).b(arr).build();
DynamicCustomOp op = new Mmul(arr, arr, z, mMulTranspose);
Nd4j.getExecutioner().exec(op);
assertEquals(getFailureMessage(), assertion, z);
}
use of org.nd4j.linalg.api.ops.DynamicCustomOp in project nd4j by deeplearning4j.
the class GradCheckTransforms method testDepthToSpace.
@Test
public void testDepthToSpace() {
Nd4j.getRandom().setSeed(1337);
int miniBatch = 128;
int blockSize = 4;
String dataFormat = "NHWC";
int isNHWC = dataFormat.equals("NHWC") ? 1 : 0;
int[] inputShape = new int[] { miniBatch, 2, 2, blockSize * blockSize };
INDArray input = Nd4j.randn(inputShape);
SameDiff sd = SameDiff.create();
SDVariable sdInput = sd.var("in", inputShape);
INDArray expOut = Nd4j.create(miniBatch, 2 * blockSize, 2 * blockSize, 1);
DynamicCustomOp op = DynamicCustomOp.builder("depth_to_space").addInputs(input).addIntegerArguments(blockSize, isNHWC).addOutputs(expOut).build();
Nd4j.getExecutioner().exec(op);
sd.associateArrayWithVariable(input, sdInput);
SDVariable t = sd.depthToSpace(sdInput, blockSize, dataFormat);
SDVariable loss = sd.mean("loss", t);
sd.exec();
INDArray out = t.getArr();
if (!expOut.equals(out)) {
log.info("depth to space failed on forward");
}
try {
GradCheckUtil.checkGradients(sd);
} catch (Exception e) {
e.printStackTrace();
}
}
use of org.nd4j.linalg.api.ops.DynamicCustomOp in project nd4j by deeplearning4j.
the class GradCheckTransforms method testCross.
@Test
public void testCross() {
INDArray a = Nd4j.create(new float[] { 4, 2, 1 }, new int[] { 1, 3 });
INDArray b = Nd4j.create(new float[] { 1, 3, 4 }, new int[] { 1, 3 });
INDArray expOut = Nd4j.create(1, 3);
DynamicCustomOp op = DynamicCustomOp.builder("cross").addInputs(a, b).addOutputs(expOut).build();
Nd4j.getExecutioner().exec(op);
SameDiff sd = SameDiff.create();
SDVariable sdA = sd.var("a", expOut.shape());
SDVariable sdB = sd.var("b", expOut.shape());
sd.associateArrayWithVariable(a, sdA);
sd.associateArrayWithVariable(b, sdB);
SDVariable t = sd.cross(sdA, sdB);
SDVariable loss = sd.mean("loss", t);
sd.exec();
INDArray out = t.getArr();
if (!expOut.equals(out)) {
log.info("batch to space failed on forward");
}
try {
GradCheckUtil.checkGradients(sd);
} catch (Exception e) {
e.printStackTrace();
}
}
Aggregations