use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class Nd4jTestsC method testDupAndDupWithOrder.
@Test
public void testDupAndDupWithOrder() {
List<Pair<INDArray, String>> testInputs = NDArrayCreationUtil.getAllTestMatricesWithShape(ordering(), 4, 5, 123);
for (Pair<INDArray, String> pair : testInputs) {
String msg = pair.getSecond();
INDArray in = pair.getFirst();
INDArray dup = in.dup();
INDArray dupc = in.dup('c');
INDArray dupf = in.dup('f');
assertEquals(dup.ordering(), ordering());
assertEquals(dupc.ordering(), 'c');
assertEquals(dupf.ordering(), 'f');
assertEquals(msg, in, dupc);
assertEquals(msg, in, dupf);
}
}
use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class Nd4jTestsComparisonC method testGemmWithOpsCommonsMath.
@Test
public void testGemmWithOpsCommonsMath() {
List<Pair<INDArray, String>> first = NDArrayCreationUtil.getAllTestMatricesWithShape(3, 5, SEED);
List<Pair<INDArray, String>> firstT = NDArrayCreationUtil.getAllTestMatricesWithShape(5, 3, SEED);
List<Pair<INDArray, String>> second = NDArrayCreationUtil.getAllTestMatricesWithShape(5, 4, SEED);
List<Pair<INDArray, String>> secondT = NDArrayCreationUtil.getAllTestMatricesWithShape(4, 5, SEED);
double[] alpha = { 1.0, -0.5, 2.5 };
double[] beta = { 0.0, -0.25, 1.5 };
INDArray cOrig = Nd4j.linspace(1, 12, 12).reshape(3, 4);
for (int i = 0; i < first.size(); i++) {
for (int j = 0; j < second.size(); j++) {
for (int k = 0; k < alpha.length; k++) {
for (int m = 0; m < beta.length; m++) {
INDArray cff = Nd4j.create(cOrig.shape(), 'f');
cff.assign(cOrig);
INDArray cft = Nd4j.create(cOrig.shape(), 'f');
cft.assign(cOrig);
INDArray ctf = Nd4j.create(cOrig.shape(), 'f');
ctf.assign(cOrig);
INDArray ctt = Nd4j.create(cOrig.shape(), 'f');
ctt.assign(cOrig);
double a = alpha[k];
double b = beta[k];
Pair<INDArray, String> p1 = first.get(i);
Pair<INDArray, String> p1T = firstT.get(i);
Pair<INDArray, String> p2 = second.get(j);
Pair<INDArray, String> p2T = secondT.get(j);
String errorMsgff = getGemmErrorMsg(i, j, false, false, a, b, p1, p2);
String errorMsgft = getGemmErrorMsg(i, j, false, true, a, b, p1, p2T);
String errorMsgtf = getGemmErrorMsg(i, j, true, false, a, b, p1T, p2);
String errorMsgtt = getGemmErrorMsg(i, j, true, true, a, b, p1T, p2T);
System.out.println((String.format("Running iteration %d %d %d %d", i, j, k, m)));
assertTrue(errorMsgff, CheckUtil.checkGemm(p1.getFirst(), p2.getFirst(), cff, false, false, a, b, 1e-4, 1e-6));
assertTrue(errorMsgft, CheckUtil.checkGemm(p1.getFirst(), p2T.getFirst(), cft, false, true, a, b, 1e-4, 1e-6));
assertTrue(errorMsgtf, CheckUtil.checkGemm(p1T.getFirst(), p2.getFirst(), ctf, true, false, a, b, 1e-4, 1e-6));
assertTrue(errorMsgtt, CheckUtil.checkGemm(p1T.getFirst(), p2T.getFirst(), ctt, true, true, a, b, 1e-4, 1e-6));
// Also: Confirm that if the C array is uninitialized and beta is 0.0, we don't have issues like 0*NaN = NaN
if (b == 0.0) {
cff.assign(Double.NaN);
cft.assign(Double.NaN);
ctf.assign(Double.NaN);
ctt.assign(Double.NaN);
assertTrue(errorMsgff, CheckUtil.checkGemm(p1.getFirst(), p2.getFirst(), cff, false, false, a, b, 1e-4, 1e-6));
assertTrue(errorMsgft, CheckUtil.checkGemm(p1.getFirst(), p2T.getFirst(), cft, false, true, a, b, 1e-4, 1e-6));
assertTrue(errorMsgtf, CheckUtil.checkGemm(p1T.getFirst(), p2.getFirst(), ctf, true, false, a, b, 1e-4, 1e-6));
assertTrue(errorMsgtt, CheckUtil.checkGemm(p1T.getFirst(), p2T.getFirst(), ctt, true, true, a, b, 1e-4, 1e-6));
}
}
}
}
}
}
use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class TestTensorAlongDimension method testTadShapes1d.
@Test
public void testTadShapes1d() {
// Ensure TAD returns the correct/expected shapes, and values don't depend on underlying array layout/order etc
/**
* NEED TO WORK ON ELEMENT WISE STRIDE NOW.
*/
// From a 2d array:
int rows = 3;
int cols = 4;
INDArray testValues = Nd4j.linspace(1, rows * cols, rows * cols).reshape('c', rows, cols);
List<Pair<INDArray, String>> list = NDArrayCreationUtil.getAllTestMatricesWithShape('c', rows, cols, 12345);
for (Pair<INDArray, String> p : list) {
INDArray arr = p.getFirst().assign(testValues);
// Along dimension 0: expect row vector with length 'rows'
assertEquals(cols, arr.tensorssAlongDimension(0));
for (int i = 0; i < cols; i++) {
INDArray tad = arr.tensorAlongDimension(i, 0);
INDArray javaTad = arr.javaTensorAlongDimension(i, 0);
assertEquals(javaTad, tad);
assertArrayEquals(new int[] { 1, rows }, tad.shape());
assertEquals(testValues.javaTensorAlongDimension(i, 0), tad);
}
// Along dimension 1: expect row vector with length 'cols'
assertEquals(rows, arr.tensorssAlongDimension(1));
for (int i = 0; i < rows; i++) {
INDArray tad = arr.tensorAlongDimension(i, 1);
assertArrayEquals(new int[] { 1, cols }, tad.shape());
assertEquals(testValues.javaTensorAlongDimension(i, 1), tad);
}
}
// From a 3d array:
int dim2 = 5;
log.info("AF");
testValues = Nd4j.linspace(1, rows * cols * dim2, rows * cols * dim2).reshape('c', rows, cols, dim2);
list = NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, rows, cols, dim2);
for (Pair<INDArray, String> p : list) {
INDArray arr = p.getFirst().assign(testValues);
INDArray javaTad = arr.javaTensorAlongDimension(0, 0);
INDArray tadTest = arr.tensorAlongDimension(0, 0);
assertEquals(javaTad, tadTest);
// Along dimension 0: expect row vector with length 'rows'
assertEquals("Failed on " + p.getValue(), cols * dim2, arr.tensorssAlongDimension(0));
for (int i = 0; i < cols * dim2; i++) {
INDArray tad = arr.tensorAlongDimension(i, 0);
assertArrayEquals(new int[] { 1, rows }, tad.shape());
assertEquals(testValues.javaTensorAlongDimension(i, 0), tad);
}
// Along dimension 1: expect row vector with length 'cols'
assertEquals(rows * dim2, arr.tensorssAlongDimension(1));
for (int i = 0; i < rows * dim2; i++) {
INDArray tad = arr.tensorAlongDimension(i, 1);
assertArrayEquals(new int[] { 1, cols }, tad.shape());
assertEquals(testValues.javaTensorAlongDimension(i, 1), tad);
}
// Along dimension 2: expect row vector with length 'dim2'
assertEquals(rows * cols, arr.tensorssAlongDimension(2));
for (int i = 0; i < rows * cols; i++) {
INDArray tad = arr.tensorAlongDimension(i, 2);
assertArrayEquals(new int[] { 1, dim2 }, tad.shape());
assertEquals(testValues.javaTensorAlongDimension(i, 2), tad);
}
}
}
use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class ConvolutionTestsC method testMaxPoolBackprop.
@Test
@Ignore
public void testMaxPoolBackprop() {
Nd4j.getRandom().setSeed(12345);
for (int i = 0; i < 5; i++) {
int[] inputShape = { 1, 1, 4, 3 };
int[] kernel = { 2, 2 };
int[] strides = { 1, 1 };
int[] pad = { 0, 0 };
// TODO non 1-1 dilation
int[] dilation = { 1, 1 };
boolean same = true;
String fn = "maxpool2d_bp";
int nIArgs = 11;
int[] a = new int[nIArgs];
a[0] = kernel[0];
a[1] = kernel[1];
a[2] = strides[0];
a[3] = strides[1];
a[4] = pad[0];
a[5] = pad[1];
a[6] = dilation[0];
a[7] = dilation[1];
a[8] = same ? 1 : 0;
// a[9]: Not used with max pooling
// For NCHW
a[10] = 0;
List<Pair<INDArray, String>> inputs = NDArrayCreationUtil.getAll4dTestArraysWithShape(12345, inputShape);
for (Pair<INDArray, String> pIn : inputs) {
INDArray input = pIn.getFirst();
int[] outShapeHW = getOutputSize(input, kernel, strides, pad, same);
List<Pair<INDArray, String>> eps = NDArrayCreationUtil.getAll4dTestArraysWithShape(12345, inputShape[0], inputShape[1], outShapeHW[0], outShapeHW[1]);
for (Pair<INDArray, String> pEps : eps) {
INDArray epsilon = pEps.getFirst();
INDArray epsNext = Nd4j.create(inputShape, 'c');
// Runs fine with dups:
// input = input.dup('c');
epsilon = epsilon.dup('c');
DynamicCustomOp op = DynamicCustomOp.builder(fn).addInputs(input, epsilon).addOutputs(epsNext).addIntegerArguments(a).build();
Nd4j.getExecutioner().exec(op);
INDArray expEpsNext = expGradMaxPoolBackPropSame(input, epsilon, kernel, strides, same);
String msg = "input=" + pIn.getSecond() + ", eps=" + pEps.getSecond();
assertEquals(msg, expEpsNext, epsNext);
}
}
}
}
use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class ActivationRReLU method backprop.
@Override
public Pair<INDArray, INDArray> backprop(INDArray in, INDArray epsilon) {
INDArray dLdz = Nd4j.ones(in.shape());
BooleanIndexing.replaceWhere(dLdz, alpha, Conditions.lessThanOrEqual(0.0));
dLdz.muli(epsilon);
return new Pair<>(dLdz, null);
}
Aggregations