use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class ConvolutionTests method testPoolingDilation.
@Test
public void testPoolingDilation() {
int[] inputShape = { 1, 1, 4, 5 };
int outH = inputShape[2];
int outW = inputShape[3];
int[] kernel = { 2, 2 };
int[] strides = { 1, 1 };
// From same mode
int[] pad = { 1, 1 };
int[] dilation = { 2, 2 };
boolean same = true;
/*
Input:
[ 1, 2, 3, 4, 5
6, 7, 8, 9, 10
11, 12, 13, 14, 15
16, 17, 18, 19, 20 ]
Input with SAME padding:
[ 0, 0, 0, 0, 0, 0, 0
0, 1, 2, 3, 4, 5, 0
0, 6, 7, 8, 9, 10, 0
0, 11, 12, 13, 14, 15, 0
0, 16, 17, 18, 19, 20, 0
0, 0, 0, 0, 0, 0, 0]
4x5 in
Same mode, stride 1, dilation 2, kernel 2
kHEffective = (2 + (2-1)*(2-1)) = 3
oH = ceil(iH/sH) = 4
oW = ceil(iW/sW) = 5
totalPadH = (oH-1)*sH + kH - inH = (4-1)*1 + 3 - 4 = 2
padTop = 1, padBottom = 1
totalPadW = (oW-1)*sW + kW - inW = (5-1)*1 + 3 - 5 = 2
padLeft = 1, padRight = 1
[ 0, 0] [ 0, 0] [ 0, 0] [ 0, 0] [ 0, 0]
[ 0, 7] [ 6, 8] [ 7, 9] [ 8, 10] [ 9, 0]
[ 0 2] [ 1, 3] [ 2, 4] [ 3, 5] [ 4, 0]
[ 0, 12] [11, 13] [12, 14] [13, 15] [14, 0]
[ 0, 7] [ 6, 8] [ 7, 9] [ 8, 10] [ 9, 0]
[ 0, 17] [16, 18] [17, 19] [18, 20] [19, 0]
[ 0, 12] [11, 13] [12, 14] [13, 15] [14, 0]
[ 0, 0], [ 0, 0] [ 0, 0] [ 0, 0] [ 0, 0]
*/
INDArray origInput = Nd4j.create(inputShape);
origInput.get(point(0), point(0), all(), all()).assign(Nd4j.linspace(1, 20, 20).reshape('c', 4, 5));
INDArray expMax = Nd4j.create(1, 1, 4, 5);
expMax.get(point(0), point(0), all(), all()).assign(Nd4j.create(new double[][] { { 7, 8, 9, 10, 9 }, { 12, 13, 14, 15, 14 }, { 17, 18, 19, 20, 19 }, { 12, 13, 14, 15, 14 } }));
INDArray sum = Nd4j.create(1, 1, 4, 5);
sum.get(point(0), point(0), all(), all()).assign(Nd4j.create(new double[][] { { 7, (6 + 8), (7 + 9), (8 + 10), 9 }, { (2 + 12), (1 + 3 + 11 + 13), (2 + 4 + 12 + 14), (3 + 5 + 13 + 15), (4 + 14) }, { (7 + 17), (6 + 8 + 16 + 18), (7 + 9 + 17 + 19), (8 + 10 + 18 + 20), (9 + 19) }, { 12, (11 + 13), (12 + 14), (13 + 15), 14 } }));
INDArray expAvgExclude = sum.dup();
expAvgExclude.get(point(0), point(0), all(), all()).divi(Nd4j.create(new double[][] { { 1, 2, 2, 2, 1 }, { 2, 4, 4, 4, 2 }, { 2, 4, 4, 4, 2 }, { 1, 2, 2, 2, 1 } }));
INDArray expAvgInclude = sum.div(4.0);
int testNum = 0;
for (int i = 0; i < 3; i++) {
List<Pair<INDArray, String>> inputs = NDArrayCreationUtil.getAll4dTestArraysWithShape(12345, inputShape);
for (Pair<INDArray, String> pIn : inputs) {
INDArray input = pIn.getFirst().assign(origInput);
INDArray out = Nd4j.create(input.shape(), 'c');
// TODO Test on weird strides also (i.e., remove the dup here)
input = input.dup('c');
INDArray exp;
String mode;
switch(i) {
case // Max
0:
Convolution.pooling2D(input, kernel[0], kernel[1], strides[0], strides[1], pad[0], pad[1], dilation[0], dilation[1], same, Pooling2D.Pooling2DType.MAX, Pooling2D.Divisor.INCLUDE_PADDING, 0.0, outH, outW, out);
exp = expMax;
mode = "max";
break;
case // Avg + mode 0 (exclude padding)
1:
Convolution.pooling2D(input, kernel[0], kernel[1], strides[0], strides[1], pad[0], pad[1], dilation[0], dilation[1], same, Pooling2D.Pooling2DType.AVG, Pooling2D.Divisor.EXCLUDE_PADDING, 0.0, outH, outW, out);
exp = expAvgExclude;
mode = "avg_0";
break;
case // Avg + mode 1 (include padding)
2:
Convolution.pooling2D(input, kernel[0], kernel[1], strides[0], strides[1], pad[0], pad[1], dilation[0], dilation[1], same, Pooling2D.Pooling2DType.AVG, Pooling2D.Divisor.INCLUDE_PADDING, 0.0, outH, outW, out);
exp = expAvgInclude;
mode = "avg_2";
break;
default:
throw new RuntimeException();
}
String msg = "TestNum=" + testNum + ", Mode: " + mode + ", " + pIn.getSecond();
assertEquals(msg, exp, out);
testNum++;
}
}
}
use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class TestTensorAlongDimension method testTadShapes2d.
@Test
public void testTadShapes2d() {
// Ensure TAD returns the correct/expected shapes, and values don't depend on underlying array layout/order etc
// From a 3d array:
int rows = 3;
int cols = 4;
int dim2 = 5;
INDArray testValues = Nd4j.linspace(1, rows * cols * dim2, rows * cols * dim2).reshape('c', rows, cols, dim2);
List<Pair<INDArray, String>> list = NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, rows, cols, dim2);
for (Pair<INDArray, String> p : list) {
INDArray arr = p.getFirst().assign(testValues);
// Along dimension 0,1: expect matrix with shape [rows,cols]
assertEquals(dim2, arr.tensorssAlongDimension(0, 1));
for (int i = 0; i < dim2; i++) {
INDArray javaTad = arr.javaTensorAlongDimension(i, 0, 1);
INDArray tad = arr.tensorAlongDimension(i, 0, 1);
int javaEleStride = javaTad.elementWiseStride();
int testTad = tad.elementWiseStride();
assertEquals(javaEleStride, testTad);
assertEquals(javaTad, tad);
assertArrayEquals(new int[] { rows, cols }, tad.shape());
assertEquals(testValues.tensorAlongDimension(i, 0, 1), tad);
}
// Along dimension 0,2: expect matrix with shape [rows,dim2]
assertEquals(cols, arr.tensorssAlongDimension(0, 2));
for (int i = 0; i < cols; i++) {
INDArray javaTad = arr.javaTensorAlongDimension(i, 0, 2);
INDArray tad = arr.tensorAlongDimension(i, 0, 2);
assertEquals(javaTad, tad);
assertArrayEquals(new int[] { rows, dim2 }, tad.shape());
assertEquals(testValues.javaTensorAlongDimension(i, 0, 2), tad);
}
// Along dimension 1,2: expect matrix with shape [cols,dim2]
assertEquals(rows, arr.tensorssAlongDimension(1, 2));
for (int i = 0; i < rows; i++) {
INDArray tad = arr.tensorAlongDimension(i, 1, 2);
assertArrayEquals(new int[] { cols, dim2 }, tad.shape());
assertEquals(testValues.javaTensorAlongDimension(i, 1, 2), tad);
}
}
// From a 4d array:
int dim3 = 6;
testValues = Nd4j.linspace(1, rows * cols * dim2 * dim3, rows * cols * dim2 * dim3).reshape('c', rows, cols, dim2, dim3);
list = NDArrayCreationUtil.getAll4dTestArraysWithShape(12345, rows, cols, dim2, dim3);
for (Pair<INDArray, String> p : list) {
INDArray arr = p.getFirst().assign(testValues);
// Along dimension 0,1: expect matrix with shape [rows,cols]
assertEquals(dim2 * dim3, arr.tensorssAlongDimension(0, 1));
for (int i = 0; i < dim2 * dim3; i++) {
INDArray tad = arr.tensorAlongDimension(i, 0, 1);
assertArrayEquals(new int[] { rows, cols }, tad.shape());
assertEquals(testValues.javaTensorAlongDimension(i, 0, 1), tad);
}
// Along dimension 0,2: expect matrix with shape [rows,dim2]
assertEquals(cols * dim3, arr.tensorssAlongDimension(0, 2));
for (int i = 0; i < cols * dim3; i++) {
INDArray tad = arr.tensorAlongDimension(i, 0, 2);
assertArrayEquals(new int[] { rows, dim2 }, tad.shape());
assertEquals(testValues.javaTensorAlongDimension(i, 0, 2), tad);
}
// Along dimension 0,3: expect matrix with shape [rows,dim3]
assertEquals(cols * dim2, arr.tensorssAlongDimension(0, 3));
for (int i = 0; i < cols * dim2; i++) {
INDArray tad = arr.tensorAlongDimension(i, 0, 3);
assertArrayEquals(new int[] { rows, dim3 }, tad.shape());
assertEquals(testValues.javaTensorAlongDimension(i, 0, 3), tad);
}
// Along dimension 1,2: expect matrix with shape [cols,dim2]
assertEquals(rows * dim3, arr.tensorssAlongDimension(1, 2));
for (int i = 0; i < rows * dim3; i++) {
INDArray tad = arr.tensorAlongDimension(i, 1, 2);
assertArrayEquals(new int[] { cols, dim2 }, tad.shape());
assertEquals(testValues.javaTensorAlongDimension(i, 1, 2), tad);
}
// Along dimension 1,3: expect matrix with shape [cols,dim3]
assertEquals(rows * dim2, arr.tensorssAlongDimension(1, 3));
for (int i = 0; i < rows * dim2; i++) {
INDArray tad = arr.tensorAlongDimension(i, 1, 3);
assertArrayEquals(new int[] { cols, dim3 }, tad.shape());
assertEquals(testValues.javaTensorAlongDimension(i, 1, 3), tad);
}
// Along dimension 2,3: expect matrix with shape [dim2,dim3]
assertEquals(rows * cols, arr.tensorssAlongDimension(2, 3));
for (int i = 0; i < rows * cols; i++) {
INDArray tad = arr.tensorAlongDimension(i, 2, 3);
assertArrayEquals(new int[] { dim2, dim3 }, tad.shape());
assertEquals(testValues.javaTensorAlongDimension(i, 2, 3), tad);
}
}
}
use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class Nd4jTestsC method testGemmStrided.
@Test
public void testGemmStrided() {
for (int x : new int[] { 5, 1 }) {
List<Pair<INDArray, String>> la = NDArrayCreationUtil.getAllTestMatricesWithShape(5, x, 12345);
List<Pair<INDArray, String>> lb = NDArrayCreationUtil.getAllTestMatricesWithShape(x, 4, 12345);
for (int i = 0; i < la.size(); i++) {
for (int j = 0; j < lb.size(); j++) {
String msg = "x=" + x + ", i=" + i + ", j=" + j;
INDArray a = la.get(i).getFirst();
INDArray b = lb.get(i).getFirst();
INDArray result1 = Nd4j.createUninitialized(5, 4);
INDArray result2 = Nd4j.createUninitialized(5, 4);
INDArray result3 = Nd4j.createUninitialized(5, 4);
Nd4j.gemm(a.dup('c'), b.dup('c'), result1, false, false, 1.0, 0.0);
Nd4j.gemm(a.dup('f'), b.dup('f'), result2, false, false, 1.0, 0.0);
Nd4j.gemm(a, b, result3, false, false, 1.0, 0.0);
assertEquals(msg, result1, result2);
// Fails here
assertEquals(msg, result1, result3);
}
}
}
}
use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class Nd4jTestsComparisonFortran method testGemmWithOpsCommonsMath.
@Test
public void testGemmWithOpsCommonsMath() {
List<Pair<INDArray, String>> first = NDArrayCreationUtil.getAllTestMatricesWithShape(3, 5, SEED);
List<Pair<INDArray, String>> firstT = NDArrayCreationUtil.getAllTestMatricesWithShape(5, 3, SEED);
List<Pair<INDArray, String>> second = NDArrayCreationUtil.getAllTestMatricesWithShape(5, 4, SEED);
List<Pair<INDArray, String>> secondT = NDArrayCreationUtil.getAllTestMatricesWithShape(4, 5, SEED);
double[] alpha = { 1.0, -0.5, 2.5 };
double[] beta = { 0.0, -0.25, 1.5 };
INDArray cOrig = Nd4j.create(new int[] { 3, 4 });
Random r = new Random(12345);
for (int i = 0; i < cOrig.size(0); i++) {
for (int j = 0; j < cOrig.size(1); j++) {
cOrig.putScalar(new int[] { i, j }, r.nextDouble());
}
}
for (int i = 0; i < first.size(); i++) {
for (int j = 0; j < second.size(); j++) {
for (int k = 0; k < alpha.length; k++) {
for (int m = 0; m < beta.length; m++) {
System.out.println((String.format("Running iteration %d %d %d %d", i, j, k, m)));
INDArray cff = Nd4j.create(cOrig.shape(), 'f');
cff.assign(cOrig);
INDArray cft = Nd4j.create(cOrig.shape(), 'f');
cft.assign(cOrig);
INDArray ctf = Nd4j.create(cOrig.shape(), 'f');
ctf.assign(cOrig);
INDArray ctt = Nd4j.create(cOrig.shape(), 'f');
ctt.assign(cOrig);
double a = alpha[k];
double b = beta[k];
Pair<INDArray, String> p1 = first.get(i);
Pair<INDArray, String> p1T = firstT.get(i);
Pair<INDArray, String> p2 = second.get(j);
Pair<INDArray, String> p2T = secondT.get(j);
String errorMsgff = getGemmErrorMsg(i, j, false, false, a, b, p1, p2);
String errorMsgft = getGemmErrorMsg(i, j, false, true, a, b, p1, p2T);
String errorMsgtf = getGemmErrorMsg(i, j, true, false, a, b, p1T, p2);
String errorMsgtt = getGemmErrorMsg(i, j, true, true, a, b, p1T, p2T);
assertTrue(errorMsgff, CheckUtil.checkGemm(p1.getFirst(), p2.getFirst(), cff, false, false, a, b, 1e-4, 1e-6));
assertTrue(errorMsgft, CheckUtil.checkGemm(p1.getFirst(), p2T.getFirst(), cft, false, true, a, b, 1e-4, 1e-6));
assertTrue(errorMsgtf, CheckUtil.checkGemm(p1T.getFirst(), p2.getFirst(), ctf, true, false, a, b, 1e-4, 1e-6));
assertTrue(errorMsgtt, CheckUtil.checkGemm(p1T.getFirst(), p2T.getFirst(), ctt, true, true, a, b, 1e-4, 1e-6));
}
}
}
}
}
use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class Nd4jTestsComparisonFortran method testGemvApacheCommons.
@Test
public void testGemvApacheCommons() {
int[] rowsArr = new int[] { 4, 4, 4, 8, 8, 8 };
int[] colsArr = new int[] { 2, 1, 10, 2, 1, 10 };
for (int x = 0; x < rowsArr.length; x++) {
int rows = rowsArr[x];
int cols = colsArr[x];
List<Pair<INDArray, String>> matrices = NDArrayCreationUtil.getAllTestMatricesWithShape(rows, cols, 12345);
List<Pair<INDArray, String>> vectors = NDArrayCreationUtil.getAllTestMatricesWithShape(cols, 1, 12345);
for (int i = 0; i < matrices.size(); i++) {
for (int j = 0; j < vectors.size(); j++) {
Pair<INDArray, String> p1 = matrices.get(i);
Pair<INDArray, String> p2 = vectors.get(j);
String errorMsg = getTestWithOpsErrorMsg(i, j, "mmul", p1, p2);
INDArray m = p1.getFirst();
INDArray v = p2.getFirst();
RealMatrix rm = new BlockRealMatrix(m.rows(), m.columns());
for (int r = 0; r < m.rows(); r++) {
for (int c = 0; c < m.columns(); c++) {
double d = m.getDouble(r, c);
rm.setEntry(r, c, d);
}
}
RealMatrix rv = new BlockRealMatrix(cols, 1);
for (int r = 0; r < v.rows(); r++) {
double d = v.getDouble(r, 0);
rv.setEntry(r, 0, d);
}
INDArray gemv = m.mmul(v);
RealMatrix gemv2 = rm.multiply(rv);
assertArrayEquals(new int[] { rows, 1 }, gemv.shape());
assertArrayEquals(new int[] { rows, 1 }, new int[] { gemv2.getRowDimension(), gemv2.getColumnDimension() });
// Check entries:
for (int r = 0; r < rows; r++) {
double exp = gemv2.getEntry(r, 0);
double act = gemv.getDouble(r, 0);
assertEquals(errorMsg, exp, act, 1e-5);
}
}
}
}
}
Aggregations