Search in sources :

Example 41 with Pair

use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.

the class NDArrayCreationUtil method get4dTensorAlongDimensionWithShape.

public static List<Pair<INDArray, String>> get4dTensorAlongDimensionWithShape(int seed, int... shape) {
    List<Pair<INDArray, String>> list = new ArrayList<>();
    String baseMsg = "get4dTensorAlongDimensionWithShape(" + seed + "," + Arrays.toString(shape) + ")";
    // Create some 5d arrays and get subsets using 4d TAD on them
    // This is not an exhausive list of possible 4d arrays from 5d via TAD
    Nd4j.getRandom().setSeed(seed);
    int[] shape4d1 = { 3, shape[0], shape[1], shape[2], shape[3] };
    int len = ArrayUtil.prod(shape4d1);
    INDArray orig1a = Nd4j.linspace(1, len, len).reshape(shape4d1);
    INDArray tad1a = orig1a.javaTensorAlongDimension(0, 1, 2, 3, 4);
    INDArray orig1b = Nd4j.linspace(1, len, len).reshape(shape4d1);
    INDArray tad1b = orig1b.javaTensorAlongDimension(2, 1, 2, 3, 4);
    list.add(new Pair<>(tad1a, baseMsg + ".get(0)"));
    list.add(new Pair<>(tad1b, baseMsg + ".get(1)"));
    int[] shape4d2 = { 3, shape[0], shape[1], shape[2], shape[3] };
    int len2 = ArrayUtil.prod(shape4d2);
    INDArray orig2 = Nd4j.linspace(1, len2, len2).reshape(shape4d2);
    INDArray tad2 = orig2.javaTensorAlongDimension(1, 3, 4, 2, 1);
    list.add(new Pair<>(tad2, baseMsg + ".get(2)"));
    int[] shape4d3 = { shape[0], shape[1], 3, shape[2], shape[3] };
    int len3 = ArrayUtil.prod(shape4d3);
    INDArray orig3 = Nd4j.linspace(1, len3, len3).reshape(shape4d3);
    INDArray tad3 = orig3.javaTensorAlongDimension(1, 4, 1, 3, 0);
    list.add(new Pair<>(tad3, baseMsg + ".get(3)"));
    int[] shape4d4 = { shape[0], shape[1], shape[2], shape[3], 3 };
    int len4 = ArrayUtil.prod(shape4d4);
    INDArray orig4 = Nd4j.linspace(1, len4, len4).reshape(shape4d4);
    INDArray tad4 = orig4.javaTensorAlongDimension(1, 2, 0, 3, 1);
    list.add(new Pair<>(tad4, baseMsg + ".get(4)"));
    return list;
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) Pair(org.nd4j.linalg.primitives.Pair)

Example 42 with Pair

use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.

the class NDArrayCreationUtil method get5dSubArraysWithShape.

public static List<Pair<INDArray, String>> get5dSubArraysWithShape(int seed, int... shape) {
    List<Pair<INDArray, String>> list = new ArrayList<>();
    String baseMsg = "get5dSubArraysWithShape(" + seed + "," + Arrays.toString(shape) + ")";
    // Create and return various sub arrays:
    Nd4j.getRandom().setSeed(seed);
    int[] newShape1 = Arrays.copyOf(shape, shape.length);
    newShape1[0] += 5;
    INDArray temp1 = Nd4j.rand(newShape1);
    INDArray subset1 = temp1.get(NDArrayIndex.interval(2, shape[0] + 2), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all());
    list.add(new Pair<>(subset1, baseMsg + ".get(0)"));
    int[] newShape2 = Arrays.copyOf(shape, shape.length);
    newShape2[1] += 5;
    INDArray temp2 = Nd4j.rand(newShape2);
    INDArray subset2 = temp2.get(NDArrayIndex.all(), NDArrayIndex.interval(3, shape[1] + 3), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all());
    list.add(new Pair<>(subset2, baseMsg + ".get(1)"));
    int[] newShape3 = Arrays.copyOf(shape, shape.length);
    newShape3[2] += 5;
    INDArray temp3 = Nd4j.rand(newShape3);
    INDArray subset3 = temp3.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(4, shape[2] + 4), NDArrayIndex.all(), NDArrayIndex.all());
    list.add(new Pair<>(subset3, baseMsg + ".get(2)"));
    int[] newShape4 = Arrays.copyOf(shape, shape.length);
    newShape4[3] += 5;
    INDArray temp4 = Nd4j.rand(newShape4);
    INDArray subset4 = temp4.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(3, shape[3] + 3), NDArrayIndex.all());
    list.add(new Pair<>(subset4, baseMsg + ".get(3)"));
    int[] newShape5 = Arrays.copyOf(shape, shape.length);
    newShape5[4] += 5;
    INDArray temp5 = Nd4j.rand(newShape5);
    INDArray subset5 = temp5.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(3, shape[4] + 3));
    list.add(new Pair<>(subset5, baseMsg + ".get(4)"));
    int[] newShape6 = Arrays.copyOf(shape, shape.length);
    newShape6[0] += 5;
    newShape6[1] += 5;
    newShape6[2] += 5;
    newShape6[3] += 5;
    newShape6[4] += 5;
    INDArray temp6 = Nd4j.rand(newShape6);
    INDArray subset6 = temp6.get(NDArrayIndex.interval(4, shape[0] + 4), NDArrayIndex.interval(3, shape[1] + 3), NDArrayIndex.interval(2, shape[2] + 2), NDArrayIndex.interval(1, shape[3] + 1), NDArrayIndex.interval(2, shape[4] + 2));
    list.add(new Pair<>(subset6, baseMsg + ".get(5)"));
    return list;
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) Pair(org.nd4j.linalg.primitives.Pair)

Example 43 with Pair

use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.

the class NDArrayCreationUtil method getAll4dTestArraysWithShape.

public static List<Pair<INDArray, String>> getAll4dTestArraysWithShape(int seed, int... shape) {
    if (shape.length != 4)
        throw new IllegalArgumentException("Shape is not length 4");
    List<Pair<INDArray, String>> list = new ArrayList<>();
    String baseMsg = "getAll4dTestArraysWithShape(" + seed + "," + Arrays.toString(shape) + ").get(";
    // Basic 4d in C and F orders:
    Nd4j.getRandom().setSeed(seed);
    int len = ArrayUtil.prod(shape);
    INDArray stdC = Nd4j.linspace(1, len, len).reshape('c', shape);
    INDArray stdF = Nd4j.linspace(1, len, len).reshape('f', shape);
    list.add(new Pair<>(stdC, baseMsg + "0)/Nd4j.rand(" + Arrays.toString(shape) + ",'c')"));
    list.add(new Pair<>(stdF, baseMsg + "1)/Nd4j.rand(" + Arrays.toString(shape) + ",'f')"));
    // Various sub arrays:
    list.addAll(get4dSubArraysWithShape(seed, shape));
    // TAD
    list.addAll(get4dTensorAlongDimensionWithShape(seed, shape));
    // Permuted
    list.addAll(get4dPermutedWithShape(seed, shape));
    // Reshaped
    list.addAll(get4dReshapedWithShape(seed, shape));
    return list;
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) Pair(org.nd4j.linalg.primitives.Pair)

Example 44 with Pair

use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.

the class ElementWiseStrideTests method testEWS1.

@Test
public void testEWS1() throws Exception {
    List<Pair<INDArray, String>> list = NDArrayCreationUtil.getAllTestMatricesWithShape(4, 5, 12345);
    list.addAll(NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, 4, 5, 6));
    list.addAll(NDArrayCreationUtil.getAll4dTestArraysWithShape(12345, 4, 5, 6, 7));
    list.addAll(NDArrayCreationUtil.getAll5dTestArraysWithShape(12345, 4, 5, 6, 7, 8));
    list.addAll(NDArrayCreationUtil.getAll6dTestArraysWithShape(12345, 4, 5, 6, 7, 8, 9));
    for (Pair<INDArray, String> p : list) {
        int ewsBefore = Shape.elementWiseStride(p.getFirst().shapeInfo());
        INDArray reshapeAttempt = Shape.newShapeNoCopy(p.getFirst(), new int[] { 1, p.getFirst().length() }, Nd4j.order() == 'f');
        if (reshapeAttempt != null && ewsBefore == -1 && reshapeAttempt.elementWiseStride() != -1) {
            System.out.println("NDArrayCreationUtil." + p.getSecond());
            System.out.println("ews before: " + ewsBefore);
            System.out.println(p.getFirst().shapeInfoToString());
            System.out.println("ews returned by elementWiseStride(): " + p.getFirst().elementWiseStride());
            System.out.println("ews returned by reshape(): " + reshapeAttempt.elementWiseStride());
            System.out.println();
        // assertTrue(false);
        } else {
        // System.out.println("FAILED: " + p.getFirst().shapeInfoToString());
        }
    }
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) Pair(org.nd4j.linalg.primitives.Pair) Test(org.junit.Test)

Example 45 with Pair

use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.

the class AveragingTests method testMultiDeviceAveraging.

/**
 * This test should be run on multi-gpu system only. On single-gpu system this test will fail
 * @throws Exception
 */
@Test
public void testMultiDeviceAveraging() throws Exception {
    final List<Pair<INDArray, INDArray>> pairs = new ArrayList<>();
    int numDevices = Nd4j.getAffinityManager().getNumberOfDevices();
    AtomicAllocator allocator = AtomicAllocator.getInstance();
    for (int i = 0; i < THREADS; i++) {
        final int order = i;
        Thread thread = new Thread(new Runnable() {

            @Override
            public void run() {
                pairs.add(new Pair<INDArray, INDArray>(Nd4j.valueArrayOf(LENGTH, (double) order), null));
                try {
                    Thread.sleep(100);
                } catch (Exception e) {
                // 
                }
            }
        });
        thread.start();
        thread.join();
    }
    assertEquals(THREADS, pairs.size());
    final List<INDArray> arrays = new ArrayList<>();
    AtomicBoolean hasNonZero = new AtomicBoolean(false);
    for (int i = 0; i < THREADS; i++) {
        INDArray array = pairs.get(i).getKey();
        AllocationPoint point = allocator.getAllocationPoint(array.data());
        if (point.getDeviceId() != 0)
            hasNonZero.set(true);
        arrays.add(array);
    }
    assertEquals(true, hasNonZero.get());
    /*
        // old way of averaging, without further propagation
        INDArray z = Nd4j.create(LENGTH);
        long time1 = System.currentTimeMillis();
        for (int i = 0; i < THREADS; i++) {
            z.addi(arrays.get(i));
        }
        z.divi((float) THREADS);
        CudaContext context = (CudaContext) allocator.getDeviceContext().getContext();
        context.syncOldStream();
        long time2 = System.currentTimeMillis();
        System.out.println("Execution time: " + (time2 - time1));

*/
    long time1 = System.currentTimeMillis();
    INDArray z = Nd4j.averageAndPropagate(arrays);
    long time2 = System.currentTimeMillis();
    System.out.println("Execution time: " + (time2 - time1));
    assertEquals(7.5f, z.getFloat(0), 0.01f);
    assertEquals(7.5f, z.getFloat(10), 0.01f);
    for (int i = 0; i < THREADS; i++) {
        for (int x = 0; x < LENGTH; x++) {
            assertEquals("Failed on array [" + i + "], element [" + x + "]", z.getFloat(0), arrays.get(i).getFloat(x), 0.01f);
        }
    }
}
Also used : AtomicAllocator(org.nd4j.jita.allocator.impl.AtomicAllocator) ArrayList(java.util.ArrayList) AllocationPoint(org.nd4j.jita.allocator.impl.AllocationPoint) AllocationPoint(org.nd4j.jita.allocator.impl.AllocationPoint) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) INDArray(org.nd4j.linalg.api.ndarray.INDArray) Pair(org.nd4j.linalg.primitives.Pair) Test(org.junit.Test)

Aggregations

Pair (org.nd4j.linalg.primitives.Pair)66 INDArray (org.nd4j.linalg.api.ndarray.INDArray)63 Test (org.junit.Test)24 BaseNd4jTest (org.nd4j.linalg.BaseNd4jTest)9 ND4JIllegalStateException (org.nd4j.linalg.exception.ND4JIllegalStateException)5 ArrayList (java.util.ArrayList)4 DataBuffer (org.nd4j.linalg.api.buffer.DataBuffer)4 Ignore (org.junit.Ignore)3 List (java.util.List)2 RealMatrix (org.apache.commons.math3.linear.RealMatrix)2 IntPointer (org.bytedeco.javacpp.IntPointer)2 Pointer (org.bytedeco.javacpp.Pointer)2 OpExecutionerUtil (org.nd4j.linalg.api.ops.executioner.OpExecutionerUtil)2 LongPointerWrapper (org.nd4j.nativeblas.LongPointerWrapper)2 IntBuffer (java.nio.IntBuffer)1 Random (java.util.Random)1 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)1 Array2DRowRealMatrix (org.apache.commons.math3.linear.Array2DRowRealMatrix)1 BlockRealMatrix (org.apache.commons.math3.linear.BlockRealMatrix)1 LUDecomposition (org.apache.commons.math3.linear.LUDecomposition)1