Search in sources :

Example 1 with NdIndexIterator

use of org.nd4j.linalg.api.iter.NdIndexIterator in project nd4j by deeplearning4j.

the class BaseNDArray method put.

@Override
public INDArray put(List<List<Integer>> indices, INDArray element) {
    INDArrayIndex[] indArrayIndices = new INDArrayIndex[indices.size()];
    for (int i = 0; i < indArrayIndices.length; i++) {
        indArrayIndices[i] = new SpecifiedIndex(Ints.toArray(indices.get(i)));
    }
    boolean hasNext = true;
    Generator<List<List<Long>>> iterate = SpecifiedIndex.iterate(indArrayIndices);
    if (indices.size() == rank()) {
        NdIndexIterator ndIndexIterator = new NdIndexIterator(element.shape());
        while (hasNext) {
            try {
                List<List<Long>> next = iterate.next();
                int[][] nextArr = new int[next.size()][];
                for (int i = 0; i < next.size(); i++) {
                    nextArr[i] = Ints.toArray(next.get(i));
                }
                int[] curr = Ints.concat(nextArr);
                putScalar(curr, element.getDouble(ndIndexIterator.next()));
            } catch (NoSuchElementException e) {
                hasNext = false;
            }
        }
    } else {
        if (indices.size() >= 2) {
            while (hasNext) {
                try {
                    List<List<Long>> next = iterate.next();
                    int[][] nextArr = new int[next.size()][];
                    for (int i = 0; i < next.size(); i++) {
                        nextArr[i] = Ints.toArray(next.get(i));
                    }
                    int[] curr = Ints.concat(nextArr);
                    INDArray currSlice = this;
                    for (int j = 0; j < curr.length; j++) {
                        currSlice = currSlice.slice(curr[j]);
                    }
                    Nd4j.getExecutioner().exec(new Assign(new INDArray[] { currSlice, element }, new INDArray[] { currSlice }));
                } catch (NoSuchElementException e) {
                    hasNext = false;
                }
            }
        }
    }
    return this;
}
Also used : NdIndexIterator(org.nd4j.linalg.api.iter.NdIndexIterator) Assign(org.nd4j.linalg.api.ops.impl.transforms.Assign)

Example 2 with NdIndexIterator

use of org.nd4j.linalg.api.iter.NdIndexIterator in project nd4j by deeplearning4j.

the class BaseSparseNDArray method put.

@Override
public INDArray put(List<List<Integer>> indices, INDArray element) {
    if (indices.size() == rank()) {
        NdIndexIterator ndIndexIterator = new NdIndexIterator(element.shape());
        INDArrayIndex[] indArrayIndices = new INDArrayIndex[indices.size()];
        for (int i = 0; i < indArrayIndices.length; i++) {
            indArrayIndices[i] = new SpecifiedIndex(Ints.toArray(indices.get(i)));
        }
        boolean hasNext = true;
        Generator<List<List<Long>>> iterate = SpecifiedIndex.iterate(indArrayIndices);
        while (hasNext) {
            try {
                List<List<Long>> next = iterate.next();
                for (int i = 0; i < next.size(); i++) {
                    int[] curr = Ints.toArray(next.get(i));
                    putScalar(curr, element.getDouble(ndIndexIterator.next()));
                }
            } catch (NoSuchElementException e) {
                hasNext = false;
            }
        }
    } else {
        List<INDArray> arrList = new ArrayList<>();
        if (indices.size() >= 2) {
            for (int i = 0; i < indices.size(); i++) {
                List<Integer> row = indices.get(i);
                for (int j = 0; j < row.size(); j++) {
                    INDArray slice = slice(row.get(j));
                    Nd4j.getExecutioner().exec(new Assign(new INDArray[] { slice, element }, new INDArray[] { slice }));
                    arrList.add(slice(row.get(j)));
                }
            }
        } else if (indices.size() == 1) {
            for (int i = 0; i < indices.size(); i++) {
                arrList.add(slice(indices.get(0).get(i)));
            }
        }
    }
    return this;
}
Also used : NdIndexIterator(org.nd4j.linalg.api.iter.NdIndexIterator) INDArrayIndex(org.nd4j.linalg.indexing.INDArrayIndex) ArrayList(java.util.ArrayList) SpecifiedIndex(org.nd4j.linalg.indexing.SpecifiedIndex) ArrayList(java.util.ArrayList) List(java.util.List) Assign(org.nd4j.linalg.api.ops.impl.transforms.Assign) NoSuchElementException(java.util.NoSuchElementException)

Example 3 with NdIndexIterator

use of org.nd4j.linalg.api.iter.NdIndexIterator in project nd4j by deeplearning4j.

the class OpExecutionerTestsC method testVarianceSingleVsMultipleDimensions.

@Test
public void testVarianceSingleVsMultipleDimensions() {
    // this test should always run in double
    DataBuffer.Type type = Nd4j.dataType();
    DataTypeUtil.setDTypeForContext(DataBuffer.Type.DOUBLE);
    Nd4j.getRandom().setSeed(12345);
    // Generate C order random numbers. Strides: [500,100,10,1]
    INDArray fourd = Nd4j.rand('c', new int[] { 100, 5, 10, 10 }).muli(10);
    INDArray twod = Shape.newShapeNoCopy(fourd, new int[] { 100, 5 * 10 * 10 }, false);
    // Population variance. These two should be identical
    INDArray var4 = fourd.var(false, 1, 2, 3);
    INDArray var2 = twod.var(false, 1);
    // Manual calculation of population variance, not bias corrected
    // https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Na.C3.AFve_algorithm
    double[] sums = new double[100];
    double[] sumSquares = new double[100];
    NdIndexIterator iter = new NdIndexIterator(fourd.shape());
    while (iter.hasNext()) {
        int[] next = iter.next();
        double d = fourd.getDouble(next);
        sums[next[0]] += d;
        sumSquares[next[0]] += d * d;
    }
    double[] manualVariance = new double[100];
    int N = (fourd.length() / sums.length);
    for (int i = 0; i < sums.length; i++) {
        manualVariance[i] = (sumSquares[i] - (sums[i] * sums[i]) / N) / N;
    }
    INDArray var4bias = fourd.var(true, 1, 2, 3);
    INDArray var2bias = twod.var(true, 1);
    assertArrayEquals(var2.data().asDouble(), var4.data().asDouble(), 1e-5);
    assertArrayEquals(manualVariance, var2.data().asDouble(), 1e-5);
    assertArrayEquals(var2bias.data().asDouble(), var4bias.data().asDouble(), 1e-5);
    DataTypeUtil.setDTypeForContext(type);
}
Also used : NdIndexIterator(org.nd4j.linalg.api.iter.NdIndexIterator) INDArray(org.nd4j.linalg.api.ndarray.INDArray) NDArrayIndex.point(org.nd4j.linalg.indexing.NDArrayIndex.point) DataBuffer(org.nd4j.linalg.api.buffer.DataBuffer) BaseNd4jTest(org.nd4j.linalg.BaseNd4jTest) Test(org.junit.Test)

Example 4 with NdIndexIterator

use of org.nd4j.linalg.api.iter.NdIndexIterator in project nd4j by deeplearning4j.

the class Nd4jTestsC method testIMax2of4d.

@Test
public void testIMax2of4d() {
    Nd4j.getRandom().setSeed(12345);
    int[] s = new int[] { 2, 3, 4, 5 };
    INDArray arr = Nd4j.rand(s);
    // Test 0,1
    INDArray exp = Nd4j.create(new int[] { 4, 5 });
    for (int i = 0; i < 4; i++) {
        for (int j = 0; j < 5; j++) {
            INDArray subset = arr.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(i), NDArrayIndex.point(j));
            assertArrayEquals(new int[] { 2, 3 }, subset.shape());
            NdIndexIterator iter = new NdIndexIterator('c', 2, 3);
            double max = -Double.MAX_VALUE;
            int maxIdxPos = -1;
            int count = 0;
            while (iter.hasNext()) {
                int[] next = iter.next();
                double d = subset.getDouble(next);
                if (d > max) {
                    max = d;
                    maxIdxPos = count;
                }
                count++;
            }
            exp.putScalar(i, j, maxIdxPos);
        }
    }
    INDArray actC = Nd4j.getExecutioner().exec(new IMax(arr.dup('c')), 0, 1);
    INDArray actF = Nd4j.getExecutioner().exec(new IMax(arr.dup('f')), 0, 1);
    // 
    assertEquals(exp, actC);
    assertEquals(exp, actF);
    // Test 2,3
    exp = Nd4j.create(new int[] { 2, 3 });
    for (int i = 0; i < 2; i++) {
        for (int j = 0; j < 3; j++) {
            INDArray subset = arr.get(NDArrayIndex.point(i), NDArrayIndex.point(j), NDArrayIndex.all(), NDArrayIndex.all());
            assertArrayEquals(new int[] { 4, 5 }, subset.shape());
            NdIndexIterator iter = new NdIndexIterator('c', 4, 5);
            int maxIdxPos = -1;
            double max = -Double.MAX_VALUE;
            int count = 0;
            while (iter.hasNext()) {
                int[] next = iter.next();
                double d = subset.getDouble(next);
                if (d > max) {
                    max = d;
                    maxIdxPos = count;
                }
                count++;
            }
            exp.putScalar(i, j, maxIdxPos);
        }
    }
    actC = Nd4j.getExecutioner().exec(new IMax(arr.dup('c')), 2, 3);
    actF = Nd4j.getExecutioner().exec(new IMax(arr.dup('f')), 2, 3);
    assertEquals(exp, actC);
    assertEquals(exp, actF);
}
Also used : NdIndexIterator(org.nd4j.linalg.api.iter.NdIndexIterator) INDArray(org.nd4j.linalg.api.ndarray.INDArray) IMax(org.nd4j.linalg.api.ops.impl.indexaccum.IMax) Test(org.junit.Test)

Example 5 with NdIndexIterator

use of org.nd4j.linalg.api.iter.NdIndexIterator in project deeplearning4j by deeplearning4j.

the class TestDropout method testDropoutMultiLayer.

@Test
public void testDropoutMultiLayer() throws Exception {
    //Testing dropout with multiple layers
    //Layer input: values should be set to either 0.0 or 2.0x original value
    //However: we don't have access to 'original' activations easily
    //Instead: use sigmoid + weight initialization that saturates
    int nIn = 8;
    int layerSize = 10;
    int nOut = 4;
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(Updater.SGD).iterations(1).regularization(true).dropOut(0.5).learningRate(1e-9).weightInit(WeightInit.DISTRIBUTION).dist(//Weight init to cause sigmoid saturation
    new UniformDistribution(10, 11)).list().layer(0, new DenseLayer.Builder().activation(Activation.SIGMOID).nIn(nIn).nOut(layerSize).build()).layer(1, new DenseLayer.Builder().activation(Activation.SIGMOID).nIn(layerSize).nOut(layerSize).build()).layer(2, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(layerSize).nOut(nOut).weightInit(WeightInit.XAVIER).build()).backprop(true).pretrain(false).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    Field dropoutMaskField = BaseLayer.class.getDeclaredField("dropoutMask");
    dropoutMaskField.setAccessible(true);
    int nTests = 15;
    Nd4j.getRandom().setSeed(12345);
    int noDropoutCount = 0;
    for (int i = 0; i < nTests; i++) {
        INDArray in = Nd4j.rand(1, nIn).dup('c');
        INDArray out = Nd4j.rand(1, nOut).dup('c');
        INDArray inCopy = in.dup('c');
        net.fit(new DataSet(in, out));
        INDArray l0Input = net.getLayer(0).input().dup('c');
        //Dropout occurred. Expect inputs to be either scaled 2x original, or set to 0.0 (with dropout = 0.5)
        NdIndexIterator iter = new NdIndexIterator(inCopy.shape());
        while (iter.hasNext()) {
            int[] idx = iter.next();
            double origValue = inCopy.getDouble(idx);
            double doValue = l0Input.getDouble(idx);
            if (doValue > 0.0) {
                //Input was kept -> should be scaled by factor of (1.0/0.5 = 2)
                assertEquals(origValue * 2.0, doValue, 0.0001);
            }
        }
        //all be ~1.0 before dropout -> either 0 or ~2.0 after dropout
        for (int j = 1; j < 3; j++) {
            INDArray ljInput = net.getLayer(j).input();
            for (int k = 0; k < ljInput.length(); k++) {
                double doValue = ljInput.getDouble(j);
                if (doValue > 0.0) {
                    //Input was kept -> should be scaled by factor of (1.0/0.5 = 2)
                    //Sigmoid is saturated -> inputs should be ~1.0 -> 2.0 after dropout
                    assertEquals(2.0, doValue, 0.1);
                }
            }
        }
        //Do forward pass
        //(1) ensure dropout ISN'T being applied for forward pass at test time
        //(2) ensure dropout ISN'T being applied for test time scoring
        //If dropout is applied at test time: outputs + score will differ between passes
        INDArray in2 = Nd4j.rand(1, nIn);
        INDArray out2 = Nd4j.rand(1, nOut);
        INDArray outTest1 = net.output(in2, false);
        INDArray outTest2 = net.output(in2, false);
        INDArray outTest3 = net.output(in2, false);
        assertEquals(outTest1, outTest2);
        assertEquals(outTest1, outTest3);
        double score1 = net.score(new DataSet(in2, out2), false);
        double score2 = net.score(new DataSet(in2, out2), false);
        double score3 = net.score(new DataSet(in2, out2), false);
        assertEquals(score1, score2, 0.0);
        assertEquals(score1, score3, 0.0);
    }
    if (noDropoutCount >= nTests / 3) {
        //at 0.5 dropout ratio and more than a few inputs, expect only a very small number of instances where
        //no dropout occurs, just due to random chance
        fail("Too many instances of dropout not being applied");
    }
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) NdIndexIterator(org.nd4j.linalg.api.iter.NdIndexIterator) DataSet(org.nd4j.linalg.dataset.DataSet) UniformDistribution(org.deeplearning4j.nn.conf.distribution.UniformDistribution) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) Field(java.lang.reflect.Field) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) INDArray(org.nd4j.linalg.api.ndarray.INDArray) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) Test(org.junit.Test)

Aggregations

NdIndexIterator (org.nd4j.linalg.api.iter.NdIndexIterator)16 INDArray (org.nd4j.linalg.api.ndarray.INDArray)11 Test (org.junit.Test)7 BaseNd4jTest (org.nd4j.linalg.BaseNd4jTest)4 Assign (org.nd4j.linalg.api.ops.impl.transforms.Assign)3 ArrayList (java.util.ArrayList)2 List (java.util.List)2 DataBuffer (org.nd4j.linalg.api.buffer.DataBuffer)2 Field (java.lang.reflect.Field)1 IntBuffer (java.nio.IntBuffer)1 NoSuchElementException (java.util.NoSuchElementException)1 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)1 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)1 UniformDistribution (org.deeplearning4j.nn.conf.distribution.UniformDistribution)1 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)1 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)1 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)1 DifferentialFunction (org.nd4j.autodiff.functions.DifferentialFunction)1 SDVariable (org.nd4j.autodiff.samediff.SDVariable)1 IMax (org.nd4j.linalg.api.ops.impl.indexaccum.IMax)1