use of org.nd4j.linalg.api.iter.NdIndexIterator in project deeplearning4j by deeplearning4j.
the class BackPropMLPTest method asFloat.
public static float[] asFloat(INDArray arr) {
int len = arr.length();
float[] f = new float[len];
NdIndexIterator iterator = new NdIndexIterator('c', arr.shape());
for (int i = 0; i < len; i++) {
f[i] = arr.getFloat(iterator.next());
}
return f;
}
use of org.nd4j.linalg.api.iter.NdIndexIterator in project nd4j by deeplearning4j.
the class BaseNDArray method put.
@Override
public INDArray put(INDArray indices, INDArray element) {
if (indices.rank() > 2) {
throw new ND4JIllegalArgumentException("Indices must be a vector or matrix.");
}
if (indices.rows() == rank()) {
NdIndexIterator ndIndexIterator = new NdIndexIterator(element.shape());
for (int i = 0; i < indices.columns(); i++) {
int[] specifiedIndex = indices.getColumn(i).dup().data().asInt();
putScalar(specifiedIndex, element.getDouble(ndIndexIterator.next()));
}
} else {
List<INDArray> arrList = new ArrayList<>();
if (indices.isMatrix() || indices.isColumnVector()) {
for (int i = 0; i < indices.rows(); i++) {
INDArray row = indices.getRow(i);
for (int j = 0; j < row.length(); j++) {
INDArray slice = slice(row.getInt(j));
Nd4j.getExecutioner().exec(new Assign(new INDArray[] { slice, element }, new INDArray[] { slice }));
arrList.add(slice(row.getInt(j)));
}
}
} else if (indices.isRowVector()) {
for (int i = 0; i < indices.length(); i++) {
arrList.add(slice(indices.getInt(i)));
}
}
}
return this;
}
use of org.nd4j.linalg.api.iter.NdIndexIterator in project nd4j by deeplearning4j.
the class NormalDistribution method sample.
@Override
public INDArray sample(INDArray ret) {
if (random.getStatePointer() != null) {
if (means != null) {
return Nd4j.getExecutioner().exec(new GaussianDistribution(ret, means, standardDeviation), random);
} else {
return Nd4j.getExecutioner().exec(new GaussianDistribution(ret, mean, standardDeviation), random);
}
} else {
// For consistent values irrespective of c vs. fortran ordering
Iterator<int[]> idxIter = new NdIndexIterator(ret.shape());
int len = ret.length();
if (means != null) {
for (int i = 0; i < len; i++) {
int[] idx = idxIter.next();
ret.putScalar(idx, standardDeviation * random.nextGaussian() + means.getDouble(idx));
}
} else {
for (int i = 0; i < len; i++) {
ret.putScalar(idxIter.next(), standardDeviation * random.nextGaussian() + mean);
}
}
return ret;
}
}
use of org.nd4j.linalg.api.iter.NdIndexIterator in project nd4j by deeplearning4j.
the class LapackTest method testEv.
void testEv(int N, char matrixOrder) {
INDArray A = Nd4j.rand(N, N, matrixOrder);
for (int r = 1; r < N; r++) {
for (int c = 0; c < r; c++) {
double v = A.getDouble(r, c);
A.putScalar(c, r, v);
}
}
INDArray Aorig = A.dup();
INDArray V = Nd4j.create(N);
Nd4j.getBlasWrapper().lapack().syev('V', 'U', A, V);
INDArray VV = Nd4j.create(N, N);
for (int i = 0; i < N; i++) {
VV.put(i, i, V.getDouble(i));
}
INDArray L = Aorig.mmul(A);
INDArray R = A.mmul(VV);
NdIndexIterator iter = new NdIndexIterator(L.shape());
while (iter.hasNext()) {
int[] pos = iter.next();
assertEquals("SVD did not factorize properly", L.getDouble(pos), R.getDouble(pos), 1e-5);
}
}
use of org.nd4j.linalg.api.iter.NdIndexIterator in project nd4j by deeplearning4j.
the class LapackTest method testSvd.
//
// Helper functions - not direct test cases
//
void testSvd(int M, int N, char matrixOrder) {
INDArray A = Nd4j.rand(M, N, matrixOrder);
INDArray Aorig = A.dup();
INDArray U = Nd4j.create(M, M, matrixOrder);
INDArray S = Nd4j.create(N, matrixOrder);
INDArray VT = Nd4j.create(N, N, matrixOrder);
Nd4j.getBlasWrapper().lapack().gesvd(A, S, U, VT);
INDArray SS = Nd4j.create(M, N);
for (int i = 0; i < Math.min(M, N); i++) {
SS.put(i, i, S.getDouble(i));
}
INDArray AA = U.mmul(SS).mmul(VT);
NdIndexIterator iter = new NdIndexIterator(AA.shape());
while (iter.hasNext()) {
int[] pos = iter.next();
assertEquals("SVD did not factorize properly", AA.getDouble(pos), Aorig.getDouble(pos), 1e-5);
}
}
Aggregations