use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class BasicTADManager method getTADOnlyShapeInfo.
@Override
public Pair<DataBuffer, DataBuffer> getTADOnlyShapeInfo(INDArray array, int[] dimension) {
if (dimension != null && dimension.length > 1)
Arrays.sort(dimension);
if (dimension == null)
dimension = new int[] { Integer.MAX_VALUE };
boolean isScalar = dimension == null || (dimension.length == 1 && dimension[0] == Integer.MAX_VALUE);
// FIXME: this is fast triage, remove it later
// dimensionLength <= 1 ? 2 : dimensionLength;
int targetRank = isScalar ? 2 : array.rank();
long offsetLength = 0;
long tadLength = 1;
if (!isScalar)
for (int i = 0; i < dimension.length; i++) {
tadLength *= array.shape()[dimension[i]];
}
if (!isScalar)
offsetLength = array.lengthLong() / tadLength;
else
offsetLength = 1;
// logger.info("Original shape info before TAD: {}", array.shapeInfoDataBuffer());
// logger.info("dimension: {}, tadLength: {}, offsetLength for TAD: {}", Arrays.toString(dimension),tadLength, offsetLength);
DataBuffer outputBuffer = new CudaIntDataBuffer(targetRank * 2 + 4);
DataBuffer offsetsBuffer = new CudaLongDataBuffer(offsetLength);
AtomicAllocator.getInstance().getAllocationPoint(outputBuffer).tickHostWrite();
AtomicAllocator.getInstance().getAllocationPoint(offsetsBuffer).tickHostWrite();
DataBuffer dimensionBuffer = AtomicAllocator.getInstance().getConstantBuffer(dimension);
Pointer dimensionPointer = AtomicAllocator.getInstance().getHostPointer(dimensionBuffer);
Pointer xShapeInfo = AddressRetriever.retrieveHostPointer(array.shapeInfoDataBuffer());
Pointer targetPointer = AddressRetriever.retrieveHostPointer(outputBuffer);
Pointer offsetsPointer = AddressRetriever.retrieveHostPointer(offsetsBuffer);
if (!isScalar)
nativeOps.tadOnlyShapeInfo((IntPointer) xShapeInfo, (IntPointer) dimensionPointer, dimension.length, (IntPointer) targetPointer, new LongPointerWrapper(offsetsPointer));
else {
outputBuffer.put(0, 2);
outputBuffer.put(1, 1);
outputBuffer.put(2, 1);
outputBuffer.put(3, 1);
outputBuffer.put(4, 1);
outputBuffer.put(5, 0);
outputBuffer.put(6, 0);
outputBuffer.put(7, 99);
}
AtomicAllocator.getInstance().getAllocationPoint(outputBuffer).tickHostWrite();
AtomicAllocator.getInstance().getAllocationPoint(offsetsBuffer).tickHostWrite();
return new Pair<>(outputBuffer, offsetsBuffer);
}
use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class TestInvertMatrices method testInverseComparison.
@Test
public void testInverseComparison() {
List<Pair<INDArray, String>> list = NDArrayCreationUtil.getAllTestMatricesWithShape(10, 10, 12345);
for (Pair<INDArray, String> p : list) {
INDArray orig = p.getFirst();
orig.assign(Nd4j.rand(orig.shape()));
INDArray inverse = InvertMatrix.invert(orig, false);
RealMatrix rm = CheckUtil.convertToApacheMatrix(orig);
RealMatrix rmInverse = new LUDecomposition(rm).getSolver().getInverse();
INDArray expected = CheckUtil.convertFromApacheMatrix(rmInverse);
assertTrue(p.getSecond(), CheckUtil.checkEntries(expected, inverse, 1e-3, 1e-4));
}
}
use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class ConcatTests method testConcat3dv2.
@Test
@Ignore
public void testConcat3dv2() {
INDArray first = Nd4j.linspace(1, 24, 24).reshape('c', 2, 3, 4);
INDArray second = Nd4j.linspace(24, 35, 12).reshape('c', 1, 3, 4);
INDArray third = Nd4j.linspace(36, 47, 12).reshape('c', 1, 3, 4);
// ConcatV2, dim 0
INDArray exp = Nd4j.create(2 + 1 + 1, 3, 4);
exp.put(new INDArrayIndex[] { NDArrayIndex.interval(0, 2), NDArrayIndex.all(), NDArrayIndex.all() }, first);
exp.put(new INDArrayIndex[] { NDArrayIndex.point(2), NDArrayIndex.all(), NDArrayIndex.all() }, second);
exp.put(new INDArrayIndex[] { NDArrayIndex.point(3), NDArrayIndex.all(), NDArrayIndex.all() }, third);
List<Pair<INDArray, String>> firsts = NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, 2, 3, 4);
List<Pair<INDArray, String>> seconds = NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, 1, 3, 4);
List<Pair<INDArray, String>> thirds = NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, 1, 3, 4);
for (Pair<INDArray, String> f : firsts) {
for (Pair<INDArray, String> s : seconds) {
for (Pair<INDArray, String> t : thirds) {
INDArray f2 = f.getFirst().assign(first);
INDArray s2 = s.getFirst().assign(second);
INDArray t2 = t.getFirst().assign(third);
System.out.println("-------------------------------------------");
INDArray concat0 = Nd4j.concat(0, f2, s2, t2);
assertEquals(exp, concat0);
}
}
}
// ConcatV2, dim 1
second = Nd4j.linspace(24, 31, 8).reshape('c', 2, 1, 4);
third = Nd4j.linspace(32, 47, 16).reshape('c', 2, 2, 4);
exp = Nd4j.create(2, 3 + 1 + 2, 4);
exp.put(new INDArrayIndex[] { NDArrayIndex.all(), NDArrayIndex.interval(0, 3), NDArrayIndex.all() }, first);
exp.put(new INDArrayIndex[] { NDArrayIndex.all(), NDArrayIndex.point(3), NDArrayIndex.all() }, second);
exp.put(new INDArrayIndex[] { NDArrayIndex.all(), NDArrayIndex.interval(4, 6), NDArrayIndex.all() }, third);
firsts = NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, 2, 3, 4);
seconds = NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, 2, 1, 4);
thirds = NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, 2, 2, 4);
for (Pair<INDArray, String> f : firsts) {
for (Pair<INDArray, String> s : seconds) {
for (Pair<INDArray, String> t : thirds) {
INDArray f2 = f.getFirst().assign(first);
INDArray s2 = s.getFirst().assign(second);
INDArray t2 = t.getFirst().assign(third);
INDArray concat1 = Nd4j.concat(1, f2, s2, t2);
assertEquals(exp, concat1);
}
}
}
// ConcatV2, dim 2
second = Nd4j.linspace(24, 35, 12).reshape('c', 2, 3, 2);
third = Nd4j.linspace(36, 41, 6).reshape('c', 2, 3, 1);
exp = Nd4j.create(2, 3, 4 + 2 + 1);
exp.put(new INDArrayIndex[] { NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 4) }, first);
exp.put(new INDArrayIndex[] { NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(4, 6) }, second);
exp.put(new INDArrayIndex[] { NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(6) }, third);
firsts = NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, 2, 3, 4);
seconds = NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, 2, 3, 2);
thirds = NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, 2, 3, 1);
for (Pair<INDArray, String> f : firsts) {
for (Pair<INDArray, String> s : seconds) {
for (Pair<INDArray, String> t : thirds) {
INDArray f2 = f.getFirst().assign(first);
INDArray s2 = s.getFirst().assign(second);
INDArray t2 = t.getFirst().assign(third);
INDArray concat2 = Nd4j.concat(2, f2, s2, t2);
assertEquals(exp, concat2);
}
}
}
}
use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class CpuNDArrayFactory method shuffle.
/**
* Symmetric in place shuffle of an ndarray
* along a specified set of dimensions. Each array in list should have it's own dimension at the same index of dimensions array
*
* @param arrays the ndarrays to shuffle
* @param dimensions the dimensions to do the shuffle
* @return
*/
@Override
public void shuffle(List<INDArray> arrays, Random rnd, List<int[]> dimensions) {
if (dimensions == null || dimensions.size() == 0)
throw new RuntimeException("Dimension can't be null or 0-length");
if (arrays == null || arrays.size() == 0)
throw new RuntimeException("No input arrays provided");
if (dimensions.size() > 1 && arrays.size() != dimensions.size())
throw new IllegalStateException("Number of dimensions do not match number of arrays to shuffle");
int tadLength = 1;
for (int i = 0; i < dimensions.get(0).length; i++) {
tadLength *= arrays.get(0).shape()[dimensions.get(0)[i]];
}
int numTads = arrays.get(0).length() / tadLength;
int[] map = ArrayUtil.buildInterleavedVector(rnd, numTads);
PointerPointer dataPointers = new PointerPointer(arrays.size());
PointerPointer shapePointers = new PointerPointer(arrays.size());
PointerPointer tadPointers = new PointerPointer(arrays.size());
PointerPointer offsetPointers = new PointerPointer(arrays.size());
PointerPointer dummy = new PointerPointer(new Pointer[] { null });
List<Pair<DataBuffer, DataBuffer>> list = new ArrayList<>();
TADManager tadManager = Nd4j.getExecutioner().getTADManager();
IntPointer ptrMap = new IntPointer(map);
long[] ptrs = new long[arrays.size()];
for (int i = 0; i < arrays.size(); i++) {
INDArray array = arrays.get(i);
Nd4j.getCompressor().autoDecompress(array);
int[] dimension = dimensions.size() > 1 ? dimensions.get(i) : dimensions.get(0);
Pair<DataBuffer, DataBuffer> tadBuffers = tadManager.getTADOnlyShapeInfo(array, dimension);
list.add(tadBuffers);
Pointer hostTadShapeInfo = tadBuffers.getFirst().addressPointer();
DataBuffer offsets = tadBuffers.getSecond();
if (offsets.length() != numTads)
throw new ND4JIllegalStateException("Can't symmetrically shuffle arrays with non-equal number of TADs");
if (offsets == null)
throw new ND4JIllegalStateException("Offsets for shuffle can't be null");
dataPointers.put(i, array.data().addressPointer());
shapePointers.put(i, array.shapeInfoDataBuffer().addressPointer());
offsetPointers.put(i, offsets.addressPointer());
tadPointers.put(i, tadBuffers.getFirst().addressPointer());
}
if (Nd4j.dataType() == DataBuffer.Type.DOUBLE) {
nativeOps.shuffleDouble(dummy, dataPointers, shapePointers, dataPointers, shapePointers, arrays.size(), ptrMap, tadPointers, offsetPointers);
} else if (Nd4j.dataType() == DataBuffer.Type.FLOAT) {
nativeOps.shuffleFloat(dummy, dataPointers, shapePointers, dataPointers, shapePointers, arrays.size(), ptrMap, tadPointers, offsetPointers);
} else {
// HALFs
}
dataPointers.address();
shapePointers.address();
tadPointers.address();
offsetPointers.address();
}
use of org.nd4j.linalg.primitives.Pair in project nd4j by deeplearning4j.
the class Nd4jTestsC method testTensorStats.
@Test
public void testTensorStats() {
List<Pair<INDArray, String>> testInputs = NDArrayCreationUtil.getAllTestMatricesWithShape(9, 13, 123);
for (Pair<INDArray, String> pair : testInputs) {
INDArray arr = pair.getFirst();
String msg = pair.getSecond();
int nTAD0 = arr.tensorssAlongDimension(0);
int nTAD1 = arr.tensorssAlongDimension(1);
OpExecutionerUtil.Tensor1DStats t0 = OpExecutionerUtil.get1DTensorStats(arr, 0);
OpExecutionerUtil.Tensor1DStats t1 = OpExecutionerUtil.get1DTensorStats(arr, 1);
assertEquals(nTAD0, t0.getNumTensors());
assertEquals(nTAD1, t1.getNumTensors());
INDArray tFirst0 = arr.tensorAlongDimension(0, 0);
INDArray tSecond0 = arr.tensorAlongDimension(1, 0);
INDArray tFirst1 = arr.tensorAlongDimension(0, 1);
INDArray tSecond1 = arr.tensorAlongDimension(1, 1);
assertEquals(tFirst0.offset(), t0.getFirstTensorOffset());
assertEquals(tFirst1.offset(), t1.getFirstTensorOffset());
long separation0 = tSecond0.offset() - tFirst0.offset();
long separation1 = tSecond1.offset() - tFirst1.offset();
assertEquals(separation0, t0.getTensorStartSeparation());
assertEquals(separation1, t1.getTensorStartSeparation());
for (int i = 0; i < nTAD0; i++) {
INDArray tad0 = arr.tensorAlongDimension(i, 0);
assertEquals(tad0.length(), t0.getTensorLength());
assertEquals(tad0.elementWiseStride(), t0.getElementWiseStride());
long offset = tad0.offset();
long calcOffset = t0.getFirstTensorOffset() + i * t0.getTensorStartSeparation();
assertEquals(offset, calcOffset);
}
for (int i = 0; i < nTAD1; i++) {
INDArray tad1 = arr.tensorAlongDimension(i, 1);
assertEquals(tad1.length(), t1.getTensorLength());
assertEquals(tad1.elementWiseStride(), t1.getElementWiseStride());
long offset = tad1.offset();
long calcOffset = t1.getFirstTensorOffset() + i * t1.getTensorStartSeparation();
assertEquals(offset, calcOffset);
}
}
}
Aggregations