use of org.nd4j.linalg.api.ndarray.BaseSparseNDArray in project nd4j by deeplearning4j.
the class SparseBaseLevel1 method axpy.
/**
* Adds a scalar multiple of compressed sparse vector to a full-storage vector.
*
* @param n The number of element
* @param alpha
* @param x a sparse vector
* @param y a dense vector
*/
@Override
public void axpy(int n, double alpha, INDArray x, INDArray y) {
BaseSparseNDArray sparseX = (BaseSparseNDArray) x;
DataBuffer pointers = sparseX.getVectorCoordinates();
switch(x.data().dataType()) {
case DOUBLE:
DefaultOpExecutioner.validateDataType(DataBuffer.Type.DOUBLE, x);
DefaultOpExecutioner.validateDataType(DataBuffer.Type.DOUBLE, y);
daxpyi(n, alpha, x, pointers, y);
break;
case FLOAT:
DefaultOpExecutioner.validateDataType(DataBuffer.Type.FLOAT, x);
DefaultOpExecutioner.validateDataType(DataBuffer.Type.FLOAT, y);
saxpyi(n, alpha, x, pointers, y);
break;
case HALF:
DefaultOpExecutioner.validateDataType(DataBuffer.Type.HALF, x);
DefaultOpExecutioner.validateDataType(DataBuffer.Type.HALF, y);
haxpyi(n, alpha, x, pointers, y);
break;
default:
throw new UnsupportedOperationException();
}
}
use of org.nd4j.linalg.api.ndarray.BaseSparseNDArray in project nd4j by deeplearning4j.
the class SparseBaseLevel1 method dot.
/**
* computes a vector-vector dot product.
*
* @param n number of accessed element
* @param alpha
* @param X an INDArray
* @param Y an INDArray
* @return the vector-vector dot product of X and Y
*/
@Override
public double dot(int n, double alpha, INDArray X, INDArray Y) {
if (X instanceof BaseSparseNDArray) {
BaseSparseNDArray sparseX = (BaseSparseNDArray) X;
DataBuffer pointers = sparseX.getVectorCoordinates();
switch(X.data().dataType()) {
case DOUBLE:
DefaultOpExecutioner.validateDataType(DataBuffer.Type.DOUBLE, X, Y);
return ddoti(n, X, pointers, Y);
case FLOAT:
DefaultOpExecutioner.validateDataType(DataBuffer.Type.FLOAT, X, Y);
return sdoti(n, X, pointers, Y);
case HALF:
DefaultOpExecutioner.validateDataType(DataBuffer.Type.HALF, X, Y);
return hdoti(n, X, pointers, Y);
default:
}
}
throw new UnsupportedOperationException();
}
use of org.nd4j.linalg.api.ndarray.BaseSparseNDArray in project nd4j by deeplearning4j.
the class SparseCSRLevel1Test method shouldComputeRotWithFullVector.
@Test
public void shouldComputeRotWithFullVector() {
// try with dense vectors to get the expected result
/*
INDArray temp1 = Nd4j.create( new double[] {1 ,2, 3, 4});
INDArray temp2 = Nd4j.create( new double[] {1 ,2, 3, 4});
System.out.println("before: " + temp1.data() + " " + temp2.data());
Nd4j.getBlasWrapper().level1().rot(temp1.length(), temp1, temp2, 1, 2);
System.out.println("after: " + temp1.data() + " " + temp2.data());
*/
// before: [1.0,2.0,3.0,4.0] [1.0,2.0,3.0,4.0]
// after: [3.0,6.0,0.0,12.0] [-1.0,-2.0,-3.0,-4.0]
int[] cols = { 0, 1, 2, 3 };
double[] values = { 1, 2, 3, 4 };
INDArray sparseVec = Nd4j.createSparseCSR(values, cols, pointerB, pointerE, shape);
INDArray vec = Nd4j.create(new double[] { 1, 2, 3, 4 });
Nd4j.getBlasWrapper().level1().rot(vec.length(), sparseVec, vec, 1, 2);
INDArray expectedSparseVec = Nd4j.createSparseCSR(new double[] { 3, 6, 9, 12 }, new int[] { 0, 1, 2, 3 }, new int[] { 0 }, new int[] { 4 }, new int[] { 1, 4 });
INDArray expectedVec = Nd4j.create(new double[] { -1, -2, -3, -4 });
assertEquals(getFailureMessage(), expectedSparseVec.data(), sparseVec.data());
assertEquals(getFailureMessage(), expectedVec, vec);
if (expectedSparseVec.isSparse() && sparseVec.isSparse()) {
BaseSparseNDArray vec2 = ((BaseSparseNDArray) expectedSparseVec);
BaseSparseNDArray vecSparse2 = ((BaseSparseNDArray) sparseVec);
assertEquals(getFailureMessage(), vec2.getVectorCoordinates(), vecSparse2);
}
}
use of org.nd4j.linalg.api.ndarray.BaseSparseNDArray in project nd4j by deeplearning4j.
the class SparseCOOLevel1Test method shouldComputeRotWithFullVector.
@Test
public void shouldComputeRotWithFullVector() {
// try with dense vectors to get the expected result
/*
INDArray temp1 = Nd4j.create( new double[] {1 ,2, 3, 4});
INDArray temp2 = Nd4j.create( new double[] {1 ,2, 3, 4});
System.out.println("before: " + temp1.data() + " " + temp2.data());
Nd4j.getBlasWrapper().level1().rot(temp1.length(), temp1, temp2, 1, 2);
System.out.println("after: " + temp1.data() + " " + temp2.data());
*/
// before: [1.0,2.0,3.0,4.0] [1.0,2.0,3.0,4.0]
// after: [3.0,6.0,0.0,12.0] [-1.0,-2.0,-3.0,-4.0]
int[] cols = { 0, 1, 2, 3 };
double[] values = { 1, 2, 3, 4 };
INDArray sparseVec = Nd4j.createSparseCOO(data, indexes, shape);
INDArray vec = Nd4j.create(new double[] { 1, 2, 3, 4 });
Nd4j.getBlasWrapper().level1().rot(vec.length(), sparseVec, vec, 1, 2);
INDArray expectedSparseVec = Nd4j.createSparseCSR(new double[] { 3, 6, 9, 12 }, new int[] { 0, 1, 2, 3 }, new int[] { 0 }, new int[] { 4 }, new int[] { 1, 4 });
INDArray expectedVec = Nd4j.create(new double[] { -1, -2, -3, -4 });
assertEquals(getFailureMessage(), expectedSparseVec.data(), sparseVec.data());
assertEquals(getFailureMessage(), expectedVec, vec);
if (expectedSparseVec.isSparse() && sparseVec.isSparse()) {
BaseSparseNDArray vec2 = ((BaseSparseNDArray) expectedSparseVec);
BaseSparseNDArray vecSparse2 = ((BaseSparseNDArray) sparseVec);
assertEquals(getFailureMessage(), vec2.getVectorCoordinates(), vecSparse2);
}
}
Aggregations