use of org.nd4j.linalg.api.buffer.DataBuffer in project nd4j by deeplearning4j.
the class BlasBufferUtil method setData.
/**
* Set the data for the underlying array.
* If the underlying buffer's array is equivalent to this array
* it returns (avoiding an unneccessary copy)
*
* If the underlying storage mechanism isn't heap (no arrays)
* it just copied the data over (strided access with offsets where neccessary)
*
* This is meant to be used with blas operations where the underlying blas implementation
* takes an array but the data buffer being used might not be an array.
*
* This is also for situations where there is strided access and it's not
* optimal to want to use the whole data buffer but just the subset of the
* buffer needed for calculations.
*
* @param data the data to set
* @param toSet the array to set the data to
*/
public static void setData(double[] data, INDArray toSet) {
if (toSet.data().dataType() != DataBuffer.Type.DOUBLE) {
throw new IllegalArgumentException("Unable to set double data for opType " + toSet.data().dataType());
}
if (toSet.data().allocationMode() == DataBuffer.AllocationMode.HEAP) {
Object array = toSet.data().array();
// data is assumed to have already been updated
if (array == data)
return;
else {
// copy the data over directly to the underlying array
double[] d = (double[]) array;
if (toSet.offset() == 0 && toSet.length() == data.length)
System.arraycopy(data, 0, d, 0, d.length);
else {
int count = 0;
// need to do strided access with offset
for (int i = 0; i < data.length; i++) {
// FIXME: LONG
int dIndex = (int) toSet.offset() + (i * toSet.majorStride());
d[dIndex] = data[count++];
}
}
}
} else {
// assumes the underlying data is in the right order
DataBuffer underlyingData = toSet.data();
if (data.length == toSet.length() && toSet.offset() == 0) {
for (int i = 0; i < toSet.length(); i++) {
underlyingData.put(i, data[i]);
}
} else {
int count = 0;
// need to do strided access with offset
for (int i = 0; i < data.length; i++) {
// FIXME: LONG
int dIndex = (int) toSet.offset() + (i * toSet.majorStride());
underlyingData.put(dIndex, data[count++]);
}
}
}
}
use of org.nd4j.linalg.api.buffer.DataBuffer in project nd4j by deeplearning4j.
the class BinarySerdeTest method testReadShapeFile.
@Test
public void testReadShapeFile() throws Exception {
File tmpFile = new File(System.getProperty("java.io.tmpdir"), "ndarraytmp-" + UUID.randomUUID().toString() + " .bin");
tmpFile.deleteOnExit();
INDArray rand = Nd4j.randn(5, 5);
BinarySerde.writeArrayToDisk(rand, tmpFile);
DataBuffer buffer = BinarySerde.readShapeFromDisk(tmpFile);
assertArrayEquals(rand.shapeInfoDataBuffer().asInt(), buffer.asInt());
}
use of org.nd4j.linalg.api.buffer.DataBuffer in project nd4j by deeplearning4j.
the class ArrowSerde method fromTensor.
/**
* Convert a {@link Tensor}
* to an {@link INDArray}
* @param tensor the input tensor
* @return the equivalent {@link INDArray}
*/
public static INDArray fromTensor(Tensor tensor) {
byte b = tensor.typeType();
int[] shape = new int[tensor.shapeLength()];
int[] stride = new int[tensor.stridesLength()];
for (int i = 0; i < shape.length; i++) {
shape[i] = (int) tensor.shape(i).size();
stride[i] = (int) tensor.strides(i);
}
int length = ArrayUtil.prod(shape);
Buffer buffer = tensor.data();
if (buffer == null) {
throw new ND4JIllegalStateException("Buffer was not serialized properly.");
}
// deduce element size
int elementSize = (int) buffer.length() / length;
// nd4j strides aren't based on element size
for (int i = 0; i < stride.length; i++) {
stride[i] /= elementSize;
}
DataBuffer.Type type = typeFromTensorType(b, elementSize);
DataBuffer dataBuffer = DataBufferStruct.createFromByteBuffer(tensor.getByteBuffer(), (int) tensor.data().offset(), type, length, elementSize);
INDArray arr = Nd4j.create(dataBuffer, shape);
arr.setShapeAndStride(shape, stride);
return arr;
}
use of org.nd4j.linalg.api.buffer.DataBuffer in project nd4j by deeplearning4j.
the class VectorDeSerializer method deserialize.
@Override
public INDArray deserialize(JsonParser jp, DeserializationContext deserializationContext) throws IOException {
JsonNode node = jp.getCodec().readTree(jp);
JsonNode arr = node.get("dataBuffer");
int rank = node.get("rankField").asInt();
int numElements = node.get("numElements").asInt();
int offset = node.get("offsetField").asInt();
JsonNode shape = node.get("shapeField");
JsonNode stride = node.get("strideField");
String type = node.get("typeField").asText();
int[] realShape = new int[rank];
int[] realStride = new int[rank];
DataBuffer buff = Nd4j.createBuffer(numElements);
for (int i = 0; i < numElements; i++) {
buff.put(i, arr.get(i).asDouble());
}
String ordering = node.get("orderingField").asText();
for (int i = 0; i < rank; i++) {
realShape[i] = shape.get(i).asInt();
realStride[i] = stride.get(i).asInt();
}
INDArray ret = type.equals("real") ? Nd4j.create(buff, realShape, realStride, offset, ordering.charAt(0)) : Nd4j.createComplex(buff, realShape, realStride, offset, ordering.charAt(0));
return ret;
}
use of org.nd4j.linalg.api.buffer.DataBuffer in project nd4j by deeplearning4j.
the class NDArrayTestsFortran method testRowsColumns.
@Test
public void testRowsColumns() {
DataBuffer data = Nd4j.linspace(1, 6, 6).data();
INDArray rows = Nd4j.create(data, new int[] { 2, 3 });
assertEquals(2, rows.rows());
assertEquals(3, rows.columns());
INDArray columnVector = Nd4j.create(data, new int[] { 6, 1 });
assertEquals(6, columnVector.rows());
assertEquals(1, columnVector.columns());
INDArray rowVector = Nd4j.create(data, new int[] { 1, 6 });
assertEquals(1, rowVector.rows());
assertEquals(6, rowVector.columns());
}
Aggregations