use of org.nd4j.linalg.api.buffer.DataBuffer in project nd4j by deeplearning4j.
the class Nd4jTestsC method testNullPointerDataBuffer.
@Test
public void testNullPointerDataBuffer() {
DataBuffer.Type initialType = Nd4j.dataType();
DataTypeUtil.setDTypeForContext(DataBuffer.Type.FLOAT);
ByteBuffer allocate = ByteBuffer.allocateDirect(10 * 4).order(ByteOrder.nativeOrder());
allocate.asFloatBuffer().put(new float[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
DataBuffer buff = Nd4j.createBuffer(allocate, DataBuffer.Type.FLOAT, 10);
float sum = Nd4j.create(buff).sumNumber().floatValue();
System.out.println(sum);
assertEquals(55f, sum, 0.001f);
DataTypeUtil.setDTypeForContext(initialType);
}
use of org.nd4j.linalg.api.buffer.DataBuffer in project nd4j by deeplearning4j.
the class DataBufferStruct method createFromByteBuffer.
public static DataBuffer createFromByteBuffer(ByteBuffer bb, int bb_pos, DataBuffer.Type type, int length, int elementSize) {
bb.order(ByteOrder.LITTLE_ENDIAN);
DataBuffer ret = Nd4j.createBuffer(ByteBuffer.allocateDirect(length * elementSize), type, length, 0);
switch(type) {
case DOUBLE:
for (int i = 0; i < ret.length(); i++) {
double doubleGet = bb.getDouble(bb.capacity() - bb_pos + (i * elementSize));
ret.put(i, doubleGet);
}
break;
case FLOAT:
for (int i = 0; i < ret.length(); i++) {
float floatGet = bb.getFloat(bb.capacity() - bb_pos + (i * elementSize));
ret.put(i, floatGet);
}
break;
case INT:
for (int i = 0; i < ret.length(); i++) {
int intGet = bb.getInt(bb.capacity() - bb_pos + (i * elementSize));
ret.put(i, intGet);
}
break;
case LONG:
for (int i = 0; i < ret.length(); i++) {
long longGet = bb.getLong(bb.capacity() - bb_pos + (i * elementSize));
ret.put(i, longGet);
}
break;
}
return ret;
}
use of org.nd4j.linalg.api.buffer.DataBuffer in project nd4j by deeplearning4j.
the class DefaultOpExecutioner method bitmapEncode.
@Override
public INDArray bitmapEncode(INDArray indArray, double threshold) {
DataBuffer buffer = Nd4j.getDataBufferFactory().createInt(indArray.length() / 16 + 5);
INDArray ret = Nd4j.createArrayFromShapeBuffer(buffer, indArray.shapeInfoDataBuffer());
bitmapEncode(indArray, ret, threshold);
return ret;
}
use of org.nd4j.linalg.api.buffer.DataBuffer in project nd4j by deeplearning4j.
the class BaseNDArray method leverageTo.
/**
* This method detaches INDArray from current Workspace, and attaches it to Workspace with a given Id.
* If enforceExistence == true, and no workspace with the specified ID exists, then an {@link Nd4jNoSuchWorkspaceException}
* is thrown. Otherwise, if enforceExistance == false and no workspace with the specified ID exists, then the current
* INDArray is returned unmodified (same as {@link #leverage()}
*
* @param id ID of the workspace to leverage to
* @param enforceExistence If true, and the specified workspace does not exist: an {@link Nd4jNoSuchWorkspaceException}
* will be thrown.
* @return The INDArray, leveraged to the specified workspace
* @see #leverageTo(String)
*/
@Override
public INDArray leverageTo(String id, boolean enforceExistence) throws Nd4jNoSuchWorkspaceException {
if (!isAttached())
return this;
if (!Nd4j.getWorkspaceManager().checkIfWorkspaceExists(id)) {
if (enforceExistence) {
throw new Nd4jNoSuchWorkspaceException(id);
} else {
return this;
}
}
MemoryWorkspace current = Nd4j.getMemoryManager().getCurrentWorkspace();
MemoryWorkspace target = Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread(id);
if (current == target)
return this;
if (this.data.getParentWorkspace() == target)
return this;
Nd4j.getMemoryManager().setCurrentWorkspace(target);
INDArray copy = null;
if (!this.isView()) {
Nd4j.getExecutioner().commit();
DataBuffer buffer = Nd4j.createBuffer(this.lengthLong(), false);
Nd4j.getMemoryManager().memcpy(buffer, this.data());
copy = Nd4j.createArrayFromShapeBuffer(buffer, this.shapeInfoDataBuffer());
} else {
copy = this.dup(this.ordering());
Nd4j.getExecutioner().commit();
}
Nd4j.getMemoryManager().setCurrentWorkspace(current);
return copy;
}
use of org.nd4j.linalg.api.buffer.DataBuffer in project nd4j by deeplearning4j.
the class BaseNDArray method leverage.
/**
* This method detaches INDArray from current Workspace, and attaches it to Workspace above, if any.
* <p>
* PLEASE NOTE: If this INDArray instance is NOT attached - it will be returned unmodified.
* PLEASE NOTE: If current Workspace is the top-tier one, effect will be equal to detach() call - detached copy will be returned
*
* @return
*/
@Override
public INDArray leverage() {
if (!isAttached())
return this;
MemoryWorkspace workspace = Nd4j.getMemoryManager().getCurrentWorkspace();
if (workspace == null) {
return this.detach();
}
MemoryWorkspace parentWorkspace = workspace.getParentWorkspace();
if (this.data.getParentWorkspace() == parentWorkspace)
return this;
// if there's no parent ws - just detach
if (parentWorkspace == null)
return this.detach();
else {
Nd4j.getExecutioner().commit();
// temporary set parent ws as current ws
Nd4j.getMemoryManager().setCurrentWorkspace(parentWorkspace);
INDArray copy = null;
if (!this.isView()) {
Nd4j.getExecutioner().commit();
DataBuffer buffer = Nd4j.createBuffer(this.lengthLong(), false);
Nd4j.getMemoryManager().memcpy(buffer, this.data());
copy = Nd4j.createArrayFromShapeBuffer(buffer, this.shapeInfoDataBuffer());
} else {
copy = this.dup(this.ordering());
Nd4j.getExecutioner().commit();
}
// restore current ws
Nd4j.getMemoryManager().setCurrentWorkspace(workspace);
return copy;
}
}
Aggregations