use of org.nd4j.linalg.compression.CompressionDescriptor in project nd4j by deeplearning4j.
the class Uint8 method compressPointer.
@Override
protected CompressedDataBuffer compressPointer(DataBuffer.TypeEx srcType, Pointer srcPointer, int length, int elementSize) {
BytePointer ptr = new BytePointer(length);
CompressionDescriptor descriptor = new CompressionDescriptor();
descriptor.setCompressedLength(length * 1);
descriptor.setOriginalLength(length * elementSize);
descriptor.setOriginalElementSize(elementSize);
descriptor.setNumberOfElements(length);
descriptor.setCompressionAlgorithm(getDescriptor());
descriptor.setCompressionType(getCompressionType());
CompressedDataBuffer buffer = new CompressedDataBuffer(ptr, descriptor);
Nd4j.getNDArrayFactory().convertDataEx(srcType, srcPointer, DataBuffer.TypeEx.UINT8, ptr, length);
return buffer;
}
use of org.nd4j.linalg.compression.CompressionDescriptor in project nd4j by deeplearning4j.
the class BinarySerde method toArrayAndByteBuffer.
/**
* Create an ndarray and existing bytebuffer
* @param buffer
* @param offset
* @return
*/
public static Pair<INDArray, ByteBuffer> toArrayAndByteBuffer(ByteBuffer buffer, int offset) {
ByteBuffer byteBuffer = buffer == null ? ByteBuffer.allocateDirect(buffer.array().length).put(buffer.array()).order(ByteOrder.nativeOrder()) : buffer.order(ByteOrder.nativeOrder());
// bump the byte buffer to the proper position
byteBuffer.position(offset);
int rank = byteBuffer.getInt();
if (rank < 0)
throw new IllegalStateException("Found negative integer. Corrupt serialization?");
// get the shape buffer length to create the shape information buffer
int shapeBufferLength = Shape.shapeInfoLength(rank);
// create the ndarray shape information
DataBuffer shapeBuff = Nd4j.createBufferDetached(new int[shapeBufferLength]);
// compute the databuffer opType from the index
DataBuffer.Type type = DataBuffer.Type.values()[byteBuffer.getInt()];
for (int i = 0; i < shapeBufferLength; i++) {
shapeBuff.put(i, byteBuffer.getInt());
}
// after the rank,data opType, shape buffer (of length shape buffer length) * sizeof(int)
if (type != DataBuffer.Type.COMPRESSED) {
ByteBuffer slice = byteBuffer.slice();
// wrap the data buffer for the last bit
DataBuffer buff = Nd4j.createBuffer(slice, type, Shape.length(shapeBuff));
// advance past the data
int position = byteBuffer.position() + (buff.getElementSize() * (int) buff.length());
byteBuffer.position(position);
// create the final array
// TODO: see how to avoid dup here
INDArray arr = Nd4j.createArrayFromShapeBuffer(buff.dup(), shapeBuff.dup());
return Pair.of(arr, byteBuffer);
} else {
CompressionDescriptor compressionDescriptor = CompressionDescriptor.fromByteBuffer(byteBuffer);
ByteBuffer slice = byteBuffer.slice();
// ensure that we only deal with the slice of the buffer that is actually the data
BytePointer byteBufferPointer = new BytePointer(slice);
// create a compressed array based on the rest of the data left in the buffer
CompressedDataBuffer compressedDataBuffer = new CompressedDataBuffer(byteBufferPointer, compressionDescriptor);
// TODO: see how to avoid dup()
INDArray arr = Nd4j.createArrayFromShapeBuffer(compressedDataBuffer.dup(), shapeBuff.dup());
// advance past the data
int compressLength = (int) compressionDescriptor.getCompressedLength();
byteBuffer.position(byteBuffer.position() + compressLength);
return Pair.of(arr, byteBuffer);
}
}
use of org.nd4j.linalg.compression.CompressionDescriptor in project nd4j by deeplearning4j.
the class BinarySerde method byteBufferSizeFor.
/**
* Returns the byte buffer size for the given
* ndarray. This is an auxillary method
* for determining the size of the buffer
* size to allocate for sending an ndarray via
* the aeron media driver.
*
* The math break down for uncompressed is:
* 2 ints for rank of the array and an ordinal representing the data opType of the data buffer
* The rest is in order:
* shape information
* data buffer
*
* The math break down for compressed is:
* 2 ints for rank and an ordinal representing the data opType for the data buffer
*
* The rest is in order:
* shape information
* codec information
* data buffer
*
* @param arr the array to compute the size for
* @return the size of the byte buffer that was allocated
*/
public static int byteBufferSizeFor(INDArray arr) {
if (!arr.isCompressed()) {
ByteBuffer buffer = arr.data().pointer().asByteBuffer().order(ByteOrder.nativeOrder());
ByteBuffer shapeBuffer = arr.shapeInfoDataBuffer().pointer().asByteBuffer().order(ByteOrder.nativeOrder());
// 2 four byte ints at the beginning
int twoInts = 8;
return twoInts + buffer.limit() + shapeBuffer.limit();
} else {
CompressedDataBuffer compressedDataBuffer = (CompressedDataBuffer) arr.data();
CompressionDescriptor descriptor = compressedDataBuffer.getCompressionDescriptor();
ByteBuffer codecByteBuffer = descriptor.toByteBuffer();
ByteBuffer buffer = arr.data().pointer().asByteBuffer().order(ByteOrder.nativeOrder());
ByteBuffer shapeBuffer = arr.shapeInfoDataBuffer().pointer().asByteBuffer().order(ByteOrder.nativeOrder());
int twoInts = 2 * 4;
return twoInts + buffer.limit() + shapeBuffer.limit() + codecByteBuffer.limit();
}
}
use of org.nd4j.linalg.compression.CompressionDescriptor in project nd4j by deeplearning4j.
the class JCublasNDArrayFactory method convertDataEx.
@Override
public DataBuffer convertDataEx(DataBuffer.TypeEx typeSrc, DataBuffer source, DataBuffer.TypeEx typeDst) {
int elementSize = 0;
if (typeDst.ordinal() <= 2)
elementSize = 1;
else if (typeDst.ordinal() <= 5)
elementSize = 2;
else if (typeDst.ordinal() == 6)
elementSize = 4;
else if (typeDst.ordinal() == 7)
elementSize = 8;
else
throw new UnsupportedOperationException("Unknown target TypeEx: " + typeDst.name());
// flushQueue should be blocking here, because typeConversion happens on cpu side
Nd4j.getExecutioner().commit();
DataBuffer buffer = null;
if (!(source instanceof CompressedDataBuffer))
AtomicAllocator.getInstance().synchronizeHostData(source);
if (CompressionUtils.goingToCompress(typeSrc, typeDst)) {
// all types below 8 are compression modes
BytePointer pointer = new BytePointer(source.length() * elementSize);
CompressionDescriptor descriptor = new CompressionDescriptor(source, typeDst.name());
descriptor.setCompressionType(CompressionType.LOSSY);
descriptor.setCompressedLength(source.length() * elementSize);
buffer = new CompressedDataBuffer(pointer, descriptor);
} else {
CompressedDataBuffer compressed = (CompressedDataBuffer) source;
CompressionDescriptor descriptor = compressed.getCompressionDescriptor();
// decompression mode
buffer = Nd4j.createBuffer(descriptor.getNumberOfElements(), false);
AllocationPoint point = AtomicAllocator.getInstance().getAllocationPoint(buffer);
point.tickHostWrite();
}
convertDataEx(typeSrc, source, typeDst, buffer);
return buffer;
}
use of org.nd4j.linalg.compression.CompressionDescriptor in project nd4j by deeplearning4j.
the class CpuNDArrayFactory method convertDataEx.
@Override
public DataBuffer convertDataEx(DataBuffer.TypeEx typeSrc, DataBuffer source, DataBuffer.TypeEx typeDst) {
int elementSize = 0;
if (typeDst.ordinal() <= 2)
elementSize = 1;
else if (typeDst.ordinal() <= 5)
elementSize = 2;
else if (typeDst.ordinal() == 6)
elementSize = 4;
else if (typeDst.ordinal() == 7)
elementSize = 8;
else
throw new UnsupportedOperationException("Unknown target TypeEx: " + typeDst.name());
DataBuffer buffer = null;
if (CompressionUtils.goingToCompress(typeSrc, typeDst)) {
// all types below 6 are compression modes
BytePointer pointer = new BytePointer(source.length() * elementSize);
CompressionDescriptor descriptor = new CompressionDescriptor(source, typeDst.name());
descriptor.setCompressionType(CompressionType.LOSSY);
descriptor.setCompressedLength(source.length() * elementSize);
buffer = new CompressedDataBuffer(pointer, descriptor);
} else {
CompressedDataBuffer compressed = (CompressedDataBuffer) source;
CompressionDescriptor descriptor = compressed.getCompressionDescriptor();
// decompression mode
buffer = Nd4j.createBuffer(descriptor.getNumberOfElements(), true);
}
convertDataEx(typeSrc, source, typeDst, buffer);
return buffer;
}
Aggregations