Search in sources :

Example 36 with MemoryWorkspace

use of org.nd4j.linalg.api.memory.MemoryWorkspace in project nd4j by deeplearning4j.

the class BaseCudaDataBuffer method read.

@Override
public void read(DataInputStream s) {
    try {
        // log.info("Restoring CUDA databuffer");
        // skip allocationMode
        s.readUTF();
        allocationMode = AllocationMode.JAVACPP;
        int locLength = s.readInt();
        boolean reallocate = locLength != length || indexer == null;
        length = locLength;
        Type t = Type.valueOf(s.readUTF());
        // log.info("Restoring buffer ["+t+"] of length ["+ length+"]");
        if (globalType == null && Nd4j.dataType() != null) {
            globalType = Nd4j.dataType();
        }
        if (t != globalType && t != Type.INT && Nd4j.sizeOfDataType(globalType) < Nd4j.sizeOfDataType(t)) {
            log.warn("Loading a data stream with opType different from what is set globally. Expect precision loss");
            if (globalType == Type.INT)
                log.warn("Int to float/double widening UNSUPPORTED!!!");
        }
        if (t == Type.COMPRESSED) {
            type = t;
            return;
        } else if (t == Type.INT || globalType == Type.INT) {
            this.elementSize = 4;
            this.allocationPoint = AtomicAllocator.getInstance().allocateMemory(this, new AllocationShape(length, elementSize, t), false);
            this.trackingPoint = allocationPoint.getObjectId();
            // we keep int buffer's dtype after ser/de
            this.type = t;
            this.pointer = new CudaPointer(allocationPoint.getPointers().getHostPointer(), length).asIntPointer();
            indexer = IntIndexer.create((IntPointer) pointer);
            IntIndexer Iindexer = (IntIndexer) indexer;
            int[] array = new int[(int) length];
            for (int i = 0; i < length(); i++) {
                if (t == Type.INT)
                    // array[i] = s.readInt();
                    Iindexer.put(i, s.readInt());
                else if (t == Type.DOUBLE)
                    Iindexer.put(i, (int) s.readDouble());
                else if (t == Type.FLOAT)
                    Iindexer.put(i, (int) s.readFloat());
                else if (t == Type.HALF)
                    Iindexer.put(i, (int) toFloat((int) s.readShort()));
            }
            allocationPoint.tickHostWrite();
        } else if (globalType == Type.DOUBLE) {
            this.elementSize = 8;
            if (reallocate) {
                MemoryWorkspace workspace = Nd4j.getMemoryManager().getCurrentWorkspace();
                if (workspace != null && (workspace instanceof DummyWorkspace)) {
                    this.attached = true;
                    this.parentWorkspace = workspace;
                    workspaceGenerationId = workspace.getGenerationId();
                }
                this.allocationPoint = AtomicAllocator.getInstance().allocateMemory(this, new AllocationShape(length, elementSize, globalType), false);
                // allocationPoint.attachBuffer(this);
                this.trackingPoint = allocationPoint.getObjectId();
                this.pointer = new CudaPointer(allocationPoint.getPointers().getHostPointer(), length).asDoublePointer();
                indexer = DoubleIndexer.create((DoublePointer) pointer);
            }
            DoubleIndexer Dindexer = (DoubleIndexer) indexer;
            for (int i = 0; i < length(); i++) {
                if (t == Type.DOUBLE)
                    Dindexer.put(i, s.readDouble());
                else if (t == Type.FLOAT)
                    Dindexer.put(i, (double) s.readFloat());
                else if (t == Type.HALF)
                    Dindexer.put(i, (double) toFloat((int) s.readShort()));
            }
            allocationPoint.tickHostWrite();
        } else if (globalType == Type.FLOAT) {
            this.elementSize = 4;
            if (reallocate) {
                this.allocationPoint = AtomicAllocator.getInstance().allocateMemory(this, new AllocationShape(length, elementSize, dataType()), false);
                this.trackingPoint = allocationPoint.getObjectId();
                this.pointer = new CudaPointer(allocationPoint.getPointers().getHostPointer(), length).asFloatPointer();
                indexer = FloatIndexer.create((FloatPointer) pointer);
            }
            FloatIndexer Findexer = (FloatIndexer) indexer;
            for (int i = 0; i < length; i++) {
                if (t == Type.DOUBLE)
                    Findexer.put(i, (float) s.readDouble());
                else if (t == Type.FLOAT)
                    Findexer.put(i, s.readFloat());
                else if (t == Type.HALF) {
                    Findexer.put(i, toFloat((int) s.readShort()));
                }
            }
            allocationPoint.tickHostWrite();
        } else if (globalType == Type.HALF) {
            this.elementSize = 2;
            if (reallocate) {
                this.allocationPoint = AtomicAllocator.getInstance().allocateMemory(this, new AllocationShape(length, elementSize, dataType()), false);
                this.trackingPoint = allocationPoint.getObjectId();
                this.pointer = new CudaPointer(allocationPoint.getPointers().getHostPointer(), length).asShortPointer();
                indexer = HalfIndexer.create((ShortPointer) this.pointer);
            }
            HalfIndexer Hindexer = (HalfIndexer) indexer;
            for (int i = 0; i < length; i++) {
                if (t == Type.DOUBLE)
                    Hindexer.put(i, (float) s.readDouble());
                else if (t == Type.FLOAT)
                    Hindexer.put(i, s.readFloat());
                else if (t == Type.HALF) {
                    Hindexer.put(i, toFloat((int) s.readShort()));
                }
            }
            // for HALF & HALF2 datatype we just tag data as fresh on host
            allocationPoint.tickHostWrite();
        } else
            throw new IllegalStateException("Unknown dataType: [" + t.toString() + "]");
    /*
            this.wrappedBuffer = this.pointer.asByteBuffer();
            this.wrappedBuffer.order(ByteOrder.nativeOrder());
            */
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
    // we call sync to copyback data to host
    AtomicAllocator.getInstance().getFlowController().synchronizeToDevice(allocationPoint);
// allocator.synchronizeHostData(this);
}
Also used : DummyWorkspace(org.nd4j.linalg.memory.abstracts.DummyWorkspace) AllocationShape(org.nd4j.jita.allocator.impl.AllocationShape) AllocationPoint(org.nd4j.jita.allocator.impl.AllocationPoint) IOException(java.io.IOException) MemoryWorkspace(org.nd4j.linalg.api.memory.MemoryWorkspace) CudaPointer(org.nd4j.jita.allocator.pointers.CudaPointer)

Example 37 with MemoryWorkspace

use of org.nd4j.linalg.api.memory.MemoryWorkspace in project nd4j by deeplearning4j.

the class CpuWorkspaceManager method createNewWorkspace.

@Override
public MemoryWorkspace createNewWorkspace() {
    ensureThreadExistense();
    MemoryWorkspace workspace = new CpuWorkspace(defaultConfiguration);
    backingMap.get().put(workspace.getId(), workspace);
    pickReference(workspace);
    return workspace;
}
Also used : MemoryWorkspace(org.nd4j.linalg.api.memory.MemoryWorkspace)

Example 38 with MemoryWorkspace

use of org.nd4j.linalg.api.memory.MemoryWorkspace in project nd4j by deeplearning4j.

the class CpuWorkspaceManager method getWorkspaceForCurrentThread.

@Override
public MemoryWorkspace getWorkspaceForCurrentThread(@NonNull WorkspaceConfiguration configuration, @NonNull String id) {
    ensureThreadExistense();
    MemoryWorkspace workspace = backingMap.get().get(id);
    if (workspace == null) {
        workspace = new CpuWorkspace(configuration, id);
        backingMap.get().put(id, workspace);
        pickReference(workspace);
    }
    return workspace;
}
Also used : MemoryWorkspace(org.nd4j.linalg.api.memory.MemoryWorkspace)

Example 39 with MemoryWorkspace

use of org.nd4j.linalg.api.memory.MemoryWorkspace in project nd4j by deeplearning4j.

the class FloatDataBufferTest method testReallocationWorkspace.

@Test
public void testReallocationWorkspace() {
    WorkspaceConfiguration initialConfig = WorkspaceConfiguration.builder().initialSize(10 * 1024L * 1024L).policyAllocation(AllocationPolicy.STRICT).policyLearning(LearningPolicy.NONE).build();
    MemoryWorkspace workspace = Nd4j.getWorkspaceManager().getAndActivateWorkspace(initialConfig, "SOME_ID");
    DataBuffer buffer = Nd4j.createBuffer(new float[] { 1, 2, 3, 4 });
    assertTrue(buffer.isAttached());
    float[] old = buffer.asFloat();
    assertEquals(4, buffer.capacity());
    buffer.reallocate(6);
    assertEquals(6, buffer.capacity());
    float[] newBuf = buffer.asFloat();
    assertArrayEquals(old, newBuf, 1e-4F);
    workspace.close();
}
Also used : WorkspaceConfiguration(org.nd4j.linalg.api.memory.conf.WorkspaceConfiguration) MemoryWorkspace(org.nd4j.linalg.api.memory.MemoryWorkspace) Test(org.junit.Test) BaseNd4jTest(org.nd4j.linalg.BaseNd4jTest)

Example 40 with MemoryWorkspace

use of org.nd4j.linalg.api.memory.MemoryWorkspace in project nd4j by deeplearning4j.

the class IntDataBufferTests method testReallocationWorkspace.

@Test
public void testReallocationWorkspace() {
    WorkspaceConfiguration initialConfig = WorkspaceConfiguration.builder().initialSize(10 * 1024L * 1024L).policyAllocation(AllocationPolicy.STRICT).policyLearning(LearningPolicy.NONE).build();
    MemoryWorkspace workspace = Nd4j.getWorkspaceManager().getAndActivateWorkspace(initialConfig, "SOME_ID");
    DataBuffer buffer = Nd4j.createBuffer(new int[] { 1, 2, 3, 4 });
    int[] old = buffer.asInt();
    assertTrue(buffer.isAttached());
    assertEquals(4, buffer.capacity());
    buffer.reallocate(6);
    assertEquals(6, buffer.capacity());
    assertArrayEquals(old, buffer.asInt());
    workspace.close();
}
Also used : WorkspaceConfiguration(org.nd4j.linalg.api.memory.conf.WorkspaceConfiguration) MemoryWorkspace(org.nd4j.linalg.api.memory.MemoryWorkspace) Test(org.junit.Test) BaseNd4jTest(org.nd4j.linalg.BaseNd4jTest)

Aggregations

MemoryWorkspace (org.nd4j.linalg.api.memory.MemoryWorkspace)62 Test (org.junit.Test)39 BaseNd4jTest (org.nd4j.linalg.BaseNd4jTest)35 INDArray (org.nd4j.linalg.api.ndarray.INDArray)35 Nd4jWorkspace (org.nd4j.linalg.memory.abstracts.Nd4jWorkspace)18 WorkspaceConfiguration (org.nd4j.linalg.api.memory.conf.WorkspaceConfiguration)14 DataBuffer (org.nd4j.linalg.api.buffer.DataBuffer)6 AtomicLong (java.util.concurrent.atomic.AtomicLong)5 AllocationPoint (org.nd4j.jita.allocator.impl.AllocationPoint)4 ND4JIllegalStateException (org.nd4j.linalg.exception.ND4JIllegalStateException)3 CudaContext (org.nd4j.linalg.jcublas.context.CudaContext)3 ByteArrayInputStream (java.io.ByteArrayInputStream)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)2 DataInputStream (java.io.DataInputStream)2 DataOutputStream (java.io.DataOutputStream)2 File (java.io.File)2 ArrayList (java.util.ArrayList)2 Ignore (org.junit.Ignore)2 IOException (java.io.IOException)1 CopyOnWriteArrayList (java.util.concurrent.CopyOnWriteArrayList)1