Search in sources :

Example 1 with SummaryType

use of org.deeplearning4j.ui.stats.api.SummaryType in project deeplearning4j by deeplearning4j.

the class SbeStatsReport method entrySize.

private int entrySize(List<String> entryNames, StatsType... statsTypes) {
    int bufferSize = 0;
    for (String s : entryNames) {
        //For each parameter: MAY also have a number of summary stats (mean, stdev etc), and histograms (both as nested groups)
        int summaryStatsCount = 0;
        for (StatsType statsType : statsTypes) {
            //Parameters, Gradients, updates, activations
            for (SummaryType summaryType : SummaryType.values()) {
                //Mean, stdev, MM
                Map<String, Double> map = mapForTypes(statsType, summaryType);
                if (map == null)
                    continue;
                if (map.containsKey(s))
                    summaryStatsCount++;
            }
        }
        //Each summary stat value: StatsType (uint8), SummaryType (uint8), value (double) -> 1+1+8 = 10 bytes
        bufferSize += summaryStatsCount * 10;
        //Histograms for this parameter
        int nHistogramsThisParam = 0;
        if (histograms != null && histograms.size() > 0) {
            for (Map<String, Histogram> map : histograms.values()) {
                if (map != null && map.containsKey(s))
                    nHistogramsThisParam++;
            }
        }
        //For each histogram: StatsType (uint8) + 2x double + int32 -> 1 + 2*8 + 4 = 21 bytes PLUS counts group header (4 bytes) -> 25 bytes fixed per histogram
        bufferSize += 25 * nHistogramsThisParam;
        //PLUS, the number of count values, given by nBins...
        int nBinCountEntries = 0;
        for (StatsType statsType : statsTypes) {
            if (histograms == null || !histograms.containsKey(statsType))
                continue;
            Map<String, Histogram> map = histograms.get(statsType);
            if (map != null && map.containsKey(s)) {
                //If it doesn't: assume 0 count...
                nBinCountEntries += map.get(s).getNBins();
            }
        }
        //Each entry: uint32 -> 4 bytes
        bufferSize += 4 * nBinCountEntries;
    }
    return bufferSize;
}
Also used : SummaryType(org.deeplearning4j.ui.stats.api.SummaryType) Histogram(org.deeplearning4j.ui.stats.api.Histogram) StatsType(org.deeplearning4j.ui.stats.api.StatsType)

Example 2 with SummaryType

use of org.deeplearning4j.ui.stats.api.SummaryType in project deeplearning4j by deeplearning4j.

the class SbeStatsReport method encode.

@Override
public void encode(MutableDirectBuffer buffer) {
    MessageHeaderEncoder enc = new MessageHeaderEncoder();
    UpdateEncoder ue = new UpdateEncoder();
    enc.wrap(buffer, 0).blockLength(ue.sbeBlockLength()).templateId(ue.sbeTemplateId()).schemaId(ue.sbeSchemaId()).version(ue.sbeSchemaVersion());
    //Expect 8 bytes
    int offset = enc.encodedLength();
    ue.wrap(buffer, offset);
    //Fixed length fields: always encoded
    //TODO
    ue.time(timeStamp).deltaTime(0).iterationCount(iterationCount).fieldsPresent().score(scorePresent).memoryUse(memoryUsePresent).performance(performanceStatsPresent).garbageCollection(gcStats != null && !gcStats.isEmpty()).histogramParameters(histograms != null && histograms.containsKey(StatsType.Parameters)).histogramActivations(histograms != null && histograms.containsKey(StatsType.Gradients)).histogramUpdates(histograms != null && histograms.containsKey(StatsType.Updates)).histogramActivations(histograms != null && histograms.containsKey(StatsType.Activations)).meanParameters(meanValues != null && meanValues.containsKey(StatsType.Parameters)).meanGradients(meanValues != null && meanValues.containsKey(StatsType.Gradients)).meanUpdates(meanValues != null && meanValues.containsKey(StatsType.Updates)).meanActivations(meanValues != null && meanValues.containsKey(StatsType.Activations)).meanMagnitudeParameters(meanMagnitudeValues != null && meanMagnitudeValues.containsKey(StatsType.Parameters)).meanMagnitudeGradients(meanMagnitudeValues != null && meanMagnitudeValues.containsKey(StatsType.Gradients)).meanMagnitudeUpdates(meanMagnitudeValues != null && meanMagnitudeValues.containsKey(StatsType.Updates)).meanMagnitudeActivations(meanMagnitudeValues != null && meanMagnitudeValues.containsKey(StatsType.Activations)).learningRatesPresent(learningRatesByParam != null).dataSetMetaDataPresent(hasDataSetMetaData());
    ue.statsCollectionDuration(statsCollectionDurationMs).score(score);
    int memoryUseCount;
    if (!memoryUsePresent) {
        memoryUseCount = 0;
    } else {
        memoryUseCount = 4 + (deviceCurrentBytes == null ? 0 : deviceCurrentBytes.length) + (deviceMaxBytes == null ? 0 : deviceMaxBytes.length);
    }
    UpdateEncoder.MemoryUseEncoder mue = ue.memoryUseCount(memoryUseCount);
    if (memoryUsePresent) {
        mue.next().memoryType(MemoryType.JvmCurrent).memoryBytes(jvmCurrentBytes).next().memoryType(MemoryType.JvmMax).memoryBytes(jvmMaxBytes).next().memoryType(MemoryType.OffHeapCurrent).memoryBytes(offHeapCurrentBytes).next().memoryType(MemoryType.OffHeapMax).memoryBytes(offHeapMaxBytes);
        if (deviceCurrentBytes != null) {
            for (int i = 0; i < deviceCurrentBytes.length; i++) {
                mue.next().memoryType(MemoryType.DeviceCurrent).memoryBytes(deviceCurrentBytes[i]);
            }
        }
        if (deviceMaxBytes != null) {
            for (int i = 0; i < deviceMaxBytes.length; i++) {
                mue.next().memoryType(MemoryType.DeviceMax).memoryBytes(deviceMaxBytes[i]);
            }
        }
    }
    UpdateEncoder.PerformanceEncoder pe = ue.performanceCount(performanceStatsPresent ? 1 : 0);
    if (performanceStatsPresent) {
        pe.next().totalRuntimeMs(totalRuntimeMs).totalExamples(totalExamples).totalMinibatches(totalMinibatches).examplesPerSecond((float) examplesPerSecond).minibatchesPerSecond((float) minibatchesPerSecond);
    }
    UpdateEncoder.GcStatsEncoder gce = ue.gcStatsCount(gcStats == null || gcStats.size() == 0 ? 0 : gcStats.size());
    List<byte[]> gcStatsLabelBytes = null;
    if (gcStats != null && gcStats.size() > 0) {
        gcStatsLabelBytes = new ArrayList<>();
        for (GCStats stats : gcStats) {
            byte[] nameAsBytes = SbeUtil.toBytes(true, stats.gcName);
            gcStatsLabelBytes.add(nameAsBytes);
        }
    }
    if (gcStats != null && gcStats.size() > 0) {
        int i = 0;
        for (GCStats g : gcStats) {
            byte[] gcLabelBytes = gcStatsLabelBytes.get(i++);
            gce.next().deltaGCCount(g.deltaGCCount).deltaGCTimeMs(g.deltaGCTime).putGcName(gcLabelBytes, 0, gcLabelBytes.length);
        }
    }
    //Param names
    List<String> paramNames = getParamNames();
    UpdateEncoder.ParamNamesEncoder pne = ue.paramNamesCount(paramNames.size());
    for (String s : paramNames) {
        pne.next().paramName(s);
    }
    //Layer names
    List<String> layerNames = getlayerNames();
    UpdateEncoder.LayerNamesEncoder lne = ue.layerNamesCount(layerNames.size());
    for (String s : layerNames) {
        lne.next().layerName(s);
    }
    // +++++ Per Parameter Stats +++++
    UpdateEncoder.PerParameterStatsEncoder ppe = ue.perParameterStatsCount(paramNames.size() + layerNames.size());
    StatsType[] st = new StatsType[] { StatsType.Parameters, StatsType.Gradients, StatsType.Updates };
    for (String s : paramNames) {
        ppe = ppe.next();
        float lr = 0.0f;
        if (learningRatesByParam != null && learningRatesByParam.containsKey(s)) {
            lr = learningRatesByParam.get(s).floatValue();
        }
        ppe.learningRate(lr);
        int summaryStatsCount = 0;
        for (StatsType statsType : st) {
            //Parameters, updates
            for (SummaryType summaryType : SummaryType.values()) {
                //Mean, stdev, MM
                Map<String, Double> map = mapForTypes(statsType, summaryType);
                if (map == null || map.size() == 0)
                    continue;
                if (map.containsKey(s))
                    summaryStatsCount++;
            }
        }
        UpdateEncoder.PerParameterStatsEncoder.SummaryStatEncoder sse = ppe.summaryStatCount(summaryStatsCount);
        //Summary stats
        for (StatsType statsType : st) {
            //Parameters, updates
            for (SummaryType summaryType : SummaryType.values()) {
                //Mean, stdev, MM
                Map<String, Double> map = mapForTypes(statsType, summaryType);
                if (map == null || map.size() == 0)
                    continue;
                appendOrDefault(sse, s, statsType, summaryType, map, Double.NaN);
            }
        }
        int nHistogramsThisParam = 0;
        if (histograms != null && histograms.size() > 0) {
            for (StatsType statsType : st) {
                //Parameters, updates
                Map<String, Histogram> map = histograms.get(statsType);
                if (map == null)
                    continue;
                if (map.containsKey(s))
                    nHistogramsThisParam++;
            }
        }
        //Histograms
        UpdateEncoder.PerParameterStatsEncoder.HistogramsEncoder sshe = ppe.histogramsCount(nHistogramsThisParam);
        if (nHistogramsThisParam > 0) {
            for (StatsType statsType : st) {
                Map<String, Histogram> map = histograms.get(statsType);
                if (map == null || !map.containsKey(s))
                    continue;
                //Histogram for StatsType for this parameter
                Histogram h = map.get(s);
                double min;
                double max;
                int nBins;
                int[] binCounts;
                if (h == null) {
                    min = 0.0;
                    max = 0.0;
                    nBins = 0;
                    binCounts = null;
                } else {
                    min = h.getMin();
                    max = h.getMax();
                    nBins = h.getNBins();
                    binCounts = h.getBinCounts();
                }
                sshe = sshe.next().statType(translate(statsType)).minValue(min).maxValue(max).nBins(nBins);
                UpdateEncoder.PerParameterStatsEncoder.HistogramsEncoder.HistogramCountsEncoder histCountsEncoder = sshe.histogramCountsCount(nBins);
                for (int i = 0; i < nBins; i++) {
                    int count = (binCounts == null || binCounts.length <= i ? 0 : binCounts[i]);
                    histCountsEncoder.next().binCount(count);
                }
            }
        }
    }
    for (String s : layerNames) {
        ppe = ppe.next();
        //Not applicable
        ppe.learningRate(0.0f);
        int summaryStatsCount = 0;
        for (SummaryType summaryType : SummaryType.values()) {
            //Mean, stdev, MM
            Map<String, Double> map = mapForTypes(StatsType.Activations, summaryType);
            if (map == null || map.size() == 0)
                continue;
            if (map.containsKey(s))
                summaryStatsCount++;
        }
        UpdateEncoder.PerParameterStatsEncoder.SummaryStatEncoder sse = ppe.summaryStatCount(summaryStatsCount);
        //Summary stats
        for (SummaryType summaryType : SummaryType.values()) {
            //Mean, stdev, MM
            Map<String, Double> map = mapForTypes(StatsType.Activations, summaryType);
            if (map == null || map.size() == 0)
                continue;
            appendOrDefault(sse, s, StatsType.Activations, summaryType, map, Double.NaN);
        }
        int nHistogramsThisLayer = 0;
        if (histograms != null && histograms.size() > 0) {
            for (Map<String, Histogram> map : histograms.values()) {
                if (map != null && map.containsKey(s))
                    nHistogramsThisLayer++;
            }
        }
        //Histograms
        UpdateEncoder.PerParameterStatsEncoder.HistogramsEncoder sshe = ppe.histogramsCount(nHistogramsThisLayer);
        if (nHistogramsThisLayer > 0) {
            Map<String, Histogram> map = histograms.get(StatsType.Activations);
            if (map == null || !map.containsKey(s))
                continue;
            //Histogram for StatsType for this parameter
            Histogram h = map.get(s);
            double min;
            double max;
            int nBins;
            int[] binCounts;
            if (h == null) {
                min = 0.0;
                max = 0.0;
                nBins = 0;
                binCounts = null;
            } else {
                min = h.getMin();
                max = h.getMax();
                nBins = h.getNBins();
                binCounts = h.getBinCounts();
            }
            sshe = sshe.next().statType(translate(StatsType.Activations)).minValue(min).maxValue(max).nBins(nBins);
            UpdateEncoder.PerParameterStatsEncoder.HistogramsEncoder.HistogramCountsEncoder histCountsEncoder = sshe.histogramCountsCount(nBins);
            for (int i = 0; i < nBins; i++) {
                int count = (binCounts == null || binCounts.length <= i ? 0 : binCounts[i]);
                histCountsEncoder.next().binCount(count);
            }
        }
    }
    // +++ DataSet MetaData +++
    UpdateEncoder.DataSetMetaDataBytesEncoder metaEnc = ue.dataSetMetaDataBytesCount(dataSetMetaData != null ? dataSetMetaData.size() : 0);
    if (dataSetMetaData != null && dataSetMetaData.size() > 0) {
        for (byte[] b : dataSetMetaData) {
            metaEnc = metaEnc.next();
            UpdateEncoder.DataSetMetaDataBytesEncoder.MetaDataBytesEncoder mdbe = metaEnc.metaDataBytesCount(b.length);
            for (byte bb : b) {
                mdbe.next().bytes(bb);
            }
        }
    }
    //Session/worker IDs
    byte[] bSessionID = SbeUtil.toBytes(true, sessionID);
    byte[] bTypeID = SbeUtil.toBytes(true, typeID);
    byte[] bWorkerID = SbeUtil.toBytes(true, workerID);
    ue.putSessionID(bSessionID, 0, bSessionID.length);
    ue.putTypeID(bTypeID, 0, bTypeID.length);
    ue.putWorkerID(bWorkerID, 0, bWorkerID.length);
    //Class name for DataSet metadata
    byte[] metaDataClassNameBytes = SbeUtil.toBytes(true, metaDataClassName);
    ue.putDataSetMetaDataClassName(metaDataClassNameBytes, 0, metaDataClassNameBytes.length);
}
Also used : Histogram(org.deeplearning4j.ui.stats.api.Histogram) SummaryType(org.deeplearning4j.ui.stats.api.SummaryType) StatsType(org.deeplearning4j.ui.stats.api.StatsType)

Example 3 with SummaryType

use of org.deeplearning4j.ui.stats.api.SummaryType in project deeplearning4j by deeplearning4j.

the class SbeStatsReport method decode.

@Override
public void decode(DirectBuffer buffer) {
    //TODO we could do this more efficiently, with buffer re-use, etc.
    MessageHeaderDecoder dec = new MessageHeaderDecoder();
    UpdateDecoder ud = new UpdateDecoder();
    dec.wrap(buffer, 0);
    final int blockLength = dec.blockLength();
    final int version = dec.version();
    int headerLength = dec.encodedLength();
    //TODO: in general, we'd check the header, version, schema etc.
    ud.wrap(buffer, headerLength, blockLength, version);
    //TODO iteration count
    timeStamp = ud.time();
    //TODO
    long deltaTime = ud.deltaTime();
    iterationCount = ud.iterationCount();
    UpdateFieldsPresentDecoder fpd = ud.fieldsPresent();
    scorePresent = fpd.score();
    memoryUsePresent = fpd.memoryUse();
    performanceStatsPresent = fpd.performance();
    boolean gc = fpd.garbageCollection();
    boolean histogramParameters = fpd.histogramParameters();
    boolean histogramUpdates = fpd.histogramUpdates();
    boolean histogramActivations = fpd.histogramActivations();
    boolean meanParameters = fpd.meanParameters();
    boolean meanUpdates = fpd.meanUpdates();
    boolean meanActivations = fpd.meanActivations();
    boolean meanMagParams = fpd.meanMagnitudeParameters();
    boolean meanMagUpdates = fpd.meanMagnitudeUpdates();
    boolean meanMagAct = fpd.meanMagnitudeActivations();
    boolean learningRatesPresent = fpd.learningRatesPresent();
    boolean metaDataPresent = fpd.dataSetMetaDataPresent();
    statsCollectionDurationMs = ud.statsCollectionDuration();
    score = ud.score();
    //First group: memory use
    UpdateDecoder.MemoryUseDecoder mud = ud.memoryUse();
    //TODO avoid
    List<Long> dcMem = null;
    List<Long> dmMem = null;
    for (UpdateDecoder.MemoryUseDecoder m : mud) {
        MemoryType type = m.memoryType();
        long memBytes = m.memoryBytes();
        switch(type) {
            case JvmCurrent:
                jvmCurrentBytes = memBytes;
                break;
            case JvmMax:
                jvmMaxBytes = memBytes;
                break;
            case OffHeapCurrent:
                offHeapCurrentBytes = memBytes;
                break;
            case OffHeapMax:
                offHeapMaxBytes = memBytes;
                break;
            case DeviceCurrent:
                if (dcMem == null)
                    dcMem = new ArrayList<>();
                dcMem.add(memBytes);
                break;
            case DeviceMax:
                if (dmMem == null)
                    dmMem = new ArrayList<>();
                dmMem.add(memBytes);
                break;
            case NULL_VAL:
                break;
        }
    }
    if (dcMem != null) {
        long[] a = new long[dcMem.size()];
        int i = 0;
        for (Long l : dcMem) {
            a[i++] = l;
        }
        deviceCurrentBytes = a;
    }
    if (dmMem != null) {
        long[] a = new long[dmMem.size()];
        int i = 0;
        for (Long l : dmMem) {
            a[i++] = l;
        }
        deviceMaxBytes = a;
    }
    //Second group: performance stats (0 or 1 entries only)
    for (UpdateDecoder.PerformanceDecoder pd : ud.performance()) {
        totalRuntimeMs = pd.totalRuntimeMs();
        totalExamples = pd.totalExamples();
        totalMinibatches = pd.totalMinibatches();
        examplesPerSecond = pd.examplesPerSecond();
        minibatchesPerSecond = pd.minibatchesPerSecond();
    }
    //Third group: GC stats
    for (UpdateDecoder.GcStatsDecoder gcsd : ud.gcStats()) {
        if (gcStats == null)
            gcStats = new ArrayList<>();
        int deltaGCCount = gcsd.deltaGCCount();
        int deltaGCTimeMs = gcsd.deltaGCTimeMs();
        String gcName = gcsd.gcName();
        //TODO delta time...
        GCStats s = new GCStats(gcName, deltaGCCount, deltaGCTimeMs);
        gcStats.add(s);
    }
    //Fourth group: param names
    UpdateDecoder.ParamNamesDecoder pnd = ud.paramNames();
    int nParams = pnd.count();
    List<String> paramNames = null;
    if (nParams > 0) {
        paramNames = new ArrayList<>(nParams);
    }
    for (UpdateDecoder.ParamNamesDecoder pndec : pnd) {
        paramNames.add(pndec.paramName());
    }
    //Fifth group: layer names
    UpdateDecoder.LayerNamesDecoder lnd = ud.layerNames();
    int nLayers = lnd.count();
    List<String> layerNames = null;
    if (nLayers > 0) {
        layerNames = new ArrayList<>(nLayers);
    }
    for (UpdateDecoder.LayerNamesDecoder l : lnd) {
        layerNames.add(l.layerName());
    }
    //Sixth group: Per parameter stats (and histograms, etc) AND per layer stats
    int entryNum = 0;
    for (UpdateDecoder.PerParameterStatsDecoder ppsd : ud.perParameterStats()) {
        boolean isParam = entryNum < nParams;
        String name = (isParam ? paramNames.get(entryNum) : layerNames.get(entryNum - nParams));
        entryNum++;
        float lr = ppsd.learningRate();
        if (learningRatesPresent && isParam) {
            if (learningRatesByParam == null)
                learningRatesByParam = new HashMap<>();
            learningRatesByParam.put(name, (double) lr);
        }
        //Summary stats (mean/stdev/mean magnitude)
        for (UpdateDecoder.PerParameterStatsDecoder.SummaryStatDecoder ssd : ppsd.summaryStat()) {
            StatsType st = translate(ssd.statType());
            SummaryType summaryType = translate(ssd.summaryType());
            double value = ssd.value();
            switch(summaryType) {
                case Mean:
                    if (meanValues == null)
                        meanValues = new HashMap<>();
                    Map<String, Double> map = meanValues.get(st);
                    if (map == null) {
                        map = new HashMap<>();
                        meanValues.put(st, map);
                    }
                    map.put(name, value);
                    break;
                case Stdev:
                    if (stdevValues == null)
                        stdevValues = new HashMap<>();
                    Map<String, Double> map2 = stdevValues.get(st);
                    if (map2 == null) {
                        map2 = new HashMap<>();
                        stdevValues.put(st, map2);
                    }
                    map2.put(name, value);
                    break;
                case MeanMagnitudes:
                    if (meanMagnitudeValues == null)
                        meanMagnitudeValues = new HashMap<>();
                    Map<String, Double> map3 = meanMagnitudeValues.get(st);
                    if (map3 == null) {
                        map3 = new HashMap<>();
                        meanMagnitudeValues.put(st, map3);
                    }
                    map3.put(name, value);
                    break;
            }
        }
        //Histograms
        for (UpdateDecoder.PerParameterStatsDecoder.HistogramsDecoder hd : ppsd.histograms()) {
            StatsType st = translate(hd.statType());
            double min = hd.minValue();
            double max = hd.maxValue();
            int nBins = hd.nBins();
            int[] binCounts = new int[nBins];
            int i = 0;
            for (UpdateDecoder.PerParameterStatsDecoder.HistogramsDecoder.HistogramCountsDecoder hcd : hd.histogramCounts()) {
                binCounts[i++] = (int) hcd.binCount();
            }
            Histogram h = new Histogram(min, max, nBins, binCounts);
            if (histograms == null)
                histograms = new HashMap<>();
            Map<String, Histogram> map = histograms.get(st);
            if (map == null) {
                map = new HashMap<>();
                histograms.put(st, map);
            }
            map.put(name, h);
        }
    }
    //Final group: DataSet metadata
    for (UpdateDecoder.DataSetMetaDataBytesDecoder metaDec : ud.dataSetMetaDataBytes()) {
        if (this.dataSetMetaData == null)
            this.dataSetMetaData = new ArrayList<>();
        UpdateDecoder.DataSetMetaDataBytesDecoder.MetaDataBytesDecoder mdbd = metaDec.metaDataBytes();
        int length = mdbd.count();
        byte[] b = new byte[length];
        int i = 0;
        for (UpdateDecoder.DataSetMetaDataBytesDecoder.MetaDataBytesDecoder mdbd2 : mdbd) {
            b[i++] = mdbd2.bytes();
        }
        this.dataSetMetaData.add(b);
    }
    //IDs
    this.sessionID = ud.sessionID();
    this.typeID = ud.typeID();
    this.workerID = ud.workerID();
    //Variable length: DataSet metadata class name
    this.metaDataClassName = ud.dataSetMetaDataClassName();
    if (!metaDataPresent) {
        this.metaDataClassName = null;
    }
}
Also used : Histogram(org.deeplearning4j.ui.stats.api.Histogram) SummaryType(org.deeplearning4j.ui.stats.api.SummaryType) StatsType(org.deeplearning4j.ui.stats.api.StatsType)

Aggregations

Histogram (org.deeplearning4j.ui.stats.api.Histogram)3 StatsType (org.deeplearning4j.ui.stats.api.StatsType)3 SummaryType (org.deeplearning4j.ui.stats.api.SummaryType)3