Search in sources :

Example 1 with StatsInitializationReport

use of org.deeplearning4j.ui.stats.api.StatsInitializationReport in project deeplearning4j by deeplearning4j.

the class TrainModule method getConfig.

private Triple<MultiLayerConfiguration, ComputationGraphConfiguration, NeuralNetConfiguration> getConfig() {
    boolean noData = currentSessionID == null;
    StatsStorage ss = (noData ? null : knownSessionIDs.get(currentSessionID));
    List<Persistable> allStatic = (noData ? Collections.EMPTY_LIST : ss.getAllStaticInfos(currentSessionID, StatsListener.TYPE_ID));
    if (allStatic.size() == 0)
        return null;
    StatsInitializationReport p = (StatsInitializationReport) allStatic.get(0);
    String modelClass = p.getModelClassName();
    String config = p.getModelConfigJson();
    if (modelClass.endsWith("MultiLayerNetwork")) {
        MultiLayerConfiguration conf = MultiLayerConfiguration.fromJson(config);
        return new Triple<>(conf, null, null);
    } else if (modelClass.endsWith("ComputationGraph")) {
        ComputationGraphConfiguration conf = ComputationGraphConfiguration.fromJson(config);
        return new Triple<>(null, conf, null);
    } else {
        try {
            NeuralNetConfiguration layer = NeuralNetConfiguration.mapper().readValue(config, NeuralNetConfiguration.class);
            return new Triple<>(null, null, layer);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    return null;
}
Also used : Triple(org.deeplearning4j.berkeley.Triple) StatsInitializationReport(org.deeplearning4j.ui.stats.api.StatsInitializationReport) StatsStorage(org.deeplearning4j.api.storage.StatsStorage) Persistable(org.deeplearning4j.api.storage.Persistable) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) ComputationGraphConfiguration(org.deeplearning4j.nn.conf.ComputationGraphConfiguration) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration)

Example 2 with StatsInitializationReport

use of org.deeplearning4j.ui.stats.api.StatsInitializationReport in project deeplearning4j by deeplearning4j.

the class TrainModule method sessionInfo.

private Result sessionInfo() {
    //Display, for each session: session ID, start time, number of workers, last update
    Map<String, Object> dataEachSession = new HashMap<>();
    for (Map.Entry<String, StatsStorage> entry : knownSessionIDs.entrySet()) {
        Map<String, Object> dataThisSession = new HashMap<>();
        String sid = entry.getKey();
        StatsStorage ss = entry.getValue();
        List<String> workerIDs = ss.listWorkerIDsForSessionAndType(sid, StatsListener.TYPE_ID);
        int workerCount = (workerIDs == null ? 0 : workerIDs.size());
        List<Persistable> staticInfo = ss.getAllStaticInfos(sid, StatsListener.TYPE_ID);
        long initTime = Long.MAX_VALUE;
        if (staticInfo != null) {
            for (Persistable p : staticInfo) {
                initTime = Math.min(p.getTimeStamp(), initTime);
            }
        }
        long lastUpdateTime = Long.MIN_VALUE;
        List<Persistable> lastUpdatesAllWorkers = ss.getLatestUpdateAllWorkers(sid, StatsListener.TYPE_ID);
        for (Persistable p : lastUpdatesAllWorkers) {
            lastUpdateTime = Math.max(lastUpdateTime, p.getTimeStamp());
        }
        dataThisSession.put("numWorkers", workerCount);
        dataThisSession.put("initTime", initTime == Long.MAX_VALUE ? "" : initTime);
        dataThisSession.put("lastUpdate", lastUpdateTime == Long.MIN_VALUE ? "" : lastUpdateTime);
        // add hashmap of workers
        if (workerCount > 0) {
            dataThisSession.put("workers", workerIDs);
        }
        //Model info: type, # layers, # params...
        if (staticInfo != null && staticInfo.size() > 0) {
            StatsInitializationReport sr = (StatsInitializationReport) staticInfo.get(0);
            String modelClassName = sr.getModelClassName();
            if (modelClassName.endsWith("MultiLayerNetwork")) {
                modelClassName = "MultiLayerNetwork";
            } else if (modelClassName.endsWith("ComputationGraph")) {
                modelClassName = "ComputationGraph";
            }
            int numLayers = sr.getModelNumLayers();
            long numParams = sr.getModelNumParams();
            dataThisSession.put("modelType", modelClassName);
            dataThisSession.put("numLayers", numLayers);
            dataThisSession.put("numParams", numParams);
        } else {
            dataThisSession.put("modelType", "");
            dataThisSession.put("numLayers", "");
            dataThisSession.put("numParams", "");
        }
        dataEachSession.put(sid, dataThisSession);
    }
    return ok(Json.toJson(dataEachSession));
}
Also used : StatsInitializationReport(org.deeplearning4j.ui.stats.api.StatsInitializationReport) StatsStorage(org.deeplearning4j.api.storage.StatsStorage) Persistable(org.deeplearning4j.api.storage.Persistable)

Example 3 with StatsInitializationReport

use of org.deeplearning4j.ui.stats.api.StatsInitializationReport in project deeplearning4j by deeplearning4j.

the class TrainModule method getLayerInfoTable.

private String[][] getLayerInfoTable(int layerIdx, TrainModuleUtils.GraphInfo gi, I18N i18N, boolean noData, StatsStorage ss, String wid) {
    List<String[]> layerInfoRows = new ArrayList<>();
    layerInfoRows.add(new String[] { i18N.getMessage("train.model.layerinfotable.layerName"), gi.getVertexNames().get(layerIdx) });
    layerInfoRows.add(new String[] { i18N.getMessage("train.model.layerinfotable.layerType"), "" });
    if (!noData) {
        Persistable p = ss.getStaticInfo(currentSessionID, StatsListener.TYPE_ID, wid);
        if (p != null) {
            StatsInitializationReport initReport = (StatsInitializationReport) p;
            String configJson = initReport.getModelConfigJson();
            String modelClass = initReport.getModelClassName();
            //TODO error handling...
            String layerType = "";
            Layer layer = null;
            NeuralNetConfiguration nnc = null;
            if (modelClass.endsWith("MultiLayerNetwork")) {
                MultiLayerConfiguration conf = MultiLayerConfiguration.fromJson(configJson);
                //-1 because of input
                int confIdx = layerIdx - 1;
                if (confIdx >= 0) {
                    nnc = conf.getConf(confIdx);
                    layer = nnc.getLayer();
                } else {
                    //Input layer
                    layerType = "Input";
                }
            } else if (modelClass.endsWith("ComputationGraph")) {
                ComputationGraphConfiguration conf = ComputationGraphConfiguration.fromJson(configJson);
                String vertexName = gi.getVertexNames().get(layerIdx);
                Map<String, GraphVertex> vertices = conf.getVertices();
                if (vertices.containsKey(vertexName) && vertices.get(vertexName) instanceof LayerVertex) {
                    LayerVertex lv = (LayerVertex) vertices.get(vertexName);
                    nnc = lv.getLayerConf();
                    layer = nnc.getLayer();
                } else if (conf.getNetworkInputs().contains(vertexName)) {
                    layerType = "Input";
                } else {
                    GraphVertex gv = conf.getVertices().get(vertexName);
                    if (gv != null) {
                        layerType = gv.getClass().getSimpleName();
                    }
                }
            } else if (modelClass.endsWith("VariationalAutoencoder")) {
                layerType = gi.getVertexTypes().get(layerIdx);
                Map<String, String> map = gi.getVertexInfo().get(layerIdx);
                for (Map.Entry<String, String> entry : map.entrySet()) {
                    layerInfoRows.add(new String[] { entry.getKey(), entry.getValue() });
                }
            }
            if (layer != null) {
                layerType = getLayerType(layer);
            }
            if (layer != null) {
                String activationFn = null;
                if (layer instanceof FeedForwardLayer) {
                    FeedForwardLayer ffl = (FeedForwardLayer) layer;
                    layerInfoRows.add(new String[] { i18N.getMessage("train.model.layerinfotable.layerNIn"), String.valueOf(ffl.getNIn()) });
                    layerInfoRows.add(new String[] { i18N.getMessage("train.model.layerinfotable.layerSize"), String.valueOf(ffl.getNOut()) });
                    activationFn = layer.getActivationFn().toString();
                }
                int nParams = layer.initializer().numParams(nnc);
                layerInfoRows.add(new String[] { i18N.getMessage("train.model.layerinfotable.layerNParams"), String.valueOf(nParams) });
                if (nParams > 0) {
                    WeightInit wi = layer.getWeightInit();
                    String str = wi.toString();
                    if (wi == WeightInit.DISTRIBUTION) {
                        str += layer.getDist();
                    }
                    layerInfoRows.add(new String[] { i18N.getMessage("train.model.layerinfotable.layerWeightInit"), str });
                    Updater u = layer.getUpdater();
                    String us = (u == null ? "" : u.toString());
                    layerInfoRows.add(new String[] { i18N.getMessage("train.model.layerinfotable.layerUpdater"), us });
                //TODO: Maybe L1/L2, dropout, updater-specific values etc
                }
                if (layer instanceof ConvolutionLayer || layer instanceof SubsamplingLayer) {
                    int[] kernel;
                    int[] stride;
                    int[] padding;
                    if (layer instanceof ConvolutionLayer) {
                        ConvolutionLayer cl = (ConvolutionLayer) layer;
                        kernel = cl.getKernelSize();
                        stride = cl.getStride();
                        padding = cl.getPadding();
                    } else {
                        SubsamplingLayer ssl = (SubsamplingLayer) layer;
                        kernel = ssl.getKernelSize();
                        stride = ssl.getStride();
                        padding = ssl.getPadding();
                        activationFn = null;
                        layerInfoRows.add(new String[] { i18N.getMessage("train.model.layerinfotable.layerSubsamplingPoolingType"), ssl.getPoolingType().toString() });
                    }
                    layerInfoRows.add(new String[] { i18N.getMessage("train.model.layerinfotable.layerCnnKernel"), Arrays.toString(kernel) });
                    layerInfoRows.add(new String[] { i18N.getMessage("train.model.layerinfotable.layerCnnStride"), Arrays.toString(stride) });
                    layerInfoRows.add(new String[] { i18N.getMessage("train.model.layerinfotable.layerCnnPadding"), Arrays.toString(padding) });
                }
                if (activationFn != null) {
                    layerInfoRows.add(new String[] { i18N.getMessage("train.model.layerinfotable.layerActivationFn"), activationFn });
                }
            }
            layerInfoRows.get(1)[1] = layerType;
        }
    }
    return layerInfoRows.toArray(new String[layerInfoRows.size()][0]);
}
Also used : StatsInitializationReport(org.deeplearning4j.ui.stats.api.StatsInitializationReport) LayerVertex(org.deeplearning4j.nn.conf.graph.LayerVertex) Persistable(org.deeplearning4j.api.storage.Persistable) SubsamplingLayer(org.deeplearning4j.nn.conf.layers.SubsamplingLayer) WeightInit(org.deeplearning4j.nn.weights.WeightInit) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer) SubsamplingLayer(org.deeplearning4j.nn.conf.layers.SubsamplingLayer) FeedForwardLayer(org.deeplearning4j.nn.conf.layers.FeedForwardLayer) Layer(org.deeplearning4j.nn.conf.layers.Layer) ConvolutionLayer(org.deeplearning4j.nn.conf.layers.ConvolutionLayer) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) GraphVertex(org.deeplearning4j.nn.conf.graph.GraphVertex) Updater(org.deeplearning4j.nn.conf.Updater) ComputationGraphConfiguration(org.deeplearning4j.nn.conf.ComputationGraphConfiguration) FeedForwardLayer(org.deeplearning4j.nn.conf.layers.FeedForwardLayer)

Example 4 with StatsInitializationReport

use of org.deeplearning4j.ui.stats.api.StatsInitializationReport in project deeplearning4j by deeplearning4j.

the class TrainModule method getHardwareSoftwareInfo.

private static Pair<Map<String, Object>, Map<String, Object>> getHardwareSoftwareInfo(List<Persistable> staticInfoAllWorkers, I18N i18n) {
    Map<String, Object> retHw = new HashMap<>();
    Map<String, Object> retSw = new HashMap<>();
    //First: map workers to JVMs
    Set<String> jvmIDs = new HashSet<>();
    Map<String, StatsInitializationReport> staticByJvm = new HashMap<>();
    for (Persistable p : staticInfoAllWorkers) {
        //TODO validation/checks
        StatsInitializationReport init = (StatsInitializationReport) p;
        String jvmuid = init.getSwJvmUID();
        jvmIDs.add(jvmuid);
        staticByJvm.put(jvmuid, init);
    }
    List<String> jvmList = new ArrayList<>(jvmIDs);
    Collections.sort(jvmList);
    //For each unique JVM, collect hardware info
    int count = 0;
    for (String jvm : jvmList) {
        StatsInitializationReport sr = staticByJvm.get(jvm);
        //---- Harware Info ----
        List<String[]> hwInfo = new ArrayList<>();
        int numDevices = sr.getHwNumDevices();
        String[] deviceDescription = sr.getHwDeviceDescription();
        long[] devTotalMem = sr.getHwDeviceTotalMemory();
        hwInfo.add(new String[] { i18n.getMessage("train.system.hwTable.jvmMax"), String.valueOf(sr.getHwJvmMaxMemory()) });
        hwInfo.add(new String[] { i18n.getMessage("train.system.hwTable.offHeapMax"), String.valueOf(sr.getHwOffHeapMaxMemory()) });
        hwInfo.add(new String[] { i18n.getMessage("train.system.hwTable.jvmProcs"), String.valueOf(sr.getHwJvmAvailableProcessors()) });
        hwInfo.add(new String[] { i18n.getMessage("train.system.hwTable.computeDevices"), String.valueOf(numDevices) });
        for (int i = 0; i < numDevices; i++) {
            String label = i18n.getMessage("train.system.hwTable.deviceName") + " (" + i + ")";
            String name = (deviceDescription == null || i >= deviceDescription.length ? String.valueOf(i) : deviceDescription[i]);
            hwInfo.add(new String[] { label, name });
            String memLabel = i18n.getMessage("train.system.hwTable.deviceMemory") + " (" + i + ")";
            String memBytes = (devTotalMem == null | i >= devTotalMem.length ? "-" : String.valueOf(devTotalMem[i]));
            hwInfo.add(new String[] { memLabel, memBytes });
        }
        retHw.put(String.valueOf(count), hwInfo);
        //---- Software Info -----
        String nd4jBackend = sr.getSwNd4jBackendClass();
        if (nd4jBackend != null && nd4jBackend.contains(".")) {
            int idx = nd4jBackend.lastIndexOf('.');
            nd4jBackend = nd4jBackend.substring(idx + 1);
            String temp;
            switch(nd4jBackend) {
                case "CpuNDArrayFactory":
                    temp = "CPU";
                    break;
                case "JCublasNDArrayFactory":
                    temp = "CUDA";
                    break;
                default:
                    temp = nd4jBackend;
            }
            nd4jBackend = temp;
        }
        String datatype = sr.getSwNd4jDataTypeName();
        if (datatype == null)
            datatype = "";
        else
            datatype = datatype.toLowerCase();
        List<String[]> swInfo = new ArrayList<>();
        swInfo.add(new String[] { i18n.getMessage("train.system.swTable.os"), sr.getSwOsName() });
        swInfo.add(new String[] { i18n.getMessage("train.system.swTable.hostname"), sr.getSwHostName() });
        swInfo.add(new String[] { i18n.getMessage("train.system.swTable.osArch"), sr.getSwArch() });
        swInfo.add(new String[] { i18n.getMessage("train.system.swTable.jvmName"), sr.getSwJvmName() });
        swInfo.add(new String[] { i18n.getMessage("train.system.swTable.jvmVersion"), sr.getSwJvmVersion() });
        swInfo.add(new String[] { i18n.getMessage("train.system.swTable.nd4jBackend"), nd4jBackend });
        swInfo.add(new String[] { i18n.getMessage("train.system.swTable.nd4jDataType"), datatype });
        retSw.put(String.valueOf(count), swInfo);
        count++;
    }
    return new Pair<>(retHw, retSw);
}
Also used : StatsInitializationReport(org.deeplearning4j.ui.stats.api.StatsInitializationReport) Persistable(org.deeplearning4j.api.storage.Persistable) Pair(org.deeplearning4j.berkeley.Pair)

Example 5 with StatsInitializationReport

use of org.deeplearning4j.ui.stats.api.StatsInitializationReport in project deeplearning4j by deeplearning4j.

the class TrainModule method getOverviewData.

private Result getOverviewData() {
    Long lastUpdate = lastUpdateForSession.get(currentSessionID);
    if (lastUpdate == null)
        lastUpdate = -1L;
    I18N i18N = I18NProvider.getInstance();
    boolean noData = currentSessionID == null;
    //First pass (optimize later): query all data...
    StatsStorage ss = (noData ? null : knownSessionIDs.get(currentSessionID));
    String wid = getWorkerIdForIndex(currentWorkerIdx);
    if (wid == null) {
        noData = true;
    }
    List<Integer> scoresIterCount = new ArrayList<>();
    List<Double> scores = new ArrayList<>();
    Map<String, Object> result = new HashMap<>();
    result.put("updateTimestamp", lastUpdate);
    result.put("scores", scores);
    result.put("scoresIter", scoresIterCount);
    //Get scores info
    List<Persistable> updates = (noData ? null : ss.getAllUpdatesAfter(currentSessionID, StatsListener.TYPE_ID, wid, 0));
    if (updates == null || updates.size() == 0) {
        noData = true;
    }
    //Collect update ratios for weights
    //Collect standard deviations: activations, gradients, updates
    //Mean magnitude (updates) / mean magnitude (parameters)
    Map<String, List<Double>> updateRatios = new HashMap<>();
    result.put("updateRatios", updateRatios);
    Map<String, List<Double>> stdevActivations = new HashMap<>();
    Map<String, List<Double>> stdevGradients = new HashMap<>();
    Map<String, List<Double>> stdevUpdates = new HashMap<>();
    result.put("stdevActivations", stdevActivations);
    result.put("stdevGradients", stdevGradients);
    result.put("stdevUpdates", stdevUpdates);
    if (!noData) {
        Persistable u = updates.get(0);
        if (u instanceof StatsReport) {
            StatsReport sp = (StatsReport) u;
            Map<String, Double> map = sp.getMeanMagnitudes(StatsType.Parameters);
            if (map != null) {
                for (String s : map.keySet()) {
                    if (!s.toLowerCase().endsWith("w"))
                        //TODO: more robust "weights only" approach...
                        continue;
                    updateRatios.put(s, new ArrayList<>());
                }
            }
            Map<String, Double> stdGrad = sp.getStdev(StatsType.Gradients);
            if (stdGrad != null) {
                for (String s : stdGrad.keySet()) {
                    if (!s.toLowerCase().endsWith("w"))
                        //TODO: more robust "weights only" approach...
                        continue;
                    stdevGradients.put(s, new ArrayList<>());
                }
            }
            Map<String, Double> stdUpdate = sp.getStdev(StatsType.Updates);
            if (stdUpdate != null) {
                for (String s : stdUpdate.keySet()) {
                    if (!s.toLowerCase().endsWith("w"))
                        //TODO: more robust "weights only" approach...
                        continue;
                    stdevUpdates.put(s, new ArrayList<>());
                }
            }
            Map<String, Double> stdAct = sp.getStdev(StatsType.Activations);
            if (stdAct != null) {
                for (String s : stdAct.keySet()) {
                    stdevActivations.put(s, new ArrayList<>());
                }
            }
        }
    }
    StatsReport last = null;
    int lastIterCount = -1;
    //Legacy issue - Spark training - iteration counts are used to be reset... which means: could go 0,1,2,0,1,2, etc...
    //Or, it could equally go 4,8,4,8,... or 5,5,5,5 - depending on the collection and averaging frequencies
    //Now, it should use the proper iteration counts
    boolean needToHandleLegacyIterCounts = false;
    if (!noData) {
        double lastScore;
        int totalUpdates = updates.size();
        int subsamplingFrequency = 1;
        if (totalUpdates > maxChartPoints) {
            subsamplingFrequency = totalUpdates / maxChartPoints;
        }
        int pCount = -1;
        int lastUpdateIdx = updates.size() - 1;
        for (Persistable u : updates) {
            pCount++;
            if (!(u instanceof StatsReport))
                continue;
            last = (StatsReport) u;
            int iterCount = last.getIterationCount();
            if (iterCount <= lastIterCount) {
                needToHandleLegacyIterCounts = true;
            }
            lastIterCount = iterCount;
            if (pCount > 0 && subsamplingFrequency > 1 && pCount % subsamplingFrequency != 0) {
                //Skip this - subsample the data
                if (pCount != lastUpdateIdx)
                    //Always keep the most recent value
                    continue;
            }
            scoresIterCount.add(iterCount);
            lastScore = last.getScore();
            if (Double.isFinite(lastScore)) {
                scores.add(lastScore);
            } else {
                scores.add(NAN_REPLACEMENT_VALUE);
            }
            //Update ratios: mean magnitudes(updates) / mean magnitudes (parameters)
            Map<String, Double> updateMM = last.getMeanMagnitudes(StatsType.Updates);
            Map<String, Double> paramMM = last.getMeanMagnitudes(StatsType.Parameters);
            if (updateMM != null && paramMM != null && updateMM.size() > 0 && paramMM.size() > 0) {
                for (String s : updateRatios.keySet()) {
                    List<Double> ratioHistory = updateRatios.get(s);
                    double currUpdate = updateMM.getOrDefault(s, 0.0);
                    double currParam = paramMM.getOrDefault(s, 0.0);
                    double ratio = currUpdate / currParam;
                    if (Double.isFinite(ratio)) {
                        ratioHistory.add(ratio);
                    } else {
                        ratioHistory.add(NAN_REPLACEMENT_VALUE);
                    }
                }
            }
            //Standard deviations: gradients, updates, activations
            Map<String, Double> stdGrad = last.getStdev(StatsType.Gradients);
            Map<String, Double> stdUpd = last.getStdev(StatsType.Updates);
            Map<String, Double> stdAct = last.getStdev(StatsType.Activations);
            if (stdGrad != null) {
                for (String s : stdevGradients.keySet()) {
                    double d = stdGrad.getOrDefault(s, 0.0);
                    stdevGradients.get(s).add(fixNaN(d));
                }
            }
            if (stdUpd != null) {
                for (String s : stdevUpdates.keySet()) {
                    double d = stdUpd.getOrDefault(s, 0.0);
                    stdevUpdates.get(s).add(fixNaN(d));
                }
            }
            if (stdAct != null) {
                for (String s : stdevActivations.keySet()) {
                    double d = stdAct.getOrDefault(s, 0.0);
                    stdevActivations.get(s).add(fixNaN(d));
                }
            }
        }
    }
    if (needToHandleLegacyIterCounts) {
        cleanLegacyIterationCounts(scoresIterCount);
    }
    //----- Performance Info -----
    String[][] perfInfo = new String[][] { { i18N.getMessage("train.overview.perftable.startTime"), "" }, { i18N.getMessage("train.overview.perftable.totalRuntime"), "" }, { i18N.getMessage("train.overview.perftable.lastUpdate"), "" }, { i18N.getMessage("train.overview.perftable.totalParamUpdates"), "" }, { i18N.getMessage("train.overview.perftable.updatesPerSec"), "" }, { i18N.getMessage("train.overview.perftable.examplesPerSec"), "" } };
    if (last != null) {
        perfInfo[2][1] = String.valueOf(dateFormat.format(new Date(last.getTimeStamp())));
        perfInfo[3][1] = String.valueOf(last.getTotalMinibatches());
        perfInfo[4][1] = String.valueOf(df2.format(last.getMinibatchesPerSecond()));
        perfInfo[5][1] = String.valueOf(df2.format(last.getExamplesPerSecond()));
    }
    result.put("perf", perfInfo);
    // ----- Model Info -----
    String[][] modelInfo = new String[][] { { i18N.getMessage("train.overview.modeltable.modeltype"), "" }, { i18N.getMessage("train.overview.modeltable.nLayers"), "" }, { i18N.getMessage("train.overview.modeltable.nParams"), "" } };
    if (!noData) {
        Persistable p = ss.getStaticInfo(currentSessionID, StatsListener.TYPE_ID, wid);
        if (p != null) {
            StatsInitializationReport initReport = (StatsInitializationReport) p;
            int nLayers = initReport.getModelNumLayers();
            long numParams = initReport.getModelNumParams();
            String className = initReport.getModelClassName();
            String modelType;
            if (className.endsWith("MultiLayerNetwork")) {
                modelType = "MultiLayerNetwork";
            } else if (className.endsWith("ComputationGraph")) {
                modelType = "ComputationGraph";
            } else {
                modelType = className;
                if (modelType.lastIndexOf('.') > 0) {
                    modelType = modelType.substring(modelType.lastIndexOf('.') + 1);
                }
            }
            modelInfo[0][1] = modelType;
            modelInfo[1][1] = String.valueOf(nLayers);
            modelInfo[2][1] = String.valueOf(numParams);
        }
    }
    result.put("model", modelInfo);
    return Results.ok(Json.toJson(result));
}
Also used : StatsInitializationReport(org.deeplearning4j.ui.stats.api.StatsInitializationReport) Persistable(org.deeplearning4j.api.storage.Persistable) StatsReport(org.deeplearning4j.ui.stats.api.StatsReport) StatsStorage(org.deeplearning4j.api.storage.StatsStorage) AtomicInteger(java.util.concurrent.atomic.AtomicInteger)

Aggregations

StatsInitializationReport (org.deeplearning4j.ui.stats.api.StatsInitializationReport)8 Persistable (org.deeplearning4j.api.storage.Persistable)7 StatsStorage (org.deeplearning4j.api.storage.StatsStorage)4 StatsReport (org.deeplearning4j.ui.stats.api.StatsReport)3 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)2 ComputationGraphConfiguration (org.deeplearning4j.nn.conf.ComputationGraphConfiguration)2 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)2 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)2 HashMap (java.util.HashMap)1 Pair (org.deeplearning4j.berkeley.Pair)1 Triple (org.deeplearning4j.berkeley.Triple)1 Updater (org.deeplearning4j.nn.conf.Updater)1 GraphVertex (org.deeplearning4j.nn.conf.graph.GraphVertex)1 LayerVertex (org.deeplearning4j.nn.conf.graph.LayerVertex)1 ConvolutionLayer (org.deeplearning4j.nn.conf.layers.ConvolutionLayer)1 FeedForwardLayer (org.deeplearning4j.nn.conf.layers.FeedForwardLayer)1 Layer (org.deeplearning4j.nn.conf.layers.Layer)1 SubsamplingLayer (org.deeplearning4j.nn.conf.layers.SubsamplingLayer)1 WeightInit (org.deeplearning4j.nn.weights.WeightInit)1 SbeStatsInitializationReport (org.deeplearning4j.ui.stats.impl.SbeStatsInitializationReport)1