Search in sources :

Example 1 with ScalarStatistics

use of com.simiacryptus.util.data.ScalarStatistics in project MindsEye by SimiaCryptus.

the class MonitoringWrapperLayer method getMetrics.

@Nonnull
@Override
public Map<CharSequence, Object> getMetrics() {
    @Nonnull final HashMap<CharSequence, Object> map = new HashMap<>();
    map.put("class", getInner().getClass().getName());
    map.put("totalBatches", totalBatches);
    map.put("totalItems", totalItems);
    map.put("outputStatistics", forwardSignal.getMetrics());
    map.put("backpropStatistics", backwardSignal.getMetrics());
    if (verbose) {
        map.put("forwardPerformance", forwardPerformance.getMetrics());
        map.put("backwardPerformance", backwardPerformance.getMetrics());
    }
    final double batchesPerItem = totalBatches * 1.0 / totalItems;
    map.put("avgMsPerItem", 1000 * batchesPerItem * forwardPerformance.getMean());
    map.put("medianMsPerItem", 1000 * batchesPerItem * forwardPerformance.getPercentile(0.5));
    final double backpropMean = backwardPerformance.getMean();
    final double backpropMedian = backwardPerformance.getPercentile(0.5);
    map.put("avgMsPerItem_Backward", 1000 * batchesPerItem * backpropMean);
    map.put("medianMsPerItem_Backward", 1000 * batchesPerItem * backpropMedian);
    @Nullable final List<double[]> state = state();
    @Nonnull final ScalarStatistics statistics = new PercentileStatistics();
    for (@Nonnull final double[] s : state) {
        for (final double v : s) {
            statistics.add(v);
        }
    }
    if (statistics.getCount() > 0) {
        @Nonnull final HashMap<CharSequence, Object> weightStats = new HashMap<>();
        weightStats.put("buffers", state.size());
        weightStats.putAll(statistics.getMetrics());
        map.put("weights", weightStats);
    }
    return map;
}
Also used : Nonnull(javax.annotation.Nonnull) HashMap(java.util.HashMap) ScalarStatistics(com.simiacryptus.util.data.ScalarStatistics) JsonObject(com.google.gson.JsonObject) MonitoredObject(com.simiacryptus.util.MonitoredObject) Nullable(javax.annotation.Nullable) PercentileStatistics(com.simiacryptus.util.data.PercentileStatistics) Nonnull(javax.annotation.Nonnull)

Example 2 with ScalarStatistics

use of com.simiacryptus.util.data.ScalarStatistics in project MindsEye by SimiaCryptus.

the class BatchingTester method test.

/**
 * Test tolerance statistics.
 *
 * @param reference      the reference
 * @param inputPrototype the input prototype
 * @return the tolerance statistics
 */
@Nonnull
public ToleranceStatistics test(@Nullable final Layer reference, @Nonnull final Tensor[] inputPrototype) {
    if (null == reference)
        return new ToleranceStatistics();
    final TensorList[] inputTensorLists = Arrays.stream(inputPrototype).map(t -> TensorArray.wrap(IntStream.range(0, getBatchSize()).mapToObj(i -> t.map(v -> getRandom())).toArray(i -> new Tensor[i]))).toArray(i -> new TensorList[i]);
    @Nonnull final SimpleResult asABatch;
    final List<SimpleEval> oneAtATime;
    try {
        asABatch = SimpleListEval.run(reference, inputTensorLists);
        oneAtATime = IntStream.range(0, getBatchSize()).mapToObj(batch -> {
            Tensor[] inputTensors = IntStream.range(0, inputTensorLists.length).mapToObj(i -> inputTensorLists[i].get(batch)).toArray(i -> new Tensor[i]);
            @Nonnull SimpleEval eval = SimpleEval.run(reference, inputTensors);
            for (@Nonnull Tensor tensor : inputTensors) {
                tensor.freeRef();
            }
            return eval;
        }).collect(Collectors.toList());
    } finally {
        for (@Nonnull TensorList tensorList : inputTensorLists) {
            tensorList.freeRef();
        }
    }
    try {
        TensorList batchOutput = asABatch.getOutput();
        @Nonnull IntFunction<ToleranceStatistics> toleranceStatisticsIntFunction = batch -> {
            @Nullable Tensor batchTensor = batchOutput.get(batch);
            @Nonnull ToleranceStatistics accumulate = new ToleranceStatistics().accumulate(batchTensor.getData(), oneAtATime.get(batch).getOutput().getData());
            batchTensor.freeRef();
            return accumulate;
        };
        int batchLength = batchOutput.length();
        @Nonnull final ToleranceStatistics outputAgreement = IntStream.range(0, Math.min(getBatchSize(), batchLength)).mapToObj(toleranceStatisticsIntFunction).reduce((a, b) -> a.combine(b)).get();
        if (!(outputAgreement.absoluteTol.getMax() < tolerance)) {
            logger.info("Batch Output: " + batchOutput.stream().map(x -> {
                String str = x.prettyPrint();
                x.freeRef();
                return str;
            }).collect(Collectors.toList()));
            logger.info("Singular Output: " + oneAtATime.stream().map(x -> x.getOutput().prettyPrint()).collect(Collectors.toList()));
            throw new AssertionError("Output Corrupt: " + outputAgreement);
        }
        ToleranceStatistics derivativeAgreement = IntStream.range(0, Math.min(getBatchSize(), batchLength)).mapToObj(batch -> {
            IntFunction<ToleranceStatistics> statisticsFunction = input -> {
                @Nullable Tensor a = asABatch.getInputDerivative()[input].get(batch);
                Tensor b = oneAtATime.get(batch).getDerivative()[input];
                @Nonnull Tensor diff = a.minus(b);
                logger.info("Error: " + diff.prettyPrint());
                logger.info("Scalar Statistics: " + new ScalarStatistics().add(diff.getData()).getMetrics());
                double[][] points = Arrays.stream(diff.getData()).mapToObj(x -> new double[] { x }).toArray(i -> new double[i][]);
                // logger.info("Density: " + new DensityTree("x").setMinSplitFract(1e-8).setSplitSizeThreshold(2).new Node(points));
                diff.freeRef();
                @Nonnull ToleranceStatistics toleranceStatistics = new ToleranceStatistics().accumulate(a.getData(), b.getData());
                a.freeRef();
                return toleranceStatistics;
            };
            return IntStream.range(0, Math.min(inputPrototype.length, batchLength)).mapToObj(statisticsFunction).reduce((a, b) -> a.combine(b)).orElse(null);
        }).filter(x -> x != null).reduce((a, b) -> a.combine(b)).orElse(null);
        if (null != derivativeAgreement && !(derivativeAgreement.absoluteTol.getMax() < tolerance)) {
            throw new AssertionError("Derivatives Corrupt: " + derivativeAgreement);
        }
        return null != derivativeAgreement ? derivativeAgreement.combine(outputAgreement) : outputAgreement;
    } finally {
        asABatch.freeRef();
        oneAtATime.forEach(x -> x.freeRef());
    }
}
Also used : IntStream(java.util.stream.IntStream) SimpleResult(com.simiacryptus.mindseye.test.SimpleResult) Arrays(java.util.Arrays) Logger(org.slf4j.Logger) LoggerFactory(org.slf4j.LoggerFactory) Tensor(com.simiacryptus.mindseye.lang.Tensor) Collectors(java.util.stream.Collectors) List(java.util.List) SimpleListEval(com.simiacryptus.mindseye.test.SimpleListEval) ToleranceStatistics(com.simiacryptus.mindseye.test.ToleranceStatistics) ScalarStatistics(com.simiacryptus.util.data.ScalarStatistics) TensorList(com.simiacryptus.mindseye.lang.TensorList) Layer(com.simiacryptus.mindseye.lang.Layer) TensorArray(com.simiacryptus.mindseye.lang.TensorArray) SimpleEval(com.simiacryptus.mindseye.test.SimpleEval) NotebookOutput(com.simiacryptus.util.io.NotebookOutput) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) IntFunction(java.util.function.IntFunction) Tensor(com.simiacryptus.mindseye.lang.Tensor) Nonnull(javax.annotation.Nonnull) ScalarStatistics(com.simiacryptus.util.data.ScalarStatistics) TensorList(com.simiacryptus.mindseye.lang.TensorList) SimpleResult(com.simiacryptus.mindseye.test.SimpleResult) ToleranceStatistics(com.simiacryptus.mindseye.test.ToleranceStatistics) IntFunction(java.util.function.IntFunction) SimpleEval(com.simiacryptus.mindseye.test.SimpleEval) Nullable(javax.annotation.Nullable) Nonnull(javax.annotation.Nonnull)

Example 3 with ScalarStatistics

use of com.simiacryptus.util.data.ScalarStatistics in project MindsEye by SimiaCryptus.

the class SingleDerivativeTester method testFeedback.

/**
 * Test feedback tolerance statistics.
 *
 * @param statistics      the statistics
 * @param component       the component
 * @param inputPrototype  the input prototype
 * @param outputPrototype the output prototype
 * @return the tolerance statistics
 */
@Nonnull
public ToleranceStatistics testFeedback(@Nonnull ToleranceStatistics statistics, @Nonnull Layer component, @Nonnull Tensor[] inputPrototype, @Nonnull Tensor outputPrototype) {
    Optional<ToleranceStatistics> optional = IntStream.range(0, inputPrototype.length).mapToObj(i -> {
        @Nullable final Tensor measuredGradient = !verify ? null : measureFeedbackGradient(component, i, outputPrototype, inputPrototype);
        @Nonnull final Tensor implementedGradient = getFeedbackGradient(component, i, outputPrototype, inputPrototype);
        @Nonnull Tensor difference = measuredGradient.minus(implementedGradient);
        try {
            final ToleranceStatistics result = IntStream.range(0, null == measuredGradient ? 0 : measuredGradient.length()).mapToObj(i1 -> {
                return new ToleranceStatistics().accumulate(measuredGradient.getData()[i1], implementedGradient.getData()[i1]);
            }).reduce((a, b) -> a.combine(b)).orElse(new ToleranceStatistics());
            if (!(result.absoluteTol.getMax() < tolerance))
                throw new AssertionError(result.toString());
            // log.info(String.format("Component: %s", component));
            if (verbose) {
                log.info(String.format("Feedback for input %s", i));
                log.info(String.format("Inputs Values: %s", inputPrototype[i].prettyPrint()));
                log.info(String.format("Value Statistics: %s", new ScalarStatistics().add(inputPrototype[i].getData())));
                log.info(String.format("Implemented Feedback: %s", implementedGradient.prettyPrint()));
                log.info(String.format("Implemented Statistics: %s", new ScalarStatistics().add(implementedGradient.getData())));
                if (null != measuredGradient) {
                    log.info(String.format("Measured Feedback: %s", measuredGradient.prettyPrint()));
                    log.info(String.format("Measured Statistics: %s", new ScalarStatistics().add(measuredGradient.getData())));
                    log.info(String.format("Feedback Error: %s", difference.prettyPrint()));
                    log.info(String.format("Error Statistics: %s", new ScalarStatistics().add(difference.getData())));
                }
            }
            difference.freeRef();
            measuredGradient.freeRef();
            implementedGradient.freeRef();
            return result;
        } catch (@Nonnull final Throwable e) {
            // log.info(String.format("Component: %s", component));
            log.info(String.format("Feedback for input %s", i));
            log.info(String.format("Inputs Values: %s", inputPrototype[i].prettyPrint()));
            log.info(String.format("Value Statistics: %s", new ScalarStatistics().add(inputPrototype[i].getData())));
            log.info(String.format("Implemented Feedback: %s", implementedGradient.prettyPrint()));
            log.info(String.format("Implemented Statistics: %s", new ScalarStatistics().add(implementedGradient.getData())));
            if (null != measuredGradient) {
                log.info(String.format("Measured: %s", measuredGradient.prettyPrint()));
                log.info(String.format("Measured Statistics: %s", new ScalarStatistics().add(measuredGradient.getData())));
                log.info(String.format("Feedback Error: %s", difference.prettyPrint()));
                log.info(String.format("Error Statistics: %s", new ScalarStatistics().add(difference.getData())));
            }
            measuredGradient.freeRef();
            implementedGradient.freeRef();
            difference.freeRef();
            throw e;
        }
    }).reduce((a, b) -> a.combine(b));
    if (!optional.isPresent())
        return statistics;
    return statistics.combine(optional.orElse(null));
}
Also used : IntStream(java.util.stream.IntStream) Arrays(java.util.Arrays) Logger(org.slf4j.Logger) LoggerFactory(org.slf4j.LoggerFactory) Tensor(com.simiacryptus.mindseye.lang.Tensor) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) DoubleBuffer(com.simiacryptus.mindseye.lang.DoubleBuffer) Result(com.simiacryptus.mindseye.lang.Result) Collectors(java.util.stream.Collectors) Delta(com.simiacryptus.mindseye.lang.Delta) List(java.util.List) ConstantResult(com.simiacryptus.mindseye.lang.ConstantResult) ToleranceStatistics(com.simiacryptus.mindseye.test.ToleranceStatistics) ScalarStatistics(com.simiacryptus.util.data.ScalarStatistics) TensorList(com.simiacryptus.mindseye.lang.TensorList) PlaceholderLayer(com.simiacryptus.mindseye.layers.java.PlaceholderLayer) Layer(com.simiacryptus.mindseye.lang.Layer) Optional(java.util.Optional) TensorArray(com.simiacryptus.mindseye.lang.TensorArray) DeltaSet(com.simiacryptus.mindseye.lang.DeltaSet) SimpleEval(com.simiacryptus.mindseye.test.SimpleEval) NotebookOutput(com.simiacryptus.util.io.NotebookOutput) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) Tensor(com.simiacryptus.mindseye.lang.Tensor) ScalarStatistics(com.simiacryptus.util.data.ScalarStatistics) Nonnull(javax.annotation.Nonnull) ToleranceStatistics(com.simiacryptus.mindseye.test.ToleranceStatistics) Nonnull(javax.annotation.Nonnull)

Example 4 with ScalarStatistics

use of com.simiacryptus.util.data.ScalarStatistics in project MindsEye by SimiaCryptus.

the class SingleDerivativeTester method testLearning.

/**
 * Test learning tolerance statistics.
 *
 * @param prev            the prev
 * @param component       the component
 * @param inputPrototype  the input prototype
 * @param outputPrototype the output prototype
 * @return the tolerance statistics
 */
public ToleranceStatistics testLearning(@Nonnull ToleranceStatistics prev, @Nonnull Layer component, Tensor[] inputPrototype, @Nonnull Tensor outputPrototype) {
    return IntStream.range(0, component.state().size()).mapToObj(i -> {
        @Nullable final Tensor measuredGradient = !verify ? null : measureLearningGradient(component, i, outputPrototype, inputPrototype);
        @Nonnull final Tensor implementedGradient = getLearningGradient(component, i, outputPrototype, inputPrototype);
        @Nonnull Tensor difference = measuredGradient.minus(implementedGradient);
        try {
            final ToleranceStatistics result = IntStream.range(0, null == measuredGradient ? 0 : measuredGradient.length()).mapToObj(i1 -> {
                return new ToleranceStatistics().accumulate(measuredGradient.getData()[i1], implementedGradient.getData()[i1]);
            }).reduce((a, b) -> a.combine(b)).orElse(new ToleranceStatistics());
            if (!(result.absoluteTol.getMax() < tolerance)) {
                throw new AssertionError(result.toString());
            } else {
                // log.info(String.format("Component: %s", component));
                if (verbose) {
                    log.info(String.format("Learning Gradient for weight setByCoord %s", i));
                    log.info(String.format("Weights: %s", Tensor.prettyPrint(component.state().get(i))));
                    log.info(String.format("Implemented Gradient: %s", implementedGradient.prettyPrint()));
                    log.info(String.format("Implemented Statistics: %s", new ScalarStatistics().add(implementedGradient.getData())));
                    if (null != measuredGradient) {
                        log.info(String.format("Measured Gradient: %s", measuredGradient.prettyPrint()));
                        log.info(String.format("Measured Statistics: %s", new ScalarStatistics().add(measuredGradient.getData())));
                        log.info(String.format("Gradient Error: %s", difference.prettyPrint()));
                        log.info(String.format("Error Statistics: %s", new ScalarStatistics().add(difference.getData())));
                    }
                }
                difference.freeRef();
                return result;
            }
        } catch (@Nonnull final Throwable e) {
            // log.info(String.format("Component: %s", component));
            log.info(String.format("Learning Gradient for weight setByCoord %s", i));
            log.info(String.format("Implemented Gradient: %s", implementedGradient.prettyPrint()));
            log.info(String.format("Implemented Statistics: %s", new ScalarStatistics().add(implementedGradient.getData())));
            if (null != measuredGradient) {
                log.info(String.format("Measured Gradient: %s", measuredGradient.prettyPrint()));
                log.info(String.format("Measured Statistics: %s", new ScalarStatistics().add(measuredGradient.getData())));
                log.info(String.format("Gradient Error: %s", difference.prettyPrint()));
                log.info(String.format("Error Statistics: %s", new ScalarStatistics().add(difference.getData())));
            }
            difference.freeRef();
            throw e;
        } finally {
            measuredGradient.freeRef();
            implementedGradient.freeRef();
        }
    }).reduce((a, b) -> a.combine(b)).map(x -> x.combine(prev)).orElseGet(() -> prev);
}
Also used : IntStream(java.util.stream.IntStream) Arrays(java.util.Arrays) Logger(org.slf4j.Logger) LoggerFactory(org.slf4j.LoggerFactory) Tensor(com.simiacryptus.mindseye.lang.Tensor) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) DoubleBuffer(com.simiacryptus.mindseye.lang.DoubleBuffer) Result(com.simiacryptus.mindseye.lang.Result) Collectors(java.util.stream.Collectors) Delta(com.simiacryptus.mindseye.lang.Delta) List(java.util.List) ConstantResult(com.simiacryptus.mindseye.lang.ConstantResult) ToleranceStatistics(com.simiacryptus.mindseye.test.ToleranceStatistics) ScalarStatistics(com.simiacryptus.util.data.ScalarStatistics) TensorList(com.simiacryptus.mindseye.lang.TensorList) PlaceholderLayer(com.simiacryptus.mindseye.layers.java.PlaceholderLayer) Layer(com.simiacryptus.mindseye.lang.Layer) Optional(java.util.Optional) TensorArray(com.simiacryptus.mindseye.lang.TensorArray) DeltaSet(com.simiacryptus.mindseye.lang.DeltaSet) SimpleEval(com.simiacryptus.mindseye.test.SimpleEval) NotebookOutput(com.simiacryptus.util.io.NotebookOutput) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) Tensor(com.simiacryptus.mindseye.lang.Tensor) ScalarStatistics(com.simiacryptus.util.data.ScalarStatistics) Nonnull(javax.annotation.Nonnull) ToleranceStatistics(com.simiacryptus.mindseye.test.ToleranceStatistics)

Example 5 with ScalarStatistics

use of com.simiacryptus.util.data.ScalarStatistics in project MindsEye by SimiaCryptus.

the class StyleTransfer method measureStyle.

/**
 * Measure style neural setup.
 *
 * @param style the style
 * @return the neural setup
 */
public NeuralSetup measureStyle(final StyleSetup<T> style) {
    NeuralSetup<T> self = new NeuralSetup(style);
    List<CharSequence> keyList = style.styleImages.keySet().stream().collect(Collectors.toList());
    Tensor contentInput = Tensor.fromRGB(style.contentImage);
    List<Tensor> styleInputs = keyList.stream().map(x -> style.styleImages.get(x)).map(img -> Tensor.fromRGB(img)).collect(Collectors.toList());
    IntStream.range(0, keyList.size()).forEach(i -> {
        self.styleTargets.put(keyList.get(i), new StyleTarget());
    });
    self.contentTarget = new ContentTarget();
    for (final T layerType : getLayerTypes()) {
        System.gc();
        final PipelineNetwork network = layerType.texture();
        ArtistryUtil.setPrecision(network, style.precision);
        Tensor content = network.eval(contentInput).getDataAndFree().getAndFree(0);
        self.contentTarget.content.put(layerType, content);
        logger.info(String.format("%s : target content = %s", layerType.name(), content.prettyPrint()));
        logger.info(String.format("%s : content statistics = %s", layerType.name(), JsonUtil.toJson(new ScalarStatistics().add(content.getData()).getMetrics())));
        for (int i = 0; i < styleInputs.size(); i++) {
            Tensor styleInput = styleInputs.get(i);
            CharSequence key = keyList.get(i);
            StyleTarget<T> styleTarget = self.styleTargets.get(key);
            if (0 == self.style.styles.entrySet().stream().filter(e1 -> e1.getKey().contains(key)).map(x -> (LayerStyleParams) x.getValue().params.get(layerType)).filter(x -> null != x).filter(x -> x.mean != 0 || x.cov != 0).count())
                continue;
            System.gc();
            Tensor mean = ArtistryUtil.wrapTilesAvg(ArtistryUtil.avg(network.copy())).eval(styleInput).getDataAndFree().getAndFree(0);
            styleTarget.mean.put(layerType, mean);
            logger.info(String.format("%s : style mean = %s", layerType.name(), mean.prettyPrint()));
            logger.info(String.format("%s : mean statistics = %s", layerType.name(), JsonUtil.toJson(new ScalarStatistics().add(mean.getData()).getMetrics())));
            if (0 == self.style.styles.entrySet().stream().filter(e1 -> e1.getKey().contains(key)).map(x -> (LayerStyleParams) x.getValue().params.get(layerType)).filter(x -> null != x).filter(x -> x.cov != 0).count())
                continue;
            System.gc();
            Tensor cov0 = ArtistryUtil.wrapTilesAvg(ArtistryUtil.gram(network.copy())).eval(styleInput).getDataAndFree().getAndFree(0);
            Tensor cov1 = ArtistryUtil.wrapTilesAvg(ArtistryUtil.gram(network.copy(), mean)).eval(styleInput).getDataAndFree().getAndFree(0);
            styleTarget.cov0.put(layerType, cov0);
            styleTarget.cov1.put(layerType, cov1);
            int featureBands = mean.getDimensions()[2];
            int covarianceElements = cov1.getDimensions()[2];
            int selectedBands = covarianceElements / featureBands;
            logger.info(String.format("%s : target cov0 = %s", layerType.name(), cov0.reshapeCast(featureBands, selectedBands, 1).prettyPrint()));
            logger.info(String.format("%s : cov0 statistics = %s", layerType.name(), JsonUtil.toJson(new ScalarStatistics().add(cov0.getData()).getMetrics())));
            logger.info(String.format("%s : target cov1 = %s", layerType.name(), cov1.reshapeCast(featureBands, selectedBands, 1).prettyPrint()));
            logger.info(String.format("%s : cov1 statistics = %s", layerType.name(), JsonUtil.toJson(new ScalarStatistics().add(cov1.getData()).getMetrics())));
        }
    }
    return self;
}
Also used : PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) IntStream(java.util.stream.IntStream) Arrays(java.util.Arrays) TrustRegion(com.simiacryptus.mindseye.opt.region.TrustRegion) MeanSqLossLayer(com.simiacryptus.mindseye.layers.cudnn.MeanSqLossLayer) LoggerFactory(org.slf4j.LoggerFactory) Tensor(com.simiacryptus.mindseye.lang.Tensor) TrustRegionStrategy(com.simiacryptus.mindseye.opt.orient.TrustRegionStrategy) HashMap(java.util.HashMap) NullNotebookOutput(com.simiacryptus.util.io.NullNotebookOutput) MultiLayerImageNetwork(com.simiacryptus.mindseye.models.MultiLayerImageNetwork) ArrayList(java.util.ArrayList) JsonUtil(com.simiacryptus.util.io.JsonUtil) Trainable(com.simiacryptus.mindseye.eval.Trainable) Precision(com.simiacryptus.mindseye.lang.cudnn.Precision) Tuple2(com.simiacryptus.util.lang.Tuple2) Map(java.util.Map) Layer(com.simiacryptus.mindseye.lang.Layer) GateBiasLayer(com.simiacryptus.mindseye.layers.cudnn.GateBiasLayer) StepRecord(com.simiacryptus.mindseye.test.StepRecord) NotebookOutput(com.simiacryptus.util.io.NotebookOutput) IterativeTrainer(com.simiacryptus.mindseye.opt.IterativeTrainer) Nonnull(javax.annotation.Nonnull) Logger(org.slf4j.Logger) BufferedImage(java.awt.image.BufferedImage) ValueLayer(com.simiacryptus.mindseye.layers.cudnn.ValueLayer) TestUtil(com.simiacryptus.mindseye.test.TestUtil) UUID(java.util.UUID) DAGNode(com.simiacryptus.mindseye.network.DAGNode) Collectors(java.util.stream.Collectors) BandAvgReducerLayer(com.simiacryptus.mindseye.layers.cudnn.BandAvgReducerLayer) StreamNanoHTTPD(com.simiacryptus.util.StreamNanoHTTPD) TimeUnit(java.util.concurrent.TimeUnit) BisectionSearch(com.simiacryptus.mindseye.opt.line.BisectionSearch) List(java.util.List) GramianLayer(com.simiacryptus.mindseye.layers.cudnn.GramianLayer) Stream(java.util.stream.Stream) ArrayTrainable(com.simiacryptus.mindseye.eval.ArrayTrainable) BinarySumLayer(com.simiacryptus.mindseye.layers.cudnn.BinarySumLayer) InnerNode(com.simiacryptus.mindseye.network.InnerNode) ScalarStatistics(com.simiacryptus.util.data.ScalarStatistics) MultiLayerVGG16(com.simiacryptus.mindseye.models.MultiLayerVGG16) RangeConstraint(com.simiacryptus.mindseye.opt.region.RangeConstraint) LayerEnum(com.simiacryptus.mindseye.models.LayerEnum) MultiLayerVGG19(com.simiacryptus.mindseye.models.MultiLayerVGG19) Tensor(com.simiacryptus.mindseye.lang.Tensor) ScalarStatistics(com.simiacryptus.util.data.ScalarStatistics) PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) RangeConstraint(com.simiacryptus.mindseye.opt.region.RangeConstraint)

Aggregations

ScalarStatistics (com.simiacryptus.util.data.ScalarStatistics)12 Nonnull (javax.annotation.Nonnull)12 Tensor (com.simiacryptus.mindseye.lang.Tensor)11 NotebookOutput (com.simiacryptus.util.io.NotebookOutput)11 Arrays (java.util.Arrays)11 List (java.util.List)11 Nullable (javax.annotation.Nullable)11 Layer (com.simiacryptus.mindseye.lang.Layer)10 Collectors (java.util.stream.Collectors)10 IntStream (java.util.stream.IntStream)10 Logger (org.slf4j.Logger)10 LoggerFactory (org.slf4j.LoggerFactory)10 TensorArray (com.simiacryptus.mindseye.lang.TensorArray)7 TensorList (com.simiacryptus.mindseye.lang.TensorList)7 SimpleEval (com.simiacryptus.mindseye.test.SimpleEval)7 ToleranceStatistics (com.simiacryptus.mindseye.test.ToleranceStatistics)7 ConstantResult (com.simiacryptus.mindseye.lang.ConstantResult)6 Delta (com.simiacryptus.mindseye.lang.Delta)6 DeltaSet (com.simiacryptus.mindseye.lang.DeltaSet)6 DoubleBuffer (com.simiacryptus.mindseye.lang.DoubleBuffer)6