Search in sources :

Example 1 with Tuple2

use of com.simiacryptus.util.lang.Tuple2 in project MindsEye by SimiaCryptus.

the class MaxPoolingLayer method eval.

@Nonnull
@Override
public Result eval(@Nonnull final Result... inObj) {
    Arrays.stream(inObj).forEach(nnResult -> nnResult.addRef());
    final Result in = inObj[0];
    in.getData().length();
    @Nonnull final int[] inputDims = in.getData().getDimensions();
    final List<Tuple2<Integer, int[]>> regions = MaxPoolingLayer.calcRegionsCache.apply(new MaxPoolingLayer.CalcRegionsParameter(inputDims, kernelDims));
    final Tensor[] outputA = IntStream.range(0, in.getData().length()).mapToObj(dataIndex -> {
        final int[] newDims = IntStream.range(0, inputDims.length).map(i -> {
            return (int) Math.ceil(inputDims[i] * 1.0 / kernelDims[i]);
        }).toArray();
        @Nonnull final Tensor output = new Tensor(newDims);
        return output;
    }).toArray(i -> new Tensor[i]);
    Arrays.stream(outputA).mapToInt(x -> x.length()).sum();
    @Nonnull final int[][] gradientMapA = new int[in.getData().length()][];
    IntStream.range(0, in.getData().length()).forEach(dataIndex -> {
        @Nullable final Tensor input = in.getData().get(dataIndex);
        final Tensor output = outputA[dataIndex];
        @Nonnull final IntToDoubleFunction keyExtractor = inputCoords -> input.get(inputCoords);
        @Nonnull final int[] gradientMap = new int[input.length()];
        regions.parallelStream().forEach(tuple -> {
            final Integer from = tuple.getFirst();
            final int[] toList = tuple.getSecond();
            int toMax = -1;
            double bestValue = Double.NEGATIVE_INFINITY;
            for (final int c : toList) {
                final double value = keyExtractor.applyAsDouble(c);
                if (-1 == toMax || bestValue < value) {
                    bestValue = value;
                    toMax = c;
                }
            }
            gradientMap[from] = toMax;
            output.set(from, input.get(toMax));
        });
        input.freeRef();
        gradientMapA[dataIndex] = gradientMap;
    });
    return new Result(TensorArray.wrap(outputA), (@Nonnull final DeltaSet<Layer> buffer, @Nonnull final TensorList data) -> {
        if (in.isAlive()) {
            @Nonnull TensorArray tensorArray = TensorArray.wrap(IntStream.range(0, in.getData().length()).parallel().mapToObj(dataIndex -> {
                @Nonnull final Tensor backSignal = new Tensor(inputDims);
                final int[] ints = gradientMapA[dataIndex];
                @Nullable final Tensor datum = data.get(dataIndex);
                for (int i = 0; i < datum.length(); i++) {
                    backSignal.add(ints[i], datum.get(i));
                }
                datum.freeRef();
                return backSignal;
            }).toArray(i -> new Tensor[i]));
            in.accumulate(buffer, tensorArray);
        }
    }) {

        @Override
        protected void _free() {
            Arrays.stream(inObj).forEach(nnResult -> nnResult.freeRef());
        }

        @Override
        public boolean isAlive() {
            return in.isAlive();
        }
    };
}
Also used : IntStream(java.util.stream.IntStream) JsonObject(com.google.gson.JsonObject) Util(com.simiacryptus.util.Util) Arrays(java.util.Arrays) Logger(org.slf4j.Logger) IntToDoubleFunction(java.util.function.IntToDoubleFunction) LoggerFactory(org.slf4j.LoggerFactory) Tensor(com.simiacryptus.mindseye.lang.Tensor) Result(com.simiacryptus.mindseye.lang.Result) Function(java.util.function.Function) Collectors(java.util.stream.Collectors) DataSerializer(com.simiacryptus.mindseye.lang.DataSerializer) JsonUtil(com.simiacryptus.util.io.JsonUtil) Tuple2(com.simiacryptus.util.lang.Tuple2) List(java.util.List) LayerBase(com.simiacryptus.mindseye.lang.LayerBase) TensorList(com.simiacryptus.mindseye.lang.TensorList) Map(java.util.Map) Layer(com.simiacryptus.mindseye.lang.Layer) TensorArray(com.simiacryptus.mindseye.lang.TensorArray) DeltaSet(com.simiacryptus.mindseye.lang.DeltaSet) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) Tensor(com.simiacryptus.mindseye.lang.Tensor) Nonnull(javax.annotation.Nonnull) IntToDoubleFunction(java.util.function.IntToDoubleFunction) DeltaSet(com.simiacryptus.mindseye.lang.DeltaSet) TensorList(com.simiacryptus.mindseye.lang.TensorList) Result(com.simiacryptus.mindseye.lang.Result) Tuple2(com.simiacryptus.util.lang.Tuple2) TensorArray(com.simiacryptus.mindseye.lang.TensorArray) Nullable(javax.annotation.Nullable) Nonnull(javax.annotation.Nonnull)

Example 2 with Tuple2

use of com.simiacryptus.util.lang.Tuple2 in project MindsEye by SimiaCryptus.

the class DeepDream method getContentComponents.

/**
 * Gets content components.
 *
 * @param setup   the setup
 * @param nodeMap the node map
 * @return the content components
 */
@Nonnull
public ArrayList<Tuple2<Double, DAGNode>> getContentComponents(NeuralSetup<T> setup, final Map<T, DAGNode> nodeMap) {
    ArrayList<Tuple2<Double, DAGNode>> contentComponents = new ArrayList<>();
    for (final T layerType : getLayerTypes()) {
        final DAGNode node = nodeMap.get(layerType);
        if (setup.style.coefficients.containsKey(layerType)) {
            final double coeff_content = setup.style.coefficients.get(layerType).rms;
            DAGNetwork network = node.getNetwork();
            contentComponents.add(new Tuple2<>(coeff_content, network.wrap(new MeanSqLossLayer(), node, network.wrap(new ValueLayer(setup.contentTarget.content.get(layerType))))));
            final double coeff_gain = setup.style.coefficients.get(layerType).gain;
            contentComponents.add(new Tuple2<>(-coeff_gain, network.wrap(new AvgReducerLayer(), network.wrap(new SquareActivationLayer(), node))));
        }
    }
    return contentComponents;
}
Also used : Tuple2(com.simiacryptus.util.lang.Tuple2) AvgReducerLayer(com.simiacryptus.mindseye.layers.cudnn.AvgReducerLayer) ArrayList(java.util.ArrayList) ValueLayer(com.simiacryptus.mindseye.layers.cudnn.ValueLayer) SquareActivationLayer(com.simiacryptus.mindseye.layers.cudnn.SquareActivationLayer) DAGNetwork(com.simiacryptus.mindseye.network.DAGNetwork) DAGNode(com.simiacryptus.mindseye.network.DAGNode) MeanSqLossLayer(com.simiacryptus.mindseye.layers.cudnn.MeanSqLossLayer) Nonnull(javax.annotation.Nonnull)

Example 3 with Tuple2

use of com.simiacryptus.util.lang.Tuple2 in project MindsEye by SimiaCryptus.

the class StyleTransfer method getStyleComponents.

/**
 * Gets style components.
 *
 * @param node          the node
 * @param network       the network
 * @param styleParams   the style params
 * @param mean          the mean
 * @param covariance    the covariance
 * @param centeringMode the centering mode
 * @return the style components
 */
@Nonnull
public ArrayList<Tuple2<Double, DAGNode>> getStyleComponents(final DAGNode node, final PipelineNetwork network, final LayerStyleParams styleParams, final Tensor mean, final Tensor covariance, final CenteringMode centeringMode) {
    ArrayList<Tuple2<Double, DAGNode>> styleComponents = new ArrayList<>();
    if (null != styleParams && (styleParams.cov != 0 || styleParams.mean != 0)) {
        double meanRms = mean.rms();
        double meanScale = 0 == meanRms ? 1 : (1.0 / meanRms);
        InnerNode negTarget = network.wrap(new ValueLayer(mean.scale(-1)), new DAGNode[] {});
        InnerNode negAvg = network.wrap(new BandAvgReducerLayer().setAlpha(-1), node);
        if (styleParams.cov != 0) {
            DAGNode recentered;
            switch(centeringMode) {
                case Origin:
                    recentered = node;
                    break;
                case Dynamic:
                    recentered = network.wrap(new GateBiasLayer(), node, negAvg);
                    break;
                case Static:
                    recentered = network.wrap(new GateBiasLayer(), node, negTarget);
                    break;
                default:
                    throw new RuntimeException();
            }
            int[] covDim = covariance.getDimensions();
            assert 0 < covDim[2] : Arrays.toString(covDim);
            int inputBands = mean.getDimensions()[2];
            assert 0 < inputBands : Arrays.toString(mean.getDimensions());
            int outputBands = covDim[2] / inputBands;
            assert 0 < outputBands : Arrays.toString(covDim) + " / " + inputBands;
            double covRms = covariance.rms();
            double covScale = 0 == covRms ? 1 : (1.0 / covRms);
            styleComponents.add(new Tuple2<>(styleParams.cov, network.wrap(new MeanSqLossLayer().setAlpha(covScale), network.wrap(new ValueLayer(covariance), new DAGNode[] {}), network.wrap(ArtistryUtil.wrapTilesAvg(new GramianLayer()), recentered))));
        }
        if (styleParams.mean != 0) {
            styleComponents.add(new Tuple2<>(styleParams.mean, network.wrap(new MeanSqLossLayer().setAlpha(meanScale), negAvg, negTarget)));
        }
    }
    return styleComponents;
}
Also used : ArrayList(java.util.ArrayList) ValueLayer(com.simiacryptus.mindseye.layers.cudnn.ValueLayer) DAGNode(com.simiacryptus.mindseye.network.DAGNode) RangeConstraint(com.simiacryptus.mindseye.opt.region.RangeConstraint) MeanSqLossLayer(com.simiacryptus.mindseye.layers.cudnn.MeanSqLossLayer) InnerNode(com.simiacryptus.mindseye.network.InnerNode) GramianLayer(com.simiacryptus.mindseye.layers.cudnn.GramianLayer) Tuple2(com.simiacryptus.util.lang.Tuple2) BandAvgReducerLayer(com.simiacryptus.mindseye.layers.cudnn.BandAvgReducerLayer) GateBiasLayer(com.simiacryptus.mindseye.layers.cudnn.GateBiasLayer) Nonnull(javax.annotation.Nonnull)

Example 4 with Tuple2

use of com.simiacryptus.util.lang.Tuple2 in project MindsEye by SimiaCryptus.

the class PerformanceTester method testPerformance.

/**
 * Test learning performance double statistics.
 *
 * @param component      the component
 * @param inputPrototype the input prototype
 * @return the double statistics
 */
@Nonnull
protected Tuple2<Double, Double> testPerformance(@Nonnull final Layer component, final Tensor... inputPrototype) {
    final Tensor[][] data = IntStream.range(0, batches).mapToObj(x -> x).flatMap(x -> Stream.<Tensor[]>of(inputPrototype)).toArray(i -> new Tensor[i][]);
    @Nonnull TimedResult<Result> timedEval = TimedResult.time(() -> {
        Result[] input = ConstantResult.batchResultArray(data);
        @Nullable Result result;
        try {
            result = component.eval(input);
        } finally {
            for (@Nonnull Result nnResult : input) {
                nnResult.freeRef();
                nnResult.getData().freeRef();
            }
        }
        return result;
    });
    final Result result = timedEval.result;
    @Nonnull final DeltaSet<Layer> buffer = new DeltaSet<Layer>();
    try {
        long timedBackprop = TimedResult.time(() -> {
            @Nonnull TensorArray tensorArray = TensorArray.wrap(result.getData().stream().map(x -> {
                return x.mapAndFree(v -> 1.0);
            }).toArray(i -> new Tensor[i]));
            result.accumulate(buffer, tensorArray);
            assert tensorArray.currentRefCount() == 0;
            return buffer;
        }).timeNanos;
        return new Tuple2<>(timedEval.timeNanos / 1e9, timedBackprop / 1e9);
    } finally {
        buffer.freeRef();
        result.freeRef();
        result.getData().freeRef();
    }
}
Also used : IntStream(java.util.stream.IntStream) Arrays(java.util.Arrays) Logger(org.slf4j.Logger) DoubleStatistics(com.simiacryptus.util.data.DoubleStatistics) LoggerFactory(org.slf4j.LoggerFactory) Tensor(com.simiacryptus.mindseye.lang.Tensor) TestUtil(com.simiacryptus.mindseye.test.TestUtil) Result(com.simiacryptus.mindseye.lang.Result) Collectors(java.util.stream.Collectors) Tuple2(com.simiacryptus.util.lang.Tuple2) List(java.util.List) Stream(java.util.stream.Stream) ConstantResult(com.simiacryptus.mindseye.lang.ConstantResult) ToleranceStatistics(com.simiacryptus.mindseye.test.ToleranceStatistics) TimedResult(com.simiacryptus.util.lang.TimedResult) Layer(com.simiacryptus.mindseye.lang.Layer) TensorArray(com.simiacryptus.mindseye.lang.TensorArray) DAGNetwork(com.simiacryptus.mindseye.network.DAGNetwork) DeltaSet(com.simiacryptus.mindseye.lang.DeltaSet) NotebookOutput(com.simiacryptus.util.io.NotebookOutput) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) Nonnull(javax.annotation.Nonnull) DeltaSet(com.simiacryptus.mindseye.lang.DeltaSet) Layer(com.simiacryptus.mindseye.lang.Layer) Result(com.simiacryptus.mindseye.lang.Result) ConstantResult(com.simiacryptus.mindseye.lang.ConstantResult) TimedResult(com.simiacryptus.util.lang.TimedResult) Tuple2(com.simiacryptus.util.lang.Tuple2) TensorArray(com.simiacryptus.mindseye.lang.TensorArray) Nullable(javax.annotation.Nullable) Nonnull(javax.annotation.Nonnull)

Example 5 with Tuple2

use of com.simiacryptus.util.lang.Tuple2 in project MindsEye by SimiaCryptus.

the class StyleTransfer method getContentComponents.

/**
 * Gets content components.
 *
 * @param setup   the setup
 * @param nodeMap the node map
 * @return the content components
 */
@Nonnull
public ArrayList<Tuple2<Double, DAGNode>> getContentComponents(NeuralSetup<T> setup, final Map<T, DAGNode> nodeMap) {
    ArrayList<Tuple2<Double, DAGNode>> contentComponents = new ArrayList<>();
    for (final T layerType : getLayerTypes()) {
        final DAGNode node = nodeMap.get(layerType);
        final double coeff_content = !setup.style.content.params.containsKey(layerType) ? 0 : setup.style.content.params.get(layerType);
        final PipelineNetwork network1 = (PipelineNetwork) node.getNetwork();
        if (coeff_content != 0) {
            Tensor content = setup.contentTarget.content.get(layerType);
            contentComponents.add(new Tuple2<>(coeff_content, network1.wrap(new MeanSqLossLayer().setAlpha(1.0 / content.rms()), node, network1.wrap(new ValueLayer(content), new DAGNode[] {}))));
        }
    }
    return contentComponents;
}
Also used : Tensor(com.simiacryptus.mindseye.lang.Tensor) Tuple2(com.simiacryptus.util.lang.Tuple2) ArrayList(java.util.ArrayList) ValueLayer(com.simiacryptus.mindseye.layers.cudnn.ValueLayer) PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) DAGNode(com.simiacryptus.mindseye.network.DAGNode) MeanSqLossLayer(com.simiacryptus.mindseye.layers.cudnn.MeanSqLossLayer) Nonnull(javax.annotation.Nonnull)

Aggregations

Tuple2 (com.simiacryptus.util.lang.Tuple2)7 Nonnull (javax.annotation.Nonnull)7 Tensor (com.simiacryptus.mindseye.lang.Tensor)5 DeltaSet (com.simiacryptus.mindseye.lang.DeltaSet)4 Layer (com.simiacryptus.mindseye.lang.Layer)4 Result (com.simiacryptus.mindseye.lang.Result)4 TensorArray (com.simiacryptus.mindseye.lang.TensorArray)4 Arrays (java.util.Arrays)4 List (java.util.List)4 Collectors (java.util.stream.Collectors)4 IntStream (java.util.stream.IntStream)4 Nullable (javax.annotation.Nullable)4 Logger (org.slf4j.Logger)4 LoggerFactory (org.slf4j.LoggerFactory)4 MeanSqLossLayer (com.simiacryptus.mindseye.layers.cudnn.MeanSqLossLayer)3 ValueLayer (com.simiacryptus.mindseye.layers.cudnn.ValueLayer)3 DAGNetwork (com.simiacryptus.mindseye.network.DAGNetwork)3 DAGNode (com.simiacryptus.mindseye.network.DAGNode)3 ArrayList (java.util.ArrayList)3 JsonObject (com.google.gson.JsonObject)2