Search in sources :

Example 1 with MeanSqLossLayer

use of com.simiacryptus.mindseye.layers.cudnn.MeanSqLossLayer in project MindsEye by SimiaCryptus.

the class DeepDream method getContentComponents.

/**
 * Gets content components.
 *
 * @param setup   the setup
 * @param nodeMap the node map
 * @return the content components
 */
@Nonnull
public ArrayList<Tuple2<Double, DAGNode>> getContentComponents(NeuralSetup<T> setup, final Map<T, DAGNode> nodeMap) {
    ArrayList<Tuple2<Double, DAGNode>> contentComponents = new ArrayList<>();
    for (final T layerType : getLayerTypes()) {
        final DAGNode node = nodeMap.get(layerType);
        if (setup.style.coefficients.containsKey(layerType)) {
            final double coeff_content = setup.style.coefficients.get(layerType).rms;
            DAGNetwork network = node.getNetwork();
            contentComponents.add(new Tuple2<>(coeff_content, network.wrap(new MeanSqLossLayer(), node, network.wrap(new ValueLayer(setup.contentTarget.content.get(layerType))))));
            final double coeff_gain = setup.style.coefficients.get(layerType).gain;
            contentComponents.add(new Tuple2<>(-coeff_gain, network.wrap(new AvgReducerLayer(), network.wrap(new SquareActivationLayer(), node))));
        }
    }
    return contentComponents;
}
Also used : Tuple2(com.simiacryptus.util.lang.Tuple2) AvgReducerLayer(com.simiacryptus.mindseye.layers.cudnn.AvgReducerLayer) ArrayList(java.util.ArrayList) ValueLayer(com.simiacryptus.mindseye.layers.cudnn.ValueLayer) SquareActivationLayer(com.simiacryptus.mindseye.layers.cudnn.SquareActivationLayer) DAGNetwork(com.simiacryptus.mindseye.network.DAGNetwork) DAGNode(com.simiacryptus.mindseye.network.DAGNode) MeanSqLossLayer(com.simiacryptus.mindseye.layers.cudnn.MeanSqLossLayer) Nonnull(javax.annotation.Nonnull)

Example 2 with MeanSqLossLayer

use of com.simiacryptus.mindseye.layers.cudnn.MeanSqLossLayer in project MindsEye by SimiaCryptus.

the class StyleTransfer method getStyleComponents.

/**
 * Gets style components.
 *
 * @param node          the node
 * @param network       the network
 * @param styleParams   the style params
 * @param mean          the mean
 * @param covariance    the covariance
 * @param centeringMode the centering mode
 * @return the style components
 */
@Nonnull
public ArrayList<Tuple2<Double, DAGNode>> getStyleComponents(final DAGNode node, final PipelineNetwork network, final LayerStyleParams styleParams, final Tensor mean, final Tensor covariance, final CenteringMode centeringMode) {
    ArrayList<Tuple2<Double, DAGNode>> styleComponents = new ArrayList<>();
    if (null != styleParams && (styleParams.cov != 0 || styleParams.mean != 0)) {
        double meanRms = mean.rms();
        double meanScale = 0 == meanRms ? 1 : (1.0 / meanRms);
        InnerNode negTarget = network.wrap(new ValueLayer(mean.scale(-1)), new DAGNode[] {});
        InnerNode negAvg = network.wrap(new BandAvgReducerLayer().setAlpha(-1), node);
        if (styleParams.cov != 0) {
            DAGNode recentered;
            switch(centeringMode) {
                case Origin:
                    recentered = node;
                    break;
                case Dynamic:
                    recentered = network.wrap(new GateBiasLayer(), node, negAvg);
                    break;
                case Static:
                    recentered = network.wrap(new GateBiasLayer(), node, negTarget);
                    break;
                default:
                    throw new RuntimeException();
            }
            int[] covDim = covariance.getDimensions();
            assert 0 < covDim[2] : Arrays.toString(covDim);
            int inputBands = mean.getDimensions()[2];
            assert 0 < inputBands : Arrays.toString(mean.getDimensions());
            int outputBands = covDim[2] / inputBands;
            assert 0 < outputBands : Arrays.toString(covDim) + " / " + inputBands;
            double covRms = covariance.rms();
            double covScale = 0 == covRms ? 1 : (1.0 / covRms);
            styleComponents.add(new Tuple2<>(styleParams.cov, network.wrap(new MeanSqLossLayer().setAlpha(covScale), network.wrap(new ValueLayer(covariance), new DAGNode[] {}), network.wrap(ArtistryUtil.wrapTilesAvg(new GramianLayer()), recentered))));
        }
        if (styleParams.mean != 0) {
            styleComponents.add(new Tuple2<>(styleParams.mean, network.wrap(new MeanSqLossLayer().setAlpha(meanScale), negAvg, negTarget)));
        }
    }
    return styleComponents;
}
Also used : ArrayList(java.util.ArrayList) ValueLayer(com.simiacryptus.mindseye.layers.cudnn.ValueLayer) DAGNode(com.simiacryptus.mindseye.network.DAGNode) RangeConstraint(com.simiacryptus.mindseye.opt.region.RangeConstraint) MeanSqLossLayer(com.simiacryptus.mindseye.layers.cudnn.MeanSqLossLayer) InnerNode(com.simiacryptus.mindseye.network.InnerNode) GramianLayer(com.simiacryptus.mindseye.layers.cudnn.GramianLayer) Tuple2(com.simiacryptus.util.lang.Tuple2) BandAvgReducerLayer(com.simiacryptus.mindseye.layers.cudnn.BandAvgReducerLayer) GateBiasLayer(com.simiacryptus.mindseye.layers.cudnn.GateBiasLayer) Nonnull(javax.annotation.Nonnull)

Example 3 with MeanSqLossLayer

use of com.simiacryptus.mindseye.layers.cudnn.MeanSqLossLayer in project MindsEye by SimiaCryptus.

the class StyleTransfer method getContentComponents.

/**
 * Gets content components.
 *
 * @param setup   the setup
 * @param nodeMap the node map
 * @return the content components
 */
@Nonnull
public ArrayList<Tuple2<Double, DAGNode>> getContentComponents(NeuralSetup<T> setup, final Map<T, DAGNode> nodeMap) {
    ArrayList<Tuple2<Double, DAGNode>> contentComponents = new ArrayList<>();
    for (final T layerType : getLayerTypes()) {
        final DAGNode node = nodeMap.get(layerType);
        final double coeff_content = !setup.style.content.params.containsKey(layerType) ? 0 : setup.style.content.params.get(layerType);
        final PipelineNetwork network1 = (PipelineNetwork) node.getNetwork();
        if (coeff_content != 0) {
            Tensor content = setup.contentTarget.content.get(layerType);
            contentComponents.add(new Tuple2<>(coeff_content, network1.wrap(new MeanSqLossLayer().setAlpha(1.0 / content.rms()), node, network1.wrap(new ValueLayer(content), new DAGNode[] {}))));
        }
    }
    return contentComponents;
}
Also used : Tensor(com.simiacryptus.mindseye.lang.Tensor) Tuple2(com.simiacryptus.util.lang.Tuple2) ArrayList(java.util.ArrayList) ValueLayer(com.simiacryptus.mindseye.layers.cudnn.ValueLayer) PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) DAGNode(com.simiacryptus.mindseye.network.DAGNode) MeanSqLossLayer(com.simiacryptus.mindseye.layers.cudnn.MeanSqLossLayer) Nonnull(javax.annotation.Nonnull)

Aggregations

MeanSqLossLayer (com.simiacryptus.mindseye.layers.cudnn.MeanSqLossLayer)3 ValueLayer (com.simiacryptus.mindseye.layers.cudnn.ValueLayer)3 DAGNode (com.simiacryptus.mindseye.network.DAGNode)3 Tuple2 (com.simiacryptus.util.lang.Tuple2)3 ArrayList (java.util.ArrayList)3 Nonnull (javax.annotation.Nonnull)3 Tensor (com.simiacryptus.mindseye.lang.Tensor)1 AvgReducerLayer (com.simiacryptus.mindseye.layers.cudnn.AvgReducerLayer)1 BandAvgReducerLayer (com.simiacryptus.mindseye.layers.cudnn.BandAvgReducerLayer)1 GateBiasLayer (com.simiacryptus.mindseye.layers.cudnn.GateBiasLayer)1 GramianLayer (com.simiacryptus.mindseye.layers.cudnn.GramianLayer)1 SquareActivationLayer (com.simiacryptus.mindseye.layers.cudnn.SquareActivationLayer)1 DAGNetwork (com.simiacryptus.mindseye.network.DAGNetwork)1 InnerNode (com.simiacryptus.mindseye.network.InnerNode)1 PipelineNetwork (com.simiacryptus.mindseye.network.PipelineNetwork)1 RangeConstraint (com.simiacryptus.mindseye.opt.region.RangeConstraint)1