Search in sources :

Example 1 with Not

use of org.nd4j.linalg.api.ops.impl.transforms.Not in project deeplearning4j by deeplearning4j.

the class TestMasking method testPerOutputMaskingMLN.

@Test
public void testPerOutputMaskingMLN() {
    //Idea: for per-output masking, the contents of the masked label entries should make zero difference to either
    // the score or the gradients
    int nIn = 6;
    int layerSize = 4;
    INDArray mask1 = Nd4j.create(new double[] { 1, 0, 0, 1, 0 });
    INDArray mask3 = Nd4j.create(new double[][] { { 1, 1, 1, 1, 1 }, { 0, 1, 0, 1, 0 }, { 1, 0, 0, 1, 1 } });
    INDArray[] labelMasks = new INDArray[] { mask1, mask3 };
    ILossFunction[] lossFunctions = new ILossFunction[] { new LossBinaryXENT(), //                new LossCosineProximity(),    //Doesn't support per-output masking, as it doesn't make sense for cosine proximity
    new LossHinge(), new LossKLD(), new LossKLD(), new LossL1(), new LossL2(), new LossMAE(), new LossMAE(), new LossMAPE(), new LossMAPE(), //                new LossMCXENT(),             //Per output masking on MCXENT+Softmax: not yet supported
    new LossMCXENT(), new LossMSE(), new LossMSE(), new LossMSLE(), new LossMSLE(), new LossNegativeLogLikelihood(), new LossPoisson(), new LossSquaredHinge() };
    Activation[] act = new Activation[] { //XENT
    Activation.SIGMOID, //Hinge
    Activation.TANH, //KLD
    Activation.SIGMOID, //KLD + softmax
    Activation.SOFTMAX, //L1
    Activation.TANH, //L2
    Activation.TANH, //MAE
    Activation.TANH, //MAE + softmax
    Activation.SOFTMAX, //MAPE
    Activation.TANH, //MAPE + softmax
    Activation.SOFTMAX, //MCXENT + sigmoid
    Activation.SIGMOID, //MSE
    Activation.TANH, //MSE + softmax
    Activation.SOFTMAX, //MSLE - needs positive labels/activations (due to log)
    Activation.SIGMOID, //MSLE + softmax
    Activation.SOFTMAX, //NLL
    Activation.SIGMOID, //Poisson
    Activation.SIGMOID, //Squared hinge
    Activation.TANH };
    for (INDArray labelMask : labelMasks) {
        int minibatch = labelMask.size(0);
        int nOut = labelMask.size(1);
        for (int i = 0; i < lossFunctions.length; i++) {
            ILossFunction lf = lossFunctions[i];
            Activation a = act[i];
            MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().updater(Updater.NONE).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).seed(12345).list().layer(0, new DenseLayer.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build()).layer(1, new OutputLayer.Builder().nIn(layerSize).nOut(nOut).lossFunction(lf).activation(a).build()).build();
            MultiLayerNetwork net = new MultiLayerNetwork(conf);
            net.init();
            net.setLayerMaskArrays(null, labelMask);
            INDArray[] fl = LossFunctionGradientCheck.getFeaturesAndLabels(lf, minibatch, nIn, nOut, 12345);
            INDArray features = fl[0];
            INDArray labels = fl[1];
            net.setInput(features);
            net.setLabels(labels);
            net.computeGradientAndScore();
            double score1 = net.score();
            INDArray grad1 = net.gradient().gradient();
            //Now: change the label values for the masked steps. The
            INDArray maskZeroLocations = Nd4j.getExecutioner().execAndReturn(new Not(labelMask.dup()));
            INDArray rand = Nd4j.rand(maskZeroLocations.shape()).muli(0.5);
            //Only the masked values are changed
            INDArray newLabels = labels.add(rand.muli(maskZeroLocations));
            net.setLabels(newLabels);
            net.computeGradientAndScore();
            assertNotEquals(labels, newLabels);
            double score2 = net.score();
            INDArray grad2 = net.gradient().gradient();
            assertEquals(score1, score2, 1e-6);
            assertEquals(grad1, grad2);
            //Do the same for CompGraph
            ComputationGraphConfiguration conf2 = new NeuralNetConfiguration.Builder().updater(Updater.NONE).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).seed(12345).graphBuilder().addInputs("in").addLayer("0", new DenseLayer.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH).build(), "in").addLayer("1", new OutputLayer.Builder().nIn(layerSize).nOut(nOut).lossFunction(lf).activation(a).build(), "0").setOutputs("1").build();
            ComputationGraph graph = new ComputationGraph(conf2);
            graph.init();
            graph.setLayerMaskArrays(null, new INDArray[] { labelMask });
            graph.setInputs(features);
            graph.setLabels(labels);
            graph.computeGradientAndScore();
            double gScore1 = graph.score();
            INDArray gGrad1 = graph.gradient().gradient();
            graph.setLabels(newLabels);
            graph.computeGradientAndScore();
            double gScore2 = graph.score();
            INDArray gGrad2 = graph.gradient().gradient();
            assertEquals(gScore1, gScore2, 1e-6);
            assertEquals(gGrad1, gGrad2);
        }
    }
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) RnnOutputLayer(org.deeplearning4j.nn.conf.layers.RnnOutputLayer) Activation(org.nd4j.linalg.activations.Activation) ComputationGraph(org.deeplearning4j.nn.graph.ComputationGraph) ILossFunction(org.nd4j.linalg.lossfunctions.ILossFunction) Not(org.nd4j.linalg.api.ops.impl.transforms.Not) INDArray(org.nd4j.linalg.api.ndarray.INDArray) DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) Test(org.junit.Test)

Aggregations

NormalDistribution (org.deeplearning4j.nn.conf.distribution.NormalDistribution)1 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)1 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)1 RnnOutputLayer (org.deeplearning4j.nn.conf.layers.RnnOutputLayer)1 ComputationGraph (org.deeplearning4j.nn.graph.ComputationGraph)1 Test (org.junit.Test)1 Activation (org.nd4j.linalg.activations.Activation)1 INDArray (org.nd4j.linalg.api.ndarray.INDArray)1 Not (org.nd4j.linalg.api.ops.impl.transforms.Not)1 ILossFunction (org.nd4j.linalg.lossfunctions.ILossFunction)1