Search in sources :

Example 6 with ILossFunction

use of org.nd4j.linalg.lossfunctions.ILossFunction in project deeplearning4j by deeplearning4j.

the class BaseOutputLayer method computeScore.

/** Compute score after labels and input have been set.
     * @param fullNetworkL1 L1 regularization term for the entire network
     * @param fullNetworkL2 L2 regularization term for the entire network
     * @param training whether score should be calculated at train or test time (this affects things like application of
     *                 dropout, etc)
     * @return score (loss function)
     */
@Override
public double computeScore(double fullNetworkL1, double fullNetworkL2, boolean training) {
    if (input == null || labels == null)
        throw new IllegalStateException("Cannot calculate score without input and labels");
    this.fullNetworkL1 = fullNetworkL1;
    this.fullNetworkL2 = fullNetworkL2;
    INDArray preOut = preOutput2d(training);
    ILossFunction lossFunction = layerConf().getLossFn();
    //double score = lossFunction.computeScore(getLabels2d(), preOut, layerConf().getActivationFunction(), maskArray, false);
    double score = lossFunction.computeScore(getLabels2d(), preOut, layerConf().getActivationFn(), maskArray, false);
    score += fullNetworkL1 + fullNetworkL2;
    score /= getInputMiniBatchSize();
    this.score = score;
    return score;
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) ILossFunction(org.nd4j.linalg.lossfunctions.ILossFunction)

Example 7 with ILossFunction

use of org.nd4j.linalg.lossfunctions.ILossFunction in project deeplearning4j by deeplearning4j.

the class BaseOutputLayer method computeScoreForExamples.

/**Compute the score for each example individually, after labels and input have been set.
     *
     * @param fullNetworkL1 L1 regularization term for the entire network (or, 0.0 to not include regularization)
     * @param fullNetworkL2 L2 regularization term for the entire network (or, 0.0 to not include regularization)
     * @return A column INDArray of shape [numExamples,1], where entry i is the score of the ith example
     */
@Override
public INDArray computeScoreForExamples(double fullNetworkL1, double fullNetworkL2) {
    if (input == null || labels == null)
        throw new IllegalStateException("Cannot calculate score without input and labels");
    INDArray preOut = preOutput2d(false);
    ILossFunction lossFunction = layerConf().getLossFn();
    INDArray scoreArray = lossFunction.computeScoreArray(getLabels2d(), preOut, layerConf().getActivationFn(), maskArray);
    double l1l2 = fullNetworkL1 + fullNetworkL2;
    if (l1l2 != 0.0) {
        scoreArray.addi(l1l2);
    }
    return scoreArray;
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) ILossFunction(org.nd4j.linalg.lossfunctions.ILossFunction)

Example 8 with ILossFunction

use of org.nd4j.linalg.lossfunctions.ILossFunction in project deeplearning4j by deeplearning4j.

the class BasePretrainNetwork method setScoreWithZ.

@Override
protected void setScoreWithZ(INDArray z) {
    if (input == null || z == null)
        throw new IllegalStateException("Cannot calculate score without input and labels");
    ILossFunction lossFunction = layerConf().getLossFunction().getILossFunction();
    //double score = lossFunction.computeScore(input, z, layerConf().getActivationFunction(), maskArray, false);
    double score = lossFunction.computeScore(input, z, layerConf().getActivationFn(), maskArray, false);
    score += calcL1(false) + calcL2(false);
    score /= getInputMiniBatchSize();
    this.score = score;
}
Also used : ILossFunction(org.nd4j.linalg.lossfunctions.ILossFunction)

Example 9 with ILossFunction

use of org.nd4j.linalg.lossfunctions.ILossFunction in project deeplearning4j by deeplearning4j.

the class LossLayer method getGradientsAndDelta.

/** Returns tuple: {Gradient,Delta,Output} given preOut */
private Pair<Gradient, INDArray> getGradientsAndDelta(INDArray preOut) {
    // delta calculation
    ILossFunction lossFunction = layerConf().getLossFn();
    INDArray delta = lossFunction.computeGradient(getLabels2d(), preOut, layerConf().getActivationFn(), maskArray);
    // grab the empty gradient
    Gradient gradient = new DefaultGradient();
    return new Pair<>(gradient, delta);
}
Also used : Gradient(org.deeplearning4j.nn.gradient.Gradient) DefaultGradient(org.deeplearning4j.nn.gradient.DefaultGradient) DefaultGradient(org.deeplearning4j.nn.gradient.DefaultGradient) INDArray(org.nd4j.linalg.api.ndarray.INDArray) ILossFunction(org.nd4j.linalg.lossfunctions.ILossFunction) Pair(org.deeplearning4j.berkeley.Pair)

Example 10 with ILossFunction

use of org.nd4j.linalg.lossfunctions.ILossFunction in project deeplearning4j by deeplearning4j.

the class LossLayer method computeScoreForExamples.

/**Compute the score for each example individually, after labels and input have been set.
     *
     * @param fullNetworkL1 L1 regularization term for the entire network (or, 0.0 to not include regularization)
     * @param fullNetworkL2 L2 regularization term for the entire network (or, 0.0 to not include regularization)
     * @return A column INDArray of shape [numExamples,1], where entry i is the score of the ith example
     */
@Override
public INDArray computeScoreForExamples(double fullNetworkL1, double fullNetworkL2) {
    if (input == null || labels == null)
        throw new IllegalStateException("Cannot calculate score without input and labels");
    INDArray preOut = input;
    ILossFunction lossFunction = layerConf().getLossFn();
    INDArray scoreArray = lossFunction.computeScoreArray(getLabels2d(), preOut, layerConf().getActivationFn(), maskArray);
    double l1l2 = fullNetworkL1 + fullNetworkL2;
    if (l1l2 != 0.0) {
        scoreArray.addi(l1l2);
    }
    return scoreArray;
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) ILossFunction(org.nd4j.linalg.lossfunctions.ILossFunction)

Aggregations

ILossFunction (org.nd4j.linalg.lossfunctions.ILossFunction)18 INDArray (org.nd4j.linalg.api.ndarray.INDArray)17 Test (org.junit.Test)6 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)5 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)5 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)4 ArrayList (java.util.ArrayList)3 Pair (org.deeplearning4j.berkeley.Pair)3 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)3 NormalDistribution (org.deeplearning4j.nn.conf.distribution.NormalDistribution)3 UniformDistribution (org.deeplearning4j.nn.conf.distribution.UniformDistribution)3 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)3 DefaultGradient (org.deeplearning4j.nn.gradient.DefaultGradient)3 Gradient (org.deeplearning4j.nn.gradient.Gradient)3 Activation (org.nd4j.linalg.activations.Activation)3 DL4JInvalidInputException (org.deeplearning4j.exception.DL4JInvalidInputException)2 ComputationGraph (org.deeplearning4j.nn.graph.ComputationGraph)2 ComputationGraphConfiguration (org.deeplearning4j.nn.conf.ComputationGraphConfiguration)1 LossLayer (org.deeplearning4j.nn.conf.layers.LossLayer)1 RnnOutputLayer (org.deeplearning4j.nn.conf.layers.RnnOutputLayer)1