use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class LossFunctions method l2.
/**
* L2 loss function: i.e., sum of squared errors, L = sum_i (actual_i - predicted)^2
*
* @param outputName
* @param predictions
* @param label
* @param weights
* @param reduction
* @param dimensions
* @return
*/
public static LossInfo l2(String outputName, SDVariable predictions, SDVariable label, SDVariable weights, Reduction reduction, int... dimensions) {
LossInfo.Builder b = validate("l2", predictions, label, reduction);
SameDiff sd = predictions.getSameDiff();
if (weights == null) {
weights = sd.one("l2_loss_weights", SCALAR);
}
SDVariable diff = predictions.sub(label);
String name = (reduction == Reduction.NONE ? outputName : null);
SDVariable preReduceLoss = sd.square(diff).mul(name, weights);
return doReduce(sd, outputName, false, b, reduction, preReduceLoss, label, weights, dimensions);
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class LossFunctions method mcxent.
/**
* Multi-Class Cross Entropy loss function:<br>
* L = sum_i actual_i * log( predicted_i )
*
* @param outputName
* @param predictions
* @param label
* @param weights
* @param reduction
* @param dimensions
* @return
*/
public static LossInfo mcxent(String outputName, SDVariable predictions, SDVariable label, SDVariable weights, Reduction reduction, int... dimensions) {
LossInfo.Builder b = validate("mcxent", predictions, label, reduction);
SameDiff sd = predictions.getSameDiff();
if (weights == null) {
weights = sd.one("mcxent_loss_weights", SCALAR);
}
String name = (reduction == Reduction.NONE ? outputName : null);
SDVariable weightedLogProd = sd.log(predictions).mul(label).mul(name, weights);
return doReduce(sd, outputName, false, b, reduction, weightedLogProd, label, weights, dimensions);
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class LossFunctions method l1.
/**
* L1 loss - sum of absolute errors. L = sum_i abs(predicted_i - actual_i)
*
* @param outputName
* @param predictions
* @param label
* @param weights
* @param reduction
* @param dimensions
* @return
*/
public static LossInfo l1(String outputName, SDVariable predictions, SDVariable label, SDVariable weights, Reduction reduction, int... dimensions) {
LossInfo.Builder b = validate("l1", predictions, label, reduction);
SameDiff sd = predictions.getSameDiff();
if (weights == null) {
weights = sd.one("l1_loss_weights", SCALAR);
}
String name = (reduction == Reduction.NONE ? outputName : null);
SDVariable preReduceLoss = sd.abs(predictions.sub(label)).mul(name, weights);
return doReduce(sd, outputName, false, b, reduction, preReduceLoss, label, weights, dimensions);
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class DifferentialFunctionFactory method batchNorm.
/**
* Batch norm operation.
*/
public SDVariable batchNorm(SDVariable input, SDVariable mean, SDVariable variance, SDVariable gamma, SDVariable beta, boolean applyGamma, boolean applyBeta, double epsilon) {
BatchNorm batchNorm = BatchNorm.builder().inputFunctions(new SDVariable[] { input, mean, variance, gamma, beta }).applyGamma(applyGamma).applyBeta(applyBeta).epsilon(epsilon).sameDiff(sameDiff()).build();
val outputVars = batchNorm.outputVariables();
return outputVars[0];
}
use of org.nd4j.autodiff.samediff.SDVariable in project nd4j by deeplearning4j.
the class DifferentialFunctionFactory method doGradChoose.
/**
* @param func
* @param input
* @return
*/
public SDVariable doGradChoose(SDVariable func, SDVariable input) {
validateDifferentialFunctionsameDiff(func);
validateDifferentialFunctionsameDiff(input);
SDVariable repeatedGrad = doRepeat(func, input);
SDVariable resultRepeated = doRepeat(func.args()[0], input);
SDVariable argMaxLocations = eq(input, resultRepeated);
return div(mul(argMaxLocations, repeatedGrad), sum(argMaxLocations).outputVariables()[0]);
}
Aggregations