Search in sources :

Example 1 with Op

use of org.nd4j.linalg.api.ops.Op in project nd4j by deeplearning4j.

the class BaseGraphMapper method opTypeForNode.

@Override
public Op.Type opTypeForNode(NODE_TYPE nodeDef) {
    DifferentialFunction opWithTensorflowName = getMappedOp(getOpType(nodeDef));
    if (opWithTensorflowName == null)
        throw new NoOpNameFoundException("No op found with name " + getOpType(nodeDef));
    Op.Type type = opWithTensorflowName.opType();
    return type;
}
Also used : Op(org.nd4j.linalg.api.ops.Op) DifferentialFunction(org.nd4j.autodiff.functions.DifferentialFunction) NoOpNameFoundException(org.nd4j.imports.NoOpNameFoundException)

Example 2 with Op

use of org.nd4j.linalg.api.ops.Op in project deeplearning4j by deeplearning4j.

the class ROC method eval.

/**
     * Evaluate (collect statistics for) the given minibatch of data.
     * For time series (3 dimensions) use {@link #evalTimeSeries(INDArray, INDArray)} or {@link #evalTimeSeries(INDArray, INDArray, INDArray)}
     *
     * @param labels      Labels / true outcomes
     * @param predictions Predictions
     */
public void eval(INDArray labels, INDArray predictions) {
    if (labels.rank() == 3 && predictions.rank() == 3) {
        //Assume time series input -> reshape to 2d
        evalTimeSeries(labels, predictions);
    }
    if (labels.rank() > 2 || predictions.rank() > 2 || labels.size(1) != predictions.size(1) || labels.size(1) > 2) {
        throw new IllegalArgumentException("Invalid input data shape: labels shape = " + Arrays.toString(labels.shape()) + ", predictions shape = " + Arrays.toString(predictions.shape()) + "; require rank 2 array with size(1) == 1 or 2");
    }
    double step = 1.0 / thresholdSteps;
    boolean singleOutput = labels.size(1) == 1;
    INDArray positivePredictedClassColumn;
    INDArray positiveActualClassColumn;
    INDArray negativeActualClassColumn;
    if (singleOutput) {
        //Single binary variable case
        positiveActualClassColumn = labels;
        //1.0 - label
        negativeActualClassColumn = labels.rsub(1.0);
        positivePredictedClassColumn = predictions;
    } else {
        //Standard case - 2 output variables (probability distribution)
        positiveActualClassColumn = labels.getColumn(1);
        negativeActualClassColumn = labels.getColumn(0);
        positivePredictedClassColumn = predictions.getColumn(1);
    }
    //Increment global counts - actual positive/negative observed
    countActualPositive += positiveActualClassColumn.sumNumber().intValue();
    countActualNegative += negativeActualClassColumn.sumNumber().intValue();
    for (int i = 0; i <= thresholdSteps; i++) {
        double currThreshold = i * step;
        //Work out true/false positives - do this by replacing probabilities (predictions) with 1 or 0 based on threshold
        Condition condGeq = Conditions.greaterThanOrEqual(currThreshold);
        Condition condLeq = Conditions.lessThanOrEqual(currThreshold);
        Op op = new CompareAndSet(positivePredictedClassColumn.dup(), 1.0, condGeq);
        INDArray predictedClass1 = Nd4j.getExecutioner().execAndReturn(op);
        op = new CompareAndSet(predictedClass1, 0.0, condLeq);
        predictedClass1 = Nd4j.getExecutioner().execAndReturn(op);
        //True positives: occur when positive predicted class and actual positive actual class...
        //False positive occurs when positive predicted class, but negative actual class
        //If predicted == 1 and actual == 1 at this threshold: 1x1 = 1. 0 otherwise
        INDArray isTruePositive = predictedClass1.mul(positiveActualClassColumn);
        //If predicted == 1 and actual == 0 at this threshold: 1x1 = 1. 0 otherwise
        INDArray isFalsePositive = predictedClass1.mul(negativeActualClassColumn);
        //Counts for this batch:
        int truePositiveCount = isTruePositive.sumNumber().intValue();
        int falsePositiveCount = isFalsePositive.sumNumber().intValue();
        //Increment counts for this thold
        CountsForThreshold thresholdCounts = counts.get(currThreshold);
        thresholdCounts.incrementTruePositive(truePositiveCount);
        thresholdCounts.incrementFalsePositive(falsePositiveCount);
    }
}
Also used : Condition(org.nd4j.linalg.indexing.conditions.Condition) Op(org.nd4j.linalg.api.ops.Op) INDArray(org.nd4j.linalg.api.ndarray.INDArray) CompareAndSet(org.nd4j.linalg.api.ops.impl.transforms.comparison.CompareAndSet)

Example 3 with Op

use of org.nd4j.linalg.api.ops.Op in project deeplearning4j by deeplearning4j.

the class ROCMultiClass method eval.

/**
     * Evaluate (collect statistics for) the given minibatch of data.
     * For time series (3 dimensions) use {@link #evalTimeSeries(INDArray, INDArray)} or {@link #evalTimeSeries(INDArray, INDArray, INDArray)}
     *
     * @param labels      Labels / true outcomes
     * @param predictions Predictions
     */
public void eval(INDArray labels, INDArray predictions) {
    if (labels.rank() == 3 && predictions.rank() == 3) {
        //Assume time series input -> reshape to 2d
        evalTimeSeries(labels, predictions);
    }
    if (labels.rank() > 2 || predictions.rank() > 2 || labels.size(1) != predictions.size(1)) {
        throw new IllegalArgumentException("Invalid input data shape: labels shape = " + Arrays.toString(labels.shape()) + ", predictions shape = " + Arrays.toString(predictions.shape()) + "; require rank 2 array with size(1) == 1 or 2");
    }
    double step = 1.0 / thresholdSteps;
    if (countActualPositive == null) {
        //This must be the first time eval has been called...
        int size = labels.size(1);
        countActualPositive = new long[size];
        countActualNegative = new long[size];
        for (int i = 0; i < size; i++) {
            Map<Double, ROC.CountsForThreshold> map = new LinkedHashMap<Double, ROC.CountsForThreshold>();
            counts.put(i, map);
            for (int j = 0; j <= thresholdSteps; j++) {
                double currThreshold = j * step;
                map.put(currThreshold, new ROC.CountsForThreshold(currThreshold));
            }
        }
    }
    if (countActualPositive.length != labels.size(1)) {
        throw new IllegalArgumentException("Cannot evaluate data: number of label classes does not match previous call. " + "Got " + labels.size(1) + " labels (from array shape " + Arrays.toString(labels.shape()) + ")" + " vs. expected number of label classes = " + countActualPositive.length);
    }
    for (int i = 0; i < countActualPositive.length; i++) {
        //Iterate over each class
        INDArray positiveActualColumn = labels.getColumn(i);
        INDArray positivePredictedColumn = predictions.getColumn(i);
        //Increment global counts - actual positive/negative observed
        long currBatchPositiveActualCount = positiveActualColumn.sumNumber().intValue();
        countActualPositive[i] += currBatchPositiveActualCount;
        countActualNegative[i] += positiveActualColumn.length() - currBatchPositiveActualCount;
        for (int j = 0; j <= thresholdSteps; j++) {
            double currThreshold = j * step;
            //Work out true/false positives - do this by replacing probabilities (predictions) with 1 or 0 based on threshold
            Condition condGeq = Conditions.greaterThanOrEqual(currThreshold);
            Condition condLeq = Conditions.lessThanOrEqual(currThreshold);
            Op op = new CompareAndSet(positivePredictedColumn.dup(), 1.0, condGeq);
            INDArray predictedClass1 = Nd4j.getExecutioner().execAndReturn(op);
            op = new CompareAndSet(predictedClass1, 0.0, condLeq);
            predictedClass1 = Nd4j.getExecutioner().execAndReturn(op);
            //True positives: occur when positive predicted class and actual positive actual class...
            //False positive occurs when positive predicted class, but negative actual class
            //If predicted == 1 and actual == 1 at this threshold: 1x1 = 1. 0 otherwise
            INDArray isTruePositive = predictedClass1.mul(positiveActualColumn);
            INDArray negativeActualColumn = positiveActualColumn.rsub(1.0);
            //If predicted == 1 and actual == 0 at this threshold: 1x1 = 1. 0 otherwise
            INDArray isFalsePositive = predictedClass1.mul(negativeActualColumn);
            //Counts for this batch:
            int truePositiveCount = isTruePositive.sumNumber().intValue();
            int falsePositiveCount = isFalsePositive.sumNumber().intValue();
            //Increment counts for this threshold
            ROC.CountsForThreshold thresholdCounts = counts.get(i).get(currThreshold);
            thresholdCounts.incrementTruePositive(truePositiveCount);
            thresholdCounts.incrementFalsePositive(falsePositiveCount);
        }
    }
}
Also used : Condition(org.nd4j.linalg.indexing.conditions.Condition) Op(org.nd4j.linalg.api.ops.Op) INDArray(org.nd4j.linalg.api.ndarray.INDArray) CompareAndSet(org.nd4j.linalg.api.ops.impl.transforms.comparison.CompareAndSet)

Example 4 with Op

use of org.nd4j.linalg.api.ops.Op in project nd4j by deeplearning4j.

the class SameDiffTests method testSigmoidBackwards.

@Test
public void testSigmoidBackwards() {
    SameDiff sameDiff = SameDiff.create();
    INDArray sumInput = Nd4j.linspace(1, 4, 4).reshape(2, 2);
    Map<String, INDArray> inputs = new HashMap<>();
    inputs.put("x", sumInput);
    SDVariable input = sameDiff.var("x", inputs.get("x"));
    SDVariable sigmoid = sameDiff.sigmoid(input);
    SDVariable sum = sameDiff.sum(sigmoid, Integer.MAX_VALUE);
    List<DifferentialFunction> backwardsOps = sameDiff.execBackwards().getRight();
    Op finalOp = (Op) backwardsOps.get(backwardsOps.size() - 1);
    assertTrue(Nd4j.create(new double[][] { { 0.1966, 0.1050 }, { 0.0452, 0.0177 } }).equalsWithEps(finalOp.z(), 1e-2));
    System.out.println(backwardsOps);
}
Also used : Op(org.nd4j.linalg.api.ops.Op) INDArray(org.nd4j.linalg.api.ndarray.INDArray) DifferentialFunction(org.nd4j.autodiff.functions.DifferentialFunction) Test(org.junit.Test)

Example 5 with Op

use of org.nd4j.linalg.api.ops.Op in project nd4j by deeplearning4j.

the class Nd4jTestsC method testBroadcast1d.

@Test
public void testBroadcast1d() {
    int[] shape = { 4, 3, 2 };
    int[] toBroadcastDims = new int[] { 0, 1, 2 };
    int[][] toBroadcastShapes = new int[][] { { 1, 4 }, { 1, 3 }, { 1, 2 } };
    // Expected result values in buffer: c order, need to reshape to {4,3,2}. Values taken from 0.4-rc3.8
    double[][] expFlat = new double[][] { { 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0 }, { 1.0, 1.0, 2.0, 2.0, 3.0, 3.0, 1.0, 1.0, 2.0, 2.0, 3.0, 3.0, 1.0, 1.0, 2.0, 2.0, 3.0, 3.0, 1.0, 1.0, 2.0, 2.0, 3.0, 3.0 }, { 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0, 1.0, 2.0 } };
    double[][] expLinspaced = new double[][] { { 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0 }, { 2.0, 3.0, 5.0, 6.0, 8.0, 9.0, 8.0, 9.0, 11.0, 12.0, 14.0, 15.0, 14.0, 15.0, 17.0, 18.0, 20.0, 21.0, 20.0, 21.0, 23.0, 24.0, 26.0, 27.0 }, { 2.0, 4.0, 4.0, 6.0, 6.0, 8.0, 8.0, 10.0, 10.0, 12.0, 12.0, 14.0, 14.0, 16.0, 16.0, 18.0, 18.0, 20.0, 20.0, 22.0, 22.0, 24.0, 24.0, 26.0 } };
    for (int i = 0; i < toBroadcastDims.length; i++) {
        int dim = toBroadcastDims[i];
        int[] vectorShape = toBroadcastShapes[i];
        int length = ArrayUtil.prod(vectorShape);
        INDArray zC = Nd4j.create(shape, 'c');
        zC.setData(Nd4j.linspace(1, 24, 24).data());
        for (int tad = 0; tad < zC.tensorssAlongDimension(dim); tad++) {
            INDArray javaTad = zC.javaTensorAlongDimension(tad, dim);
            System.out.println("Tad " + tad + " is " + zC.tensorAlongDimension(tad, dim));
        }
        INDArray zF = Nd4j.create(shape, 'f');
        zF.assign(zC);
        INDArray toBroadcast = Nd4j.linspace(1, length, length);
        Op opc = new BroadcastAddOp(zC, toBroadcast, zC, dim);
        Op opf = new BroadcastAddOp(zF, toBroadcast, zF, dim);
        INDArray exp = Nd4j.create(expLinspaced[i], shape, 'c');
        INDArray expF = Nd4j.create(shape, 'f');
        expF.assign(exp);
        for (int tad = 0; tad < zC.tensorssAlongDimension(dim); tad++) {
            System.out.println(zC.tensorAlongDimension(tad, dim).offset() + " and f offset is " + zF.tensorAlongDimension(tad, dim).offset());
        }
        Nd4j.getExecutioner().exec(opc);
        Nd4j.getExecutioner().exec(opf);
        assertEquals(exp, zC);
        assertEquals(exp, zF);
    }
}
Also used : Op(org.nd4j.linalg.api.ops.Op) BroadcastOp(org.nd4j.linalg.api.ops.BroadcastOp) DynamicCustomOp(org.nd4j.linalg.api.ops.DynamicCustomOp) INDArray(org.nd4j.linalg.api.ndarray.INDArray) Test(org.junit.Test)

Aggregations

Op (org.nd4j.linalg.api.ops.Op)6 INDArray (org.nd4j.linalg.api.ndarray.INDArray)4 Test (org.junit.Test)3 DifferentialFunction (org.nd4j.autodiff.functions.DifferentialFunction)2 BroadcastOp (org.nd4j.linalg.api.ops.BroadcastOp)2 DynamicCustomOp (org.nd4j.linalg.api.ops.DynamicCustomOp)2 CompareAndSet (org.nd4j.linalg.api.ops.impl.transforms.comparison.CompareAndSet)2 Condition (org.nd4j.linalg.indexing.conditions.Condition)2 lombok.val (lombok.val)1 NoOpNameFoundException (org.nd4j.imports.NoOpNameFoundException)1