Search in sources :

Example 1 with NDArrayStrings

use of org.nd4j.linalg.string.NDArrayStrings in project nd4j by deeplearning4j.

the class TestFormatting method testNd4jArrayString.

@Test
public void testNd4jArrayString() {
    INDArray arr = Nd4j.create(new float[] { 1f, 20000000f, 40.838383f, 3f }, new int[] { 2, 2 });
    String serializedData1 = new NDArrayStrings(",", 3).format(arr);
    log.info("\n" + serializedData1);
    String expected1 = "[[1.000,40.838],\n" + " [2e7,3.000]]";
    Assert.assertEquals(expected1.replaceAll(" ", ""), serializedData1.replaceAll(" ", ""));
    String serializedData2 = new NDArrayStrings().format(arr);
    log.info("\n" + serializedData2);
    String expected2 = "[[1.0000,40.8384],\n" + " [2e7,3.0000]]";
    Assert.assertEquals(expected2.replaceAll(" ", ""), serializedData2.replaceAll(" ", ""));
    String serializedData3 = new NDArrayStrings(",", "000.00##E0").format(arr);
    String expected3 = "[[100.00E-2,408.3838E-1],\n" + " [200.00E5,300.00E-2]]";
    log.info("\n" + serializedData3);
    Assert.assertEquals(expected3.replaceAll(" ", ""), serializedData3.replaceAll(" ", ""));
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) NDArrayStrings(org.nd4j.linalg.string.NDArrayStrings) Test(org.junit.Test) BaseNd4jTest(org.nd4j.linalg.BaseNd4jTest)

Example 2 with NDArrayStrings

use of org.nd4j.linalg.string.NDArrayStrings in project nd4j by deeplearning4j.

the class TestFormatting method testTwoByTwo.

@Test
public void testTwoByTwo() {
    INDArray arr = Nd4j.create(2, 2, 2, 2);
    System.out.println(new NDArrayStrings().format(arr));
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) NDArrayStrings(org.nd4j.linalg.string.NDArrayStrings) Test(org.junit.Test) BaseNd4jTest(org.nd4j.linalg.BaseNd4jTest)

Example 3 with NDArrayStrings

use of org.nd4j.linalg.string.NDArrayStrings in project nd4j by deeplearning4j.

the class DerivativeTests method softmaxsimplelongerlengthLossTest.

@Test
public void softmaxsimplelongerlengthLossTest() {
    /*
            Read comments in earlier test for length = 2
         */
    // random array represeting preout
    int someLength = 7;
    INDArray X = Nd4j.rand(1, someLength);
    // preout transformed to y_hat with softmax
    INDArray YHat = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("softmax", X.dup()));
    // hard coding something to construct a function with, using MSE
    INDArray temp = Nd4j.rand(1, someLength);
    INDArray Y = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("softmax", temp));
    // This is the MSE now
    double lossHere = Transforms.pow(Y.sub(YHat), 2).sumNumber().doubleValue();
    INDArray softmaxDer = Nd4j.getExecutioner().execAndReturn(new SoftMaxDerivative(X.dup()));
    // the way we apply the chain rule now is 2*(y-yhat)*softmaxder
    INDArray dLdY = Y.sub(YHat).mul(-2);
    INDArray currentGradient = dLdY.mul(softmaxDer);
    INDArray mysoftmaxDer = correctSoftmax(X);
    INDArray myGradient = mysoftmaxDer.mulRowVector(dLdY).sum(1);
    double epsilon = 0.0001;
    INDArray Xiplus, Ximinus;
    INDArray YHatplus, YHatminus;
    double lossplus, lossminus;
    INDArray numGradient = Nd4j.zeros(1, someLength);
    for (int i = 0; i < someLength; i++) {
        /* change X one value one at a time */
        // +epsilon
        double x = X.getDouble(0, i);
        Xiplus = X.dup();
        Xiplus.put(0, i, x + epsilon);
        YHatplus = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("softmax", Xiplus.dup()));
        lossplus = Transforms.pow(Y.sub(YHatplus), 2).sumNumber().doubleValue();
        // -epsilon
        Ximinus = X.dup();
        Ximinus.put(0, i, x - epsilon);
        YHatminus = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("softmax", Ximinus.dup()));
        lossminus = Transforms.pow(Y.sub(YHatminus), 2).sumNumber().doubleValue();
        double gradienti = (lossplus - lossminus) / (2 * epsilon);
        numGradient.put(0, i, gradienti);
    }
    System.out.println("=========================");
    System.out.println("NUMERICAL GRADIENT:");
    System.out.println(new NDArrayStrings(6).format(numGradient).toString());
    System.out.println("\nANALYTIC USING EXISTING SOFTMAX DER:");
    System.out.println(new NDArrayStrings(6).format(currentGradient).toString());
    System.out.println("\nGRADIENT USING MY VERSION OF SOFTMAX DER:");
    System.out.println(new NDArrayStrings(6).format(myGradient).toString());
    System.out.println("=========================");
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) SoftMaxDerivative(org.nd4j.linalg.api.ops.impl.transforms.SoftMaxDerivative) NDArrayStrings(org.nd4j.linalg.string.NDArrayStrings) Test(org.junit.Test) BaseNd4jTest(org.nd4j.linalg.BaseNd4jTest)

Example 4 with NDArrayStrings

use of org.nd4j.linalg.string.NDArrayStrings in project nd4j by deeplearning4j.

the class TestPCA method testPCA.

/**
 * Test new PCA routines, added by Luke Czapla
 */
@Test
public void testPCA() {
    INDArray m = Nd4j.randn(10000, 16);
    // 10000 random correlated samples of 16 features to analyze
    m.getColumn(0).muli(4.84);
    m.getColumn(1).muli(4.84);
    m.getColumn(2).muli(4.09);
    m.getColumn(1).addi(m.getColumn(2).div(2.0));
    m.getColumn(2).addi(34.286);
    m.getColumn(1).addi(m.getColumn(4));
    m.getColumn(4).subi(m.getColumn(5).div(2.0));
    m.getColumn(5).addi(3.4);
    m.getColumn(6).muli(6.0);
    m.getColumn(7).muli(0.2);
    m.getColumn(8).muli(2.0);
    m.getColumn(9).muli(6.0);
    m.getColumn(9).addi(m.getColumn(6).mul(1.0));
    m.getColumn(10).muli(0.2);
    m.getColumn(11).muli(2.0);
    m.getColumn(12).muli(0.2);
    m.getColumn(13).muli(4.0);
    m.getColumn(14).muli(3.2);
    m.getColumn(14).addi(m.getColumn(2).mul(1.0)).subi(m.getColumn(13).div(2.0));
    m.getColumn(15).muli(1.0);
    m.getColumn(13).subi(12.0);
    m.getColumn(15).addi(30.0);
    PCA myPCA = new PCA(m);
    INDArray reduced70 = myPCA.reducedBasis(0.70);
    INDArray reduced99 = myPCA.reducedBasis(0.99);
    assertTrue("Major variance differences should change number of basis vectors", reduced99.columns() > reduced70.columns());
    INDArray reduced100 = myPCA.reducedBasis(1.0);
    assertTrue("100% variance coverage should include all eigenvectors", reduced100.columns() == m.columns());
    NDArrayStrings ns = new NDArrayStrings(5);
    System.out.println("Eigenvectors:\n" + ns.format(myPCA.getEigenvectors()));
    System.out.println("Eigenvalues:\n" + ns.format(myPCA.getEigenvalues()));
    double variance = 0.0;
    // sample 1000 of the randomly generated samples with the reduced basis set
    for (int i = 0; i < 1000; i++) variance += myPCA.estimateVariance(m.getRow(i), reduced70.columns());
    variance /= 1000.0;
    System.out.println("Fraction of variance using 70% variance with " + reduced70.columns() + " columns: " + variance);
    assertTrue("Variance does not cover intended 70% variance", variance > 0.70);
    // create "dummy" data with the same exact trends
    INDArray testSample = myPCA.generateGaussianSamples(10000);
    PCA analyzePCA = new PCA(testSample);
    assertTrue("Means do not agree accurately enough", myPCA.getMean().equalsWithEps(analyzePCA.getMean(), 0.2 * myPCA.getMean().columns()));
    assertTrue("Covariance is not reproduced accurately enough", myPCA.getCovarianceMatrix().equalsWithEps(analyzePCA.getCovarianceMatrix(), 1.0 * analyzePCA.getCovarianceMatrix().length()));
    assertTrue("Eigenvalues are not close enough", myPCA.getEigenvalues().equalsWithEps(analyzePCA.getEigenvalues(), 0.5 * myPCA.getEigenvalues().columns()));
    assertTrue("Eigenvectors are not close enough", myPCA.getEigenvectors().equalsWithEps(analyzePCA.getEigenvectors(), 0.1 * analyzePCA.getEigenvectors().length()));
    System.out.println("Original cov:\n" + ns.format(myPCA.getCovarianceMatrix()) + "\nDummy cov:\n" + ns.format(analyzePCA.getCovarianceMatrix()));
    INDArray testSample2 = analyzePCA.convertBackToFeatures(analyzePCA.convertToComponents(testSample));
    assertTrue("Transformation does not work.", testSample.equalsWithEps(testSample2, 1e-5 * testSample.length()));
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) NDArrayStrings(org.nd4j.linalg.string.NDArrayStrings) Test(org.junit.Test) BaseNd4jTest(org.nd4j.linalg.BaseNd4jTest)

Example 5 with NDArrayStrings

use of org.nd4j.linalg.string.NDArrayStrings in project nd4j by deeplearning4j.

the class Nd4j method writeStringForArray.

private static String writeStringForArray(INDArray write, String format) {
    if (format.isEmpty())
        format = "0.000000000000000000E0";
    String lineOne = "{\n";
    String lineTwo = "\"filefrom\": \"dl4j\",\n";
    String lineThree = "\"ordering\": \"" + write.ordering() + "\",\n";
    String lineFour = "\"shape\":\t" + java.util.Arrays.toString(write.shape()) + ",\n";
    String lineFive = "\"data\":\n";
    String fileData = new NDArrayStrings(",", format).format(write, false);
    String fileEnd = "\n}\n";
    String fileBegin = lineOne + lineTwo + lineThree + lineFour + lineFive;
    String fileContents = fileBegin + fileData + fileEnd;
    return fileContents;
}
Also used : NDArrayStrings(org.nd4j.linalg.string.NDArrayStrings)

Aggregations

NDArrayStrings (org.nd4j.linalg.string.NDArrayStrings)5 Test (org.junit.Test)4 BaseNd4jTest (org.nd4j.linalg.BaseNd4jTest)4 INDArray (org.nd4j.linalg.api.ndarray.INDArray)4 SoftMaxDerivative (org.nd4j.linalg.api.ops.impl.transforms.SoftMaxDerivative)1