use of org.nd4j.linalg.api.ops.impl.accum.distances.ManhattanDistance in project nd4j by deeplearning4j.
the class CrashTest method op.
protected void op(INDArray x, INDArray y, int i) {
// broadcast along row & column
INDArray row = Nd4j.ones(64);
INDArray column = Nd4j.ones(1024, 1);
x.addiRowVector(row);
x.addiColumnVector(column);
// casual scalar
x.addi(i * 2);
// reduction along all dimensions
float sum = x.sumNumber().floatValue();
// index reduction
Nd4j.getExecutioner().exec(new IMax(x), Integer.MAX_VALUE);
// casual transform
Nd4j.getExecutioner().exec(new Sqrt(x, x));
// dup
INDArray x1 = x.dup(x.ordering());
INDArray x2 = x.dup(x.ordering());
INDArray x3 = x.dup('c');
INDArray x4 = x.dup('f');
// vstack && hstack
INDArray vstack = Nd4j.vstack(x, x1, x2, x3, x4);
INDArray hstack = Nd4j.hstack(x, x1, x2, x3, x4);
// reduce3 call
Nd4j.getExecutioner().exec(new ManhattanDistance(x, x2));
// flatten call
INDArray flat = Nd4j.toFlattened(x, x1, x2, x3, x4);
// reduction along dimension: row & column
INDArray max_0 = x.max(0);
INDArray max_1 = x.max(1);
// index reduction along dimension: row & column
INDArray imax_0 = Nd4j.argMax(x, 0);
INDArray imax_1 = Nd4j.argMax(x, 1);
// logisoftmax, softmax & softmax derivative
Nd4j.getExecutioner().exec(new OldSoftMax(x));
Nd4j.getExecutioner().exec(new SoftMaxDerivative(x));
Nd4j.getExecutioner().exec(new LogSoftMax(x));
// BooleanIndexing
BooleanIndexing.replaceWhere(x, 5f, Conditions.lessThan(8f));
// assing on view
BooleanIndexing.assignIf(x, x1, Conditions.greaterThan(-1000000000f));
// std var along all dimensions
float std = x.stdNumber().floatValue();
// std var along row & col
INDArray xStd_0 = x.std(0);
INDArray xStd_1 = x.std(1);
// blas call
float dot = (float) Nd4j.getBlasWrapper().dot(x, x1);
// mmul
for (boolean tA : paramsA) {
for (boolean tB : paramsB) {
INDArray xT = tA ? x.dup() : x.dup().transpose();
INDArray yT = tB ? y.dup() : y.dup().transpose();
Nd4j.gemm(xT, yT, tA, tB);
}
}
// specially for views, checking here without dup and rollover
Nd4j.gemm(x, y, false, false);
System.out.println("Iteration passed: " + i);
}
use of org.nd4j.linalg.api.ops.impl.accum.distances.ManhattanDistance in project nd4j by deeplearning4j.
the class LongTests method testLongTadOp1.
@Test
public void testLongTadOp1() {
double exp = Transforms.manhattanDistance(Nd4j.create(1000).assign(1.0), Nd4j.create(1000).assign(2.0));
INDArray hugeX = Nd4j.create(2200000, 1000).assign(1.0);
INDArray hugeY = Nd4j.create(1, 1000).assign(2.0);
for (int x = 0; x < hugeX.rows(); x++) {
assertEquals("Failed at row " + x, 1000, hugeX.getRow(x).sumNumber().intValue());
}
INDArray result = Nd4j.getExecutioner().exec(new ManhattanDistance(hugeX, hugeY, hugeX.lengthLong()), 1);
for (int x = 0; x < hugeX.rows(); x++) {
assertEquals(exp, result.getDouble(x), 1e-5);
}
}
use of org.nd4j.linalg.api.ops.impl.accum.distances.ManhattanDistance in project nd4j by deeplearning4j.
the class CudaReduce3Tests method testPinnedManhattanDistance.
@Test
public void testPinnedManhattanDistance() throws Exception {
// simple way to stop test if we're not on CUDA backend here
INDArray array1 = Nd4j.create(new float[] { 0.0f, 1.0f, 2.0f, 3.0f, 4.0f });
INDArray array2 = Nd4j.create(new float[] { 0.5f, 1.5f, 2.5f, 3.5f, 4.5f });
double result = Nd4j.getExecutioner().execAndReturn(new ManhattanDistance(array1, array2)).getFinalResult().doubleValue();
System.out.println("Distance: " + result);
assertEquals(2.5, result, 0.01);
System.out.println("Array1: " + array1);
System.out.println("Array2: " + array2);
}
use of org.nd4j.linalg.api.ops.impl.accum.distances.ManhattanDistance in project nd4j by deeplearning4j.
the class NativeOpExecutionerTest method testEuclideanManhattanDistanceAlongDimension_Rank4.
@Test
public void testEuclideanManhattanDistanceAlongDimension_Rank4() {
Nd4j.getRandom().setSeed(12345);
INDArray firstOneExample = Nd4j.rand('c', new int[] { 1, 2, 2, 2 });
INDArray secondOneExample = Nd4j.rand('c', new int[] { 1, 2, 2, 2 });
double[] d1 = firstOneExample.data().asDouble();
double[] d2 = secondOneExample.data().asDouble();
double sumSquaredDiff = 0.0;
double expManhattanDistance = 0.0;
for (int i = 0; i < d1.length; i++) {
double diff = d1[i] - d2[i];
sumSquaredDiff += diff * diff;
expManhattanDistance += Math.abs(diff);
}
double expected = Math.sqrt(sumSquaredDiff);
System.out.println("Expected, Euclidean: " + expected);
System.out.println("Expected, Manhattan: " + expManhattanDistance);
int mb = 2;
INDArray firstOrig = Nd4j.create(mb, 2, 2, 2);
INDArray secondOrig = Nd4j.create(mb, 2, 2, 2);
for (int i = 0; i < mb; i++) {
firstOrig.put(new INDArrayIndex[] { NDArrayIndex.point(i), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all() }, firstOneExample);
secondOrig.put(new INDArrayIndex[] { NDArrayIndex.point(i), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.all() }, secondOneExample);
}
for (char order : new char[] { 'c', 'f' }) {
INDArray first = firstOrig.dup(order);
INDArray second = secondOrig.dup(order);
assertEquals(firstOrig, first);
assertEquals(secondOrig, second);
INDArray out = Nd4j.getExecutioner().exec(new EuclideanDistance(first, second), 1, 2, 3);
INDArray outManhattan = Nd4j.getExecutioner().exec(new ManhattanDistance(first, second), 1, 2, 3);
System.out.println("\n\nOrder: " + order);
System.out.println("Euclidean:");
System.out.println(Arrays.toString(out.getRow(0).dup().data().asDouble()));
System.out.println(Arrays.toString(out.getRow(1).dup().data().asDouble()));
assertEquals(out.getRow(0), out.getRow(1));
System.out.println("Manhattan:");
System.out.println(Arrays.toString(outManhattan.getRow(0).dup().data().asDouble()));
System.out.println(Arrays.toString(outManhattan.getRow(1).dup().data().asDouble()));
assertEquals(expected, out.getRow(0).getDouble(0), 1e-5);
assertEquals(expManhattanDistance, outManhattan.getRow(0).getDouble(0), 1e-5);
}
}
use of org.nd4j.linalg.api.ops.impl.accum.distances.ManhattanDistance in project nd4j by deeplearning4j.
the class NativeOpExecutionerTest method testPinnedManhattanDistance2.
@Test
public void testPinnedManhattanDistance2() throws Exception {
// simple way to stop test if we're not on CUDA backend here
INDArray array1 = Nd4j.linspace(1, 1000, 1000);
INDArray array2 = Nd4j.linspace(1, 900, 1000);
double result = Nd4j.getExecutioner().execAndReturn(new ManhattanDistance(array1, array2)).getFinalResult().doubleValue();
assertEquals(50000.0, result, 0.001f);
}
Aggregations