use of org.nd4j.linalg.api.ops.impl.indexaccum.IMax in project nd4j by deeplearning4j.
the class CrashTest method op.
protected void op(INDArray x, INDArray y, int i) {
// broadcast along row & column
INDArray row = Nd4j.ones(64);
INDArray column = Nd4j.ones(1024, 1);
x.addiRowVector(row);
x.addiColumnVector(column);
// casual scalar
x.addi(i * 2);
// reduction along all dimensions
float sum = x.sumNumber().floatValue();
// index reduction
Nd4j.getExecutioner().exec(new IMax(x), Integer.MAX_VALUE);
// casual transform
Nd4j.getExecutioner().exec(new Sqrt(x, x));
// dup
INDArray x1 = x.dup(x.ordering());
INDArray x2 = x.dup(x.ordering());
INDArray x3 = x.dup('c');
INDArray x4 = x.dup('f');
// vstack && hstack
INDArray vstack = Nd4j.vstack(x, x1, x2, x3, x4);
INDArray hstack = Nd4j.hstack(x, x1, x2, x3, x4);
// reduce3 call
Nd4j.getExecutioner().exec(new ManhattanDistance(x, x2));
// flatten call
INDArray flat = Nd4j.toFlattened(x, x1, x2, x3, x4);
// reduction along dimension: row & column
INDArray max_0 = x.max(0);
INDArray max_1 = x.max(1);
// index reduction along dimension: row & column
INDArray imax_0 = Nd4j.argMax(x, 0);
INDArray imax_1 = Nd4j.argMax(x, 1);
// logisoftmax, softmax & softmax derivative
Nd4j.getExecutioner().exec(new OldSoftMax(x));
Nd4j.getExecutioner().exec(new SoftMaxDerivative(x));
Nd4j.getExecutioner().exec(new LogSoftMax(x));
// BooleanIndexing
BooleanIndexing.replaceWhere(x, 5f, Conditions.lessThan(8f));
// assing on view
BooleanIndexing.assignIf(x, x1, Conditions.greaterThan(-1000000000f));
// std var along all dimensions
float std = x.stdNumber().floatValue();
// std var along row & col
INDArray xStd_0 = x.std(0);
INDArray xStd_1 = x.std(1);
// blas call
float dot = (float) Nd4j.getBlasWrapper().dot(x, x1);
// mmul
for (boolean tA : paramsA) {
for (boolean tB : paramsB) {
INDArray xT = tA ? x.dup() : x.dup().transpose();
INDArray yT = tB ? y.dup() : y.dup().transpose();
Nd4j.gemm(xT, yT, tA, tB);
}
}
// specially for views, checking here without dup and rollover
Nd4j.gemm(x, y, false, false);
System.out.println("Iteration passed: " + i);
}
use of org.nd4j.linalg.api.ops.impl.indexaccum.IMax in project nd4j by deeplearning4j.
the class CudaIndexReduceTests method testPinnedIMax.
@Test
public void testPinnedIMax() throws Exception {
// simple way to stop test if we're not on CUDA backend here
assertEquals("JcublasLevel1", Nd4j.getBlasWrapper().level1().getClass().getSimpleName());
INDArray array1 = Nd4j.create(new float[] { 1.0f, 0.1f, 2.0f, 3.0f, 4.0f, 5.0f });
int idx = ((IndexAccumulation) Nd4j.getExecutioner().exec(new IMax(array1))).getFinalResult();
System.out.println("Array1: " + array1);
assertEquals(5, idx);
}
use of org.nd4j.linalg.api.ops.impl.indexaccum.IMax in project nd4j by deeplearning4j.
the class CudaIndexReduceTests method testPinnedIMax3.
@Test
public void testPinnedIMax3() throws Exception {
// simple way to stop test if we're not on CUDA backend here
assertEquals("JcublasLevel1", Nd4j.getBlasWrapper().level1().getClass().getSimpleName());
INDArray array1 = Nd4j.create(new float[] { 6.0f, 0.1f, 2.0f, 3.0f, 7.0f, 9.0f });
int idx = ((IndexAccumulation) Nd4j.getExecutioner().exec(new IMax(array1))).getFinalResult();
System.out.println("Array1: " + array1);
assertEquals(5, idx);
}
use of org.nd4j.linalg.api.ops.impl.indexaccum.IMax in project nd4j by deeplearning4j.
the class Nd4jTestsC method testIMax2of4d.
@Test
public void testIMax2of4d() {
Nd4j.getRandom().setSeed(12345);
int[] s = new int[] { 2, 3, 4, 5 };
INDArray arr = Nd4j.rand(s);
// Test 0,1
INDArray exp = Nd4j.create(new int[] { 4, 5 });
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 5; j++) {
INDArray subset = arr.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(i), NDArrayIndex.point(j));
assertArrayEquals(new int[] { 2, 3 }, subset.shape());
NdIndexIterator iter = new NdIndexIterator('c', 2, 3);
double max = -Double.MAX_VALUE;
int maxIdxPos = -1;
int count = 0;
while (iter.hasNext()) {
int[] next = iter.next();
double d = subset.getDouble(next);
if (d > max) {
max = d;
maxIdxPos = count;
}
count++;
}
exp.putScalar(i, j, maxIdxPos);
}
}
INDArray actC = Nd4j.getExecutioner().exec(new IMax(arr.dup('c')), 0, 1);
INDArray actF = Nd4j.getExecutioner().exec(new IMax(arr.dup('f')), 0, 1);
//
assertEquals(exp, actC);
assertEquals(exp, actF);
// Test 2,3
exp = Nd4j.create(new int[] { 2, 3 });
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 3; j++) {
INDArray subset = arr.get(NDArrayIndex.point(i), NDArrayIndex.point(j), NDArrayIndex.all(), NDArrayIndex.all());
assertArrayEquals(new int[] { 4, 5 }, subset.shape());
NdIndexIterator iter = new NdIndexIterator('c', 4, 5);
int maxIdxPos = -1;
double max = -Double.MAX_VALUE;
int count = 0;
while (iter.hasNext()) {
int[] next = iter.next();
double d = subset.getDouble(next);
if (d > max) {
max = d;
maxIdxPos = count;
}
count++;
}
exp.putScalar(i, j, maxIdxPos);
}
}
actC = Nd4j.getExecutioner().exec(new IMax(arr.dup('c')), 2, 3);
actF = Nd4j.getExecutioner().exec(new IMax(arr.dup('f')), 2, 3);
assertEquals(exp, actC);
assertEquals(exp, actF);
}
use of org.nd4j.linalg.api.ops.impl.indexaccum.IMax in project deeplearning4j by deeplearning4j.
the class BagOfWordsVectorizerTest method testBagOfWordsVectorizer.
@Test
public void testBagOfWordsVectorizer() throws Exception {
File rootDir = new ClassPathResource("rootdir").getFile();
LabelAwareSentenceIterator iter = new LabelAwareFileSentenceIterator(rootDir);
List<String> labels = Arrays.asList("label1", "label2");
TokenizerFactory tokenizerFactory = new DefaultTokenizerFactory();
BagOfWordsVectorizer vectorizer = new BagOfWordsVectorizer.Builder().setMinWordFrequency(1).setStopWords(new ArrayList<String>()).setTokenizerFactory(tokenizerFactory).setIterator(iter).allowParallelTokenization(false).build();
vectorizer.fit();
VocabWord word = vectorizer.getVocabCache().wordFor("file.");
assumeNotNull(word);
assertEquals(word, vectorizer.getVocabCache().tokenFor("file."));
assertEquals(2, vectorizer.getVocabCache().totalNumberOfDocs());
assertEquals(2, word.getSequencesCount());
assertEquals(2, word.getElementFrequency(), 0.1);
VocabWord word1 = vectorizer.getVocabCache().wordFor("1");
assertEquals(1, word1.getSequencesCount());
assertEquals(1, word1.getElementFrequency(), 0.1);
log.info("Labels used: " + vectorizer.getLabelsSource().getLabels());
assertEquals(2, vectorizer.getLabelsSource().getNumberOfLabelsUsed());
///////////////////
INDArray array = vectorizer.transform("This is 2 file.");
log.info("Transformed array: " + array);
assertEquals(5, array.columns());
VocabCache<VocabWord> vocabCache = vectorizer.getVocabCache();
assertEquals(2, array.getDouble(vocabCache.tokenFor("This").getIndex()), 0.1);
assertEquals(2, array.getDouble(vocabCache.tokenFor("is").getIndex()), 0.1);
assertEquals(2, array.getDouble(vocabCache.tokenFor("file.").getIndex()), 0.1);
assertEquals(0, array.getDouble(vocabCache.tokenFor("1").getIndex()), 0.1);
assertEquals(1, array.getDouble(vocabCache.tokenFor("2").getIndex()), 0.1);
DataSet dataSet = vectorizer.vectorize("This is 2 file.", "label2");
assertEquals(array, dataSet.getFeatureMatrix());
INDArray labelz = dataSet.getLabels();
log.info("Labels array: " + labelz);
int idx2 = ((IndexAccumulation) Nd4j.getExecutioner().exec(new IMax(labelz))).getFinalResult();
// assertEquals(1.0, dataSet.getLabels().getDouble(0), 0.1);
// assertEquals(0.0, dataSet.getLabels().getDouble(1), 0.1);
dataSet = vectorizer.vectorize("This is 1 file.", "label1");
assertEquals(2, dataSet.getFeatureMatrix().getDouble(vocabCache.tokenFor("This").getIndex()), 0.1);
assertEquals(2, dataSet.getFeatureMatrix().getDouble(vocabCache.tokenFor("is").getIndex()), 0.1);
assertEquals(2, dataSet.getFeatureMatrix().getDouble(vocabCache.tokenFor("file.").getIndex()), 0.1);
assertEquals(1, dataSet.getFeatureMatrix().getDouble(vocabCache.tokenFor("1").getIndex()), 0.1);
assertEquals(0, dataSet.getFeatureMatrix().getDouble(vocabCache.tokenFor("2").getIndex()), 0.1);
int idx1 = ((IndexAccumulation) Nd4j.getExecutioner().exec(new IMax(dataSet.getLabels()))).getFinalResult();
//assertEquals(0.0, dataSet.getLabels().getDouble(0), 0.1);
//assertEquals(1.0, dataSet.getLabels().getDouble(1), 0.1);
assertNotEquals(idx2, idx1);
// Serialization check
File tempFile = File.createTempFile("fdsf", "fdfsdf");
tempFile.deleteOnExit();
SerializationUtils.saveObject(vectorizer, tempFile);
BagOfWordsVectorizer vectorizer2 = SerializationUtils.readObject(tempFile);
vectorizer2.setTokenizerFactory(tokenizerFactory);
dataSet = vectorizer2.vectorize("This is 2 file.", "label2");
assertEquals(array, dataSet.getFeatureMatrix());
}
Aggregations