use of org.nd4j.linalg.api.ops.IndexAccumulation in project nd4j by deeplearning4j.
the class OpExecutionerTests method testIMin.
@Test
public void testIMin() {
INDArray arr = Nd4j.linspace(1, 10, 10);
IMin imin = new IMin(arr);
assertEquals(0, ((IndexAccumulation) Nd4j.getExecutioner().exec(imin)).getFinalResult());
arr.muli(-1);
imin = new IMin(arr);
int minIdx = ((IndexAccumulation) Nd4j.getExecutioner().exec(imin)).getFinalResult();
assertEquals(9, minIdx);
}
use of org.nd4j.linalg.api.ops.IndexAccumulation in project nd4j by deeplearning4j.
the class CudaIndexReduceTests method testPinnedIMin.
@Test
public void testPinnedIMin() throws Exception {
// simple way to stop test if we're not on CUDA backend here
assertEquals("JcublasLevel1", Nd4j.getBlasWrapper().level1().getClass().getSimpleName());
INDArray array1 = Nd4j.create(new float[] { 1.0f, 0.1f, 2.0f, 3.0f, 4.0f, 5.0f });
int idx = ((IndexAccumulation) Nd4j.getExecutioner().exec(new IMin(array1))).getFinalResult();
System.out.println("Array1: " + array1);
assertEquals(1, idx);
}
use of org.nd4j.linalg.api.ops.IndexAccumulation in project nd4j by deeplearning4j.
the class CudaIndexReduceTests method testPinnedIMax.
@Test
public void testPinnedIMax() throws Exception {
// simple way to stop test if we're not on CUDA backend here
assertEquals("JcublasLevel1", Nd4j.getBlasWrapper().level1().getClass().getSimpleName());
INDArray array1 = Nd4j.create(new float[] { 1.0f, 0.1f, 2.0f, 3.0f, 4.0f, 5.0f });
int idx = ((IndexAccumulation) Nd4j.getExecutioner().exec(new IMax(array1))).getFinalResult();
System.out.println("Array1: " + array1);
assertEquals(5, idx);
}
use of org.nd4j.linalg.api.ops.IndexAccumulation in project nd4j by deeplearning4j.
the class CudaIndexReduceTests method testPinnedIMax3.
@Test
public void testPinnedIMax3() throws Exception {
// simple way to stop test if we're not on CUDA backend here
assertEquals("JcublasLevel1", Nd4j.getBlasWrapper().level1().getClass().getSimpleName());
INDArray array1 = Nd4j.create(new float[] { 6.0f, 0.1f, 2.0f, 3.0f, 7.0f, 9.0f });
int idx = ((IndexAccumulation) Nd4j.getExecutioner().exec(new IMax(array1))).getFinalResult();
System.out.println("Array1: " + array1);
assertEquals(5, idx);
}
use of org.nd4j.linalg.api.ops.IndexAccumulation in project deeplearning4j by deeplearning4j.
the class BagOfWordsVectorizerTest method testBagOfWordsVectorizer.
@Test
public void testBagOfWordsVectorizer() throws Exception {
File rootDir = new ClassPathResource("rootdir").getFile();
LabelAwareSentenceIterator iter = new LabelAwareFileSentenceIterator(rootDir);
List<String> labels = Arrays.asList("label1", "label2");
TokenizerFactory tokenizerFactory = new DefaultTokenizerFactory();
BagOfWordsVectorizer vectorizer = new BagOfWordsVectorizer.Builder().setMinWordFrequency(1).setStopWords(new ArrayList<String>()).setTokenizerFactory(tokenizerFactory).setIterator(iter).allowParallelTokenization(false).build();
vectorizer.fit();
VocabWord word = vectorizer.getVocabCache().wordFor("file.");
assumeNotNull(word);
assertEquals(word, vectorizer.getVocabCache().tokenFor("file."));
assertEquals(2, vectorizer.getVocabCache().totalNumberOfDocs());
assertEquals(2, word.getSequencesCount());
assertEquals(2, word.getElementFrequency(), 0.1);
VocabWord word1 = vectorizer.getVocabCache().wordFor("1");
assertEquals(1, word1.getSequencesCount());
assertEquals(1, word1.getElementFrequency(), 0.1);
log.info("Labels used: " + vectorizer.getLabelsSource().getLabels());
assertEquals(2, vectorizer.getLabelsSource().getNumberOfLabelsUsed());
///////////////////
INDArray array = vectorizer.transform("This is 2 file.");
log.info("Transformed array: " + array);
assertEquals(5, array.columns());
VocabCache<VocabWord> vocabCache = vectorizer.getVocabCache();
assertEquals(2, array.getDouble(vocabCache.tokenFor("This").getIndex()), 0.1);
assertEquals(2, array.getDouble(vocabCache.tokenFor("is").getIndex()), 0.1);
assertEquals(2, array.getDouble(vocabCache.tokenFor("file.").getIndex()), 0.1);
assertEquals(0, array.getDouble(vocabCache.tokenFor("1").getIndex()), 0.1);
assertEquals(1, array.getDouble(vocabCache.tokenFor("2").getIndex()), 0.1);
DataSet dataSet = vectorizer.vectorize("This is 2 file.", "label2");
assertEquals(array, dataSet.getFeatureMatrix());
INDArray labelz = dataSet.getLabels();
log.info("Labels array: " + labelz);
int idx2 = ((IndexAccumulation) Nd4j.getExecutioner().exec(new IMax(labelz))).getFinalResult();
// assertEquals(1.0, dataSet.getLabels().getDouble(0), 0.1);
// assertEquals(0.0, dataSet.getLabels().getDouble(1), 0.1);
dataSet = vectorizer.vectorize("This is 1 file.", "label1");
assertEquals(2, dataSet.getFeatureMatrix().getDouble(vocabCache.tokenFor("This").getIndex()), 0.1);
assertEquals(2, dataSet.getFeatureMatrix().getDouble(vocabCache.tokenFor("is").getIndex()), 0.1);
assertEquals(2, dataSet.getFeatureMatrix().getDouble(vocabCache.tokenFor("file.").getIndex()), 0.1);
assertEquals(1, dataSet.getFeatureMatrix().getDouble(vocabCache.tokenFor("1").getIndex()), 0.1);
assertEquals(0, dataSet.getFeatureMatrix().getDouble(vocabCache.tokenFor("2").getIndex()), 0.1);
int idx1 = ((IndexAccumulation) Nd4j.getExecutioner().exec(new IMax(dataSet.getLabels()))).getFinalResult();
//assertEquals(0.0, dataSet.getLabels().getDouble(0), 0.1);
//assertEquals(1.0, dataSet.getLabels().getDouble(1), 0.1);
assertNotEquals(idx2, idx1);
// Serialization check
File tempFile = File.createTempFile("fdsf", "fdfsdf");
tempFile.deleteOnExit();
SerializationUtils.saveObject(vectorizer, tempFile);
BagOfWordsVectorizer vectorizer2 = SerializationUtils.readObject(tempFile);
vectorizer2.setTokenizerFactory(tokenizerFactory);
dataSet = vectorizer2.vectorize("This is 2 file.", "label2");
assertEquals(array, dataSet.getFeatureMatrix());
}
Aggregations