use of org.apache.lucene.analysis.Tokenizer in project lucene-solr by apache.
the class TestKStemmer method testEmptyTerm.
public void testEmptyTerm() throws IOException {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new KeywordTokenizer();
return new TokenStreamComponents(tokenizer, new KStemFilter(tokenizer));
}
};
checkOneTerm(a, "", "");
a.close();
}
use of org.apache.lucene.analysis.Tokenizer in project lucene-solr by apache.
the class TestPorterStemFilter method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer t = new MockTokenizer(MockTokenizer.KEYWORD, false);
return new TokenStreamComponents(t, new PorterStemFilter(t));
}
};
}
use of org.apache.lucene.analysis.Tokenizer in project lucene-solr by apache.
the class TestPorterStemFilter method testWithKeywordAttribute.
public void testWithKeywordAttribute() throws IOException {
CharArraySet set = new CharArraySet(1, true);
set.add("yourselves");
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
tokenizer.setReader(new StringReader("yourselves yours"));
TokenStream filter = new PorterStemFilter(new SetKeywordMarkerFilter(tokenizer, set));
assertTokenStreamContents(filter, new String[] { "yourselves", "your" });
}
use of org.apache.lucene.analysis.Tokenizer in project lucene-solr by apache.
the class TestGalicianStemFilter method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
analyzer = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer source = new MockTokenizer(MockTokenizer.WHITESPACE, false);
return new TokenStreamComponents(source, new GalicianStemFilter(source));
}
};
}
use of org.apache.lucene.analysis.Tokenizer in project lucene-solr by apache.
the class EdgeNGramTokenFilterTest method testEmptyTerm.
public void testEmptyTerm() throws Exception {
Random random = random();
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new KeywordTokenizer();
return new TokenStreamComponents(tokenizer, new EdgeNGramTokenFilter(tokenizer, 2, 15));
}
};
checkAnalysisConsistency(random, a, random.nextBoolean(), "");
a.close();
}
Aggregations