use of org.apache.lucene.analysis.MockCharFilter in project lucene-solr by apache.
the class TestPerFieldAnalyzerWrapper method testCharFilters.
public void testCharFilters() throws Exception {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
return new TokenStreamComponents(new MockTokenizer());
}
@Override
protected Reader initReader(String fieldName, Reader reader) {
return new MockCharFilter(reader, 7);
}
};
assertAnalyzesTo(a, "ab", new String[] { "aab" }, new int[] { 0 }, new int[] { 2 });
// now wrap in PFAW
PerFieldAnalyzerWrapper p = new PerFieldAnalyzerWrapper(a, Collections.<String, Analyzer>emptyMap());
assertAnalyzesTo(p, "ab", new String[] { "aab" }, new int[] { 0 }, new int[] { 2 });
p.close();
// TODO: fix this about PFAW, its a trap
a.close();
}
use of org.apache.lucene.analysis.MockCharFilter in project lucene-solr by apache.
the class TestBugInSomething method test.
public void test() throws Exception {
final CharArraySet cas = new CharArraySet(3, false);
cas.add("jjp");
cas.add("wlmwoknt");
cas.add("tcgyreo");
final NormalizeCharMap.Builder builder = new NormalizeCharMap.Builder();
builder.add("mtqlpi", "");
builder.add("mwoknt", "jjp");
builder.add("tcgyreo", "zpfpajyws");
final NormalizeCharMap map = builder.build();
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer t = new MockTokenizer(MockTokenFilter.ENGLISH_STOPSET, false, -65);
TokenFilter f = new CommonGramsFilter(t, cas);
return new TokenStreamComponents(t, f);
}
@Override
protected Reader initReader(String fieldName, Reader reader) {
reader = new MockCharFilter(reader, 0);
reader = new MappingCharFilter(map, reader);
reader = new TestRandomChains.CheckThatYouDidntReadAnythingReaderWrapper(reader);
return reader;
}
};
checkAnalysisConsistency(random(), a, false, "wmgddzunizdomqyj");
a.close();
}
Aggregations