use of org.apache.lucene.analysis.MockTokenizer in project lucene-solr by apache.
the class TestKeepWordFilter method testRandomStrings.
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
final Set<String> words = new HashSet<>();
words.add("a");
words.add("b");
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
TokenStream stream = new KeepWordFilter(tokenizer, new CharArraySet(words, true));
return new TokenStreamComponents(tokenizer, stream);
}
};
checkRandomData(random(), a, 1000 * RANDOM_MULTIPLIER);
a.close();
}
use of org.apache.lucene.analysis.MockTokenizer in project lucene-solr by apache.
the class TestLengthFilterFactory method testPositionIncrements.
public void testPositionIncrements() throws Exception {
Reader reader = new StringReader("foo foobar super-duper-trooper");
TokenStream stream = new MockTokenizer(MockTokenizer.WHITESPACE, false);
((Tokenizer) stream).setReader(reader);
stream = tokenFilterFactory("Length", LengthFilterFactory.MIN_KEY, "4", LengthFilterFactory.MAX_KEY, "10").create(stream);
assertTokenStreamContents(stream, new String[] { "foobar" }, new int[] { 2 });
}
use of org.apache.lucene.analysis.MockTokenizer in project lucene-solr by apache.
the class TestLengthFilterFactory method testInvalidArguments.
/** Test that invalid arguments result in exception */
public void testInvalidArguments() throws Exception {
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
Reader reader = new StringReader("foo foobar super-duper-trooper");
TokenStream stream = new MockTokenizer(MockTokenizer.WHITESPACE, false);
((Tokenizer) stream).setReader(reader);
tokenFilterFactory("Length", LengthFilterFactory.MIN_KEY, "5", LengthFilterFactory.MAX_KEY, "4").create(stream);
});
assertTrue(expected.getMessage().contains("maximum length must not be greater than minimum length"));
}
use of org.apache.lucene.analysis.MockTokenizer in project lucene-solr by apache.
the class TestCapitalizationFilter method testRandomString.
/** blast some random strings through the analyzer */
public void testRandomString() throws Exception {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
return new TokenStreamComponents(tokenizer, new CapitalizationFilter(tokenizer));
}
};
checkRandomData(random(), a, 1000 * RANDOM_MULTIPLIER);
a.close();
}
use of org.apache.lucene.analysis.MockTokenizer in project lucene-solr by apache.
the class TestFingerprintFilter method testAllDupValues.
public void testAllDupValues() throws Exception {
for (final boolean consumeAll : new boolean[] { true, false }) {
MockTokenizer tokenizer = whitespaceMockTokenizer("B2 B2");
tokenizer.setEnableChecks(consumeAll);
TokenStream stream = new FingerprintFilter(tokenizer);
assertTokenStreamContents(stream, new String[] { "B2" });
}
}
Aggregations