use of org.apache.lucene.analysis.CannedBinaryTokenStream in project lucene-solr by apache.
the class TestTerms method testTermMinMaxRandom.
public void testTermMinMaxRandom() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
int numDocs = atLeast(100);
BytesRef minTerm = null;
BytesRef maxTerm = null;
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
Field field = new TextField("field", "", Field.Store.NO);
doc.add(field);
//System.out.println(" doc " + i);
CannedBinaryTokenStream.BinaryToken[] tokens = new CannedBinaryTokenStream.BinaryToken[atLeast(10)];
for (int j = 0; j < tokens.length; j++) {
byte[] bytes = new byte[TestUtil.nextInt(random(), 1, 20)];
random().nextBytes(bytes);
BytesRef tokenBytes = new BytesRef(bytes);
//System.out.println(" token " + tokenBytes);
if (minTerm == null || tokenBytes.compareTo(minTerm) < 0) {
//System.out.println(" ** new min");
minTerm = tokenBytes;
}
if (maxTerm == null || tokenBytes.compareTo(maxTerm) > 0) {
//System.out.println(" ** new max");
maxTerm = tokenBytes;
}
tokens[j] = new CannedBinaryTokenStream.BinaryToken(tokenBytes);
}
field.setTokenStream(new CannedBinaryTokenStream(tokens));
w.addDocument(doc);
}
IndexReader r = w.getReader();
Terms terms = MultiFields.getTerms(r, "field");
assertEquals(minTerm, terms.getMin());
assertEquals(maxTerm, terms.getMax());
r.close();
w.close();
dir.close();
}
use of org.apache.lucene.analysis.CannedBinaryTokenStream in project lucene-solr by apache.
the class AnalyzingSuggesterTest method test0ByteKeys.
public void test0ByteKeys() throws Exception {
final Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
return new TokenStreamComponents(tokenizer) {
int tokenStreamCounter = 0;
final TokenStream[] tokenStreams = new TokenStream[] { new CannedBinaryTokenStream(new BinaryToken[] { token(new BytesRef(new byte[] { 0x0, 0x0, 0x0 })) }), new CannedBinaryTokenStream(new BinaryToken[] { token(new BytesRef(new byte[] { 0x0, 0x0 })) }), new CannedBinaryTokenStream(new BinaryToken[] { token(new BytesRef(new byte[] { 0x0, 0x0, 0x0 })) }), new CannedBinaryTokenStream(new BinaryToken[] { token(new BytesRef(new byte[] { 0x0, 0x0 })) }) };
@Override
public TokenStream getTokenStream() {
TokenStream result = tokenStreams[tokenStreamCounter];
tokenStreamCounter++;
return result;
}
@Override
protected void setReader(final Reader reader) {
}
};
}
};
Directory tempDir = getDirectory();
AnalyzingSuggester suggester = new AnalyzingSuggester(tempDir, "suggest", a, a, 0, 256, -1, true);
suggester.build(new InputArrayIterator(new Input[] { new Input("a a", 50), new Input("a b", 50) }));
IOUtils.close(a, tempDir);
}
Aggregations