use of org.apache.lucene.analysis.MockTokenizer in project lucene-solr by apache.
the class TestSnowball method testEnglish.
public void testEnglish() throws Exception {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer();
return new TokenStreamComponents(tokenizer, new SnowballFilter(tokenizer, "English"));
}
};
assertAnalyzesTo(a, "he abhorred accents", new String[] { "he", "abhor", "accent" });
a.close();
}
use of org.apache.lucene.analysis.MockTokenizer in project lucene-solr by apache.
the class TestSerbianNormalizationFilterFactory method testStemming.
public void testStemming() throws Exception {
Reader reader = new StringReader("Ä‘ura");
TokenStream stream = new MockTokenizer(MockTokenizer.WHITESPACE, false);
((Tokenizer) stream).setReader(reader);
stream = tokenFilterFactory("SerbianNormalization").create(stream);
assertTokenStreamContents(stream, new String[] { "djura" });
}
use of org.apache.lucene.analysis.MockTokenizer in project lucene-solr by apache.
the class TestSerbianNormalizationRegularFilter method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
analyzer = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
final Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
final TokenStream stream = new SerbianNormalizationRegularFilter(tokenizer);
return new TokenStreamComponents(tokenizer, stream);
}
};
}
use of org.apache.lucene.analysis.MockTokenizer in project lucene-solr by apache.
the class TestSolrSynonymParser method testSimple.
/** Tests some simple examples from the solr wiki */
public void testSimple() throws Exception {
String testFile = "i-pod, ipod, ipoooood\n" + "foo => foo bar\n" + "foo => baz\n" + "this test, that testing";
Analyzer analyzer = new MockAnalyzer(random());
SolrSynonymParser parser = new SolrSynonymParser(true, true, analyzer);
parser.parse(new StringReader(testFile));
final SynonymMap map = parser.build();
analyzer.close();
analyzer = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true);
return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, true));
}
};
assertAnalyzesTo(analyzer, "ball", new String[] { "ball" }, new int[] { 1 });
assertAnalyzesTo(analyzer, "i-pod", new String[] { "i-pod", "ipod", "ipoooood" }, new int[] { 1, 0, 0 });
assertAnalyzesTo(analyzer, "foo", new String[] { "foo", "baz", "bar" }, new int[] { 1, 0, 1 });
assertAnalyzesTo(analyzer, "this test", new String[] { "this", "that", "test", "testing" }, new int[] { 1, 0, 1, 0 });
analyzer.close();
}
use of org.apache.lucene.analysis.MockTokenizer in project lucene-solr by apache.
the class TestIndexWriterOnVMError method doTest.
// just one thread, serial merge policy, hopefully debuggable
private void doTest(MockDirectoryWrapper.Failure failOn) throws Exception {
// log all exceptions we hit, in case we fail (for debugging)
ByteArrayOutputStream exceptionLog = new ByteArrayOutputStream();
PrintStream exceptionStream = new PrintStream(exceptionLog, true, "UTF-8");
//PrintStream exceptionStream = System.out;
final long analyzerSeed = random().nextLong();
final Analyzer analyzer = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
MockTokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
// we are gonna make it angry
tokenizer.setEnableChecks(false);
TokenStream stream = tokenizer;
// emit some payloads
if (fieldName.contains("payloads")) {
stream = new MockVariableLengthPayloadFilter(new Random(analyzerSeed), stream);
}
return new TokenStreamComponents(tokenizer, stream);
}
};
MockDirectoryWrapper dir = null;
final int numIterations = TEST_NIGHTLY ? atLeast(100) : atLeast(5);
STARTOVER: for (int iter = 0; iter < numIterations; iter++) {
try {
// close from last run
if (dir != null) {
dir.close();
}
// disable slow things: we don't rely upon sleeps here.
dir = newMockDirectory();
dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER);
dir.setUseSlowOpenClosers(false);
IndexWriterConfig conf = newIndexWriterConfig(analyzer);
// just for now, try to keep this test reproducible
conf.setMergeScheduler(new SerialMergeScheduler());
// test never makes it this far...
int numDocs = atLeast(2000);
IndexWriter iw = new IndexWriter(dir, conf);
// ensure there is always a commit
iw.commit();
dir.failOn(failOn);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
doc.add(newStringField("id", Integer.toString(i), Field.Store.NO));
doc.add(new NumericDocValuesField("dv", i));
doc.add(new BinaryDocValuesField("dv2", new BytesRef(Integer.toString(i))));
doc.add(new SortedDocValuesField("dv3", new BytesRef(Integer.toString(i))));
doc.add(new SortedSetDocValuesField("dv4", new BytesRef(Integer.toString(i))));
doc.add(new SortedSetDocValuesField("dv4", new BytesRef(Integer.toString(i - 1))));
doc.add(new SortedNumericDocValuesField("dv5", i));
doc.add(new SortedNumericDocValuesField("dv5", i - 1));
doc.add(newTextField("text1", TestUtil.randomAnalysisString(random(), 20, true), Field.Store.NO));
// ensure we store something
doc.add(new StoredField("stored1", "foo"));
doc.add(new StoredField("stored1", "bar"));
// ensure we get some payloads
doc.add(newTextField("text_payloads", TestUtil.randomAnalysisString(random(), 6, true), Field.Store.NO));
// ensure we get some vectors
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.setStoreTermVectors(true);
doc.add(newField("text_vectors", TestUtil.randomAnalysisString(random(), 6, true), ft));
doc.add(new IntPoint("point", random().nextInt()));
doc.add(new IntPoint("point2d", random().nextInt(), random().nextInt()));
if (random().nextInt(10) > 0) {
// single doc
try {
iw.addDocument(doc);
// we made it, sometimes delete our doc, or update a dv
int thingToDo = random().nextInt(4);
if (thingToDo == 0) {
iw.deleteDocuments(new Term("id", Integer.toString(i)));
} else if (thingToDo == 1) {
iw.updateNumericDocValue(new Term("id", Integer.toString(i)), "dv", i + 1L);
} else if (thingToDo == 2) {
iw.updateBinaryDocValue(new Term("id", Integer.toString(i)), "dv2", new BytesRef(Integer.toString(i + 1)));
}
} catch (VirtualMachineError | AlreadyClosedException disaster) {
getTragedy(disaster, iw, exceptionStream);
continue STARTOVER;
}
} else {
// block docs
Document doc2 = new Document();
doc2.add(newStringField("id", Integer.toString(-i), Field.Store.NO));
doc2.add(newTextField("text1", TestUtil.randomAnalysisString(random(), 20, true), Field.Store.NO));
doc2.add(new StoredField("stored1", "foo"));
doc2.add(new StoredField("stored1", "bar"));
doc2.add(newField("text_vectors", TestUtil.randomAnalysisString(random(), 6, true), ft));
try {
iw.addDocuments(Arrays.asList(doc, doc2));
// we made it, sometimes delete our docs
if (random().nextBoolean()) {
iw.deleteDocuments(new Term("id", Integer.toString(i)), new Term("id", Integer.toString(-i)));
}
} catch (VirtualMachineError | AlreadyClosedException disaster) {
getTragedy(disaster, iw, exceptionStream);
continue STARTOVER;
}
}
if (random().nextInt(10) == 0) {
// trigger flush:
try {
if (random().nextBoolean()) {
DirectoryReader ir = null;
try {
ir = DirectoryReader.open(iw, random().nextBoolean(), false);
TestUtil.checkReader(ir);
} finally {
IOUtils.closeWhileHandlingException(ir);
}
} else {
iw.commit();
}
if (DirectoryReader.indexExists(dir)) {
TestUtil.checkIndex(dir);
}
} catch (VirtualMachineError | AlreadyClosedException disaster) {
getTragedy(disaster, iw, exceptionStream);
continue STARTOVER;
}
}
}
try {
iw.close();
} catch (VirtualMachineError | AlreadyClosedException disaster) {
getTragedy(disaster, iw, exceptionStream);
continue STARTOVER;
}
} catch (Throwable t) {
System.out.println("Unexpected exception: dumping fake-exception-log:...");
exceptionStream.flush();
System.out.println(exceptionLog.toString("UTF-8"));
System.out.flush();
Rethrow.rethrow(t);
}
}
dir.close();
if (VERBOSE) {
System.out.println("TEST PASSED: dumping fake-exception-log:...");
System.out.println(exceptionLog.toString("UTF-8"));
}
}
Aggregations