use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class TestFieldCacheSort method testIntMissing.
/** Tests sorting on type int with a missing value */
public void testIntMissing() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
writer.addDocument(doc);
doc = new Document();
doc.add(new IntPoint("value", -1));
doc.add(new StoredField("value", -1));
writer.addDocument(doc);
doc = new Document();
doc.add(new IntPoint("value", 4));
doc.add(new StoredField("value", 4));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(), Collections.singletonMap("value", Type.INTEGER_POINT));
writer.close();
IndexSearcher searcher = newSearcher(ir, false);
Sort sort = new Sort(new SortField("value", SortField.Type.INT));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.totalHits);
// null is treated as a 0
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertNull(searcher.doc(td.scoreDocs[1].doc).get("value"));
assertEquals("4", searcher.doc(td.scoreDocs[2].doc).get("value"));
TestUtil.checkReader(ir);
ir.close();
dir.close();
}
use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class TestFieldCacheSort method testInt.
/** Tests sorting on type int */
public void testInt() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(new IntPoint("value", 300000));
doc.add(new StoredField("value", 300000));
writer.addDocument(doc);
doc = new Document();
doc.add(new IntPoint("value", -1));
doc.add(new StoredField("value", -1));
writer.addDocument(doc);
doc = new Document();
doc.add(new IntPoint("value", 4));
doc.add(new StoredField("value", 4));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(), Collections.singletonMap("value", Type.INTEGER_POINT));
writer.close();
IndexSearcher searcher = newSearcher(ir, false);
Sort sort = new Sort(new SortField("value", SortField.Type.INT));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(3, td.totalHits);
// numeric order
assertEquals("-1", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertEquals("4", searcher.doc(td.scoreDocs[1].doc).get("value"));
assertEquals("300000", searcher.doc(td.scoreDocs[2].doc).get("value"));
TestUtil.checkReader(ir);
ir.close();
dir.close();
}
use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class TestFieldCacheSort method testDouble.
/** Tests sorting on type double */
public void testDouble() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(new DoublePoint("value", 30.1));
doc.add(new StoredField("value", 30.1));
writer.addDocument(doc);
doc = new Document();
doc.add(new DoublePoint("value", -1.3));
doc.add(new StoredField("value", -1.3));
writer.addDocument(doc);
doc = new Document();
doc.add(new DoublePoint("value", 4.2333333333333));
doc.add(new StoredField("value", 4.2333333333333));
writer.addDocument(doc);
doc = new Document();
doc.add(new DoublePoint("value", 4.2333333333332));
doc.add(new StoredField("value", 4.2333333333332));
writer.addDocument(doc);
IndexReader ir = UninvertingReader.wrap(writer.getReader(), Collections.singletonMap("value", Type.DOUBLE_POINT));
writer.close();
IndexSearcher searcher = newSearcher(ir, false);
Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE));
TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
assertEquals(4, td.totalHits);
// numeric order
assertEquals("-1.3", searcher.doc(td.scoreDocs[0].doc).get("value"));
assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[1].doc).get("value"));
assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[2].doc).get("value"));
assertEquals("30.1", searcher.doc(td.scoreDocs[3].doc).get("value"));
TestUtil.checkReader(ir);
ir.close();
dir.close();
}
use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class TestIndexWriterOnVMError method doTest.
// just one thread, serial merge policy, hopefully debuggable
private void doTest(MockDirectoryWrapper.Failure failOn) throws Exception {
// log all exceptions we hit, in case we fail (for debugging)
ByteArrayOutputStream exceptionLog = new ByteArrayOutputStream();
PrintStream exceptionStream = new PrintStream(exceptionLog, true, "UTF-8");
//PrintStream exceptionStream = System.out;
final long analyzerSeed = random().nextLong();
final Analyzer analyzer = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
MockTokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
// we are gonna make it angry
tokenizer.setEnableChecks(false);
TokenStream stream = tokenizer;
// emit some payloads
if (fieldName.contains("payloads")) {
stream = new MockVariableLengthPayloadFilter(new Random(analyzerSeed), stream);
}
return new TokenStreamComponents(tokenizer, stream);
}
};
MockDirectoryWrapper dir = null;
final int numIterations = TEST_NIGHTLY ? atLeast(100) : atLeast(5);
STARTOVER: for (int iter = 0; iter < numIterations; iter++) {
try {
// close from last run
if (dir != null) {
dir.close();
}
// disable slow things: we don't rely upon sleeps here.
dir = newMockDirectory();
dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER);
dir.setUseSlowOpenClosers(false);
IndexWriterConfig conf = newIndexWriterConfig(analyzer);
// just for now, try to keep this test reproducible
conf.setMergeScheduler(new SerialMergeScheduler());
// test never makes it this far...
int numDocs = atLeast(2000);
IndexWriter iw = new IndexWriter(dir, conf);
// ensure there is always a commit
iw.commit();
dir.failOn(failOn);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
doc.add(newStringField("id", Integer.toString(i), Field.Store.NO));
doc.add(new NumericDocValuesField("dv", i));
doc.add(new BinaryDocValuesField("dv2", new BytesRef(Integer.toString(i))));
doc.add(new SortedDocValuesField("dv3", new BytesRef(Integer.toString(i))));
doc.add(new SortedSetDocValuesField("dv4", new BytesRef(Integer.toString(i))));
doc.add(new SortedSetDocValuesField("dv4", new BytesRef(Integer.toString(i - 1))));
doc.add(new SortedNumericDocValuesField("dv5", i));
doc.add(new SortedNumericDocValuesField("dv5", i - 1));
doc.add(newTextField("text1", TestUtil.randomAnalysisString(random(), 20, true), Field.Store.NO));
// ensure we store something
doc.add(new StoredField("stored1", "foo"));
doc.add(new StoredField("stored1", "bar"));
// ensure we get some payloads
doc.add(newTextField("text_payloads", TestUtil.randomAnalysisString(random(), 6, true), Field.Store.NO));
// ensure we get some vectors
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.setStoreTermVectors(true);
doc.add(newField("text_vectors", TestUtil.randomAnalysisString(random(), 6, true), ft));
doc.add(new IntPoint("point", random().nextInt()));
doc.add(new IntPoint("point2d", random().nextInt(), random().nextInt()));
if (random().nextInt(10) > 0) {
// single doc
try {
iw.addDocument(doc);
// we made it, sometimes delete our doc, or update a dv
int thingToDo = random().nextInt(4);
if (thingToDo == 0) {
iw.deleteDocuments(new Term("id", Integer.toString(i)));
} else if (thingToDo == 1) {
iw.updateNumericDocValue(new Term("id", Integer.toString(i)), "dv", i + 1L);
} else if (thingToDo == 2) {
iw.updateBinaryDocValue(new Term("id", Integer.toString(i)), "dv2", new BytesRef(Integer.toString(i + 1)));
}
} catch (VirtualMachineError | AlreadyClosedException disaster) {
getTragedy(disaster, iw, exceptionStream);
continue STARTOVER;
}
} else {
// block docs
Document doc2 = new Document();
doc2.add(newStringField("id", Integer.toString(-i), Field.Store.NO));
doc2.add(newTextField("text1", TestUtil.randomAnalysisString(random(), 20, true), Field.Store.NO));
doc2.add(new StoredField("stored1", "foo"));
doc2.add(new StoredField("stored1", "bar"));
doc2.add(newField("text_vectors", TestUtil.randomAnalysisString(random(), 6, true), ft));
try {
iw.addDocuments(Arrays.asList(doc, doc2));
// we made it, sometimes delete our docs
if (random().nextBoolean()) {
iw.deleteDocuments(new Term("id", Integer.toString(i)), new Term("id", Integer.toString(-i)));
}
} catch (VirtualMachineError | AlreadyClosedException disaster) {
getTragedy(disaster, iw, exceptionStream);
continue STARTOVER;
}
}
if (random().nextInt(10) == 0) {
// trigger flush:
try {
if (random().nextBoolean()) {
DirectoryReader ir = null;
try {
ir = DirectoryReader.open(iw, random().nextBoolean(), false);
TestUtil.checkReader(ir);
} finally {
IOUtils.closeWhileHandlingException(ir);
}
} else {
iw.commit();
}
if (DirectoryReader.indexExists(dir)) {
TestUtil.checkIndex(dir);
}
} catch (VirtualMachineError | AlreadyClosedException disaster) {
getTragedy(disaster, iw, exceptionStream);
continue STARTOVER;
}
}
}
try {
iw.close();
} catch (VirtualMachineError | AlreadyClosedException disaster) {
getTragedy(disaster, iw, exceptionStream);
continue STARTOVER;
}
} catch (Throwable t) {
System.out.println("Unexpected exception: dumping fake-exception-log:...");
exceptionStream.flush();
System.out.println(exceptionLog.toString("UTF-8"));
System.out.flush();
Rethrow.rethrow(t);
}
}
dir.close();
if (VERBOSE) {
System.out.println("TEST PASSED: dumping fake-exception-log:...");
System.out.println(exceptionLog.toString("UTF-8"));
}
}
use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class TestIndexWriterExceptions method testNullStoredBytesFieldReuse.
/** test a null byte[] value doesn't abort the entire segment */
public void testNullStoredBytesFieldReuse() throws Exception {
Directory dir = newDirectory();
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(analyzer));
// add good document
Document doc = new Document();
Field theField = new StoredField("foo", new BytesRef("hello").bytes);
doc.add(theField);
iw.addDocument(doc);
expectThrows(NullPointerException.class, () -> {
// set to null value
byte[] v = null;
theField.setBytesValue(v);
iw.addDocument(doc);
});
assertNull(iw.getTragicException());
iw.close();
// make sure we see our good doc
DirectoryReader r = DirectoryReader.open(dir);
assertEquals(1, r.numDocs());
r.close();
dir.close();
}
Aggregations