use of org.apache.lucene.search.FieldDoc in project lucene-solr by apache.
the class TestNearest method testNearestNeighborWithDeletedDocs.
public void testNearestNeighborWithDeletedDocs() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir, getIndexWriterConfig());
Document doc = new Document();
doc.add(new LatLonPoint("point", 40.0, 50.0));
doc.add(new StringField("id", "0", Field.Store.YES));
w.addDocument(doc);
doc = new Document();
doc.add(new LatLonPoint("point", 45.0, 55.0));
doc.add(new StringField("id", "1", Field.Store.YES));
w.addDocument(doc);
DirectoryReader r = w.getReader();
// can't wrap because we require Lucene60PointsFormat directly but e.g. ParallelReader wraps with its own points impl:
IndexSearcher s = newSearcher(r, false);
FieldDoc hit = (FieldDoc) LatLonPoint.nearest(s, "point", 40.0, 50.0, 1).scoreDocs[0];
assertEquals("0", r.document(hit.doc).getField("id").stringValue());
r.close();
w.deleteDocuments(new Term("id", "0"));
r = w.getReader();
// can't wrap because we require Lucene60PointsFormat directly but e.g. ParallelReader wraps with its own points impl:
s = newSearcher(r, false);
hit = (FieldDoc) LatLonPoint.nearest(s, "point", 40.0, 50.0, 1).scoreDocs[0];
assertEquals("1", r.document(hit.doc).getField("id").stringValue());
r.close();
w.close();
dir.close();
}
use of org.apache.lucene.search.FieldDoc in project lucene-solr by apache.
the class TestLatLonPointDistanceSort method doRandomTest.
private void doRandomTest(int numDocs, int numQueries) throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
// else seeds may not to reproduce:
iwc.setMergeScheduler(new SerialMergeScheduler());
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
doc.add(new StoredField("id", i));
doc.add(new NumericDocValuesField("id", i));
if (random().nextInt(10) > 7) {
double latRaw = GeoTestUtil.nextLatitude();
double lonRaw = GeoTestUtil.nextLongitude();
// pre-normalize up front, so we can just use quantized value for testing and do simple exact comparisons
double lat = decodeLatitude(encodeLatitude(latRaw));
double lon = decodeLongitude(encodeLongitude(lonRaw));
doc.add(new LatLonDocValuesField("field", lat, lon));
doc.add(new StoredField("lat", lat));
doc.add(new StoredField("lon", lon));
}
// otherwise "missing"
writer.addDocument(doc);
}
IndexReader reader = writer.getReader();
IndexSearcher searcher = newSearcher(reader);
for (int i = 0; i < numQueries; i++) {
double lat = GeoTestUtil.nextLatitude();
double lon = GeoTestUtil.nextLongitude();
double missingValue = Double.POSITIVE_INFINITY;
Result[] expected = new Result[reader.maxDoc()];
for (int doc = 0; doc < reader.maxDoc(); doc++) {
Document targetDoc = reader.document(doc);
final double distance;
if (targetDoc.getField("lat") == null) {
// missing
distance = missingValue;
} else {
double docLatitude = targetDoc.getField("lat").numericValue().doubleValue();
double docLongitude = targetDoc.getField("lon").numericValue().doubleValue();
distance = SloppyMath.haversinMeters(lat, lon, docLatitude, docLongitude);
}
int id = targetDoc.getField("id").numericValue().intValue();
expected[doc] = new Result(id, distance);
}
Arrays.sort(expected);
// randomize the topN a bit
int topN = TestUtil.nextInt(random(), 1, reader.maxDoc());
// sort by distance, then ID
SortField distanceSort = LatLonDocValuesField.newDistanceSort("field", lat, lon);
distanceSort.setMissingValue(missingValue);
Sort sort = new Sort(distanceSort, new SortField("id", SortField.Type.INT));
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), topN, sort);
for (int resultNumber = 0; resultNumber < topN; resultNumber++) {
FieldDoc fieldDoc = (FieldDoc) topDocs.scoreDocs[resultNumber];
Result actual = new Result((Integer) fieldDoc.fields[1], (Double) fieldDoc.fields[0]);
assertEquals(expected[resultNumber], actual);
}
// get page2 with searchAfter()
if (topN < reader.maxDoc()) {
int page2 = TestUtil.nextInt(random(), 1, reader.maxDoc() - topN);
TopDocs topDocs2 = searcher.searchAfter(topDocs.scoreDocs[topN - 1], new MatchAllDocsQuery(), page2, sort);
for (int resultNumber = 0; resultNumber < page2; resultNumber++) {
FieldDoc fieldDoc = (FieldDoc) topDocs2.scoreDocs[resultNumber];
Result actual = new Result((Integer) fieldDoc.fields[1], (Double) fieldDoc.fields[0]);
assertEquals(expected[topN + resultNumber], actual);
}
}
}
reader.close();
writer.close();
dir.close();
}
use of org.apache.lucene.search.FieldDoc in project lucene-solr by apache.
the class TestNearest method testNearestNeighborWithAllDeletedDocs.
public void testNearestNeighborWithAllDeletedDocs() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir, getIndexWriterConfig());
Document doc = new Document();
doc.add(new LatLonPoint("point", 40.0, 50.0));
doc.add(new StringField("id", "0", Field.Store.YES));
w.addDocument(doc);
doc = new Document();
doc.add(new LatLonPoint("point", 45.0, 55.0));
doc.add(new StringField("id", "1", Field.Store.YES));
w.addDocument(doc);
DirectoryReader r = w.getReader();
// can't wrap because we require Lucene60PointsFormat directly but e.g. ParallelReader wraps with its own points impl:
IndexSearcher s = newSearcher(r, false);
FieldDoc hit = (FieldDoc) LatLonPoint.nearest(s, "point", 40.0, 50.0, 1).scoreDocs[0];
assertEquals("0", r.document(hit.doc).getField("id").stringValue());
r.close();
w.deleteDocuments(new Term("id", "0"));
w.deleteDocuments(new Term("id", "1"));
r = w.getReader();
// can't wrap because we require Lucene60PointsFormat directly but e.g. ParallelReader wraps with its own points impl:
s = newSearcher(r, false);
assertEquals(0, LatLonPoint.nearest(s, "point", 40.0, 50.0, 1).scoreDocs.length);
r.close();
w.close();
dir.close();
}
use of org.apache.lucene.search.FieldDoc in project lucene-solr by apache.
the class TestGrouping method assertEquals.
private void assertEquals(int[] docIDtoID, TopGroups<BytesRef> expected, TopGroups<BytesRef> actual, boolean verifyGroupValues, boolean verifyTotalGroupCount, boolean verifySortValues, boolean testScores, boolean idvBasedImplsUsed) {
if (expected == null) {
assertNull(actual);
return;
}
assertNotNull(actual);
assertEquals("expected.groups.length != actual.groups.length", expected.groups.length, actual.groups.length);
assertEquals("expected.totalHitCount != actual.totalHitCount", expected.totalHitCount, actual.totalHitCount);
assertEquals("expected.totalGroupedHitCount != actual.totalGroupedHitCount", expected.totalGroupedHitCount, actual.totalGroupedHitCount);
if (expected.totalGroupCount != null && verifyTotalGroupCount) {
assertEquals("expected.totalGroupCount != actual.totalGroupCount", expected.totalGroupCount, actual.totalGroupCount);
}
for (int groupIDX = 0; groupIDX < expected.groups.length; groupIDX++) {
if (VERBOSE) {
System.out.println(" check groupIDX=" + groupIDX);
}
final GroupDocs<BytesRef> expectedGroup = expected.groups[groupIDX];
final GroupDocs<BytesRef> actualGroup = actual.groups[groupIDX];
if (verifyGroupValues) {
if (idvBasedImplsUsed) {
if (actualGroup.groupValue.length == 0) {
assertNull(expectedGroup.groupValue);
} else {
assertEquals(expectedGroup.groupValue, actualGroup.groupValue);
}
} else {
assertEquals(expectedGroup.groupValue, actualGroup.groupValue);
}
}
if (verifySortValues) {
assertArrayEquals(expectedGroup.groupSortValues, actualGroup.groupSortValues);
}
// TODO
// assertEquals(expectedGroup.maxScore, actualGroup.maxScore);
assertEquals(expectedGroup.totalHits, actualGroup.totalHits);
final ScoreDoc[] expectedFDs = expectedGroup.scoreDocs;
final ScoreDoc[] actualFDs = actualGroup.scoreDocs;
assertEquals(expectedFDs.length, actualFDs.length);
for (int docIDX = 0; docIDX < expectedFDs.length; docIDX++) {
final FieldDoc expectedFD = (FieldDoc) expectedFDs[docIDX];
final FieldDoc actualFD = (FieldDoc) actualFDs[docIDX];
//System.out.println(" actual doc=" + docIDtoID[actualFD.doc] + " score=" + actualFD.score);
assertEquals(expectedFD.doc, docIDtoID[actualFD.doc]);
if (testScores) {
assertEquals(expectedFD.score, actualFD.score, 0.1);
} else {
// TODO: too anal for now
//assertEquals(Float.NaN, actualFD.score);
}
if (verifySortValues) {
assertArrayEquals(expectedFD.fields, actualFD.fields);
}
}
}
}
use of org.apache.lucene.search.FieldDoc in project lucene-solr by apache.
the class TestUtil method assertEquals.
public static void assertEquals(TopDocs expected, TopDocs actual) {
Assert.assertEquals("wrong total hits", expected.totalHits, actual.totalHits);
Assert.assertEquals("wrong maxScore", expected.getMaxScore(), actual.getMaxScore(), 0.0);
Assert.assertEquals("wrong hit count", expected.scoreDocs.length, actual.scoreDocs.length);
for (int hitIDX = 0; hitIDX < expected.scoreDocs.length; hitIDX++) {
final ScoreDoc expectedSD = expected.scoreDocs[hitIDX];
final ScoreDoc actualSD = actual.scoreDocs[hitIDX];
Assert.assertEquals("wrong hit docID", expectedSD.doc, actualSD.doc);
Assert.assertEquals("wrong hit score", expectedSD.score, actualSD.score, 0.0);
if (expectedSD instanceof FieldDoc) {
Assert.assertTrue(actualSD instanceof FieldDoc);
Assert.assertArrayEquals("wrong sort field values", ((FieldDoc) expectedSD).fields, ((FieldDoc) actualSD).fields);
} else {
Assert.assertFalse(actualSD instanceof FieldDoc);
}
}
}
Aggregations