use of org.apache.lucene.index.NumericDocValues in project lucene-solr by apache.
the class UninvertingReader method getNumericDocValues.
@Override
public NumericDocValues getNumericDocValues(String field) throws IOException {
NumericDocValues values = super.getNumericDocValues(field);
if (values != null) {
return values;
}
Type v = getType(field);
if (v != null) {
switch(v) {
case INTEGER_POINT:
return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.INT_POINT_PARSER);
case FLOAT_POINT:
return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.FLOAT_POINT_PARSER);
case LONG_POINT:
return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LONG_POINT_PARSER);
case DOUBLE_POINT:
return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.DOUBLE_POINT_PARSER);
case LEGACY_INTEGER:
return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_INT_PARSER);
case LEGACY_FLOAT:
return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_FLOAT_PARSER);
case LEGACY_LONG:
return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_LONG_PARSER);
case LEGACY_DOUBLE:
return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_DOUBLE_PARSER);
}
}
return null;
}
use of org.apache.lucene.index.NumericDocValues in project lucene-solr by apache.
the class TestPointQueries method verifyBinary.
// verify for byte[][] values
private void verifyBinary(byte[][][] docValues, int[] ids, int numBytesPerDim) throws Exception {
IndexWriterConfig iwc = newIndexWriterConfig();
int numDims = docValues[0].length;
int bytesPerDim = docValues[0][0].length;
// Else we can get O(N^2) merging:
int mbd = iwc.getMaxBufferedDocs();
if (mbd != -1 && mbd < docValues.length / 100) {
iwc.setMaxBufferedDocs(docValues.length / 100);
}
iwc.setCodec(getCodec());
Directory dir;
if (docValues.length > 100000) {
dir = newFSDirectory(createTempDir("TestPointQueries"));
} else {
dir = newDirectory();
}
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
int numValues = docValues.length;
if (VERBOSE) {
System.out.println("TEST: numValues=" + numValues + " numDims=" + numDims + " numBytesPerDim=" + numBytesPerDim);
}
int missingPct = random().nextInt(100);
int deletedPct = random().nextInt(100);
if (VERBOSE) {
System.out.println(" missingPct=" + missingPct);
System.out.println(" deletedPct=" + deletedPct);
}
BitSet missing = new BitSet();
BitSet deleted = new BitSet();
Document doc = null;
int lastID = -1;
for (int ord = 0; ord < numValues; ord++) {
int id = ids[ord];
if (id != lastID) {
if (random().nextInt(100) < missingPct) {
missing.set(id);
if (VERBOSE) {
System.out.println(" missing id=" + id);
}
}
if (doc != null) {
w.addDocument(doc);
if (random().nextInt(100) < deletedPct) {
int idToDelete = random().nextInt(id);
w.deleteDocuments(new Term("id", "" + idToDelete));
deleted.set(idToDelete);
if (VERBOSE) {
System.out.println(" delete id=" + idToDelete);
}
}
}
doc = new Document();
doc.add(newStringField("id", "" + id, Field.Store.NO));
doc.add(new NumericDocValuesField("id", id));
lastID = id;
}
if (missing.get(id) == false) {
doc.add(new BinaryPoint("value", docValues[ord]));
if (VERBOSE) {
System.out.println("id=" + id);
for (int dim = 0; dim < numDims; dim++) {
System.out.println(" dim=" + dim + " value=" + bytesToString(docValues[ord][dim]));
}
}
}
}
w.addDocument(doc);
if (random().nextBoolean()) {
if (VERBOSE) {
System.out.println(" forceMerge(1)");
}
w.forceMerge(1);
}
final IndexReader r = w.getReader();
w.close();
IndexSearcher s = newSearcher(r, false);
int numThreads = TestUtil.nextInt(random(), 2, 5);
if (VERBOSE) {
System.out.println("TEST: use " + numThreads + " query threads; searcher=" + s);
}
List<Thread> threads = new ArrayList<>();
final int iters = atLeast(100);
final CountDownLatch startingGun = new CountDownLatch(1);
final AtomicBoolean failed = new AtomicBoolean();
for (int i = 0; i < numThreads; i++) {
Thread thread = new Thread() {
@Override
public void run() {
try {
_run();
} catch (Exception e) {
failed.set(true);
throw new RuntimeException(e);
}
}
private void _run() throws Exception {
startingGun.await();
for (int iter = 0; iter < iters && failed.get() == false; iter++) {
byte[][] lower = new byte[numDims][];
byte[][] upper = new byte[numDims][];
for (int dim = 0; dim < numDims; dim++) {
lower[dim] = new byte[bytesPerDim];
random().nextBytes(lower[dim]);
upper[dim] = new byte[bytesPerDim];
random().nextBytes(upper[dim]);
if (StringHelper.compare(bytesPerDim, lower[dim], 0, upper[dim], 0) > 0) {
byte[] x = lower[dim];
lower[dim] = upper[dim];
upper[dim] = x;
}
}
if (VERBOSE) {
System.out.println("\n" + Thread.currentThread().getName() + ": TEST: iter=" + iter);
for (int dim = 0; dim < numDims; dim++) {
System.out.println(" dim=" + dim + " " + bytesToString(lower[dim]) + " TO " + bytesToString(upper[dim]));
}
}
Query query = BinaryPoint.newRangeQuery("value", lower, upper);
if (VERBOSE) {
System.out.println(Thread.currentThread().getName() + ": using query: " + query);
}
final BitSet hits = new BitSet();
s.search(query, new SimpleCollector() {
private int docBase;
@Override
public boolean needsScores() {
return false;
}
@Override
protected void doSetNextReader(LeafReaderContext context) throws IOException {
docBase = context.docBase;
}
@Override
public void collect(int doc) {
hits.set(docBase + doc);
}
});
if (VERBOSE) {
System.out.println(Thread.currentThread().getName() + ": hitCount: " + hits.cardinality());
}
BitSet expected = new BitSet();
for (int ord = 0; ord < numValues; ord++) {
int id = ids[ord];
if (missing.get(id) == false && deleted.get(id) == false && matches(bytesPerDim, lower, upper, docValues[ord])) {
expected.set(id);
}
}
NumericDocValues docIDToID = MultiDocValues.getNumericValues(r, "id");
int failCount = 0;
for (int docID = 0; docID < r.maxDoc(); docID++) {
assertEquals(docID, docIDToID.nextDoc());
int id = (int) docIDToID.longValue();
if (hits.get(docID) != expected.get(id)) {
System.out.println("FAIL: iter=" + iter + " id=" + id + " docID=" + docID + " expected=" + expected.get(id) + " but got " + hits.get(docID) + " deleted?=" + deleted.get(id) + " missing?=" + missing.get(id));
for (int dim = 0; dim < numDims; dim++) {
System.out.println(" dim=" + dim + " range: " + bytesToString(lower[dim]) + " TO " + bytesToString(upper[dim]));
failCount++;
}
}
}
if (failCount != 0) {
fail(failCount + " hits were wrong");
}
}
}
};
thread.setName("T" + i);
thread.start();
threads.add(thread);
}
startingGun.countDown();
for (Thread thread : threads) {
thread.join();
}
IOUtils.close(r, dir);
}
use of org.apache.lucene.index.NumericDocValues in project lucene-solr by apache.
the class TestSimilarityProvider method testBasics.
public void testBasics() throws Exception {
// sanity check of norms writer
// TODO: generalize
NumericDocValues fooNorms = MultiDocValues.getNormValues(reader, "foo");
NumericDocValues barNorms = MultiDocValues.getNormValues(reader, "bar");
for (int i = 0; i < reader.maxDoc(); i++) {
assertEquals(i, fooNorms.nextDoc());
assertEquals(i, barNorms.nextDoc());
assertFalse(fooNorms.longValue() == barNorms.longValue());
}
// sanity check of searching
TopDocs foodocs = searcher.search(new TermQuery(new Term("foo", "brown")), 10);
assertTrue(foodocs.totalHits > 0);
TopDocs bardocs = searcher.search(new TermQuery(new Term("bar", "brown")), 10);
assertTrue(bardocs.totalHits > 0);
assertTrue(foodocs.scoreDocs[0].score < bardocs.scoreDocs[0].score);
}
use of org.apache.lucene.index.NumericDocValues in project lucene-solr by apache.
the class DoubleFieldSource method getValues.
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
final NumericDocValues values = getNumericDocValues(context, readerContext);
return new DoubleDocValues(this) {
int lastDocID;
private double getValueForDoc(int doc) throws IOException {
if (doc < lastDocID) {
throw new IllegalArgumentException("docs were sent out-of-order: lastDocID=" + lastDocID + " vs docID=" + doc);
}
lastDocID = doc;
int curDocID = values.docID();
if (doc > curDocID) {
curDocID = values.advance(doc);
}
if (doc == curDocID) {
return Double.longBitsToDouble(values.longValue());
} else {
return 0.0;
}
}
@Override
public double doubleVal(int doc) throws IOException {
return getValueForDoc(doc);
}
@Override
public boolean exists(int doc) throws IOException {
getValueForDoc(doc);
return doc == values.docID();
}
@Override
public ValueFiller getValueFiller() {
return new ValueFiller() {
private final MutableValueDouble mval = new MutableValueDouble();
@Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) throws IOException {
mval.value = getValueForDoc(doc);
mval.exists = exists(doc);
}
};
}
};
}
use of org.apache.lucene.index.NumericDocValues in project lucene-solr by apache.
the class FloatFieldSource method getValues.
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
final NumericDocValues arr = getNumericDocValues(context, readerContext);
return new FloatDocValues(this) {
int lastDocID;
private float getValueForDoc(int doc) throws IOException {
if (doc < lastDocID) {
throw new IllegalArgumentException("docs were sent out-of-order: lastDocID=" + lastDocID + " vs docID=" + doc);
}
lastDocID = doc;
int curDocID = arr.docID();
if (doc > curDocID) {
curDocID = arr.advance(doc);
}
if (doc == curDocID) {
return Float.intBitsToFloat((int) arr.longValue());
} else {
return 0f;
}
}
@Override
public float floatVal(int doc) throws IOException {
return getValueForDoc(doc);
}
@Override
public boolean exists(int doc) throws IOException {
getValueForDoc(doc);
return arr.docID() == doc;
}
@Override
public ValueFiller getValueFiller() {
return new ValueFiller() {
private final MutableValueFloat mval = new MutableValueFloat();
@Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) throws IOException {
mval.value = floatVal(doc);
mval.exists = arr.docID() == doc;
}
};
}
};
}
Aggregations