use of org.apache.lucene.index.NumericDocValues in project lucene-solr by apache.
the class TestFieldCacheWithThreads method test.
public void test() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
final List<Long> numbers = new ArrayList<>();
final List<BytesRef> binary = new ArrayList<>();
final List<BytesRef> sorted = new ArrayList<>();
final int numDocs = atLeast(100);
for (int i = 0; i < numDocs; i++) {
Document d = new Document();
long number = random().nextLong();
d.add(new NumericDocValuesField("number", number));
BytesRef bytes = new BytesRef(TestUtil.randomRealisticUnicodeString(random()));
d.add(new BinaryDocValuesField("bytes", bytes));
binary.add(bytes);
bytes = new BytesRef(TestUtil.randomRealisticUnicodeString(random()));
d.add(new SortedDocValuesField("sorted", bytes));
sorted.add(bytes);
w.addDocument(d);
numbers.add(number);
}
w.forceMerge(1);
final IndexReader r = DirectoryReader.open(w);
w.close();
assertEquals(1, r.leaves().size());
final LeafReader ar = r.leaves().get(0).reader();
int numThreads = TestUtil.nextInt(random(), 2, 5);
List<Thread> threads = new ArrayList<>();
final CountDownLatch startingGun = new CountDownLatch(1);
for (int t = 0; t < numThreads; t++) {
final Random threadRandom = new Random(random().nextLong());
Thread thread = new Thread() {
@Override
public void run() {
try {
startingGun.await();
int iters = atLeast(1000);
for (int iter = 0; iter < iters; iter++) {
int docID = threadRandom.nextInt(numDocs);
switch(threadRandom.nextInt(4)) {
case 0:
{
NumericDocValues values = FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.INT_POINT_PARSER);
assertEquals(docID, values.advance(docID));
assertEquals(numbers.get(docID).longValue(), values.longValue());
}
break;
case 1:
{
NumericDocValues values = FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.LONG_POINT_PARSER);
assertEquals(docID, values.advance(docID));
assertEquals(numbers.get(docID).longValue(), values.longValue());
}
break;
case 2:
{
NumericDocValues values = FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.FLOAT_POINT_PARSER);
assertEquals(docID, values.advance(docID));
assertEquals(numbers.get(docID).longValue(), values.longValue());
}
break;
case 3:
{
NumericDocValues values = FieldCache.DEFAULT.getNumerics(ar, "number", FieldCache.DOUBLE_POINT_PARSER);
assertEquals(docID, values.advance(docID));
assertEquals(numbers.get(docID).longValue(), values.longValue());
}
break;
}
BinaryDocValues bdv = FieldCache.DEFAULT.getTerms(ar, "bytes");
assertEquals(docID, bdv.advance(docID));
assertEquals(binary.get(docID), bdv.binaryValue());
SortedDocValues sdv = FieldCache.DEFAULT.getTermsIndex(ar, "sorted");
assertEquals(docID, sdv.advance(docID));
assertEquals(sorted.get(docID), sdv.binaryValue());
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
};
thread.start();
threads.add(thread);
}
startingGun.countDown();
for (Thread thread : threads) {
thread.join();
}
r.close();
dir.close();
}
use of org.apache.lucene.index.NumericDocValues in project lucene-solr by apache.
the class TestLegacyFieldCache method testIntFieldCache.
// Make sure that the use of GrowableWriter doesn't prevent from using the full int range
public void testIntFieldCache() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random()));
cfg.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
Document doc = new Document();
LegacyIntField field = new LegacyIntField("f", 0, Store.YES);
doc.add(field);
final int[] values = new int[TestUtil.nextInt(random(), 1, 10)];
Set<Integer> missing = new HashSet<>();
for (int i = 0; i < values.length; ++i) {
final int v;
switch(random().nextInt(10)) {
case 0:
v = Integer.MIN_VALUE;
break;
case 1:
v = 0;
break;
case 2:
v = Integer.MAX_VALUE;
break;
default:
v = TestUtil.nextInt(random(), -10, 10);
break;
}
values[i] = v;
if (v == 0 && random().nextBoolean()) {
// missing
iw.addDocument(new Document());
missing.add(i);
} else {
field.setIntValue(v);
iw.addDocument(doc);
}
}
iw.forceMerge(1);
final DirectoryReader reader = iw.getReader();
final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.LEGACY_INT_PARSER);
for (int i = 0; i < values.length; ++i) {
if (missing.contains(i) == false) {
assertEquals(i, ints.nextDoc());
assertEquals(values[i], ints.longValue());
}
}
assertEquals(NO_MORE_DOCS, ints.nextDoc());
reader.close();
iw.close();
dir.close();
}
use of org.apache.lucene.index.NumericDocValues in project lucene-solr by apache.
the class TestLegacyFieldCache method testGetDocsWithFieldThreadSafety.
public void testGetDocsWithFieldThreadSafety() throws Exception {
final FieldCache cache = FieldCache.DEFAULT;
cache.purgeAllCaches();
int NUM_THREADS = 3;
Thread[] threads = new Thread[NUM_THREADS];
final AtomicBoolean failed = new AtomicBoolean();
final AtomicInteger iters = new AtomicInteger();
final int NUM_ITER = 200 * RANDOM_MULTIPLIER;
final CyclicBarrier restart = new CyclicBarrier(NUM_THREADS, new Runnable() {
@Override
public void run() {
cache.purgeAllCaches();
iters.incrementAndGet();
}
});
for (int threadIDX = 0; threadIDX < NUM_THREADS; threadIDX++) {
threads[threadIDX] = new Thread() {
@Override
public void run() {
try {
while (!failed.get()) {
final int op = random().nextInt(3);
if (op == 0) {
// Purge all caches & resume, once all
// threads get here:
restart.await();
if (iters.get() >= NUM_ITER) {
break;
}
} else if (op == 1) {
Bits docsWithField = cache.getDocsWithField(reader, "sparse", null);
for (int i = 0; i < docsWithField.length(); i++) {
assertEquals(i % 2 == 0, docsWithField.get(i));
}
} else {
NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.LEGACY_INT_PARSER);
for (int i = 0; i < reader.maxDoc(); i++) {
if (i % 2 == 0) {
assertEquals(i, ints.nextDoc());
assertEquals(i, ints.longValue());
}
}
}
}
} catch (Throwable t) {
failed.set(true);
restart.reset();
throw new RuntimeException(t);
}
}
};
threads[threadIDX].start();
}
for (int threadIDX = 0; threadIDX < NUM_THREADS; threadIDX++) {
threads[threadIDX].join();
}
assertFalse(failed.get());
}
use of org.apache.lucene.index.NumericDocValues in project lucene-solr by apache.
the class TestLegacyFieldCache method testLongFieldCache.
// Make sure that the use of GrowableWriter doesn't prevent from using the full long range
public void testLongFieldCache() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random()));
cfg.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
Document doc = new Document();
LegacyLongField field = new LegacyLongField("f", 0L, Store.YES);
doc.add(field);
final long[] values = new long[TestUtil.nextInt(random(), 1, 10)];
Set<Integer> missing = new HashSet<>();
for (int i = 0; i < values.length; ++i) {
final long v;
switch(random().nextInt(10)) {
case 0:
v = Long.MIN_VALUE;
break;
case 1:
v = 0;
break;
case 2:
v = Long.MAX_VALUE;
break;
default:
v = TestUtil.nextLong(random(), -10, 10);
break;
}
values[i] = v;
if (v == 0 && random().nextBoolean()) {
// missing
iw.addDocument(new Document());
missing.add(i);
} else {
field.setLongValue(v);
iw.addDocument(doc);
}
}
iw.forceMerge(1);
final DirectoryReader reader = iw.getReader();
final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.LEGACY_LONG_PARSER);
for (int i = 0; i < values.length; ++i) {
if (missing.contains(i) == false) {
assertEquals(i, longs.nextDoc());
assertEquals(values[i], longs.longValue());
}
}
assertEquals(NO_MORE_DOCS, longs.nextDoc());
reader.close();
iw.close();
dir.close();
}
use of org.apache.lucene.index.NumericDocValues in project lucene-solr by apache.
the class TestLegacyFieldCache method testNonIndexedFields.
public void testNonIndexedFields() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(new StoredField("bogusbytes", "bogus"));
doc.add(new StoredField("bogusshorts", "bogus"));
doc.add(new StoredField("bogusints", "bogus"));
doc.add(new StoredField("boguslongs", "bogus"));
doc.add(new StoredField("bogusfloats", "bogus"));
doc.add(new StoredField("bogusdoubles", "bogus"));
doc.add(new StoredField("bogusbits", "bogus"));
iw.addDocument(doc);
DirectoryReader ir = iw.getReader();
iw.close();
LeafReader ar = getOnlyLeafReader(ir);
final FieldCache cache = FieldCache.DEFAULT;
cache.purgeAllCaches();
assertEquals(0, cache.getCacheEntries().length);
NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.LEGACY_INT_PARSER);
assertEquals(NO_MORE_DOCS, ints.nextDoc());
NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LEGACY_LONG_PARSER);
assertEquals(NO_MORE_DOCS, longs.nextDoc());
NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.LEGACY_FLOAT_PARSER);
assertEquals(NO_MORE_DOCS, floats.nextDoc());
NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.LEGACY_DOUBLE_PARSER);
assertEquals(NO_MORE_DOCS, doubles.nextDoc());
// check that we cached nothing
assertEquals(0, cache.getCacheEntries().length);
ir.close();
dir.close();
}
Aggregations