use of org.apache.lucene.document.LongPoint in project lucene-solr by apache.
the class TestPointQueries method testWrongNumBytes.
public void testWrongNumBytes() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setCodec(getCodec());
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
Document doc = new Document();
doc.add(new LongPoint("value", Long.MIN_VALUE));
w.addDocument(doc);
IndexReader r = w.getReader();
// no wrapping, else the exc might happen in executor thread:
IndexSearcher s = new IndexSearcher(r);
byte[][] point = new byte[1][];
point[0] = new byte[10];
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
s.count(BinaryPoint.newRangeQuery("value", point, point));
});
assertEquals("field=\"value\" was indexed with bytesPerDim=8 but this query has bytesPerDim=10", expected.getMessage());
IOUtils.close(r, w, dir);
}
use of org.apache.lucene.document.LongPoint in project lucene-solr by apache.
the class TestPointQueries method testPointInSetQueryManyEqualValues.
public void testPointInSetQueryManyEqualValues() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setCodec(getCodec());
IndexWriter w = new IndexWriter(dir, iwc);
int zeroCount = 0;
for (int i = 0; i < 10000; i++) {
int x = random().nextInt(2);
if (x == 0) {
zeroCount++;
}
Document doc = new Document();
doc.add(new IntPoint("int", x));
doc.add(new LongPoint("long", (long) x));
doc.add(new FloatPoint("float", (float) x));
doc.add(new DoublePoint("double", (double) x));
doc.add(new BinaryPoint("bytes", new byte[] { (byte) x }));
w.addDocument(doc);
}
IndexReader r = DirectoryReader.open(w);
IndexSearcher s = newSearcher(r, false);
assertEquals(zeroCount, s.count(IntPoint.newSetQuery("int", 0)));
assertEquals(zeroCount, s.count(IntPoint.newSetQuery("int", 0, -7)));
assertEquals(zeroCount, s.count(IntPoint.newSetQuery("int", 7, 0)));
assertEquals(10000 - zeroCount, s.count(IntPoint.newSetQuery("int", 1)));
assertEquals(0, s.count(IntPoint.newSetQuery("int", 2)));
assertEquals(zeroCount, s.count(LongPoint.newSetQuery("long", 0)));
assertEquals(zeroCount, s.count(LongPoint.newSetQuery("long", 0, -7)));
assertEquals(zeroCount, s.count(LongPoint.newSetQuery("long", 7, 0)));
assertEquals(10000 - zeroCount, s.count(LongPoint.newSetQuery("long", 1)));
assertEquals(0, s.count(LongPoint.newSetQuery("long", 2)));
assertEquals(zeroCount, s.count(FloatPoint.newSetQuery("float", 0)));
assertEquals(zeroCount, s.count(FloatPoint.newSetQuery("float", 0, -7)));
assertEquals(zeroCount, s.count(FloatPoint.newSetQuery("float", 7, 0)));
assertEquals(10000 - zeroCount, s.count(FloatPoint.newSetQuery("float", 1)));
assertEquals(0, s.count(FloatPoint.newSetQuery("float", 2)));
assertEquals(zeroCount, s.count(DoublePoint.newSetQuery("double", 0)));
assertEquals(zeroCount, s.count(DoublePoint.newSetQuery("double", 0, -7)));
assertEquals(zeroCount, s.count(DoublePoint.newSetQuery("double", 7, 0)));
assertEquals(10000 - zeroCount, s.count(DoublePoint.newSetQuery("double", 1)));
assertEquals(0, s.count(DoublePoint.newSetQuery("double", 2)));
assertEquals(zeroCount, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 0 })));
assertEquals(zeroCount, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 0 }, new byte[] { -7 })));
assertEquals(zeroCount, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 7 }, new byte[] { 0 })));
assertEquals(10000 - zeroCount, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 1 })));
assertEquals(0, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 2 })));
w.close();
r.close();
dir.close();
}
use of org.apache.lucene.document.LongPoint in project lucene-solr by apache.
the class TestDemoParallelLeafReader method getReindexerNewDVFields.
/** Schema change by adding a new number_<schemaGen> DV field each time. */
private ReindexingReader getReindexerNewDVFields(Path root, final AtomicLong currentSchemaGen) throws IOException {
return new ReindexingReader(root) {
@Override
protected IndexWriterConfig getIndexWriterConfig() throws IOException {
IndexWriterConfig iwc = newIndexWriterConfig();
TieredMergePolicy tmp = new TieredMergePolicy();
// We write tiny docs, so we need tiny floor to avoid O(N^2) merging:
tmp.setFloorSegmentMB(.01);
iwc.setMergePolicy(tmp);
return iwc;
}
@Override
protected Directory openDirectory(Path path) throws IOException {
MockDirectoryWrapper dir = newMockFSDirectory(path);
dir.setUseSlowOpenClosers(false);
dir.setThrottling(Throttling.NEVER);
return dir;
}
@Override
protected void reindex(long oldSchemaGen, long newSchemaGen, LeafReader reader, Directory parallelDir) throws IOException {
IndexWriterConfig iwc = newIndexWriterConfig();
// The order of our docIDs must precisely matching incoming reader:
iwc.setMergePolicy(new LogByteSizeMergePolicy());
IndexWriter w = new IndexWriter(parallelDir, iwc);
int maxDoc = reader.maxDoc();
if (oldSchemaGen <= 0) {
// Must slowly parse the stored field into a new doc values field:
for (int i = 0; i < maxDoc; i++) {
// TODO: is this still O(blockSize^2)?
Document oldDoc = reader.document(i);
Document newDoc = new Document();
long value = Long.parseLong(oldDoc.get("text").split(" ")[1]);
newDoc.add(new NumericDocValuesField("number_" + newSchemaGen, value));
newDoc.add(new LongPoint("number", value));
w.addDocument(newDoc);
}
} else {
// Just carry over doc values from previous field:
NumericDocValues oldValues = reader.getNumericDocValues("number_" + oldSchemaGen);
assertNotNull("oldSchemaGen=" + oldSchemaGen, oldValues);
for (int i = 0; i < maxDoc; i++) {
// TODO: is this still O(blockSize^2)?
assertEquals(i, oldValues.nextDoc());
Document oldDoc = reader.document(i);
Document newDoc = new Document();
newDoc.add(new NumericDocValuesField("number_" + newSchemaGen, oldValues.longValue()));
w.addDocument(newDoc);
}
}
w.forceMerge(1);
w.close();
}
@Override
protected long getCurrentSchemaGen() {
return currentSchemaGen.get();
}
@Override
protected void checkParallelReader(LeafReader r, LeafReader parR, long schemaGen) throws IOException {
String fieldName = "number_" + schemaGen;
if (DEBUG)
System.out.println(Thread.currentThread().getName() + ": TEST: now check parallel number DVs field=" + fieldName + " r=" + r + " parR=" + parR);
NumericDocValues numbers = parR.getNumericDocValues(fieldName);
if (numbers == null) {
return;
}
int maxDoc = r.maxDoc();
boolean failed = false;
for (int i = 0; i < maxDoc; i++) {
Document oldDoc = r.document(i);
long value = Long.parseLong(oldDoc.get("text").split(" ")[1]);
assertEquals(i, numbers.nextDoc());
if (value != numbers.longValue()) {
if (DEBUG)
System.out.println("FAIL: docID=" + i + " " + oldDoc + " value=" + value + " number=" + numbers.longValue() + " numbers=" + numbers);
failed = true;
} else if (failed) {
if (DEBUG)
System.out.println("OK: docID=" + i + " " + oldDoc + " value=" + value + " number=" + numbers.longValue());
}
}
assertFalse("FAILED field=" + fieldName + " r=" + r, failed);
}
};
}
use of org.apache.lucene.document.LongPoint in project lucene-solr by apache.
the class TestPointValues method testInvalidLongPointUsage.
public void testInvalidLongPointUsage() throws Exception {
LongPoint field = new LongPoint("field", 17, 42);
expectThrows(IllegalArgumentException.class, () -> {
field.setLongValue(14);
});
expectThrows(IllegalStateException.class, () -> {
field.numericValue();
});
}
use of org.apache.lucene.document.LongPoint in project lucene-solr by apache.
the class TestPointQueries method testBasicPointInSetQuery.
public void testBasicPointInSetQuery() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setCodec(getCodec());
IndexWriter w = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new IntPoint("int", 17));
doc.add(new LongPoint("long", 17L));
doc.add(new FloatPoint("float", 17.0f));
doc.add(new DoublePoint("double", 17.0));
doc.add(new BinaryPoint("bytes", new byte[] { 0, 17 }));
w.addDocument(doc);
doc = new Document();
doc.add(new IntPoint("int", 42));
doc.add(new LongPoint("long", 42L));
doc.add(new FloatPoint("float", 42.0f));
doc.add(new DoublePoint("double", 42.0));
doc.add(new BinaryPoint("bytes", new byte[] { 0, 42 }));
w.addDocument(doc);
doc = new Document();
doc.add(new IntPoint("int", 97));
doc.add(new LongPoint("long", 97L));
doc.add(new FloatPoint("float", 97.0f));
doc.add(new DoublePoint("double", 97.0));
doc.add(new BinaryPoint("bytes", new byte[] { 0, 97 }));
w.addDocument(doc);
IndexReader r = DirectoryReader.open(w);
IndexSearcher s = newSearcher(r, false);
assertEquals(0, s.count(IntPoint.newSetQuery("int", 16)));
assertEquals(1, s.count(IntPoint.newSetQuery("int", 17)));
assertEquals(3, s.count(IntPoint.newSetQuery("int", 17, 97, 42)));
assertEquals(3, s.count(IntPoint.newSetQuery("int", -7, 17, 42, 97)));
assertEquals(3, s.count(IntPoint.newSetQuery("int", 17, 20, 42, 97)));
assertEquals(3, s.count(IntPoint.newSetQuery("int", 17, 105, 42, 97)));
assertEquals(0, s.count(LongPoint.newSetQuery("long", 16)));
assertEquals(1, s.count(LongPoint.newSetQuery("long", 17)));
assertEquals(3, s.count(LongPoint.newSetQuery("long", 17, 97, 42)));
assertEquals(3, s.count(LongPoint.newSetQuery("long", -7, 17, 42, 97)));
assertEquals(3, s.count(LongPoint.newSetQuery("long", 17, 20, 42, 97)));
assertEquals(3, s.count(LongPoint.newSetQuery("long", 17, 105, 42, 97)));
assertEquals(0, s.count(FloatPoint.newSetQuery("float", 16)));
assertEquals(1, s.count(FloatPoint.newSetQuery("float", 17)));
assertEquals(3, s.count(FloatPoint.newSetQuery("float", 17, 97, 42)));
assertEquals(3, s.count(FloatPoint.newSetQuery("float", -7, 17, 42, 97)));
assertEquals(3, s.count(FloatPoint.newSetQuery("float", 17, 20, 42, 97)));
assertEquals(3, s.count(FloatPoint.newSetQuery("float", 17, 105, 42, 97)));
assertEquals(0, s.count(DoublePoint.newSetQuery("double", 16)));
assertEquals(1, s.count(DoublePoint.newSetQuery("double", 17)));
assertEquals(3, s.count(DoublePoint.newSetQuery("double", 17, 97, 42)));
assertEquals(3, s.count(DoublePoint.newSetQuery("double", -7, 17, 42, 97)));
assertEquals(3, s.count(DoublePoint.newSetQuery("double", 17, 20, 42, 97)));
assertEquals(3, s.count(DoublePoint.newSetQuery("double", 17, 105, 42, 97)));
assertEquals(0, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 0, 16 })));
assertEquals(1, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 0, 17 })));
assertEquals(3, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 0, 17 }, new byte[] { 0, 97 }, new byte[] { 0, 42 })));
assertEquals(3, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 0, -7 }, new byte[] { 0, 17 }, new byte[] { 0, 42 }, new byte[] { 0, 97 })));
assertEquals(3, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 0, 17 }, new byte[] { 0, 20 }, new byte[] { 0, 42 }, new byte[] { 0, 97 })));
assertEquals(3, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 0, 17 }, new byte[] { 0, 105 }, new byte[] { 0, 42 }, new byte[] { 0, 97 })));
w.close();
r.close();
dir.close();
}
Aggregations