use of org.apache.lucene.document.LongPoint in project lucene-solr by apache.
the class TestPointQueries method testBasicMultiValuedPointInSetQuery.
public void testBasicMultiValuedPointInSetQuery() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setCodec(getCodec());
IndexWriter w = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new IntPoint("int", 17));
doc.add(new IntPoint("int", 42));
doc.add(new LongPoint("long", 17L));
doc.add(new LongPoint("long", 42L));
doc.add(new FloatPoint("float", 17.0f));
doc.add(new FloatPoint("float", 42.0f));
doc.add(new DoublePoint("double", 17.0));
doc.add(new DoublePoint("double", 42.0));
doc.add(new BinaryPoint("bytes", new byte[] { 0, 17 }));
doc.add(new BinaryPoint("bytes", new byte[] { 0, 42 }));
w.addDocument(doc);
IndexReader r = DirectoryReader.open(w);
IndexSearcher s = newSearcher(r, false);
assertEquals(0, s.count(IntPoint.newSetQuery("int", 16)));
assertEquals(1, s.count(IntPoint.newSetQuery("int", 17)));
assertEquals(1, s.count(IntPoint.newSetQuery("int", 17, 97, 42)));
assertEquals(1, s.count(IntPoint.newSetQuery("int", -7, 17, 42, 97)));
assertEquals(0, s.count(IntPoint.newSetQuery("int", 16, 20, 41, 97)));
assertEquals(0, s.count(LongPoint.newSetQuery("long", 16)));
assertEquals(1, s.count(LongPoint.newSetQuery("long", 17)));
assertEquals(1, s.count(LongPoint.newSetQuery("long", 17, 97, 42)));
assertEquals(1, s.count(LongPoint.newSetQuery("long", -7, 17, 42, 97)));
assertEquals(0, s.count(LongPoint.newSetQuery("long", 16, 20, 41, 97)));
assertEquals(0, s.count(FloatPoint.newSetQuery("float", 16)));
assertEquals(1, s.count(FloatPoint.newSetQuery("float", 17)));
assertEquals(1, s.count(FloatPoint.newSetQuery("float", 17, 97, 42)));
assertEquals(1, s.count(FloatPoint.newSetQuery("float", -7, 17, 42, 97)));
assertEquals(0, s.count(FloatPoint.newSetQuery("float", 16, 20, 41, 97)));
assertEquals(0, s.count(DoublePoint.newSetQuery("double", 16)));
assertEquals(1, s.count(DoublePoint.newSetQuery("double", 17)));
assertEquals(1, s.count(DoublePoint.newSetQuery("double", 17, 97, 42)));
assertEquals(1, s.count(DoublePoint.newSetQuery("double", -7, 17, 42, 97)));
assertEquals(0, s.count(DoublePoint.newSetQuery("double", 16, 20, 41, 97)));
assertEquals(0, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 0, 16 })));
assertEquals(1, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 0, 17 })));
assertEquals(1, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 0, 17 }, new byte[] { 0, 97 }, new byte[] { 0, 42 })));
assertEquals(1, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 0, -7 }, new byte[] { 0, 17 }, new byte[] { 0, 42 }, new byte[] { 0, 97 })));
assertEquals(0, s.count(BinaryPoint.newSetQuery("bytes", new byte[] { 0, 16 }, new byte[] { 0, 20 }, new byte[] { 0, 41 }, new byte[] { 0, 97 })));
w.close();
r.close();
dir.close();
}
use of org.apache.lucene.document.LongPoint in project lucene-solr by apache.
the class TestPointQueries method testAllPointDocsWereDeletedAndThenMergedAgain.
public void testAllPointDocsWereDeletedAndThenMergedAgain() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setCodec(getCodec());
IndexWriter w = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new StringField("id", "0", Field.Store.NO));
doc.add(new LongPoint("value", 0L));
w.addDocument(doc);
// Add document that won't be deleted to avoid IW dropping
// segment below since it's 100% deleted:
w.addDocument(new Document());
w.commit();
// Need another segment so we invoke BKDWriter.merge
doc = new Document();
doc.add(new StringField("id", "0", Field.Store.NO));
doc.add(new LongPoint("value", 0L));
w.addDocument(doc);
w.addDocument(new Document());
w.deleteDocuments(new Term("id", "0"));
w.forceMerge(1);
doc = new Document();
doc.add(new StringField("id", "0", Field.Store.NO));
doc.add(new LongPoint("value", 0L));
w.addDocument(doc);
w.addDocument(new Document());
w.deleteDocuments(new Term("id", "0"));
w.forceMerge(1);
IOUtils.close(w, dir);
}
use of org.apache.lucene.document.LongPoint in project lucene-solr by apache.
the class TestDocValuesQueries method doTestDuelPointRangeNumericRangeQuery.
private void doTestDuelPointRangeNumericRangeQuery(boolean sortedNumeric, int maxValuesPerDoc) throws IOException {
final int iters = atLeast(10);
for (int iter = 0; iter < iters; ++iter) {
Directory dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
final int numDocs = atLeast(100);
for (int i = 0; i < numDocs; ++i) {
Document doc = new Document();
final int numValues = TestUtil.nextInt(random(), 0, maxValuesPerDoc);
for (int j = 0; j < numValues; ++j) {
final long value = TestUtil.nextLong(random(), -100, 10000);
if (sortedNumeric) {
doc.add(new SortedNumericDocValuesField("dv", value));
} else {
doc.add(new NumericDocValuesField("dv", value));
}
doc.add(new LongPoint("idx", value));
}
iw.addDocument(doc);
}
if (random().nextBoolean()) {
iw.deleteDocuments(LongPoint.newRangeQuery("idx", 0L, 10L));
}
final IndexReader reader = iw.getReader();
final IndexSearcher searcher = newSearcher(reader, false);
iw.close();
for (int i = 0; i < 100; ++i) {
final long min = random().nextBoolean() ? Long.MIN_VALUE : TestUtil.nextLong(random(), -100, 10000);
final long max = random().nextBoolean() ? Long.MAX_VALUE : TestUtil.nextLong(random(), -100, 10000);
final Query q1 = LongPoint.newRangeQuery("idx", min, max);
final Query q2;
if (sortedNumeric) {
q2 = SortedNumericDocValuesField.newRangeQuery("dv", min, max);
} else {
q2 = NumericDocValuesField.newRangeQuery("dv", min, max);
}
assertSameMatches(searcher, q1, q2, false);
}
reader.close();
dir.close();
}
}
use of org.apache.lucene.document.LongPoint in project lucene-solr by apache.
the class TestIndexOrDocValuesQuery method testUseIndexForSelectiveQueries.
public void testUseIndexForSelectiveQueries() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setCodec(TestUtil.getDefaultCodec()));
for (int i = 0; i < 2000; ++i) {
Document doc = new Document();
if (i == 42) {
doc.add(new StringField("f1", "bar", Store.NO));
doc.add(new LongPoint("f2", 42L));
doc.add(new NumericDocValuesField("f2", 42L));
} else if (i == 100) {
doc.add(new StringField("f1", "foo", Store.NO));
doc.add(new LongPoint("f2", 2L));
doc.add(new NumericDocValuesField("f2", 2L));
} else {
doc.add(new StringField("f1", "bar", Store.NO));
doc.add(new LongPoint("f2", 2L));
doc.add(new NumericDocValuesField("f2", 2L));
}
w.addDocument(doc);
}
w.forceMerge(1);
IndexReader reader = DirectoryReader.open(w);
IndexSearcher searcher = newSearcher(reader);
searcher.setQueryCache(null);
// The term query is more selective, so the IndexOrDocValuesQuery should use doc values
final Query q1 = new BooleanQuery.Builder().add(new TermQuery(new Term("f1", "foo")), Occur.MUST).add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 2), NumericDocValuesField.newRangeQuery("f2", 2L, 2L)), Occur.MUST).build();
final Weight w1 = searcher.createNormalizedWeight(q1, random().nextBoolean());
final Scorer s1 = w1.scorer(searcher.getIndexReader().leaves().get(0));
// means we use doc values
assertNotNull(s1.twoPhaseIterator());
// The term query is less selective, so the IndexOrDocValuesQuery should use points
final Query q2 = new BooleanQuery.Builder().add(new TermQuery(new Term("f1", "bar")), Occur.MUST).add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 42), NumericDocValuesField.newRangeQuery("f2", 42L, 42L)), Occur.MUST).build();
final Weight w2 = searcher.createNormalizedWeight(q2, random().nextBoolean());
final Scorer s2 = w2.scorer(searcher.getIndexReader().leaves().get(0));
// means we use points
assertNull(s2.twoPhaseIterator());
reader.close();
w.close();
dir.close();
}
use of org.apache.lucene.document.LongPoint in project elasticsearch by elastic.
the class DateFieldMapper method parseCreateField.
@Override
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
String dateAsString;
if (context.externalValueSet()) {
Object dateAsObject = context.externalValue();
if (dateAsObject == null) {
dateAsString = null;
} else {
dateAsString = dateAsObject.toString();
}
} else {
dateAsString = context.parser().textOrNull();
}
if (dateAsString == null) {
dateAsString = fieldType().nullValueAsString();
}
if (dateAsString == null) {
return;
}
long timestamp;
try {
timestamp = fieldType().parse(dateAsString);
} catch (IllegalArgumentException e) {
if (ignoreMalformed.value()) {
return;
} else {
throw e;
}
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().name(), dateAsString, fieldType().boost());
}
if (fieldType().indexOptions() != IndexOptions.NONE) {
fields.add(new LongPoint(fieldType().name(), timestamp));
}
if (fieldType().hasDocValues()) {
fields.add(new SortedNumericDocValuesField(fieldType().name(), timestamp));
}
if (fieldType().stored()) {
fields.add(new StoredField(fieldType().name(), timestamp));
}
}
Aggregations