use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class BaseStoredFieldsFormatTestCase method testBulkMergeWithDeletes.
public void testBulkMergeWithDeletes() throws IOException {
final int numDocs = atLeast(200);
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
for (int i = 0; i < numDocs; ++i) {
Document doc = new Document();
doc.add(new StringField("id", Integer.toString(i), Store.YES));
doc.add(new StoredField("f", TestUtil.randomSimpleString(random())));
w.addDocument(doc);
}
final int deleteCount = TestUtil.nextInt(random(), 5, numDocs);
for (int i = 0; i < deleteCount; ++i) {
final int id = random().nextInt(numDocs);
w.deleteDocuments(new Term("id", Integer.toString(id)));
}
w.commit();
w.close();
w = new RandomIndexWriter(random(), dir);
w.forceMerge(TestUtil.nextInt(random(), 1, 3));
w.commit();
w.close();
TestUtil.checkIndex(dir);
dir.close();
}
use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class PointVectorStrategy method createIndexableFields.
/** @see #createIndexableFields(org.locationtech.spatial4j.shape.Shape) */
public Field[] createIndexableFields(Point point) {
Field[] fields = new Field[fieldsLen];
int idx = -1;
if (hasStored) {
fields[++idx] = new StoredField(fieldNameX, point.getX());
fields[++idx] = new StoredField(fieldNameY, point.getY());
}
if (hasDocVals) {
fields[++idx] = new DoubleDocValuesField(fieldNameX, point.getX());
fields[++idx] = new DoubleDocValuesField(fieldNameY, point.getY());
}
if (hasPointVals) {
fields[++idx] = new DoublePoint(fieldNameX, point.getX());
fields[++idx] = new DoublePoint(fieldNameY, point.getY());
}
if (legacyNumericFieldType != null) {
fields[++idx] = new LegacyDoubleField(fieldNameX, point.getX(), legacyNumericFieldType);
fields[++idx] = new LegacyDoubleField(fieldNameY, point.getY(), legacyNumericFieldType);
}
assert idx == fields.length - 1;
return fields;
}
use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class BBoxStrategy method createIndexableFields.
private Field[] createIndexableFields(Rectangle bbox) {
Field[] fields = new Field[fieldsLen];
int idx = -1;
if (hasStored) {
fields[++idx] = new StoredField(field_minX, bbox.getMinX());
fields[++idx] = new StoredField(field_minY, bbox.getMinY());
fields[++idx] = new StoredField(field_maxX, bbox.getMaxX());
fields[++idx] = new StoredField(field_maxY, bbox.getMaxY());
}
if (hasDocVals) {
fields[++idx] = new DoubleDocValuesField(field_minX, bbox.getMinX());
fields[++idx] = new DoubleDocValuesField(field_minY, bbox.getMinY());
fields[++idx] = new DoubleDocValuesField(field_maxX, bbox.getMaxX());
fields[++idx] = new DoubleDocValuesField(field_maxY, bbox.getMaxY());
}
if (hasPointVals) {
fields[++idx] = new DoublePoint(field_minX, bbox.getMinX());
fields[++idx] = new DoublePoint(field_minY, bbox.getMinY());
fields[++idx] = new DoublePoint(field_maxX, bbox.getMaxX());
fields[++idx] = new DoublePoint(field_maxY, bbox.getMaxY());
}
if (legacyNumericFieldType != null) {
fields[++idx] = new LegacyDoubleField(field_minX, bbox.getMinX(), legacyNumericFieldType);
fields[++idx] = new LegacyDoubleField(field_minY, bbox.getMinY(), legacyNumericFieldType);
fields[++idx] = new LegacyDoubleField(field_maxX, bbox.getMaxX(), legacyNumericFieldType);
fields[++idx] = new LegacyDoubleField(field_maxY, bbox.getMaxY(), legacyNumericFieldType);
}
if (xdlFieldType != null) {
fields[++idx] = new Field(field_xdl, bbox.getCrossesDateLine() ? "T" : "F", xdlFieldType);
}
assert idx == fields.length - 1;
return fields;
}
use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class TestLucene70DocValuesFormat method doTestSparseNumericBlocksOfVariousBitsPerValue.
private void doTestSparseNumericBlocksOfVariousBitsPerValue(double density) throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(atLeast(Lucene70DocValuesFormat.NUMERIC_BLOCK_SIZE));
conf.setRAMBufferSizeMB(-1);
conf.setMergePolicy(newLogMergePolicy(random().nextBoolean()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
Field storedField = newStringField("stored", "", Field.Store.YES);
Field dvField = new NumericDocValuesField("dv", 0);
doc.add(storedField);
doc.add(dvField);
final int numDocs = atLeast(Lucene70DocValuesFormat.NUMERIC_BLOCK_SIZE * 3);
final LongSupplier longs = blocksOfVariousBPV();
for (int i = 0; i < numDocs; i++) {
if (random().nextDouble() > density) {
writer.addDocument(new Document());
continue;
}
long value = longs.getAsLong();
storedField.setStringValue(Long.toString(value));
dvField.setLongValue(value);
writer.addDocument(doc);
}
writer.forceMerge(1);
writer.close();
// compare
DirectoryReader ir = DirectoryReader.open(dir);
TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
NumericDocValues docValues = DocValues.getNumeric(r, "dv");
docValues.nextDoc();
for (int i = 0; i < r.maxDoc(); i++) {
String storedValue = r.document(i).get("stored");
if (storedValue == null) {
assertTrue(docValues.docID() > i);
} else {
assertEquals(i, docValues.docID());
assertEquals(Long.parseLong(storedValue), docValues.longValue());
docValues.nextDoc();
}
}
assertEquals(DocIdSetIterator.NO_MORE_DOCS, docValues.docID());
}
ir.close();
dir.close();
}
use of org.apache.lucene.document.StoredField in project lucene-solr by apache.
the class TestLucene70DocValuesFormat method doTestSortedNumericBlocksOfVariousBitsPerValue.
private void doTestSortedNumericBlocksOfVariousBitsPerValue(LongSupplier counts) throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(atLeast(Lucene70DocValuesFormat.NUMERIC_BLOCK_SIZE));
conf.setRAMBufferSizeMB(-1);
conf.setMergePolicy(newLogMergePolicy(random().nextBoolean()));
IndexWriter writer = new IndexWriter(dir, conf);
final int numDocs = atLeast(Lucene70DocValuesFormat.NUMERIC_BLOCK_SIZE * 3);
final LongSupplier values = blocksOfVariousBPV();
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
int valueCount = (int) counts.getAsLong();
long[] valueArray = new long[valueCount];
for (int j = 0; j < valueCount; j++) {
long value = values.getAsLong();
valueArray[j] = value;
doc.add(new SortedNumericDocValuesField("dv", value));
}
Arrays.sort(valueArray);
for (int j = 0; j < valueCount; j++) {
doc.add(new StoredField("stored", Long.toString(valueArray[j])));
}
writer.addDocument(doc);
if (random().nextInt(31) == 0) {
writer.commit();
}
}
writer.forceMerge(1);
writer.close();
// compare
DirectoryReader ir = DirectoryReader.open(dir);
TestUtil.checkReader(ir);
for (LeafReaderContext context : ir.leaves()) {
LeafReader r = context.reader();
SortedNumericDocValues docValues = DocValues.getSortedNumeric(r, "dv");
for (int i = 0; i < r.maxDoc(); i++) {
if (i > docValues.docID()) {
docValues.nextDoc();
}
String[] expected = r.document(i).getValues("stored");
if (i < docValues.docID()) {
assertEquals(0, expected.length);
} else {
String[] actual = new String[docValues.docValueCount()];
for (int j = 0; j < actual.length; j++) {
actual[j] = Long.toString(docValues.nextValue());
}
assertArrayEquals(expected, actual);
}
}
}
ir.close();
dir.close();
}
Aggregations