use of org.apache.lucene.document.BinaryDocValuesField in project lucene-solr by apache.
the class TestMixedDocValuesUpdates method testStressMultiThreading.
public void testStressMultiThreading() throws Exception {
final Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
final IndexWriter writer = new IndexWriter(dir, conf);
// create index
final int numFields = TestUtil.nextInt(random(), 2, 4);
final int numThreads = TestUtil.nextInt(random(), 3, 6);
final int numDocs = atLeast(2000);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
doc.add(new StringField("id", "doc" + i, Store.NO));
double group = random().nextDouble();
String g;
if (group < 0.1)
g = "g0";
else if (group < 0.5)
g = "g1";
else if (group < 0.8)
g = "g2";
else
g = "g3";
doc.add(new StringField("updKey", g, Store.NO));
for (int j = 0; j < numFields; j++) {
long value = random().nextInt();
doc.add(new BinaryDocValuesField("f" + j, TestBinaryDocValuesUpdates.toBytes(value)));
// control, always updated to f * 2
doc.add(new NumericDocValuesField("cf" + j, value * 2));
}
writer.addDocument(doc);
}
final CountDownLatch done = new CountDownLatch(numThreads);
final AtomicInteger numUpdates = new AtomicInteger(atLeast(100));
// same thread updates a field as well as reopens
Thread[] threads = new Thread[numThreads];
for (int i = 0; i < threads.length; i++) {
threads[i] = new Thread("UpdateThread-" + i) {
@Override
public void run() {
DirectoryReader reader = null;
boolean success = false;
try {
Random random = random();
while (numUpdates.getAndDecrement() > 0) {
double group = random.nextDouble();
Term t;
if (group < 0.1)
t = new Term("updKey", "g0");
else if (group < 0.5)
t = new Term("updKey", "g1");
else if (group < 0.8)
t = new Term("updKey", "g2");
else
t = new Term("updKey", "g3");
// System.out.println("[" + Thread.currentThread().getName() + "] numUpdates=" + numUpdates + " updateTerm=" + t);
int field = random().nextInt(numFields);
final String f = "f" + field;
final String cf = "cf" + field;
long updValue = random.nextInt();
// System.err.println("[" + Thread.currentThread().getName() + "] t=" + t + ", f=" + f + ", updValue=" + updValue);
writer.updateDocValues(t, new BinaryDocValuesField(f, TestBinaryDocValuesUpdates.toBytes(updValue)), new NumericDocValuesField(cf, updValue * 2));
if (random.nextDouble() < 0.2) {
// delete a random document
int doc = random.nextInt(numDocs);
// System.out.println("[" + Thread.currentThread().getName() + "] deleteDoc=doc" + doc);
writer.deleteDocuments(new Term("id", "doc" + doc));
}
if (random.nextDouble() < 0.05) {
// commit every 20 updates on average
// System.out.println("[" + Thread.currentThread().getName() + "] commit");
writer.commit();
}
if (random.nextDouble() < 0.1) {
// reopen NRT reader (apply updates), on average once every 10 updates
if (reader == null) {
// System.out.println("[" + Thread.currentThread().getName() + "] open NRT");
reader = DirectoryReader.open(writer);
} else {
// System.out.println("[" + Thread.currentThread().getName() + "] reopen NRT");
DirectoryReader r2 = DirectoryReader.openIfChanged(reader, writer);
if (r2 != null) {
reader.close();
reader = r2;
}
}
}
}
// System.out.println("[" + Thread.currentThread().getName() + "] DONE");
success = true;
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
if (success) {
// suppress this exception only if there was another exception
throw new RuntimeException(e);
}
}
}
done.countDown();
}
}
};
}
for (Thread t : threads) t.start();
done.await();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
BytesRef scratch = new BytesRef();
for (LeafReaderContext context : reader.leaves()) {
LeafReader r = context.reader();
for (int i = 0; i < numFields; i++) {
BinaryDocValues bdv = r.getBinaryDocValues("f" + i);
NumericDocValues control = r.getNumericDocValues("cf" + i);
Bits liveDocs = r.getLiveDocs();
for (int j = 0; j < r.maxDoc(); j++) {
if (liveDocs == null || liveDocs.get(j)) {
assertEquals(j, control.advance(j));
long ctrlValue = control.longValue();
assertEquals(j, bdv.advance(j));
long bdvValue = TestBinaryDocValuesUpdates.getValue(bdv) * 2;
// if (ctrlValue != bdvValue) {
// System.out.println("seg=" + r + ", f=f" + i + ", doc=" + j + ", group=" + r.document(j).get("updKey") + ", ctrlValue=" + ctrlValue + ", bdvBytes=" + scratch);
// }
assertEquals(ctrlValue, bdvValue);
}
}
}
}
reader.close();
dir.close();
}
use of org.apache.lucene.document.BinaryDocValuesField in project lucene-solr by apache.
the class TestMultiDocValues method testBinary.
public void testBinary() throws Exception {
Directory dir = newDirectory();
Document doc = new Document();
Field field = new BinaryDocValuesField("bytes", new BytesRef());
doc.add(field);
IndexWriterConfig iwc = newIndexWriterConfig(random(), null);
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
int numDocs = TEST_NIGHTLY ? atLeast(500) : atLeast(50);
for (int i = 0; i < numDocs; i++) {
BytesRef ref = new BytesRef(TestUtil.randomUnicodeString(random()));
field.setBytesValue(ref);
iw.addDocument(doc);
if (random().nextInt(17) == 0) {
iw.commit();
}
}
DirectoryReader ir = iw.getReader();
iw.forceMerge(1);
DirectoryReader ir2 = iw.getReader();
LeafReader merged = getOnlyLeafReader(ir2);
iw.close();
BinaryDocValues multi = MultiDocValues.getBinaryValues(ir, "bytes");
BinaryDocValues single = merged.getBinaryDocValues("bytes");
for (int i = 0; i < numDocs; i++) {
assertEquals(i, multi.nextDoc());
assertEquals(i, single.nextDoc());
final BytesRef expected = BytesRef.deepCopyOf(single.binaryValue());
final BytesRef actual = multi.binaryValue();
assertEquals(expected, actual);
}
testRandomAdvance(merged.getBinaryDocValues("bytes"), MultiDocValues.getBinaryValues(ir, "bytes"));
testRandomAdvanceExact(merged.getBinaryDocValues("bytes"), MultiDocValues.getBinaryValues(ir, "bytes"), merged.maxDoc());
ir.close();
ir2.close();
dir.close();
}
use of org.apache.lucene.document.BinaryDocValuesField in project lucene-solr by apache.
the class TestMixedDocValuesUpdates method testManyReopensAndFields.
public void testManyReopensAndFields() throws Exception {
Directory dir = newDirectory();
final Random random = random();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random));
LogMergePolicy lmp = newLogMergePolicy();
// merge often
lmp.setMergeFactor(3);
conf.setMergePolicy(lmp);
IndexWriter writer = new IndexWriter(dir, conf);
final boolean isNRT = random.nextBoolean();
DirectoryReader reader;
if (isNRT) {
reader = DirectoryReader.open(writer);
} else {
writer.commit();
reader = DirectoryReader.open(dir);
}
// 3-7
final int numFields = random.nextInt(4) + 3;
// 1-3
final int numNDVFields = random.nextInt(numFields / 2) + 1;
final long[] fieldValues = new long[numFields];
for (int i = 0; i < fieldValues.length; i++) {
fieldValues[i] = 1;
}
int numRounds = atLeast(15);
int docID = 0;
for (int i = 0; i < numRounds; i++) {
int numDocs = atLeast(5);
// System.out.println("TEST: round=" + i + ", numDocs=" + numDocs);
for (int j = 0; j < numDocs; j++) {
Document doc = new Document();
doc.add(new StringField("id", "doc-" + docID, Store.NO));
// update key
doc.add(new StringField("key", "all", Store.NO));
// add all fields with their current value
for (int f = 0; f < fieldValues.length; f++) {
if (f < numNDVFields) {
doc.add(new NumericDocValuesField("f" + f, fieldValues[f]));
} else {
doc.add(new BinaryDocValuesField("f" + f, TestBinaryDocValuesUpdates.toBytes(fieldValues[f])));
}
}
writer.addDocument(doc);
++docID;
}
int fieldIdx = random.nextInt(fieldValues.length);
String updateField = "f" + fieldIdx;
if (fieldIdx < numNDVFields) {
writer.updateNumericDocValue(new Term("key", "all"), updateField, ++fieldValues[fieldIdx]);
} else {
writer.updateBinaryDocValue(new Term("key", "all"), updateField, TestBinaryDocValuesUpdates.toBytes(++fieldValues[fieldIdx]));
}
if (random.nextDouble() < 0.2) {
// might also delete an already deleted document, ok!
int deleteDoc = random.nextInt(docID);
writer.deleteDocuments(new Term("id", "doc-" + deleteDoc));
// System.out.println("[" + Thread.currentThread().getName() + "]: deleted document: doc-" + deleteDoc);
}
// verify reader
if (!isNRT) {
writer.commit();
}
// System.out.println("[" + Thread.currentThread().getName() + "]: reopen reader: " + reader);
DirectoryReader newReader = DirectoryReader.openIfChanged(reader);
assertNotNull(newReader);
reader.close();
reader = newReader;
// System.out.println("[" + Thread.currentThread().getName() + "]: reopened reader: " + reader);
// we delete at most one document per round
assertTrue(reader.numDocs() > 0);
for (LeafReaderContext context : reader.leaves()) {
LeafReader r = context.reader();
// System.out.println(((SegmentReader) r).getSegmentName());
Bits liveDocs = r.getLiveDocs();
for (int field = 0; field < fieldValues.length; field++) {
String f = "f" + field;
BinaryDocValues bdv = r.getBinaryDocValues(f);
NumericDocValues ndv = r.getNumericDocValues(f);
if (field < numNDVFields) {
assertNotNull(ndv);
assertNull(bdv);
} else {
assertNull(ndv);
assertNotNull(bdv);
}
int maxDoc = r.maxDoc();
for (int doc = 0; doc < maxDoc; doc++) {
if (liveDocs == null || liveDocs.get(doc)) {
// System.out.println("doc=" + (doc + context.docBase) + " f='" + f + "' vslue=" + getValue(bdv, doc, scratch));
if (field < numNDVFields) {
assertEquals(doc, ndv.advance(doc));
assertEquals("invalid numeric value for doc=" + doc + ", field=" + f + ", reader=" + r, fieldValues[field], ndv.longValue());
} else {
assertEquals(doc, bdv.advance(doc));
assertEquals("invalid binary value for doc=" + doc + ", field=" + f + ", reader=" + r, fieldValues[field], TestBinaryDocValuesUpdates.getValue(bdv));
}
}
}
}
}
// System.out.println();
}
writer.close();
IOUtils.close(reader, dir);
}
use of org.apache.lucene.document.BinaryDocValuesField in project lucene-solr by apache.
the class TestOrdinalMappingLeafReader method buildIndexWithFacets.
private void buildIndexWithFacets(Directory indexDir, Directory taxoDir, boolean asc) throws IOException {
IndexWriterConfig config = newIndexWriterConfig(null);
RandomIndexWriter writer = new RandomIndexWriter(random(), indexDir, config);
DirectoryTaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxoDir);
for (int i = 1; i <= NUM_DOCS; i++) {
Document doc = new Document();
for (int j = i; j <= NUM_DOCS; j++) {
int facetValue = asc ? j : NUM_DOCS - j;
doc.add(new FacetField("tag", Integer.toString(facetValue)));
}
// add a facet under default dim config
doc.add(new FacetField("id", Integer.toString(i)));
// make sure OrdinalMappingLeafReader ignores non-facet BinaryDocValues fields
doc.add(new BinaryDocValuesField("bdv", new BytesRef(Integer.toString(i))));
doc.add(new BinaryDocValuesField("cbdv", new BytesRef(Integer.toString(i * 2))));
writer.addDocument(facetConfig.build(taxonomyWriter, doc));
}
taxonomyWriter.commit();
taxonomyWriter.close();
writer.commit();
writer.close();
}
use of org.apache.lucene.document.BinaryDocValuesField in project lucene-solr by apache.
the class SerializedDVStrategy method createIndexableFields.
@Override
public Field[] createIndexableFields(Shape shape) {
//50% headroom over last
int bufSize = Math.max(128, (int) (this.indexLastBufSize * 1.5));
ByteArrayOutputStream byteStream = new ByteArrayOutputStream(bufSize);
//receiver of byteStream's bytes
final BytesRef bytesRef = new BytesRef();
try {
ctx.getBinaryCodec().writeShape(new DataOutputStream(byteStream), shape);
//this is a hack to avoid redundant byte array copying by byteStream.toByteArray()
byteStream.writeTo(new FilterOutputStream(null) {
/*not used*/
@Override
public void write(byte[] b, int off, int len) throws IOException {
bytesRef.bytes = b;
bytesRef.offset = off;
bytesRef.length = len;
}
});
} catch (IOException e) {
throw new RuntimeException(e);
}
//cache heuristic
this.indexLastBufSize = bytesRef.length;
return new Field[] { new BinaryDocValuesField(getFieldName(), bytesRef) };
}
Aggregations