use of org.apache.lucene.util.packed.MonotonicBlockPackedReader in project lucene-solr by apache.
the class MemoryDocValuesProducer method getSortedNumeric.
@Override
public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
SortedNumericEntry entry = sortedNumerics.get(field.name);
if (entry.singleton) {
LegacyNumericDocValues values = getNumericNonIterator(field);
NumericEntry ne = numerics.get(field.name);
Bits docsWithField = getMissingBits(field, ne.missingOffset, ne.missingBytes);
return DocValues.singleton(new LegacyNumericDocValuesWrapper(docsWithField, values));
} else {
final LegacyNumericDocValues values = getNumericNonIterator(field);
final MonotonicBlockPackedReader addr;
synchronized (this) {
MonotonicBlockPackedReader res = addresses.get(field.name);
if (res == null) {
IndexInput data = this.data.clone();
data.seek(entry.addressOffset);
res = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, entry.valueCount, false);
if (!merging) {
addresses.put(field.name, res);
ramBytesUsed.addAndGet(res.ramBytesUsed());
}
}
addr = res;
}
return new LegacySortedNumericDocValuesWrapper(new LegacySortedNumericDocValues() {
int startOffset;
int endOffset;
@Override
public void setDocument(int doc) {
startOffset = (int) addr.get(doc);
endOffset = (int) addr.get(doc + 1);
}
@Override
public long valueAt(int index) {
return values.get(startOffset + index);
}
@Override
public int count() {
return (endOffset - startOffset);
}
}, maxDoc);
}
}
use of org.apache.lucene.util.packed.MonotonicBlockPackedReader in project lucene-solr by apache.
the class Lucene54DocValuesProducer method getIntervalInstance.
/** returns an address instance for prefix-compressed binary values. */
private synchronized MonotonicBlockPackedReader getIntervalInstance(FieldInfo field, BinaryEntry bytes) throws IOException {
MonotonicBlockPackedReader addresses = addressInstances.get(field.name);
if (addresses == null) {
data.seek(bytes.addressesOffset);
final long size = (bytes.count + INTERVAL_MASK) >>> INTERVAL_SHIFT;
addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false);
if (!merging) {
addressInstances.put(field.name, addresses);
ramBytesUsed.addAndGet(addresses.ramBytesUsed() + Integer.BYTES);
}
}
return addresses;
}
use of org.apache.lucene.util.packed.MonotonicBlockPackedReader in project lucene-solr by apache.
the class MemoryDocValuesProducer method getLegacyBinary.
private LegacyBinaryDocValues getLegacyBinary(FieldInfo field) throws IOException {
BinaryEntry entry = binaries.get(field.name);
BytesAndAddresses instance;
synchronized (this) {
instance = pagedBytesInstances.get(field.name);
if (instance == null) {
instance = loadBinary(field);
if (!merging) {
pagedBytesInstances.put(field.name, instance);
}
}
}
final PagedBytes.Reader bytesReader = instance.reader;
final MonotonicBlockPackedReader addresses = instance.addresses;
if (addresses == null) {
assert entry.minLength == entry.maxLength;
final int fixedLength = entry.minLength;
return new LegacyBinaryDocValues() {
final BytesRef term = new BytesRef();
@Override
public BytesRef get(int docID) {
bytesReader.fillSlice(term, fixedLength * (long) docID, fixedLength);
return term;
}
};
} else {
return new LegacyBinaryDocValues() {
final BytesRef term = new BytesRef();
@Override
public BytesRef get(int docID) {
long startAddress = docID == 0 ? 0 : addresses.get(docID - 1);
long endAddress = addresses.get(docID);
bytesReader.fillSlice(term, startAddress, (int) (endAddress - startAddress));
return term;
}
};
}
}
use of org.apache.lucene.util.packed.MonotonicBlockPackedReader in project lucene-solr by apache.
the class Lucene54DocValuesProducer method getCompressedBinary.
private LegacyBinaryDocValues getCompressedBinary(FieldInfo field, final BinaryEntry bytes) throws IOException {
final MonotonicBlockPackedReader addresses = getIntervalInstance(field, bytes);
final ReverseTermsIndex index = getReverseIndexInstance(field, bytes);
// we don't have to handle empty case
assert addresses.size() > 0;
IndexInput slice = data.slice("terms", bytes.offset, bytes.addressesOffset - bytes.offset);
return new CompressedBinaryDocValues(bytes, addresses, index, slice);
}
Aggregations