use of org.apache.lucene.store.IndexInput in project lucene-solr by apache.
the class MemoryDocValuesProducer method loadBinary.
private BytesAndAddresses loadBinary(FieldInfo field) throws IOException {
BytesAndAddresses bytesAndAddresses = new BytesAndAddresses();
BinaryEntry entry = binaries.get(field.name);
IndexInput data = this.data.clone();
data.seek(entry.offset);
PagedBytes bytes = new PagedBytes(16);
bytes.copy(data, entry.numBytes);
bytesAndAddresses.reader = bytes.freeze(true);
if (!merging) {
ramBytesUsed.addAndGet(bytesAndAddresses.reader.ramBytesUsed());
}
if (entry.minLength != entry.maxLength) {
data.seek(data.getFilePointer() + entry.missingBytes);
bytesAndAddresses.addresses = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, maxDoc, false);
if (!merging) {
ramBytesUsed.addAndGet(bytesAndAddresses.addresses.ramBytesUsed());
}
}
return bytesAndAddresses;
}
use of org.apache.lucene.store.IndexInput in project lucene-solr by apache.
the class MemoryDocValuesProducer method getSortedNumeric.
@Override
public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
SortedNumericEntry entry = sortedNumerics.get(field.name);
if (entry.singleton) {
LegacyNumericDocValues values = getNumericNonIterator(field);
NumericEntry ne = numerics.get(field.name);
Bits docsWithField = getMissingBits(field, ne.missingOffset, ne.missingBytes);
return DocValues.singleton(new LegacyNumericDocValuesWrapper(docsWithField, values));
} else {
final LegacyNumericDocValues values = getNumericNonIterator(field);
final MonotonicBlockPackedReader addr;
synchronized (this) {
MonotonicBlockPackedReader res = addresses.get(field.name);
if (res == null) {
IndexInput data = this.data.clone();
data.seek(entry.addressOffset);
res = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, entry.valueCount, false);
if (!merging) {
addresses.put(field.name, res);
ramBytesUsed.addAndGet(res.ramBytesUsed());
}
}
addr = res;
}
return new LegacySortedNumericDocValuesWrapper(new LegacySortedNumericDocValues() {
int startOffset;
int endOffset;
@Override
public void setDocument(int doc) {
startOffset = (int) addr.get(doc);
endOffset = (int) addr.get(doc + 1);
}
@Override
public long valueAt(int index) {
return values.get(startOffset + index);
}
@Override
public int count() {
return (endOffset - startOffset);
}
}, maxDoc);
}
}
use of org.apache.lucene.store.IndexInput in project lucene-solr by apache.
the class TestIndexWriterThreadsToSegments method readSegmentInfoID.
// TODO: remove this hack and fix this test to be better?
// the whole thing relies on default codec too...
byte[] readSegmentInfoID(Directory dir, String file) throws IOException {
try (IndexInput in = dir.openInput(file, IOContext.DEFAULT)) {
// magic
in.readInt();
// codec name
in.readString();
// version
in.readInt();
byte[] id = new byte[StringHelper.ID_LENGTH];
in.readBytes(id, 0, id.length);
return id;
}
}
use of org.apache.lucene.store.IndexInput in project jackrabbit by apache.
the class IndexOutputStreamTest method checkStream.
private void checkStream(int size, int buffer) throws IOException {
Random rand = new Random();
byte[] data = new byte[size];
rand.nextBytes(data);
Directory dir = new RAMDirectory();
OutputStream out = new IndexOutputStream(dir.createOutput("test"));
if (buffer != 0) {
out = new BufferedOutputStream(out, buffer);
}
out.write(data);
out.close();
byte[] buf = new byte[3];
int pos = 0;
IndexInput in = dir.openInput("test");
for (; ; ) {
int len = (int) Math.min(buf.length, in.length() - pos);
in.readBytes(buf, 0, len);
for (int i = 0; i < len; i++, pos++) {
assertEquals(data[pos], buf[i]);
}
if (len == 0) {
// EOF
break;
}
}
in.close();
// assert length
assertEquals(data.length, pos);
}
use of org.apache.lucene.store.IndexInput in project jackrabbit by apache.
the class PersistentIndex method copyIndex.
/**
* Copies <code>index</code> into this persistent index. This method should
* only be called when <code>this</code> index is empty otherwise the
* behaviour is undefined.
*
* @param index the index to copy from.
* @throws IOException if an error occurs while copying.
*/
void copyIndex(AbstractIndex index) throws IOException {
// commit changes to directory on other index.
index.commit(true);
// simply copy over the files
byte[] buffer = new byte[1024];
Directory dir = index.getDirectory();
Directory dest = getDirectory();
String[] files = dir.listAll();
for (String file : files) {
IndexInput in = dir.openInput(file);
try {
IndexOutput out = dest.createOutput(file);
try {
long remaining = in.length();
while (remaining > 0) {
int num = (int) Math.min(remaining, buffer.length);
in.readBytes(buffer, 0, num);
out.writeBytes(buffer, num);
remaining -= num;
}
} finally {
out.close();
}
} finally {
in.close();
}
}
// refresh current generation
indexDelPolicy.readCurrentGeneration();
}
Aggregations