use of org.apache.lucene.util.BytesRef in project elasticsearch by elastic.
the class AbstractBytesReferenceTestCase method testSliceIterator.
public void testSliceIterator() throws IOException {
int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8));
BytesReference pbr = newBytesReference(length);
int sliceOffset = randomIntBetween(0, pbr.length());
int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset);
BytesReference slice = pbr.slice(sliceOffset, sliceLength);
BytesRefIterator iterator = slice.iterator();
BytesRef ref = null;
BytesRefBuilder builder = new BytesRefBuilder();
while ((ref = iterator.next()) != null) {
builder.append(ref);
}
assertArrayEquals(BytesReference.toBytes(slice), BytesRef.deepCopyOf(builder.toBytesRef()).bytes);
}
use of org.apache.lucene.util.BytesRef in project elasticsearch by elastic.
the class AbstractBytesReferenceTestCase method testSliceToBytesRef.
public void testSliceToBytesRef() throws IOException {
int length = randomIntBetween(0, PAGE_SIZE);
BytesReference pbr = newBytesReference(length);
// get a BytesRef from a slice
int sliceOffset = randomIntBetween(0, pbr.length());
int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset);
BytesRef sliceRef = pbr.slice(sliceOffset, sliceLength).toBytesRef();
if (sliceLength == 0 && sliceOffset != sliceRef.offset) {
// some impls optimize this to an empty instance then the offset will be 0
assertEquals(0, sliceRef.offset);
} else {
// note that these are only true if we have <= than a page, otherwise offset/length are shifted
assertEquals(sliceOffset, sliceRef.offset);
}
assertEquals(sliceLength, sliceRef.length);
}
use of org.apache.lucene.util.BytesRef in project elasticsearch by elastic.
the class AbstractBytesReferenceTestCase method testToBytesRef.
public void testToBytesRef() throws IOException {
int length = randomIntBetween(0, PAGE_SIZE);
BytesReference pbr = newBytesReference(length);
BytesRef ref = pbr.toBytesRef();
assertNotNull(ref);
assertEquals(pbr.length(), ref.length);
}
use of org.apache.lucene.util.BytesRef in project elasticsearch by elastic.
the class AbstractBytesReferenceTestCase method testRandomReads.
public void testRandomReads() throws IOException {
int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20));
BytesReference pbr = newBytesReference(length);
StreamInput streamInput = pbr.streamInput();
BytesRefBuilder target = new BytesRefBuilder();
while (target.length() < pbr.length()) {
switch(randomIntBetween(0, 10)) {
case 6:
case 5:
target.append(new BytesRef(new byte[] { streamInput.readByte() }));
break;
case 4:
case 3:
BytesRef bytesRef = streamInput.readBytesRef(scaledRandomIntBetween(1, pbr.length() - target.length()));
target.append(bytesRef);
break;
default:
byte[] buffer = new byte[scaledRandomIntBetween(1, pbr.length() - target.length())];
int offset = scaledRandomIntBetween(0, buffer.length - 1);
int read = streamInput.read(buffer, offset, buffer.length - offset);
target.append(new BytesRef(buffer, offset, read));
break;
}
}
assertEquals(pbr.length(), target.length());
BytesRef targetBytes = target.get();
assertArrayEquals(BytesReference.toBytes(pbr), Arrays.copyOfRange(targetBytes.bytes, targetBytes.offset, targetBytes.length));
}
use of org.apache.lucene.util.BytesRef in project elasticsearch by elastic.
the class AbstractBytesReferenceTestCase method testSlice.
public void testSlice() throws IOException {
int length = randomInt(PAGE_SIZE * 3);
BytesReference pbr = newBytesReference(length);
int sliceOffset = randomIntBetween(0, length / 2);
int sliceLength = Math.max(0, length - sliceOffset - 1);
BytesReference slice = pbr.slice(sliceOffset, sliceLength);
assertEquals(sliceLength, slice.length());
for (int i = 0; i < sliceLength; i++) {
assertEquals(pbr.get(i + sliceOffset), slice.get(i));
}
BytesRef singlePageOrNull = getSinglePageOrNull(slice);
if (singlePageOrNull != null) {
// we can't assert the offset since if the length is smaller than the refercence
// the offset can be anywhere
assertEquals(sliceLength, singlePageOrNull.length);
}
}
Aggregations