use of org.apache.lucene.store.DataInput in project elasticsearch by elastic.
the class XAnalyzingSuggester method load.
@Override
public boolean load(InputStream input) throws IOException {
DataInput dataIn = new InputStreamDataInput(input);
try {
this.fst = new FST<>(dataIn, new PairOutputs<>(PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton()));
maxAnalyzedPathsForOneInput = dataIn.readVInt();
hasPayloads = dataIn.readByte() == 1;
} finally {
IOUtils.close(input);
}
return true;
}
use of org.apache.lucene.store.DataInput in project lucene-solr by apache.
the class TestPagedBytes method testDataInputOutput2.
// Writes random byte/s into PagedBytes via
// .getDataOutput(), then verifies with
// PagedBytes.getDataInput():
public void testDataInputOutput2() throws Exception {
Random random = random();
for (int iter = 0; iter < 5 * RANDOM_MULTIPLIER; iter++) {
final int blockBits = TestUtil.nextInt(random, 1, 20);
final int blockSize = 1 << blockBits;
final PagedBytes p = new PagedBytes(blockBits);
final DataOutput out = p.getDataOutput();
final int numBytes = random().nextInt(10000000);
final byte[] answer = new byte[numBytes];
random().nextBytes(answer);
int written = 0;
while (written < numBytes) {
if (random().nextInt(10) == 7) {
out.writeByte(answer[written++]);
} else {
int chunk = Math.min(random().nextInt(1000), numBytes - written);
out.writeBytes(answer, written, chunk);
written += chunk;
}
}
final PagedBytes.Reader reader = p.freeze(random.nextBoolean());
final DataInput in = p.getDataInput();
final byte[] verify = new byte[numBytes];
int read = 0;
while (read < numBytes) {
if (random().nextInt(10) == 7) {
verify[read++] = in.readByte();
} else {
int chunk = Math.min(random().nextInt(1000), numBytes - read);
in.readBytes(verify, read, chunk);
read += chunk;
}
}
assertTrue(Arrays.equals(answer, verify));
final BytesRef slice = new BytesRef();
for (int iter2 = 0; iter2 < 100; iter2++) {
final int pos = random.nextInt(numBytes - 1);
final int len = random.nextInt(Math.min(blockSize + 1, numBytes - pos));
reader.fillSlice(slice, pos, len);
for (int byteUpto = 0; byteUpto < len; byteUpto++) {
assertEquals(answer[pos + byteUpto], slice.bytes[slice.offset + byteUpto]);
}
}
}
}
use of org.apache.lucene.store.DataInput in project lucene-solr by apache.
the class TestPagedBytes method testDataInputOutput.
// Writes random byte/s to "normal" file in dir, then
// copies into PagedBytes and verifies with
// PagedBytes.Reader:
public void testDataInputOutput() throws Exception {
Random random = random();
for (int iter = 0; iter < 5 * RANDOM_MULTIPLIER; iter++) {
BaseDirectoryWrapper dir = newFSDirectory(createTempDir("testOverflow"));
if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
}
final int blockBits = TestUtil.nextInt(random, 1, 20);
final int blockSize = 1 << blockBits;
final PagedBytes p = new PagedBytes(blockBits);
final IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT);
final int numBytes = TestUtil.nextInt(random(), 2, 10000000);
final byte[] answer = new byte[numBytes];
random().nextBytes(answer);
int written = 0;
while (written < numBytes) {
if (random().nextInt(10) == 7) {
out.writeByte(answer[written++]);
} else {
int chunk = Math.min(random().nextInt(1000), numBytes - written);
out.writeBytes(answer, written, chunk);
written += chunk;
}
}
out.close();
final IndexInput input = dir.openInput("foo", IOContext.DEFAULT);
final DataInput in = input.clone();
p.copy(input, input.length());
final PagedBytes.Reader reader = p.freeze(random.nextBoolean());
final byte[] verify = new byte[numBytes];
int read = 0;
while (read < numBytes) {
if (random().nextInt(10) == 7) {
verify[read++] = in.readByte();
} else {
int chunk = Math.min(random().nextInt(1000), numBytes - read);
in.readBytes(verify, read, chunk);
read += chunk;
}
}
assertTrue(Arrays.equals(answer, verify));
final BytesRef slice = new BytesRef();
for (int iter2 = 0; iter2 < 100; iter2++) {
final int pos = random.nextInt(numBytes - 1);
final int len = random.nextInt(Math.min(blockSize + 1, numBytes - pos));
reader.fillSlice(slice, pos, len);
for (int byteUpto = 0; byteUpto < len; byteUpto++) {
assertEquals(answer[pos + byteUpto], slice.bytes[slice.offset + byteUpto]);
}
}
input.close();
dir.close();
}
}
use of org.apache.lucene.store.DataInput in project lucene-solr by apache.
the class TestPackedInts method testBlockPackedReaderWriter.
public void testBlockPackedReaderWriter() throws IOException {
final int iters = atLeast(2);
for (int iter = 0; iter < iters; ++iter) {
final int blockSize = 1 << TestUtil.nextInt(random(), 6, 18);
final int valueCount = random().nextInt(1 << 18);
final long[] values = new long[valueCount];
long minValue = 0;
int bpv = 0;
for (int i = 0; i < valueCount; ++i) {
if (i % blockSize == 0) {
minValue = rarely() ? random().nextInt(256) : rarely() ? -5 : random().nextLong();
bpv = random().nextInt(65);
}
if (bpv == 0) {
values[i] = minValue;
} else if (bpv == 64) {
values[i] = random().nextLong();
} else {
values[i] = minValue + TestUtil.nextLong(random(), 0, (1L << bpv) - 1);
}
}
final Directory dir = newDirectory();
final IndexOutput out = dir.createOutput("out.bin", IOContext.DEFAULT);
final BlockPackedWriter writer = new BlockPackedWriter(out, blockSize);
for (int i = 0; i < valueCount; ++i) {
assertEquals(i, writer.ord());
writer.add(values[i]);
}
assertEquals(valueCount, writer.ord());
writer.finish();
assertEquals(valueCount, writer.ord());
final long fp = out.getFilePointer();
out.close();
IndexInput in1 = dir.openInput("out.bin", IOContext.DEFAULT);
byte[] buf = new byte[(int) fp];
in1.readBytes(buf, 0, (int) fp);
in1.seek(0L);
ByteArrayDataInput in2 = new ByteArrayDataInput(buf);
final DataInput in = random().nextBoolean() ? in1 : in2;
final BlockPackedReaderIterator it = new BlockPackedReaderIterator(in, PackedInts.VERSION_CURRENT, blockSize, valueCount);
for (int i = 0; i < valueCount; ) {
if (random().nextBoolean()) {
assertEquals("" + i, values[i], it.next());
++i;
} else {
final LongsRef nextValues = it.next(TestUtil.nextInt(random(), 1, 1024));
for (int j = 0; j < nextValues.length; ++j) {
assertEquals("" + (i + j), values[i + j], nextValues.longs[nextValues.offset + j]);
}
i += nextValues.length;
}
assertEquals(i, it.ord());
}
assertEquals(fp, in instanceof ByteArrayDataInput ? ((ByteArrayDataInput) in).getPosition() : ((IndexInput) in).getFilePointer());
expectThrows(IOException.class, () -> {
it.next();
});
if (in instanceof ByteArrayDataInput) {
((ByteArrayDataInput) in).setPosition(0);
} else {
((IndexInput) in).seek(0L);
}
final BlockPackedReaderIterator it2 = new BlockPackedReaderIterator(in, PackedInts.VERSION_CURRENT, blockSize, valueCount);
int i = 0;
while (true) {
final int skip = TestUtil.nextInt(random(), 0, valueCount - i);
it2.skip(skip);
i += skip;
assertEquals(i, it2.ord());
if (i == valueCount) {
break;
} else {
assertEquals(values[i], it2.next());
++i;
}
}
assertEquals(fp, in instanceof ByteArrayDataInput ? ((ByteArrayDataInput) in).getPosition() : ((IndexInput) in).getFilePointer());
expectThrows(IOException.class, () -> {
it2.skip(1);
});
in1.seek(0L);
final BlockPackedReader reader = new BlockPackedReader(in1, PackedInts.VERSION_CURRENT, blockSize, valueCount, random().nextBoolean());
assertEquals(in1.getFilePointer(), in1.length());
for (i = 0; i < valueCount; ++i) {
assertEquals("i=" + i, values[i], reader.get(i));
}
in1.close();
dir.close();
}
}
Aggregations