use of org.apache.lucene.store.DataOutput in project lucene-solr by apache.
the class BinaryDictionaryWriter method writeTargetMap.
// TODO: maybe this int[] should instead be the output to the FST...
protected void writeTargetMap(String filename) throws IOException {
new File(filename).getParentFile().mkdirs();
OutputStream os = new FileOutputStream(filename);
try {
os = new BufferedOutputStream(os);
final DataOutput out = new OutputStreamDataOutput(os);
CodecUtil.writeHeader(out, BinaryDictionary.TARGETMAP_HEADER, BinaryDictionary.VERSION);
final int numSourceIds = lastSourceId + 1;
// <-- size of main array
out.writeVInt(targetMapEndOffset);
// <-- size of offset array (+ 1 more entry)
out.writeVInt(numSourceIds + 1);
int prev = 0, sourceId = 0;
for (int ofs = 0; ofs < targetMapEndOffset; ofs++) {
final int val = targetMap[ofs], delta = val - prev;
assert delta >= 0;
if (ofs == targetMapOffsets[sourceId]) {
out.writeVInt((delta << 1) | 0x01);
sourceId++;
} else {
out.writeVInt((delta << 1));
}
prev += delta;
}
assert sourceId == numSourceIds : "sourceId:" + sourceId + " != numSourceIds:" + numSourceIds;
} finally {
os.close();
}
}
use of org.apache.lucene.store.DataOutput in project lucene-solr by apache.
the class CharacterDefinitionWriter method write.
public void write(String baseDir) throws IOException {
String filename = baseDir + File.separator + CharacterDefinition.class.getName().replace('.', File.separatorChar) + CharacterDefinition.FILENAME_SUFFIX;
new File(filename).getParentFile().mkdirs();
OutputStream os = new FileOutputStream(filename);
try {
os = new BufferedOutputStream(os);
final DataOutput out = new OutputStreamDataOutput(os);
CodecUtil.writeHeader(out, CharacterDefinition.HEADER, CharacterDefinition.VERSION);
out.writeBytes(characterCategoryMap, 0, characterCategoryMap.length);
for (int i = 0; i < CharacterDefinition.CLASS_COUNT; i++) {
final byte b = (byte) ((invokeMap[i] ? 0x01 : 0x00) | (groupMap[i] ? 0x02 : 0x00));
out.writeByte(b);
}
} finally {
os.close();
}
}
use of org.apache.lucene.store.DataOutput in project lucene-solr by apache.
the class TestPagedBytes method testDataInputOutput2.
// Writes random byte/s into PagedBytes via
// .getDataOutput(), then verifies with
// PagedBytes.getDataInput():
public void testDataInputOutput2() throws Exception {
Random random = random();
for (int iter = 0; iter < 5 * RANDOM_MULTIPLIER; iter++) {
final int blockBits = TestUtil.nextInt(random, 1, 20);
final int blockSize = 1 << blockBits;
final PagedBytes p = new PagedBytes(blockBits);
final DataOutput out = p.getDataOutput();
final int numBytes = random().nextInt(10000000);
final byte[] answer = new byte[numBytes];
random().nextBytes(answer);
int written = 0;
while (written < numBytes) {
if (random().nextInt(10) == 7) {
out.writeByte(answer[written++]);
} else {
int chunk = Math.min(random().nextInt(1000), numBytes - written);
out.writeBytes(answer, written, chunk);
written += chunk;
}
}
final PagedBytes.Reader reader = p.freeze(random.nextBoolean());
final DataInput in = p.getDataInput();
final byte[] verify = new byte[numBytes];
int read = 0;
while (read < numBytes) {
if (random().nextInt(10) == 7) {
verify[read++] = in.readByte();
} else {
int chunk = Math.min(random().nextInt(1000), numBytes - read);
in.readBytes(verify, read, chunk);
read += chunk;
}
}
assertTrue(Arrays.equals(answer, verify));
final BytesRef slice = new BytesRef();
for (int iter2 = 0; iter2 < 100; iter2++) {
final int pos = random.nextInt(numBytes - 1);
final int len = random.nextInt(Math.min(blockSize + 1, numBytes - pos));
reader.fillSlice(slice, pos, len);
for (int byteUpto = 0; byteUpto < len; byteUpto++) {
assertEquals(answer[pos + byteUpto], slice.bytes[slice.offset + byteUpto]);
}
}
}
}
use of org.apache.lucene.store.DataOutput in project elasticsearch by elastic.
the class XAnalyzingSuggester method store.
@Override
public boolean store(OutputStream output) throws IOException {
DataOutput dataOut = new OutputStreamDataOutput(output);
try {
if (fst == null) {
return false;
}
fst.save(dataOut);
dataOut.writeVInt(maxAnalyzedPathsForOneInput);
dataOut.writeByte((byte) (hasPayloads ? 1 : 0));
} finally {
IOUtils.close(output);
}
return true;
}
use of org.apache.lucene.store.DataOutput in project lucene-solr by apache.
the class BinaryDictionaryWriter method writePosDict.
protected void writePosDict(String filename) throws IOException {
new File(filename).getParentFile().mkdirs();
OutputStream os = new FileOutputStream(filename);
try {
os = new BufferedOutputStream(os);
final DataOutput out = new OutputStreamDataOutput(os);
CodecUtil.writeHeader(out, BinaryDictionary.POSDICT_HEADER, BinaryDictionary.VERSION);
out.writeVInt(posDict.size());
for (String s : posDict) {
if (s == null) {
out.writeByte((byte) 0);
out.writeByte((byte) 0);
out.writeByte((byte) 0);
} else {
String[] data = CSVUtil.parse(s);
assert data.length == 3 : "malformed pos/inflection: " + s;
out.writeString(data[0]);
out.writeString(data[1]);
out.writeString(data[2]);
}
}
} finally {
os.close();
}
}
Aggregations