use of org.apache.lucene.store.IndexOutput in project elasticsearch by elastic.
the class InputStreamIndexInputTests method testReadMultiFourBytesLimit.
public void testReadMultiFourBytesLimit() throws IOException {
RAMDirectory dir = new RAMDirectory();
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
for (int i = 0; i < 3; i++) {
output.writeByte((byte) 1);
}
for (int i = 0; i < 3; i++) {
output.writeByte((byte) 2);
}
output.close();
IndexInput input = dir.openInput("test", IOContext.DEFAULT);
byte[] read = new byte[4];
assertThat(input.getFilePointer(), lessThan(input.length()));
InputStreamIndexInput is = new InputStreamIndexInput(input, 4);
assertThat(is.actualSizeToRead(), equalTo(4L));
assertThat(is.read(read), equalTo(4));
assertThat(read[0], equalTo((byte) 1));
assertThat(read[1], equalTo((byte) 1));
assertThat(read[2], equalTo((byte) 1));
assertThat(read[3], equalTo((byte) 2));
assertThat(input.getFilePointer(), lessThan(input.length()));
is = new InputStreamIndexInput(input, 4);
assertThat(is.actualSizeToRead(), equalTo(2L));
assertThat(is.read(read), equalTo(2));
assertThat(read[0], equalTo((byte) 2));
assertThat(read[1], equalTo((byte) 2));
assertThat(input.getFilePointer(), equalTo(input.length()));
is = new InputStreamIndexInput(input, 4);
assertThat(is.actualSizeToRead(), equalTo(0L));
assertThat(is.read(read), equalTo(-1));
}
use of org.apache.lucene.store.IndexOutput in project elasticsearch by elastic.
the class InputStreamIndexInputTests method testMarkRest.
public void testMarkRest() throws Exception {
RAMDirectory dir = new RAMDirectory();
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
for (int i = 0; i < 3; i++) {
output.writeByte((byte) 1);
}
for (int i = 0; i < 3; i++) {
output.writeByte((byte) 2);
}
output.close();
IndexInput input = dir.openInput("test", IOContext.DEFAULT);
InputStreamIndexInput is = new InputStreamIndexInput(input, 4);
assertThat(is.markSupported(), equalTo(true));
assertThat(is.read(), equalTo(1));
assertThat(is.read(), equalTo(1));
is.mark(0);
assertThat(is.read(), equalTo(1));
assertThat(is.read(), equalTo(2));
is.reset();
assertThat(is.read(), equalTo(1));
assertThat(is.read(), equalTo(2));
}
use of org.apache.lucene.store.IndexOutput in project lucene-solr by apache.
the class FSTTester method doTest.
FST<T> doTest(int prune1, int prune2, boolean allowRandomSuffixSharing) throws IOException {
if (LuceneTestCase.VERBOSE) {
System.out.println("\nTEST: prune1=" + prune1 + " prune2=" + prune2);
}
final Builder<T> builder = new Builder<>(inputMode == 0 ? FST.INPUT_TYPE.BYTE1 : FST.INPUT_TYPE.BYTE4, prune1, prune2, prune1 == 0 && prune2 == 0, allowRandomSuffixSharing ? random.nextBoolean() : true, allowRandomSuffixSharing ? TestUtil.nextInt(random, 1, 10) : Integer.MAX_VALUE, outputs, true, 15);
for (InputOutput<T> pair : pairs) {
if (pair.output instanceof List) {
@SuppressWarnings("unchecked") List<Long> longValues = (List<Long>) pair.output;
@SuppressWarnings("unchecked") final Builder<Object> builderObject = (Builder<Object>) builder;
for (Long value : longValues) {
builderObject.add(pair.input, value);
}
} else {
builder.add(pair.input, pair.output);
}
}
FST<T> fst = builder.finish();
if (random.nextBoolean() && fst != null) {
IOContext context = LuceneTestCase.newIOContext(random);
IndexOutput out = dir.createOutput("fst.bin", context);
fst.save(out);
out.close();
IndexInput in = dir.openInput("fst.bin", context);
try {
fst = new FST<>(in, outputs);
} finally {
in.close();
dir.deleteFile("fst.bin");
}
}
if (LuceneTestCase.VERBOSE && pairs.size() <= 20 && fst != null) {
System.out.println("Printing FST as dot file to stdout:");
final Writer w = new OutputStreamWriter(System.out, Charset.defaultCharset());
Util.toDot(fst, w, false, false);
w.flush();
System.out.println("END dot file");
}
if (LuceneTestCase.VERBOSE) {
if (fst == null) {
System.out.println(" fst has 0 nodes (fully pruned)");
} else {
System.out.println(" fst has " + builder.getNodeCount() + " nodes and " + builder.getArcCount() + " arcs");
}
}
if (prune1 == 0 && prune2 == 0) {
verifyUnPruned(inputMode, fst);
} else {
verifyPruned(inputMode, fst, prune1, prune2);
}
nodeCount = builder.getNodeCount();
arcCount = builder.getArcCount();
return fst;
}
use of org.apache.lucene.store.IndexOutput in project lucene-solr by apache.
the class SolrCore method writeNewIndexProps.
/**
* Write the index.properties file with the new index sub directory name
* @param dir a data directory (containing an index.properties file)
* @param tmpFileName the file name to write the new index.properties to
* @param tmpIdxDirName new index directory name
*/
private static void writeNewIndexProps(Directory dir, String tmpFileName, String tmpIdxDirName) {
if (tmpFileName == null) {
tmpFileName = IndexFetcher.INDEX_PROPERTIES;
}
final Properties p = new Properties();
// Read existing properties
try {
final IndexInput input = dir.openInput(IndexFetcher.INDEX_PROPERTIES, DirectoryFactory.IOCONTEXT_NO_CACHE);
final InputStream is = new PropertiesInputStream(input);
try {
p.load(new InputStreamReader(is, StandardCharsets.UTF_8));
} catch (Exception e) {
log.error("Unable to load " + IndexFetcher.INDEX_PROPERTIES, e);
} finally {
IOUtils.closeQuietly(is);
}
} catch (IOException e) {
// ignore; file does not exist
}
p.put("index", tmpIdxDirName);
// Write new properties
Writer os = null;
try {
IndexOutput out = dir.createOutput(tmpFileName, DirectoryFactory.IOCONTEXT_NO_CACHE);
os = new OutputStreamWriter(new PropertiesOutputStream(out), StandardCharsets.UTF_8);
p.store(os, IndexFetcher.INDEX_PROPERTIES);
dir.sync(Collections.singleton(tmpFileName));
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to write " + IndexFetcher.INDEX_PROPERTIES, e);
} finally {
IOUtils.closeQuietly(os);
}
}
use of org.apache.lucene.store.IndexOutput in project lucene-solr by apache.
the class SolrSnapshotMetaDataManager method persist.
private synchronized void persist() throws IOException {
String fileName = SNAPSHOTS_PREFIX + nextWriteGen;
IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT);
boolean success = false;
try {
CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT);
out.writeVInt(nameToDetailsMapping.size());
for (Entry<String, SnapshotMetaData> ent : nameToDetailsMapping.entrySet()) {
out.writeString(ent.getKey());
out.writeString(ent.getValue().getIndexDirPath());
out.writeVLong(ent.getValue().getGenerationNumber());
}
success = true;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(out);
IOUtils.deleteFilesIgnoringExceptions(dir, fileName);
} else {
IOUtils.close(out);
}
}
dir.sync(Collections.singletonList(fileName));
if (nextWriteGen > 0) {
String lastSaveFile = SNAPSHOTS_PREFIX + (nextWriteGen - 1);
// exception OK: likely it didn't exist
IOUtils.deleteFilesIgnoringExceptions(dir, lastSaveFile);
}
nextWriteGen++;
}
Aggregations