use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.
the class TestPagedBytes method testOverflow.
// memory hole
@Ignore
public void testOverflow() throws IOException {
BaseDirectoryWrapper dir = newFSDirectory(createTempDir("testOverflow"));
if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
}
final int blockBits = TestUtil.nextInt(random(), 14, 28);
final int blockSize = 1 << blockBits;
byte[] arr = new byte[TestUtil.nextInt(random(), blockSize / 2, blockSize * 2)];
for (int i = 0; i < arr.length; ++i) {
arr[i] = (byte) i;
}
final long numBytes = (1L << 31) + TestUtil.nextInt(random(), 1, blockSize * 3);
final PagedBytes p = new PagedBytes(blockBits);
final IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT);
for (long i = 0; i < numBytes; ) {
assertEquals(i, out.getFilePointer());
final int len = (int) Math.min(arr.length, numBytes - i);
out.writeBytes(arr, len);
i += len;
}
assertEquals(numBytes, out.getFilePointer());
out.close();
final IndexInput in = dir.openInput("foo", IOContext.DEFAULT);
p.copy(in, numBytes);
final PagedBytes.Reader reader = p.freeze(random().nextBoolean());
for (long offset : new long[] { 0L, Integer.MAX_VALUE, numBytes - 1, TestUtil.nextLong(random(), 1, numBytes - 2) }) {
BytesRef b = new BytesRef();
reader.fillSlice(b, offset, 1);
assertEquals(arr[(int) (offset % arr.length)], b.bytes[b.offset]);
}
in.close();
dir.close();
}
use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.
the class TestIndexSplitter method test.
public void test() throws Exception {
Path dir = createTempDir(LuceneTestCase.getTestClass().getSimpleName());
Path destDir = createTempDir(LuceneTestCase.getTestClass().getSimpleName());
Directory fsDir = newFSDirectory(dir);
// so the unreferenced files are expected.
if (fsDir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper) fsDir).setAssertNoUnrefencedFilesOnClose(false);
}
MergePolicy mergePolicy = new LogByteSizeMergePolicy();
mergePolicy.setNoCFSRatio(1.0);
mergePolicy.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);
IndexWriter iw = new IndexWriter(fsDir, new IndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setMergePolicy(mergePolicy));
for (int x = 0; x < 100; x++) {
Document doc = DocHelper.createDocument(x, "index", 5);
iw.addDocument(doc);
}
iw.commit();
for (int x = 100; x < 150; x++) {
Document doc = DocHelper.createDocument(x, "index2", 5);
iw.addDocument(doc);
}
iw.commit();
for (int x = 150; x < 200; x++) {
Document doc = DocHelper.createDocument(x, "index3", 5);
iw.addDocument(doc);
}
iw.commit();
DirectoryReader iwReader = iw.getReader();
assertEquals(3, iwReader.leaves().size());
iwReader.close();
iw.close();
// we should have 2 segments now
IndexSplitter is = new IndexSplitter(dir);
String splitSegName = is.infos.info(1).info.name;
is.split(destDir, new String[] { splitSegName });
Directory fsDirDest = newFSDirectory(destDir);
DirectoryReader r = DirectoryReader.open(fsDirDest);
assertEquals(50, r.maxDoc());
r.close();
fsDirDest.close();
// now test cmdline
Path destDir2 = createTempDir(LuceneTestCase.getTestClass().getSimpleName());
IndexSplitter.main(new String[] { dir.toAbsolutePath().toString(), destDir2.toAbsolutePath().toString(), splitSegName });
Directory fsDirDest2 = newFSDirectory(destDir2);
SegmentInfos sis = SegmentInfos.readLatestCommit(fsDirDest2);
assertEquals(1, sis.size());
r = DirectoryReader.open(fsDirDest2);
assertEquals(50, r.maxDoc());
r.close();
fsDirDest2.close();
// now remove the copied segment from src
IndexSplitter.main(new String[] { dir.toAbsolutePath().toString(), "-d", splitSegName });
r = DirectoryReader.open(fsDir);
assertEquals(2, r.leaves().size());
r.close();
fsDir.close();
}
use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.
the class BaseSegmentInfoFormatTestCase method testExceptionOnOpenInput.
/**
* Test segment infos read that hits exception immediately on open.
* make sure we get our exception back, no file handle leaks, etc.
*/
public void testExceptionOnOpenInput() throws Exception {
Failure fail = new Failure() {
@Override
public void eval(MockDirectoryWrapper dir) throws IOException {
for (StackTraceElement e : Thread.currentThread().getStackTrace()) {
if (doFail && "openInput".equals(e.getMethodName())) {
throw new FakeIOException();
}
}
}
};
MockDirectoryWrapper dir = newMockDirectory();
dir.failOn(fail);
Codec codec = getCodec();
byte[] id = StringHelper.randomId();
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec, Collections.<String, String>emptyMap(), id, new HashMap<>(), null);
info.setFiles(Collections.<String>emptySet());
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
fail.setDoFail();
expectThrows(FakeIOException.class, () -> {
codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
});
fail.clearDoFail();
dir.close();
}
use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.
the class BaseStoredFieldsFormatTestCase method testBigDocuments.
@Nightly
public void testBigDocuments() throws IOException {
assumeWorkingMMapOnWindows();
// "big" as "much bigger than the chunk size"
// for this test we force a FS dir
// we can't just use newFSDirectory, because this test doesn't really index anything.
// so if we get NRTCachingDir+SimpleText, we make massive stored fields and OOM (LUCENE-4484)
Directory dir = new MockDirectoryWrapper(random(), new MMapDirectory(createTempDir("testBigDocuments")));
IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
iwConf.setMaxBufferedDocs(RandomNumbers.randomIntBetween(random(), 2, 30));
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf);
if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper) dir).setThrottling(Throttling.NEVER);
}
// emptyDoc
final Document emptyDoc = new Document();
// lot of small fields
final Document bigDoc1 = new Document();
// 1 very big field
final Document bigDoc2 = new Document();
final Field idField = new StringField("id", "", Store.NO);
emptyDoc.add(idField);
bigDoc1.add(idField);
bigDoc2.add(idField);
final FieldType onlyStored = new FieldType(StringField.TYPE_STORED);
onlyStored.setIndexOptions(IndexOptions.NONE);
final Field smallField = new Field("fld", randomByteArray(random().nextInt(10), 256), onlyStored);
final int numFields = RandomNumbers.randomIntBetween(random(), 500000, 1000000);
for (int i = 0; i < numFields; ++i) {
bigDoc1.add(smallField);
}
final Field bigField = new Field("fld", randomByteArray(RandomNumbers.randomIntBetween(random(), 1000000, 5000000), 2), onlyStored);
bigDoc2.add(bigField);
final int numDocs = atLeast(5);
final Document[] docs = new Document[numDocs];
for (int i = 0; i < numDocs; ++i) {
docs[i] = RandomPicks.randomFrom(random(), Arrays.asList(emptyDoc, bigDoc1, bigDoc2));
}
for (int i = 0; i < numDocs; ++i) {
idField.setStringValue("" + i);
iw.addDocument(docs[i]);
if (random().nextInt(numDocs) == 0) {
iw.commit();
}
}
iw.commit();
// look at what happens when big docs are merged
iw.forceMerge(1);
final DirectoryReader rd = DirectoryReader.open(dir);
final IndexSearcher searcher = new IndexSearcher(rd);
for (int i = 0; i < numDocs; ++i) {
final Query query = new TermQuery(new Term("id", "" + i));
final TopDocs topDocs = searcher.search(query, 1);
assertEquals("" + i, 1, topDocs.totalHits);
final Document doc = rd.document(topDocs.scoreDocs[0].doc);
assertNotNull(doc);
final IndexableField[] fieldValues = doc.getFields("fld");
assertEquals(docs[i].getFields("fld").length, fieldValues.length);
if (fieldValues.length > 0) {
assertEquals(docs[i].getFields("fld")[0].binaryValue(), fieldValues[0].binaryValue());
}
}
rd.close();
iw.close();
dir.close();
}
use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.
the class BaseSegmentInfoFormatTestCase method testExceptionOnCloseInput.
/**
* Test segment infos read that hits exception on close
* make sure we get our exception back, no file handle leaks, etc.
*/
public void testExceptionOnCloseInput() throws Exception {
Failure fail = new Failure() {
@Override
public void eval(MockDirectoryWrapper dir) throws IOException {
for (StackTraceElement e : Thread.currentThread().getStackTrace()) {
if (doFail && "close".equals(e.getMethodName())) {
throw new FakeIOException();
}
}
}
};
MockDirectoryWrapper dir = newMockDirectory();
dir.failOn(fail);
Codec codec = getCodec();
byte[] id = StringHelper.randomId();
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], getVersions()[0], "_123", 1, false, codec, Collections.<String, String>emptyMap(), id, new HashMap<>(), null);
info.setFiles(Collections.<String>emptySet());
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
fail.setDoFail();
expectThrows(FakeIOException.class, () -> {
codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
});
fail.clearDoFail();
dir.close();
}
Aggregations