use of org.apache.lucene.util.Version in project lucene-solr by apache.
the class BaseSegmentInfoFormatTestCase method testRandom.
/**
* Sets some otherwise hard-to-test properties:
* random segment names, ID values, document count, etc and round-trips
*/
public void testRandom() throws Exception {
Codec codec = getCodec();
Version[] versions = getVersions();
for (int i = 0; i < 10; i++) {
Directory dir = newDirectory();
Version version = versions[random().nextInt(versions.length)];
String name = "_" + Integer.toString(random().nextInt(Integer.MAX_VALUE), Character.MAX_RADIX);
int docCount = TestUtil.nextInt(random(), 1, IndexWriter.MAX_DOCS);
boolean isCompoundFile = random().nextBoolean();
Set<String> files = new HashSet<>();
int numFiles = random().nextInt(10);
for (int j = 0; j < numFiles; j++) {
String file = IndexFileNames.segmentFileName(name, "", Integer.toString(j));
files.add(file);
dir.createOutput(file, IOContext.DEFAULT).close();
}
Map<String, String> diagnostics = new HashMap<>();
int numDiags = random().nextInt(10);
for (int j = 0; j < numDiags; j++) {
diagnostics.put(TestUtil.randomUnicodeString(random()), TestUtil.randomUnicodeString(random()));
}
byte[] id = new byte[StringHelper.ID_LENGTH];
random().nextBytes(id);
Map<String, String> attributes = new HashMap<>();
int numAttributes = random().nextInt(10);
for (int j = 0; j < numAttributes; j++) {
attributes.put(TestUtil.randomUnicodeString(random()), TestUtil.randomUnicodeString(random()));
}
SegmentInfo info = new SegmentInfo(dir, version, null, name, docCount, isCompoundFile, codec, diagnostics, id, attributes, null);
info.setFiles(files);
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, name, id, IOContext.DEFAULT);
assertEquals(info, info2);
dir.close();
}
}
use of org.apache.lucene.util.Version in project lucene-solr by apache.
the class BaseSegmentInfoFormatTestCase method testVersions.
/** Test versions */
public void testVersions() throws Exception {
Codec codec = getCodec();
for (Version v : getVersions()) {
for (Version minV : new Version[] { v, null }) {
Directory dir = newDirectory();
byte[] id = StringHelper.randomId();
SegmentInfo info = new SegmentInfo(dir, v, minV, "_123", 1, false, codec, Collections.<String, String>emptyMap(), id, new HashMap<>(), null);
info.setFiles(Collections.<String>emptySet());
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
assertEquals(info2.getVersion(), v);
if (supportsMinVersion()) {
assertEquals(info2.getMinVersion(), minV);
} else {
assertEquals(info2.getMinVersion(), null);
}
dir.close();
}
}
}
use of org.apache.lucene.util.Version in project lucene-solr by apache.
the class Lucene50RWSegmentInfoFormat method write.
@Override
public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene50SegmentInfoFormat.SI_EXTENSION);
assert si.getIndexSort() == null;
try (IndexOutput output = dir.createOutput(fileName, ioContext)) {
// Only add the file once we've successfully created it, else IFD assert can trip:
si.addFile(fileName);
CodecUtil.writeIndexHeader(output, Lucene50SegmentInfoFormat.CODEC_NAME, Lucene50SegmentInfoFormat.VERSION_CURRENT, si.getId(), "");
Version version = si.getVersion();
if (version.major < 5) {
throw new IllegalArgumentException("invalid major version: should be >= 5 but got: " + version.major + " segment=" + si);
}
// Write the Lucene version that created this segment, since 3.1
output.writeInt(version.major);
output.writeInt(version.minor);
output.writeInt(version.bugfix);
assert version.prerelease == 0;
output.writeInt(si.maxDoc());
output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
output.writeMapOfStrings(si.getDiagnostics());
Set<String> files = si.files();
for (String file : files) {
if (!IndexFileNames.parseSegmentName(file).equals(si.name)) {
throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files);
}
}
output.writeSetOfStrings(files);
output.writeMapOfStrings(si.getAttributes());
CodecUtil.writeFooter(output);
}
}
use of org.apache.lucene.util.Version in project lucene-solr by apache.
the class Lucene50RWSegmentInfoFormat method read.
@Override
public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOContext context) throws IOException {
final String fileName = IndexFileNames.segmentFileName(segment, "", Lucene50SegmentInfoFormat.SI_EXTENSION);
try (ChecksumIndexInput input = dir.openChecksumInput(fileName, context)) {
Throwable priorE = null;
SegmentInfo si = null;
try {
CodecUtil.checkIndexHeader(input, Lucene50SegmentInfoFormat.CODEC_NAME, Lucene50SegmentInfoFormat.VERSION_START, Lucene50SegmentInfoFormat.VERSION_CURRENT, segmentID, "");
final Version version = Version.fromBits(input.readInt(), input.readInt(), input.readInt());
final int docCount = input.readInt();
if (docCount < 0) {
throw new CorruptIndexException("invalid docCount: " + docCount, input);
}
final boolean isCompoundFile = input.readByte() == SegmentInfo.YES;
final Map<String, String> diagnostics = input.readMapOfStrings();
final Set<String> files = input.readSetOfStrings();
final Map<String, String> attributes = input.readMapOfStrings();
si = new SegmentInfo(dir, version, null, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, null);
si.setFiles(files);
} catch (Throwable exception) {
priorE = exception;
} finally {
CodecUtil.checkFooter(input, priorE);
}
return si;
}
}
use of org.apache.lucene.util.Version in project lucene-solr by apache.
the class TestBackwardsCompatibility method testOldVersions.
/**
* Test that we didn't forget to bump the current Constants.LUCENE_MAIN_VERSION.
* This is important so that we can determine which version of lucene wrote the segment.
*/
public void testOldVersions() throws Exception {
// first create a little index with the current code and get the version
Directory currentDir = newDirectory();
RandomIndexWriter riw = new RandomIndexWriter(random(), currentDir);
riw.addDocument(new Document());
riw.close();
DirectoryReader ir = DirectoryReader.open(currentDir);
SegmentReader air = (SegmentReader) ir.leaves().get(0).reader();
Version currentVersion = air.getSegmentInfo().info.getVersion();
// only 3.0 segments can have a null version
assertNotNull(currentVersion);
ir.close();
currentDir.close();
// now check all the old indexes, their version should be < the current version
for (String name : oldNames) {
Directory dir = oldIndexDirs.get(name);
DirectoryReader r = DirectoryReader.open(dir);
for (LeafReaderContext context : r.leaves()) {
air = (SegmentReader) context.reader();
Version oldVersion = air.getSegmentInfo().info.getVersion();
// only 3.0 segments can have a null version
assertNotNull(oldVersion);
assertTrue("current Version.LATEST is <= an old index: did you forget to bump it?!", currentVersion.onOrAfter(oldVersion));
}
r.close();
}
}
Aggregations