use of org.apache.lucene.store.ChecksumIndexInput in project lucene-solr by apache.
the class TestCodecUtil method testCheckFooterValidAtFooter.
public void testCheckFooterValidAtFooter() throws Exception {
RAMFile file = new RAMFile();
IndexOutput output = new RAMOutputStream(file, true);
CodecUtil.writeHeader(output, "FooBar", 5);
output.writeString("this is the data");
CodecUtil.writeFooter(output);
output.close();
ChecksumIndexInput input = new BufferedChecksumIndexInput(new RAMInputStream("file", file));
CodecUtil.checkHeader(input, "FooBar", 5, 5);
assertEquals("this is the data", input.readString());
Exception mine = new RuntimeException("fake exception");
RuntimeException expected = expectThrows(RuntimeException.class, () -> {
CodecUtil.checkFooter(input, mine);
});
assertEquals("fake exception", expected.getMessage());
Throwable[] suppressed = expected.getSuppressed();
assertEquals(1, suppressed.length);
assertTrue(suppressed[0].getMessage().contains("checksum passed"));
input.close();
}
use of org.apache.lucene.store.ChecksumIndexInput in project lucene-solr by apache.
the class MockRandomPostingsFormat method fieldsProducer.
@Override
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
final String seedFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, SEED_EXT);
final ChecksumIndexInput in = state.directory.openChecksumInput(seedFileName, state.context);
CodecUtil.checkIndexHeader(in, "MockRandomSeed", 0, 0, state.segmentInfo.getId(), state.segmentSuffix);
final long seed = in.readLong();
CodecUtil.checkFooter(in);
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: reading from seg=" + state.segmentInfo.name + " formatID=" + state.segmentSuffix + " seed=" + seed);
}
in.close();
final Random random = new Random(seed);
int readBufferSize = TestUtil.nextInt(random, 1, 4096);
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: readBufferSize=" + readBufferSize);
}
PostingsReaderBase postingsReader = new Lucene50PostingsReader(state);
final FieldsProducer fields;
final int t1 = random.nextInt(5);
if (t1 == 0) {
boolean success = false;
try {
fields = new FSTTermsReader(state, postingsReader);
success = true;
} finally {
if (!success) {
postingsReader.close();
}
}
} else if (t1 == 1) {
boolean success = false;
try {
fields = new FSTOrdTermsReader(state, postingsReader);
success = true;
} finally {
if (!success) {
postingsReader.close();
}
}
} else if (t1 == 2) {
// Use BlockTree terms dict
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: reading BlockTree terms dict");
}
boolean success = false;
try {
fields = new BlockTreeTermsReader(postingsReader, state);
success = true;
} finally {
if (!success) {
postingsReader.close();
}
}
} else if (t1 == 3) {
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: reading Block terms dict");
}
final TermsIndexReaderBase indexReader;
boolean success = false;
try {
final boolean doFixedGap = random.nextBoolean();
if (doFixedGap) {
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: fixed-gap terms index");
}
indexReader = new FixedGapTermsIndexReader(state);
} else {
final int n2 = random.nextInt(3);
if (n2 == 1) {
random.nextInt();
} else if (n2 == 2) {
random.nextLong();
}
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: variable-gap terms index");
}
indexReader = new VariableGapTermsIndexReader(state);
}
success = true;
} finally {
if (!success) {
postingsReader.close();
}
}
success = false;
try {
fields = new BlockTermsReader(indexReader, postingsReader, state);
success = true;
} finally {
if (!success) {
try {
postingsReader.close();
} finally {
indexReader.close();
}
}
}
} else if (t1 == 4) {
// Use OrdsBlockTree terms dict
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: reading OrdsBlockTree terms dict");
}
boolean success = false;
try {
fields = new OrdsBlockTreeTermsReader(postingsReader, state);
success = true;
} finally {
if (!success) {
postingsReader.close();
}
}
} else {
// BUG!
throw new AssertionError();
}
return fields;
}
use of org.apache.lucene.store.ChecksumIndexInput in project lucene-solr by apache.
the class Lucene62SegmentInfoFormat method read.
@Override
public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOContext context) throws IOException {
final String fileName = IndexFileNames.segmentFileName(segment, "", Lucene62SegmentInfoFormat.SI_EXTENSION);
try (ChecksumIndexInput input = dir.openChecksumInput(fileName, context)) {
Throwable priorE = null;
SegmentInfo si = null;
try {
int format = CodecUtil.checkIndexHeader(input, Lucene62SegmentInfoFormat.CODEC_NAME, Lucene62SegmentInfoFormat.VERSION_START, Lucene62SegmentInfoFormat.VERSION_CURRENT, segmentID, "");
final Version version = Version.fromBits(input.readInt(), input.readInt(), input.readInt());
final int docCount = input.readInt();
if (docCount < 0) {
throw new CorruptIndexException("invalid docCount: " + docCount, input);
}
final boolean isCompoundFile = input.readByte() == SegmentInfo.YES;
final Map<String, String> diagnostics = input.readMapOfStrings();
final Set<String> files = input.readSetOfStrings();
final Map<String, String> attributes = input.readMapOfStrings();
int numSortFields = input.readVInt();
Sort indexSort;
if (numSortFields > 0) {
SortField[] sortFields = new SortField[numSortFields];
for (int i = 0; i < numSortFields; i++) {
String fieldName = input.readString();
int sortTypeID = input.readVInt();
SortField.Type sortType;
SortedSetSelector.Type sortedSetSelector = null;
SortedNumericSelector.Type sortedNumericSelector = null;
switch(sortTypeID) {
case 0:
sortType = SortField.Type.STRING;
break;
case 1:
sortType = SortField.Type.LONG;
break;
case 2:
sortType = SortField.Type.INT;
break;
case 3:
sortType = SortField.Type.DOUBLE;
break;
case 4:
sortType = SortField.Type.FLOAT;
break;
case 5:
sortType = SortField.Type.STRING;
byte selector = input.readByte();
if (selector == 0) {
sortedSetSelector = SortedSetSelector.Type.MIN;
} else if (selector == 1) {
sortedSetSelector = SortedSetSelector.Type.MAX;
} else if (selector == 2) {
sortedSetSelector = SortedSetSelector.Type.MIDDLE_MIN;
} else if (selector == 3) {
sortedSetSelector = SortedSetSelector.Type.MIDDLE_MAX;
} else {
throw new CorruptIndexException("invalid index SortedSetSelector ID: " + selector, input);
}
break;
case 6:
byte type = input.readByte();
if (type == 0) {
sortType = SortField.Type.LONG;
} else if (type == 1) {
sortType = SortField.Type.INT;
} else if (type == 2) {
sortType = SortField.Type.DOUBLE;
} else if (type == 3) {
sortType = SortField.Type.FLOAT;
} else {
throw new CorruptIndexException("invalid index SortedNumericSortField type ID: " + type, input);
}
byte numericSelector = input.readByte();
if (numericSelector == 0) {
sortedNumericSelector = SortedNumericSelector.Type.MIN;
} else if (numericSelector == 1) {
sortedNumericSelector = SortedNumericSelector.Type.MAX;
} else {
throw new CorruptIndexException("invalid index SortedNumericSelector ID: " + numericSelector, input);
}
break;
default:
throw new CorruptIndexException("invalid index sort field type ID: " + sortTypeID, input);
}
byte b = input.readByte();
boolean reverse;
if (b == 0) {
reverse = true;
} else if (b == 1) {
reverse = false;
} else {
throw new CorruptIndexException("invalid index sort reverse: " + b, input);
}
if (sortedSetSelector != null) {
sortFields[i] = new SortedSetSortField(fieldName, reverse, sortedSetSelector);
} else if (sortedNumericSelector != null) {
sortFields[i] = new SortedNumericSortField(fieldName, sortType, reverse, sortedNumericSelector);
} else {
sortFields[i] = new SortField(fieldName, sortType, reverse);
}
Object missingValue;
b = input.readByte();
if (b == 0) {
missingValue = null;
} else {
switch(sortType) {
case STRING:
if (b == 1) {
missingValue = SortField.STRING_LAST;
} else if (b == 2) {
missingValue = SortField.STRING_FIRST;
} else {
throw new CorruptIndexException("invalid missing value flag: " + b, input);
}
break;
case LONG:
if (b != 1) {
throw new CorruptIndexException("invalid missing value flag: " + b, input);
}
missingValue = input.readLong();
break;
case INT:
if (b != 1) {
throw new CorruptIndexException("invalid missing value flag: " + b, input);
}
missingValue = input.readInt();
break;
case DOUBLE:
if (b != 1) {
throw new CorruptIndexException("invalid missing value flag: " + b, input);
}
missingValue = Double.longBitsToDouble(input.readLong());
break;
case FLOAT:
if (b != 1) {
throw new CorruptIndexException("invalid missing value flag: " + b, input);
}
missingValue = Float.intBitsToFloat(input.readInt());
break;
default:
throw new AssertionError("unhandled sortType=" + sortType);
}
}
if (missingValue != null) {
sortFields[i].setMissingValue(missingValue);
}
}
indexSort = new Sort(sortFields);
} else if (numSortFields < 0) {
throw new CorruptIndexException("invalid index sort field count: " + numSortFields, input);
} else {
indexSort = null;
}
si = new SegmentInfo(dir, version, null, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, indexSort);
si.setFiles(files);
} catch (Throwable exception) {
priorE = exception;
} finally {
CodecUtil.checkFooter(input, priorE);
}
return si;
}
}
use of org.apache.lucene.store.ChecksumIndexInput in project lucene-solr by apache.
the class Lucene60FieldInfosFormat method read.
@Override
public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, IOContext context) throws IOException {
final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION);
try (ChecksumIndexInput input = directory.openChecksumInput(fileName, context)) {
Throwable priorE = null;
FieldInfo[] infos = null;
try {
CodecUtil.checkIndexHeader(input, Lucene60FieldInfosFormat.CODEC_NAME, Lucene60FieldInfosFormat.FORMAT_START, Lucene60FieldInfosFormat.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix);
//read in the size
final int size = input.readVInt();
infos = new FieldInfo[size];
// previous field's attribute map, we share when possible:
Map<String, String> lastAttributes = Collections.emptyMap();
for (int i = 0; i < size; i++) {
String name = input.readString();
final int fieldNumber = input.readVInt();
if (fieldNumber < 0) {
throw new CorruptIndexException("invalid field number for field: " + name + ", fieldNumber=" + fieldNumber, input);
}
byte bits = input.readByte();
boolean storeTermVector = (bits & STORE_TERMVECTOR) != 0;
boolean omitNorms = (bits & OMIT_NORMS) != 0;
boolean storePayloads = (bits & STORE_PAYLOADS) != 0;
final IndexOptions indexOptions = getIndexOptions(input, input.readByte());
// DV Types are packed in one byte
final DocValuesType docValuesType = getDocValuesType(input, input.readByte());
final long dvGen = input.readLong();
Map<String, String> attributes = input.readMapOfStrings();
// just use the last field's map if its the same
if (attributes.equals(lastAttributes)) {
attributes = lastAttributes;
}
lastAttributes = attributes;
int pointDimensionCount = input.readVInt();
int pointNumBytes;
if (pointDimensionCount != 0) {
pointNumBytes = input.readVInt();
} else {
pointNumBytes = 0;
}
try {
infos[i] = new FieldInfo(name, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValuesType, dvGen, attributes, pointDimensionCount, pointNumBytes);
infos[i].checkConsistency();
} catch (IllegalStateException e) {
throw new CorruptIndexException("invalid fieldinfo for field: " + name + ", fieldNumber=" + fieldNumber, input, e);
}
}
} catch (Throwable exception) {
priorE = exception;
} finally {
CodecUtil.checkFooter(input, priorE);
}
return new FieldInfos(infos);
}
}
use of org.apache.lucene.store.ChecksumIndexInput in project lucene-solr by apache.
the class Lucene70SegmentInfoFormat method read.
@Override
public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOContext context) throws IOException {
final String fileName = IndexFileNames.segmentFileName(segment, "", Lucene70SegmentInfoFormat.SI_EXTENSION);
try (ChecksumIndexInput input = dir.openChecksumInput(fileName, context)) {
Throwable priorE = null;
SegmentInfo si = null;
try {
int format = CodecUtil.checkIndexHeader(input, Lucene70SegmentInfoFormat.CODEC_NAME, Lucene70SegmentInfoFormat.VERSION_START, Lucene70SegmentInfoFormat.VERSION_CURRENT, segmentID, "");
final Version version = Version.fromBits(input.readInt(), input.readInt(), input.readInt());
byte hasMinVersion = input.readByte();
final Version minVersion;
switch(hasMinVersion) {
case 0:
minVersion = null;
break;
case 1:
minVersion = Version.fromBits(input.readInt(), input.readInt(), input.readInt());
break;
default:
throw new CorruptIndexException("Illegal boolean value " + hasMinVersion, input);
}
final int docCount = input.readInt();
if (docCount < 0) {
throw new CorruptIndexException("invalid docCount: " + docCount, input);
}
final boolean isCompoundFile = input.readByte() == SegmentInfo.YES;
final Map<String, String> diagnostics = input.readMapOfStrings();
final Set<String> files = input.readSetOfStrings();
final Map<String, String> attributes = input.readMapOfStrings();
int numSortFields = input.readVInt();
Sort indexSort;
if (numSortFields > 0) {
SortField[] sortFields = new SortField[numSortFields];
for (int i = 0; i < numSortFields; i++) {
String fieldName = input.readString();
int sortTypeID = input.readVInt();
SortField.Type sortType;
SortedSetSelector.Type sortedSetSelector = null;
SortedNumericSelector.Type sortedNumericSelector = null;
switch(sortTypeID) {
case 0:
sortType = SortField.Type.STRING;
break;
case 1:
sortType = SortField.Type.LONG;
break;
case 2:
sortType = SortField.Type.INT;
break;
case 3:
sortType = SortField.Type.DOUBLE;
break;
case 4:
sortType = SortField.Type.FLOAT;
break;
case 5:
sortType = SortField.Type.STRING;
byte selector = input.readByte();
if (selector == 0) {
sortedSetSelector = SortedSetSelector.Type.MIN;
} else if (selector == 1) {
sortedSetSelector = SortedSetSelector.Type.MAX;
} else if (selector == 2) {
sortedSetSelector = SortedSetSelector.Type.MIDDLE_MIN;
} else if (selector == 3) {
sortedSetSelector = SortedSetSelector.Type.MIDDLE_MAX;
} else {
throw new CorruptIndexException("invalid index SortedSetSelector ID: " + selector, input);
}
break;
case 6:
byte type = input.readByte();
if (type == 0) {
sortType = SortField.Type.LONG;
} else if (type == 1) {
sortType = SortField.Type.INT;
} else if (type == 2) {
sortType = SortField.Type.DOUBLE;
} else if (type == 3) {
sortType = SortField.Type.FLOAT;
} else {
throw new CorruptIndexException("invalid index SortedNumericSortField type ID: " + type, input);
}
byte numericSelector = input.readByte();
if (numericSelector == 0) {
sortedNumericSelector = SortedNumericSelector.Type.MIN;
} else if (numericSelector == 1) {
sortedNumericSelector = SortedNumericSelector.Type.MAX;
} else {
throw new CorruptIndexException("invalid index SortedNumericSelector ID: " + numericSelector, input);
}
break;
default:
throw new CorruptIndexException("invalid index sort field type ID: " + sortTypeID, input);
}
byte b = input.readByte();
boolean reverse;
if (b == 0) {
reverse = true;
} else if (b == 1) {
reverse = false;
} else {
throw new CorruptIndexException("invalid index sort reverse: " + b, input);
}
if (sortedSetSelector != null) {
sortFields[i] = new SortedSetSortField(fieldName, reverse, sortedSetSelector);
} else if (sortedNumericSelector != null) {
sortFields[i] = new SortedNumericSortField(fieldName, sortType, reverse, sortedNumericSelector);
} else {
sortFields[i] = new SortField(fieldName, sortType, reverse);
}
Object missingValue;
b = input.readByte();
if (b == 0) {
missingValue = null;
} else {
switch(sortType) {
case STRING:
if (b == 1) {
missingValue = SortField.STRING_LAST;
} else if (b == 2) {
missingValue = SortField.STRING_FIRST;
} else {
throw new CorruptIndexException("invalid missing value flag: " + b, input);
}
break;
case LONG:
if (b != 1) {
throw new CorruptIndexException("invalid missing value flag: " + b, input);
}
missingValue = input.readLong();
break;
case INT:
if (b != 1) {
throw new CorruptIndexException("invalid missing value flag: " + b, input);
}
missingValue = input.readInt();
break;
case DOUBLE:
if (b != 1) {
throw new CorruptIndexException("invalid missing value flag: " + b, input);
}
missingValue = Double.longBitsToDouble(input.readLong());
break;
case FLOAT:
if (b != 1) {
throw new CorruptIndexException("invalid missing value flag: " + b, input);
}
missingValue = Float.intBitsToFloat(input.readInt());
break;
default:
throw new AssertionError("unhandled sortType=" + sortType);
}
}
if (missingValue != null) {
sortFields[i].setMissingValue(missingValue);
}
}
indexSort = new Sort(sortFields);
} else if (numSortFields < 0) {
throw new CorruptIndexException("invalid index sort field count: " + numSortFields, input);
} else {
indexSort = null;
}
si = new SegmentInfo(dir, version, minVersion, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, indexSort);
si.setFiles(files);
} catch (Throwable exception) {
priorE = exception;
} finally {
CodecUtil.checkFooter(input, priorE);
}
return si;
}
}
Aggregations