use of com.hedera.mirror.common.domain.transaction.RecordFile in project hedera-mirror-node by hashgraph.
the class BlockNumberMigrationTest method ifBlockNumberIsAlreadyCorrectDoNothing.
@Test
void ifBlockNumberIsAlreadyCorrectDoNothing() {
List<Tuple> expectedBlockNumbersAndConsensusEnd = insertDefaultRecordFiles(Set.of(CORRECT_CONSENSUS_END)).stream().map(recordFile -> Tuple.tuple(recordFile.getConsensusEnd(), recordFile.getIndex())).collect(Collectors.toList());
final RecordFile targetRecordFile = domainBuilder.recordFile().customize(builder -> builder.consensusEnd(CORRECT_CONSENSUS_END).index(CORRECT_BLOCK_NUMBER)).persist();
expectedBlockNumbersAndConsensusEnd.add(Tuple.tuple(targetRecordFile.getConsensusEnd(), targetRecordFile.getIndex()));
blockNumberMigration.doMigrate();
assertConsensusEndAndBlockNumber(expectedBlockNumbersAndConsensusEnd);
}
use of com.hedera.mirror.common.domain.transaction.RecordFile in project hedera-mirror-node by hashgraph.
the class BlockNumberMigrationTest method insertDefaultRecordFiles.
private List<RecordFile> insertDefaultRecordFiles(Set<Long> skipRecordFileWithConsensusEnd) {
long[] consensusEnd = { 1570800761443132000L, CORRECT_CONSENSUS_END, 1570801906238879002L };
long[] blockNumber = { 0L, 8L, 9L };
var recordFiles = new ArrayList<RecordFile>(consensusEnd.length);
for (int i = 0; i < consensusEnd.length; i++) {
if (skipRecordFileWithConsensusEnd.contains(consensusEnd[i])) {
continue;
}
final long currConsensusEnd = consensusEnd[i];
final long currBlockNumber = blockNumber[i];
RecordFile recordFile = domainBuilder.recordFile().customize(builder -> builder.consensusEnd(currConsensusEnd).index(currBlockNumber)).persist();
recordFiles.add(recordFile);
}
return recordFiles;
}
use of com.hedera.mirror.common.domain.transaction.RecordFile in project hedera-mirror-node by hashgraph.
the class AbstractDownloaderTest method differentFilenames.
private void differentFilenames(Duration offset) throws Exception {
mirrorProperties.setStartBlockNumber(null);
// Copy all files and modify only node 0.0.3's files to have a different timestamp
fileCopier.filterFiles(file2 + "*").copy();
Path basePath = fileCopier.getTo().resolve(streamType.getNodePrefix() + "0.0.3");
// Construct a new filename with the offset added to the last valid file
long nanoOffset = getCloseInterval().plus(offset).toNanos();
Instant instant = file1Instant.plusNanos(nanoOffset);
// Rename the good files to have a bad timestamp
String data = StreamFilename.getFilename(streamType, DATA, instant);
String signature = StreamFilename.getFilename(streamType, SIGNATURE, instant);
Files.move(basePath.resolve(file2), basePath.resolve(data));
Files.move(basePath.resolve(file2 + "_sig"), basePath.resolve(signature));
RecordFile recordFile = new RecordFile();
recordFile.setName(file1);
expectLastStreamFile(Instant.EPOCH);
downloader.download();
verifyStreamFiles(List.of(file2));
}
use of com.hedera.mirror.common.domain.transaction.RecordFile in project hedera-mirror-node by hashgraph.
the class RecordFileV5DownloaderTest method getRecordFileMap.
@Override
protected Map<String, RecordFile> getRecordFileMap() {
Map<String, RecordFile> allRecordFileMap = TestRecordFiles.getAll();
RecordFile recordFile1 = allRecordFileMap.get("2021-01-11T22_09_24.063739000Z.rcd");
RecordFile recordFile2 = allRecordFileMap.get("2021-01-11T22_09_34.097416003Z.rcd");
return Map.of(recordFile1.getName(), recordFile1, recordFile2.getName(), recordFile2);
}
use of com.hedera.mirror.common.domain.transaction.RecordFile in project hedera-mirror-node by hashgraph.
the class RecordFileReaderImplV5 method read.
@Override
public RecordFile read(StreamFileData streamFileData) {
MessageDigest messageDigestFile = createMessageDigest(DIGEST_ALGORITHM);
MessageDigest messageDigestMetadata = createMessageDigest(DIGEST_ALGORITHM);
String filename = streamFileData.getFilename();
// first DigestInputStream is needed to avoid digesting some class ID fields twice.
try (DigestInputStream digestInputStream = new DigestInputStream(new BufferedInputStream(new DigestInputStream(streamFileData.getInputStream(), messageDigestFile)), messageDigestMetadata);
ValidatedDataInputStream vdis = new ValidatedDataInputStream(digestInputStream, filename)) {
RecordFile recordFile = new RecordFile();
recordFile.setBytes(streamFileData.getBytes());
recordFile.setDigestAlgorithm(DIGEST_ALGORITHM);
recordFile.setLoadStart(Instant.now().getEpochSecond());
recordFile.setName(filename);
readHeader(vdis, recordFile);
readBody(vdis, digestInputStream, recordFile);
recordFile.setFileHash(Hex.encodeHexString(messageDigestFile.digest()));
recordFile.setMetadataHash(Hex.encodeHexString(messageDigestMetadata.digest()));
return recordFile;
} catch (IOException e) {
throw new StreamFileReaderException("Error reading record file " + filename, e);
}
}
Aggregations