use of com.hedera.mirror.importer.exception.HashMismatchException in project hedera-mirror-node by hashgraph.
the class Downloader method verifySigsAndDownloadDataFiles.
/**
* For each group of signature files with the same file name: (1) verify that the signature files are signed by
* corresponding node's PublicKey; (2) For valid signature files, we compare their Hashes to see if at least 1/3 of
* hashes match. If they do, we download the corresponding data file from a node folder which has valid signature
* file. (3) compare the hash of data file with Hash which has been agreed on by valid signatures, if match, move
* the data file into `valid` directory; else download the data file from other valid node folder and compare the
* hash until we find a match.
*
* @param sigFilesMap signature files grouped by filename
*/
private void verifySigsAndDownloadDataFiles(Multimap<String, FileStreamSignature> sigFilesMap) {
Instant endDate = mirrorProperties.getEndDate();
for (var sigFilenameIter = sigFilesMap.keySet().iterator(); sigFilenameIter.hasNext(); ) {
if (ShutdownHelper.isStopping()) {
return;
}
Instant startTime = Instant.now();
String sigFilename = sigFilenameIter.next();
Collection<FileStreamSignature> signatures = sigFilesMap.get(sigFilename);
boolean valid = false;
try {
nodeSignatureVerifier.verify(signatures);
} catch (SignatureVerificationException ex) {
if (sigFilenameIter.hasNext()) {
log.warn("Signature verification failed but still have files in the batch, try to process the " + "next group: {}", ex.getMessage());
continue;
}
throw ex;
}
for (FileStreamSignature signature : signatures) {
if (ShutdownHelper.isStopping()) {
return;
}
// Ignore signatures that didn't validate or weren't in the majority
if (signature.getStatus() != FileStreamSignature.SignatureStatus.CONSENSUS_REACHED) {
continue;
}
try {
PendingDownload pendingDownload = downloadSignedDataFile(signature);
if (!pendingDownload.waitForCompletion()) {
continue;
}
StreamFilename dataFilename = pendingDownload.getStreamFilename();
StreamFileData streamFileData = new StreamFileData(dataFilename, pendingDownload.getBytes());
T streamFile = streamFileReader.read(streamFileData);
streamFile.setNodeAccountId(signature.getNodeAccountId());
verify(streamFile, signature);
if (downloaderProperties.isWriteFiles()) {
Utility.archiveFile(streamFile.getName(), streamFile.getBytes(), downloaderProperties.getNodeStreamPath(signature.getNodeAccountIdString()));
}
if (downloaderProperties.isWriteSignatures()) {
signatures.forEach(s -> {
Path destination = downloaderProperties.getNodeStreamPath(s.getNodeAccountIdString());
Utility.archiveFile(s.getFilename(), s.getBytes(), destination);
});
}
if (!downloaderProperties.isPersistBytes()) {
streamFile.setBytes(null);
}
if (dataFilename.getInstant().isAfter(endDate)) {
downloaderProperties.setEnabled(false);
log.warn("Disabled polling after downloading all files <= endDate ({})", endDate);
return;
}
onVerified(pendingDownload, streamFile);
valid = true;
break;
} catch (HashMismatchException e) {
log.warn("Failed to verify data file from node {} corresponding to {}. Will retry another node", signature.getNodeAccountIdString(), sigFilename, e);
} catch (InterruptedException e) {
log.warn("Failed to download data file from node {} corresponding to {}", signature.getNodeAccountIdString(), sigFilename, e);
Thread.currentThread().interrupt();
} catch (Exception e) {
log.error("Error downloading data file from node {} corresponding to {}. Will retry another node", signature.getNodeAccountIdString(), sigFilename, e);
}
}
if (!valid) {
log.error("None of the data files could be verified, signatures: {}", signatures);
}
streamVerificationMetric.tag("success", String.valueOf(valid)).register(meterRegistry).record(Duration.between(startTime, Instant.now()));
}
}
Aggregations