use of com.hedera.mirror.importer.reader.ValidatedDataInputStream in project hedera-mirror-node by hashgraph.
the class ErrataMigration method missingTransactions.
// Adds the transactions and records that are missing due to the insufficient fee funding issue in services.
private void missingTransactions() throws IOException {
Set<Long> consensusTimestamps = new HashSet<>();
var resourceResolver = new PathMatchingResourcePatternResolver();
Resource[] resources = resourceResolver.getResources("classpath*:errata/mainnet/missingtransactions/*.bin");
recordStreamFileListener.onStart();
var dateRangeFilter = new DateRangeFilter(mirrorProperties.getStartDate(), mirrorProperties.getEndDate());
for (Resource resource : resources) {
String name = resource.getFilename();
log.info("Loading file: {}", name);
try (var in = new ValidatedDataInputStream(resource.getInputStream(), name)) {
byte[] recordBytes = in.readLengthAndBytes(1, MAX_TRANSACTION_LENGTH, false, "record");
byte[] transactionBytes = in.readLengthAndBytes(1, MAX_TRANSACTION_LENGTH, false, "transaction");
var transactionRecord = TransactionRecord.parseFrom(recordBytes);
var transaction = Transaction.parseFrom(transactionBytes);
var recordItem = new RecordItem(transaction, transactionRecord);
long timestamp = recordItem.getConsensusTimestamp();
if (transactionRepository.findById(timestamp).isEmpty() && dateRangeFilter.filter(timestamp)) {
entityRecordItemListener.onItem(recordItem);
consensusTimestamps.add(timestamp);
}
} catch (IOException e) {
recordStreamFileListener.onError();
throw new FileOperationException("Error parsing errata file " + name, e);
}
}
if (consensusTimestamps.isEmpty()) {
log.info("Previously inserted all missing transactions");
return;
}
recordStreamFileListener.onEnd(null);
var ids = new MapSqlParameterSource("ids", consensusTimestamps);
jdbcOperations.update("update crypto_transfer set errata = 'INSERT' where consensus_timestamp in (:ids)", ids);
jdbcOperations.update("update transaction set errata = 'INSERT' where consensus_timestamp in (:ids)", ids);
Long min = consensusTimestamps.stream().min(Long::compareTo).orElse(null);
Long max = consensusTimestamps.stream().max(Long::compareTo).orElse(null);
log.info("Inserted {} missing transactions between {} and {}", consensusTimestamps.size(), min, max);
}
use of com.hedera.mirror.importer.reader.ValidatedDataInputStream in project hedera-mirror-node by hashgraph.
the class RecordFileReaderImplV5 method read.
@Override
public RecordFile read(StreamFileData streamFileData) {
MessageDigest messageDigestFile = createMessageDigest(DIGEST_ALGORITHM);
MessageDigest messageDigestMetadata = createMessageDigest(DIGEST_ALGORITHM);
String filename = streamFileData.getFilename();
// first DigestInputStream is needed to avoid digesting some class ID fields twice.
try (DigestInputStream digestInputStream = new DigestInputStream(new BufferedInputStream(new DigestInputStream(streamFileData.getInputStream(), messageDigestFile)), messageDigestMetadata);
ValidatedDataInputStream vdis = new ValidatedDataInputStream(digestInputStream, filename)) {
RecordFile recordFile = new RecordFile();
recordFile.setBytes(streamFileData.getBytes());
recordFile.setDigestAlgorithm(DIGEST_ALGORITHM);
recordFile.setLoadStart(Instant.now().getEpochSecond());
recordFile.setName(filename);
readHeader(vdis, recordFile);
readBody(vdis, digestInputStream, recordFile);
recordFile.setFileHash(Hex.encodeHexString(messageDigestFile.digest()));
recordFile.setMetadataHash(Hex.encodeHexString(messageDigestMetadata.digest()));
return recordFile;
} catch (IOException e) {
throw new StreamFileReaderException("Error reading record file " + filename, e);
}
}
use of com.hedera.mirror.importer.reader.ValidatedDataInputStream in project hedera-mirror-node by hashgraph.
the class SignatureFileReaderV2 method read.
@Override
public FileStreamSignature read(StreamFileData signatureFileData) {
String filename = signatureFileData.getFilename();
try (ValidatedDataInputStream vdis = new ValidatedDataInputStream(signatureFileData.getInputStream(), filename)) {
vdis.readByte(SIGNATURE_TYPE_FILE_HASH, "hash delimiter");
byte[] fileHash = vdis.readNBytes(DigestAlgorithm.SHA384.getSize(), "hash");
vdis.readByte(SIGNATURE_TYPE_SIGNATURE, "signature delimiter");
byte[] signature = vdis.readLengthAndBytes(1, SignatureType.SHA_384_WITH_RSA.getMaxLength(), false, "signature");
if (vdis.available() != 0) {
throw new SignatureFileParsingException("Extra data discovered in signature file " + filename);
}
FileStreamSignature fileStreamSignature = new FileStreamSignature();
fileStreamSignature.setBytes(signatureFileData.getBytes());
fileStreamSignature.setFileHash(fileHash);
fileStreamSignature.setFileHashSignature(signature);
fileStreamSignature.setFilename(filename);
fileStreamSignature.setSignatureType(SignatureType.SHA_384_WITH_RSA);
return fileStreamSignature;
} catch (InvalidStreamFileException | IOException e) {
throw new SignatureFileParsingException(e);
}
}
use of com.hedera.mirror.importer.reader.ValidatedDataInputStream in project hedera-mirror-node by hashgraph.
the class SignatureFileReaderV5 method read.
@Override
public FileStreamSignature read(StreamFileData signatureFileData) {
String filename = signatureFileData.getFilename();
try (ValidatedDataInputStream vdis = new ValidatedDataInputStream(signatureFileData.getInputStream(), filename)) {
vdis.readByte(SIGNATURE_FILE_FORMAT_VERSION, "fileVersion");
// Read the objectStreamSignatureVersion, which is not used
vdis.readInt();
HashObject fileHashObject = new HashObject(vdis, "entireFile", SHA384);
SignatureObject fileHashSignatureObject = new SignatureObject(vdis, "entireFile");
HashObject metadataHashObject = new HashObject(vdis, "metadata", SHA384);
SignatureObject metadataHashSignatureObject = new SignatureObject(vdis, "metadata");
if (vdis.available() != 0) {
throw new SignatureFileParsingException("Extra data discovered in signature file " + filename);
}
FileStreamSignature fileStreamSignature = new FileStreamSignature();
fileStreamSignature.setBytes(signatureFileData.getBytes());
fileStreamSignature.setFileHash(fileHashObject.getHash());
fileStreamSignature.setFileHashSignature(fileHashSignatureObject.getSignature());
fileStreamSignature.setFilename(filename);
fileStreamSignature.setMetadataHash(metadataHashObject.getHash());
fileStreamSignature.setMetadataHashSignature(metadataHashSignatureObject.getSignature());
fileStreamSignature.setSignatureType(fileHashSignatureObject.getSignatureType());
return fileStreamSignature;
} catch (InvalidStreamFileException | IOException e) {
throw new SignatureFileParsingException(e);
}
}
use of com.hedera.mirror.importer.reader.ValidatedDataInputStream in project hedera-mirror-node by hashgraph.
the class AbstractPreV5RecordFileReader method read.
@Override
public RecordFile read(@NonNull StreamFileData streamFileData) {
String filename = streamFileData.getFilename();
try (RecordFileDigest digest = getRecordFileDigest(streamFileData.getInputStream());
ValidatedDataInputStream vdis = new ValidatedDataInputStream(digest.getDigestInputStream(), filename)) {
RecordFile recordFile = new RecordFile();
recordFile.setBytes(streamFileData.getBytes());
recordFile.setLoadStart(Instant.now().getEpochSecond());
recordFile.setName(filename);
recordFile.setDigestAlgorithm(DIGEST_ALGORITHM);
readHeader(vdis, recordFile);
readBody(vdis, digest, recordFile);
return recordFile;
} catch (ImporterException e) {
throw e;
} catch (Exception e) {
throw new StreamFileReaderException("Error reading record file " + filename, e);
}
}
Aggregations