use of com.hedera.mirror.importer.exception.InvalidStreamFileException in project hedera-mirror-node by hashgraph.
the class ProtoBalanceFileReader method toFlux.
private Flux<AccountBalance> toFlux(StreamFileData streamFileData) {
return Flux.defer(() -> {
InputStream inputStream = streamFileData.getInputStream();
ExtensionRegistryLite extensionRegistry = ExtensionRegistryLite.getEmptyRegistry();
CodedInputStream input = CodedInputStream.newInstance(inputStream);
AtomicLong consensusTimestamp = new AtomicLong(0L);
UnknownFieldSet.Builder unknownFieldSet = UnknownFieldSet.newBuilder();
return Flux.<AccountBalance>generate(sink -> {
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch(tag) {
case TAG_EOF:
done = true;
break;
case TAG_TIMESTAMP:
Timestamp timestamp = input.readMessage(Timestamp.parser(), extensionRegistry);
consensusTimestamp.set(DomainUtils.timestampInNanosMax(timestamp));
break;
case TAG_BALANCE:
Assert.state(consensusTimestamp.get() > 0, "Missing consensus timestamp)");
var ab = input.readMessage(SingleAccountBalances.parser(), extensionRegistry);
sink.next(toAccountBalance(consensusTimestamp.get(), ab));
return;
default:
log.warn("Unsupported tag: {}", tag);
done = !unknownFieldSet.mergeFieldFrom(tag, input);
}
}
Assert.state(consensusTimestamp.get() > 0, "Missing consensus timestamp)");
sink.complete();
} catch (IOException e) {
sink.error(new StreamFileReaderException(e));
} catch (IllegalStateException e) {
sink.error(new InvalidStreamFileException(e));
}
}).doFinally(s -> IOUtils.closeQuietly(inputStream));
});
}
use of com.hedera.mirror.importer.exception.InvalidStreamFileException in project hedera-mirror-node by hashgraph.
the class StreamFileData method getInputStream.
public InputStream getInputStream() {
try {
InputStream is = new ByteArrayInputStream(bytes);
String compressor = streamFilename.getCompressor();
if (StringUtils.isNotBlank(compressor)) {
is = compressorStreamFactory.createCompressorInputStream(compressor, is);
}
return is;
} catch (CompressorException ex) {
throw new InvalidStreamFileException("Unable to open compressed file " + streamFilename, ex);
}
}
use of com.hedera.mirror.importer.exception.InvalidStreamFileException in project hedera-mirror-node by hashgraph.
the class StreamFilename method extractTypeInfo.
private static TypeInfo extractTypeInfo(String filename) {
List<String> parts = FILENAME_SPLITTER.splitToList(filename);
if (parts.size() < 2) {
throw new InvalidStreamFileException("Failed to determine StreamType for filename: " + filename);
}
String last = parts.get(parts.size() - 1);
String secondLast = parts.get(parts.size() - 2);
for (StreamType type : StreamType.values()) {
String suffix = type.getSuffix();
if (!StringUtils.isEmpty(suffix) && !filename.contains(suffix)) {
continue;
}
Map<String, StreamType.Extension> extensions = STREAM_TYPE_EXTENSION_MAP.get(type);
if (extensions.containsKey(last)) {
// if last matches extension, the file is not compressed
FileType fileType = last.endsWith(StreamType.SIGNATURE_SUFFIX) ? SIGNATURE : DATA;
return TypeInfo.of(null, extensions.get(last), fileType, type);
}
if (extensions.containsKey(secondLast)) {
// otherwise if secondLast matches extension, last is the compression extension
FileType fileType = secondLast.endsWith(StreamType.SIGNATURE_SUFFIX) ? SIGNATURE : DATA;
return TypeInfo.of(last, extensions.get(secondLast), fileType, type);
}
}
throw new InvalidStreamFileException("Failed to determine StreamType for filename: " + filename);
}
use of com.hedera.mirror.importer.exception.InvalidStreamFileException in project hedera-mirror-node by hashgraph.
the class RecordFileReaderImplV5 method readBody.
private void readBody(ValidatedDataInputStream vdis, DigestInputStream metadataDigestInputStream, RecordFile recordFile) throws IOException {
String filename = recordFile.getName();
// object stream version
vdis.readInt();
// start object running hash
HashObject startHashObject = new HashObject(vdis, DIGEST_ALGORITHM);
// metadata hash is not calculated on record stream objects
metadataDigestInputStream.on(false);
long hashObjectClassId = startHashObject.getClassId();
int count = 0;
long consensusStart = 0;
List<RecordItem> items = new ArrayList<>();
RecordItem lastRecordItem = null;
// read record stream objects
while (!isHashObject(vdis, hashObjectClassId)) {
RecordStreamObject recordStreamObject = new RecordStreamObject(vdis, recordFile.getHapiVersion(), count);
var recordItem = RecordItem.builder().hapiVersion(recordFile.getHapiVersion()).previous(lastRecordItem).recordBytes(recordStreamObject.recordBytes).transactionIndex(count).transactionBytes(recordStreamObject.transactionBytes).build();
items.add(recordItem);
if (count == 0) {
consensusStart = recordItem.getConsensusTimestamp();
}
lastRecordItem = recordItem;
count++;
}
if (count == 0) {
throw new InvalidStreamFileException("No record stream objects in record file " + filename);
}
long consensusEnd = lastRecordItem.getConsensusTimestamp();
// end object running hash, metadata hash is calculated on it
metadataDigestInputStream.on(true);
HashObject endHashObject = new HashObject(vdis, DIGEST_ALGORITHM);
if (vdis.available() != 0) {
throw new InvalidStreamFileException("Extra data discovered in record file " + filename);
}
recordFile.setCount((long) count);
recordFile.setConsensusEnd(consensusEnd);
recordFile.setConsensusStart(consensusStart);
recordFile.setHash(Hex.encodeHexString(endHashObject.getHash()));
recordFile.setItems(Flux.fromIterable(items));
recordFile.setPreviousHash(Hex.encodeHexString(startHashObject.getHash()));
}
use of com.hedera.mirror.importer.exception.InvalidStreamFileException in project hedera-mirror-node by hashgraph.
the class SignatureFileReaderV2 method read.
@Override
public FileStreamSignature read(StreamFileData signatureFileData) {
String filename = signatureFileData.getFilename();
try (ValidatedDataInputStream vdis = new ValidatedDataInputStream(signatureFileData.getInputStream(), filename)) {
vdis.readByte(SIGNATURE_TYPE_FILE_HASH, "hash delimiter");
byte[] fileHash = vdis.readNBytes(DigestAlgorithm.SHA384.getSize(), "hash");
vdis.readByte(SIGNATURE_TYPE_SIGNATURE, "signature delimiter");
byte[] signature = vdis.readLengthAndBytes(1, SignatureType.SHA_384_WITH_RSA.getMaxLength(), false, "signature");
if (vdis.available() != 0) {
throw new SignatureFileParsingException("Extra data discovered in signature file " + filename);
}
FileStreamSignature fileStreamSignature = new FileStreamSignature();
fileStreamSignature.setBytes(signatureFileData.getBytes());
fileStreamSignature.setFileHash(fileHash);
fileStreamSignature.setFileHashSignature(signature);
fileStreamSignature.setFilename(filename);
fileStreamSignature.setSignatureType(SignatureType.SHA_384_WITH_RSA);
return fileStreamSignature;
} catch (InvalidStreamFileException | IOException e) {
throw new SignatureFileParsingException(e);
}
}
Aggregations