use of com.hedera.mirror.common.domain.transaction.RecordFile in project hedera-mirror-node by hashgraph.
the class RecordFileParserTest method totalGasUsedMustBeCorrect.
@Test
void totalGasUsedMustBeCorrect() {
when(mirrorDateRangePropertiesProcessor.getDateRangeFilter(parserProperties.getStreamType())).thenReturn(DateRangeFilter.all());
long timestamp = ++count;
ContractFunctionResult contractFunctionResult1 = contractFunctionResult(10000000000L, new byte[] { 0, 6, 4, 0, 5, 7, 2 });
RecordItem recordItem1 = contractCreate(contractFunctionResult1, timestamp, 0);
ContractFunctionResult contractFunctionResult2 = contractFunctionResult(100000000000L, new byte[] { 3, 5, 1, 7, 4, 4, 0 });
RecordItem recordItem2 = contractCall(contractFunctionResult2, timestamp, 0);
ContractFunctionResult contractFunctionResult3 = contractFunctionResult(1000000000000L, new byte[] { 0, 1, 1, 2, 2, 6, 0 });
RecordItem recordItem3 = ethereumTransaction(contractFunctionResult3, timestamp, 0);
ContractFunctionResult contractFunctionResult4 = contractFunctionResult(1000000000000L, new byte[] { 0, 1, 1, 2, 2, 6, 0 });
RecordItem recordItem4 = ethereumTransaction(contractFunctionResult4, timestamp, 1);
RecordFile recordFile = getStreamFile(Flux.just(recordItem1, recordItem2, recordItem3, recordItem4), timestamp);
parser.parse(recordFile);
byte[] expectedLogBloom = new byte[] { 3, 7, 5, 7, 7, 7, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
assertAll(() -> assertEquals(10000000000L + 100000000000L + 1000000000000L, recordFile.getGasUsed()), () -> assertArrayEquals(expectedLogBloom, recordFile.getLogsBloom()), () -> verify(recordStreamFileListener, times(1)).onStart(), () -> verify(recordStreamFileListener, times(1)).onEnd(recordFile), () -> verify(recordItemListener, times(1)).onItem(recordItem1), () -> verify(recordItemListener, times(1)).onItem(recordItem2), () -> verify(recordItemListener, times(1)).onItem(recordItem3));
}
use of com.hedera.mirror.common.domain.transaction.RecordFile in project hedera-mirror-node by hashgraph.
the class AbstractEntityRecordItemListenerTest method parseRecordItemsAndCommit.
protected void parseRecordItemsAndCommit(List<RecordItem> recordItems) {
transactionTemplate.executeWithoutResult(status -> {
Instant instant = Instant.ofEpochSecond(0, recordItems.get(0).getConsensusTimestamp());
String filename = StreamFilename.getFilename(StreamType.RECORD, DATA, instant);
long consensusStart = recordItems.get(0).getConsensusTimestamp();
long consensusEnd = recordItems.get(recordItems.size() - 1).getConsensusTimestamp();
RecordFile recordFile = recordFile(consensusStart, consensusEnd, filename);
recordStreamFileListener.onStart();
// process each record item
for (RecordItem recordItem : recordItems) {
entityRecordItemListener.onItem(recordItem);
}
// commit, close connection
recordStreamFileListener.onEnd(recordFile);
});
}
use of com.hedera.mirror.common.domain.transaction.RecordFile in project hedera-mirror-node by hashgraph.
the class PubSubRecordParserTest method testPubSubExporter.
@Test
public void testPubSubExporter() throws Exception {
for (int index = 0; index < testFiles.length; index++) {
RecordFile recordFile = recordFileReader.read(StreamFileData.from(testFiles[index].getFile()));
recordFile.setIndex((long) index);
recordFile.setNodeAccountId(EntityId.of(0, 0, 3, EntityType.ACCOUNT));
recordFileParser.parse(recordFile);
}
// then
List<String> expectedMessages = Files.readAllLines(pubSubMessages);
List<String> actualMessages = getAllMessages(NUM_TXNS).stream().map(PubsubMessage::getData).map(ByteString::toStringUtf8).collect(Collectors.toList());
// map timestamps to messages and compare individual message JSON strings
Map<Long, String> expectedMessageMap = mapMessages(expectedMessages);
Map<Long, String> actualMessageMap = mapMessages(actualMessages);
assertThat(actualMessageMap.size()).isEqualTo(actualMessages.size());
assertThat(expectedMessageMap.size()).isEqualTo(expectedMessages.size());
for (Map.Entry<Long, String> messageEntry : expectedMessageMap.entrySet()) {
long consensusTimestamp = messageEntry.getKey();
String expectedMessage = messageEntry.getValue();
String actualMessage = actualMessageMap.get(consensusTimestamp);
JSONAssert.assertEquals(String.format("%d", consensusTimestamp), expectedMessage, actualMessage, JSONCompareMode.STRICT);
}
}
use of com.hedera.mirror.common.domain.transaction.RecordFile in project hedera-mirror-node by hashgraph.
the class RecordFileRepositoryTest method recordFile.
private RecordFile recordFile() {
RecordFile recordFile = new RecordFile();
recordFile.setConsensusStart(timestamp);
recordFile.setConsensusEnd(timestamp + 1);
recordFile.setCount(1L);
recordFile.setDigestAlgorithm(DigestAlgorithm.SHA384);
recordFile.setFileHash(String.valueOf(timestamp));
recordFile.setHash(String.valueOf(timestamp));
recordFile.setIndex(timestamp);
recordFile.setLoadEnd(timestamp + 1);
recordFile.setLoadStart(timestamp);
recordFile.setName(timestamp + ".rcd");
recordFile.setNodeAccountId(EntityId.of(0, 0, 3, ACCOUNT));
recordFile.setPreviousHash(String.valueOf(timestamp - 1));
++timestamp;
return recordFile;
}
use of com.hedera.mirror.common.domain.transaction.RecordFile in project hedera-mirror-node by hashgraph.
the class TestRecordFiles method getAll.
public Map<String, RecordFile> getAll() {
DigestAlgorithm digestAlgorithm = DigestAlgorithm.SHA384;
RecordFile recordFileV1_1 = RecordFile.builder().consensusStart(1561990380317763000L).consensusEnd(1561990399074934000L).count(15L).digestAlgorithm(digestAlgorithm).fileHash("333d6940254659533fd6b939033e59c57fe8f4ff78375d1e687c032918aa0b7b8179c7fd403754274a8c91e0b6c0195a").hash("333d6940254659533fd6b939033e59c57fe8f4ff78375d1e687c032918aa0b7b8179c7fd403754274a8c91e0b6c0195a").name("2019-07-01T14_13_00.317763Z.rcd").previousHash("f423447a3d5a531a07426070e511555283daae063706242590949116f717a0524e4dd18f9d64e66c73982d475401db04").version(1).build();
RecordFile recordFileV1_2 = RecordFile.builder().consensusStart(1561991340302068000L).consensusEnd(1561991353226225001L).count(69L).digestAlgorithm(digestAlgorithm).fileHash("1faf198f8fdbefa59bde191f214d73acdc4f5c0f434677a7edf9591b129e21aea90a5b3119d2802cee522e7be6bc8830").hash("1faf198f8fdbefa59bde191f214d73acdc4f5c0f434677a7edf9591b129e21aea90a5b3119d2802cee522e7be6bc8830").name("2019-07-01T14_29_00.302068Z.rcd").previousHash(recordFileV1_1.getFileHash()).version(1).build();
RecordFile recordFileV2_1 = RecordFile.builder().consensusStart(1567188600419072000L).consensusEnd(1567188604906443001L).count(19L).digestAlgorithm(digestAlgorithm).fileHash("591558e059bd1629ee386c4e35a6875b4c67a096718f5d225772a651042715189414df7db5588495efb2a85dc4a0ffda").hash("591558e059bd1629ee386c4e35a6875b4c67a096718f5d225772a651042715189414df7db5588495efb2a85dc4a0ffda").name("2019-08-30T18_10_00.419072Z.rcd").previousHash(digestAlgorithm.getEmptyHash()).version(2).build();
RecordFile recordFileV2_2 = RecordFile.builder().consensusStart(1567188605249678000L).consensusEnd(1567188609705382001L).count(15L).digestAlgorithm(digestAlgorithm).fileHash("5ed51baeff204eb6a2a68b76bbaadcb9b6e7074676c1746b99681d075bef009e8d57699baaa6342feec4e83726582d36").hash("5ed51baeff204eb6a2a68b76bbaadcb9b6e7074676c1746b99681d075bef009e8d57699baaa6342feec4e83726582d36").name("2019-08-30T18_10_05.249678Z.rcd").previousHash(recordFileV2_1.getFileHash()).version(2).build();
RecordFile recordFileV5_1 = RecordFile.builder().consensusStart(1610402964063739000L).consensusEnd(1610402964063739000L).count(1L).digestAlgorithm(digestAlgorithm).fileHash("e8adaac05a62a655a3c476b43f1383f6c5f5bba4bfa6c7b087dc4ee3a9089e232b5d5977bde7fba858fd56987792ece3").hapiVersionMajor(0).hapiVersionMinor(9).hapiVersionPatch(0).hash("151bd3358db59fc7936eff15f1cb6734354e444cf85549a5643e55c9c929cb500be712abccd588cd8d20eb92ca55ff49").metadataHash("ffe56840b99145f7b3370367fa5784cbe225278afd1c4c078dfe5b950fee22e2b9e9a04bde32023c3ba07c057cb54406").name("2021-01-11T22_09_24.063739000Z.rcd").previousHash(digestAlgorithm.getEmptyHash()).version(5).build();
RecordFile recordFileV5_2 = RecordFile.builder().consensusStart(1610402974097416003L).consensusEnd(1610402974097416003L).count(1L).digestAlgorithm(digestAlgorithm).fileHash("06fb76873dcdc3a4fdb67202e64ed735feaf6a6bb80d4f57fd3511df49ef61fc69d7a2414315028b7d77e168169fad22").hapiVersionMajor(0).hapiVersionMinor(9).hapiVersionPatch(0).hash("514e361089074cb06f984e5a943a20fba2a0d601b766f8adb432d03214c48c3ff14898e6b78292520340f484e820ea84").metadataHash("912869b5204ffbb7e437aaa6e7a09e9d53da98ead27942fdf7017e850827e857fadb1167e8877cfb8175883adcd74f7d").name("2021-01-11T22_09_34.097416003Z.rcd").previousHash(recordFileV5_1.getHash()).version(5).build();
List<RecordFile> allFiles = List.of(recordFileV1_1, recordFileV1_2, recordFileV2_1, recordFileV2_2, recordFileV5_1, recordFileV5_2);
return Collections.unmodifiableMap(allFiles.stream().collect(Collectors.toMap(RecordFile::getName, rf -> rf)));
}
Aggregations