use of com.hederahashgraph.api.proto.java.TransactionRecord in project hedera-services by hashgraph.
the class ContractSysDelTransitionLogicTest method followsHappyPathWithOverrides.
@Test
void followsHappyPathWithOverrides() {
// setup:
TransactionRecord updateRec = TransactionRecord.newBuilder().setReceipt(TransactionReceipt.newBuilder().setStatus(SUCCESS).build()).build();
givenValidTxnCtx();
// and:
given(delegate.perform(contractSysDelTxn, consensusTime)).willReturn(updateRec);
// when:
subject.doStateTransition();
// then:
verify(sigImpactHistorian).markEntityChanged(target.getContractNum());
verify(txnCtx).setStatus(SUCCESS);
}
use of com.hederahashgraph.api.proto.java.TransactionRecord in project hedera-services by hashgraph.
the class ContractSysUndelTransitionLogicTest method capturesBadUndelete.
@Test
void capturesBadUndelete() {
// setup:
TransactionRecord sysUndelRec = TransactionRecord.newBuilder().setReceipt(TransactionReceipt.newBuilder().setStatus(INVALID_CONTRACT_ID).build()).build();
givenValidTxnCtx();
// and:
given(delegate.perform(contractSysUndelTxn, consensusTime)).willReturn(sysUndelRec);
// when:
subject.doStateTransition();
// then:
verify(txnCtx).setStatus(INVALID_CONTRACT_ID);
}
use of com.hederahashgraph.api.proto.java.TransactionRecord in project hedera-services by hashgraph.
the class HapiGetAccountRecords method checkExpectations.
private void checkExpectations(HapiApiSpec spec, List<TransactionRecord> records) {
String specExpectationsDir = specScopedDir(spec, expectationsDirPath);
try {
String expectationsDir = specExpectationsDir + "/" + account;
File countFile = new File(expectationsDir + "/n.txt");
CharSource charSource = Files.asCharSource(countFile, Charset.forName("UTF-8"));
int n = Integer.parseInt(charSource.readFirstLine());
Assertions.assertEquals(n, records.size(), "Bad number of records!");
for (int i = 0; i < n; i++) {
File recordFile = new File(expectationsDir + "/record" + i + ".bin");
ByteSource byteSource = Files.asByteSource(recordFile);
TransactionRecord expected = TransactionRecord.parseFrom(byteSource.read());
Assertions.assertEquals(expected, records.get(i), "Wrong record #" + i);
}
} catch (Exception e) {
log.error("Something amiss with the expected records...", e);
Assertions.fail("Impossible to meet expectations (on records)!");
}
}
use of com.hederahashgraph.api.proto.java.TransactionRecord in project hedera-services by hashgraph.
the class HapiGetTxnRecord method submitWith.
@Override
protected void submitWith(HapiApiSpec spec, Transaction payment) throws InvalidProtocolBufferException {
Query query = getRecordQuery(spec, payment, false);
response = spec.clients().getCryptoSvcStub(targetNodeFor(spec), useTls).getTxRecordByTxID(query);
final TransactionRecord record = response.getTransactionGetRecord().getTransactionRecord();
if (contractResultAbi != null) {
exposeRequestedEventsFrom(record);
}
observer.ifPresent(obs -> obs.accept(record));
childRecords = response.getTransactionGetRecord().getChildTransactionRecordsList();
childRecordsCount.ifPresent(count -> assertEquals(count, childRecords.size()));
for (var rec : childRecords) {
spec.registry().saveAccountId(rec.getAlias().toStringUtf8(), rec.getReceipt().getAccountID());
spec.registry().saveKey(rec.getAlias().toStringUtf8(), Key.parseFrom(rec.getAlias()));
log.info(spec.logPrefix() + " Saving alias {} to registry for Account ID {}", rec.getAlias().toStringUtf8(), rec.getReceipt().getAccountID());
}
if (verboseLoggingOn) {
if (format.isPresent()) {
format.get().accept(record, log);
} else {
var fee = record.getTransactionFee();
var rates = spec.ratesProvider();
var priceInUsd = sdec(rates.toUsdWithActiveRates(fee), 5);
log.info(spec.logPrefix() + "Record (charged ${}): {}", priceInUsd, record);
log.info(spec.logPrefix() + " And {} child record{}: {}", childRecords.size(), childRecords.size() > 1 ? "s" : "", childRecords);
log.info("Duplicates: {}", response.getTransactionGetRecord().getDuplicateTransactionRecordsList());
}
}
if (response.getTransactionGetRecord().getHeader().getNodeTransactionPrecheckCode() == OK) {
priceConsumer.ifPresent(pc -> pc.accept(record.getTransactionFee()));
debitsConsumer.ifPresent(dc -> dc.accept(asDebits(record.getTransferList())));
}
if (registryEntry.isPresent()) {
spec.registry().saveContractList(registryEntry.get() + "CreateResult", record.getContractCreateResult().getCreatedContractIDsList());
spec.registry().saveContractList(registryEntry.get() + "CallResult", record.getContractCallResult().getCreatedContractIDsList());
}
if (saveTxnRecordToRegistry.isPresent()) {
spec.registry().saveTransactionRecord(saveTxnRecordToRegistry.get(), record);
}
}
use of com.hederahashgraph.api.proto.java.TransactionRecord in project hedera-services by hashgraph.
the class RecordParser method parseFrom.
public static RecordFile parseFrom(File file) {
FileInputStream stream = null;
List<TxnHistory> histories = new LinkedList<>();
byte[] prevHash = null;
if (!file.exists()) {
throw new IllegalArgumentException("No such file - " + file);
}
try {
stream = new FileInputStream(file);
DataInputStream dis = new DataInputStream(stream);
prevHash = new byte[48];
int record_format_version = dis.readInt();
int version = dis.readInt();
log.debug("File '{}' is: ", file);
log.debug(" -> Record format v{}", record_format_version);
log.debug(" -> HAPI protocol v{}", version);
while (dis.available() != 0) {
try {
byte typeDelimiter = dis.readByte();
switch(typeDelimiter) {
case TYPE_PREV_HASH:
dis.read(prevHash);
break;
case TYPE_RECORD:
int n = dis.readInt();
byte[] buffer = new byte[n];
dis.readFully(buffer);
Transaction signedTxn = Transaction.parseFrom(buffer);
n = dis.readInt();
buffer = new byte[n];
dis.readFully(buffer);
TransactionRecord record = TransactionRecord.parseFrom(buffer);
histories.add(new TxnHistory(signedTxn, record));
break;
default:
log.warn("Record file '{}' contained unrecognized delimiter |{}|", file, typeDelimiter);
}
} catch (Exception e) {
log.warn("Problem parsing record file '{}'", file);
break;
}
}
metaDigest.reset();
contentDigest.reset();
byte[] everything = Files.readAllBytes(file.toPath());
byte[] preface = Arrays.copyOfRange(everything, 0, 57);
byte[] bodyHash = contentDigest.digest(Arrays.copyOfRange(everything, 57, everything.length));
metaDigest.update(ArrayUtils.addAll(preface, bodyHash));
} catch (FileNotFoundException e) {
throw new IllegalStateException();
} catch (IOException e) {
log.error("Problem reading record file '{}'!", file, e);
} catch (Exception e) {
log.error("Problem parsing record file '{}'!", file, e);
} finally {
try {
if (stream != null) {
stream.close();
}
} catch (IOException ex) {
log.error("Exception in closing stream for '{}'!", file, ex);
}
}
return new RecordFile(prevHash, metaDigest.digest(), histories);
}
Aggregations