use of duckutil.TimeRecordAuto in project snowblossom by snowblossomcoin.
the class GetUTXOUtil method getNodesByPrefix.
private static List<TrieNode> getNodesByPrefix(ByteString prefix, UserServiceBlockingStub stub, boolean proof, ByteString utxo_root) throws ValidationException {
try (TimeRecordAuto tra = TimeRecord.openAuto("GetUTXOUtil.getNodesByPrefix")) {
LinkedList<TrieNode> lst = new LinkedList<>();
GetUTXONodeReply reply = stub.getUTXONode(GetUTXONodeRequest.newBuilder().setPrefix(prefix).setIncludeProof(proof).setUtxoRootHash(utxo_root).setMaxResults(10000).build());
for (TrieNode node : reply.getAnswerList()) {
if (!HashUtils.validateNodeHash(node))
throw new ValidationException("Validation failure in node: " + HexUtil.getHexString(node.getPrefix()));
lst.add(node);
}
for (TrieNode node : reply.getProofList()) {
if (!HashUtils.validateNodeHash(node))
throw new ValidationException("Validation failure in node: " + HexUtil.getHexString(node.getPrefix()));
lst.add(node);
}
return lst;
}
}
use of duckutil.TimeRecordAuto in project snowblossom by snowblossomcoin.
the class ForgeInfo method getImportPath.
/**
* Return the ordered list of blocks that need to be added to get from what
* is imported in start to get to the target block.
*
* If such a path is impossible returns null. This could be because we lack
* information about intermediate blocks, or the BlockSummary is already down some other path.
*
* If the target is already in the block summary, return an empty list.
*/
public List<BlockHeader> getImportPath(Map<Integer, BlockHeader> start, BlockHeader target) {
try (TimeRecordAuto tra = TimeRecord.openAuto("ForgeInfo.getImportPath")) {
if (target == null)
return null;
int shard_id = target.getShardId();
if (start.containsKey(shard_id)) {
// We have something for this shard
BlockHeader included = start.get(shard_id);
if (target.getBlockHeight() < included.getBlockHeight()) {
// We already have a block that is past this one, impossible
return null;
}
if (target.getBlockHeight() == included.getBlockHeight()) {
if (target.getSnowHash().equals(included.getSnowHash())) {
// We are home
return new LinkedList<BlockHeader>();
} else {
// Wrong block is included at this height - impossible
return null;
}
}
if (target.getBlockHeight() > included.getBlockHeight()) {
BlockHeader prev = getHeader(new ChainHash(target.getPrevBlockHash()));
List<BlockHeader> sub_list = getImportPath(start, prev);
if (sub_list == null)
return null;
// If we reached it, then add ourselves on and done
sub_list.add(target);
return sub_list;
}
throw new RuntimeException("unreachable");
} else {
// The summary does not have the shard in question, just keep going down
BlockHeader prev = getHeader(new ChainHash(target.getPrevBlockHash()));
List<BlockHeader> sub_list = getImportPath(start, prev);
if (sub_list == null)
return null;
// If we reached it, then add ourselves on and done
sub_list.add(target);
return sub_list;
}
}
}
use of duckutil.TimeRecordAuto in project snowblossom by snowblossomcoin.
the class ForgeInfo method isInChain.
/**
* Return true iff check is part of the chain ending with h
*/
public boolean isInChain(BlockHeader h, BlockHeader check) {
if (check == null)
return false;
if (h == null)
return false;
try (TimeRecordAuto tra = TimeRecord.openAuto("ForgeInfo.isInChain")) {
String key = new ChainHash(h.getSnowHash()).toString() + new ChainHash(check.getSnowHash()).toString();
synchronized (in_chain_cache) {
if (in_chain_cache.containsKey(key)) {
return in_chain_cache.get(key);
}
}
if (h.getBlockHeight() < check.getBlockHeight())
return false;
if (h.getBlockHeight() == check.getBlockHeight()) {
return h.getSnowHash().equals(check.getSnowHash());
}
boolean rec_answer = isInChain(getHeader(new ChainHash(h.getPrevBlockHash())), check);
synchronized (in_chain_cache) {
in_chain_cache.put(key, rec_answer);
}
return rec_answer;
}
}
use of duckutil.TimeRecordAuto in project snowblossom by snowblossomcoin.
the class Validation method checkTransactionBasics.
/**
* Check the things about a transaction that can be checked without the database
*/
public static void checkTransactionBasics(Transaction tx, boolean must_be_coinbase) throws ValidationException {
try (TimeRecordAuto tra_blk = TimeRecord.openAuto("Validation.checkTransactionBasics")) {
if (tx.toByteString().size() > Globals.MAX_TX_SIZE) {
throw new ValidationException("Transaction too big");
}
validateChainHash(tx.getTxHash(), "tx_hash");
TransactionInner inner = null;
try {
CodedInputStream code_in = CodedInputStream.newInstance(tx.getInnerData().toByteArray());
inner = TransactionInner.parseFrom(code_in);
if (!code_in.isAtEnd()) {
throw new ValidationException("Extra data at end of tx inner");
}
} catch (java.io.IOException e) {
throw new ValidationException(e);
}
MessageDigest md = DigestUtil.getMD();
MessageDigest md_addr = DigestUtil.getMDAddressSpec();
md.update(tx.getInnerData().toByteArray());
ChainHash found_hash = new ChainHash(md.digest());
if (!found_hash.equals(tx.getTxHash())) {
throw new ValidationException("TX hash mismatch");
}
if (inner.getVersion() != 1) {
throw new ValidationException(String.format("Unknown transaction version: %d", inner.getVersion()));
}
if (must_be_coinbase) {
if (!inner.getIsCoinbase()) {
throw new ValidationException("must be coinbase");
}
if (inner.getInputsCount() > 0) {
throw new ValidationException("coinbase must have zero inputs");
}
if (inner.getCoinbaseExtras().getRemarks().size() > Globals.COINBASE_REMARKS_MAX) {
throw new ValidationException(String.format("Coinbase remarks of %d over max of %d", inner.getCoinbaseExtras().getRemarks().size(), Globals.COINBASE_REMARKS_MAX));
}
if (tx.getSignaturesCount() != 0) {
throw new ValidationException("coinbase shouldn't have signatures");
}
if (inner.getFee() != 0) {
throw new ValidationException("coinbase shouldn't have fee");
}
} else {
if (inner.getIsCoinbase()) {
throw new ValidationException("unexpected coinbase");
}
if (inner.getInputsCount() == 0) {
throw new ValidationException("only coinbase can have zero inputs");
}
CoinbaseExtras extras = inner.getCoinbaseExtras();
CoinbaseExtras blank = CoinbaseExtras.newBuilder().build();
if (!extras.equals(blank)) {
throw new ValidationException("only coinbase can have extras");
}
}
if (inner.getOutputsCount() == 0) {
throw new ValidationException("Transaction with no outputs makes no sense");
}
if (inner.getOutputsCount() >= Globals.MAX_OUTPUTS) {
throw new ValidationException("Too many outputs");
}
validateNonNegValue(inner.getFee(), "fee");
HashSet<AddressSpecHash> used_address_spec_hashes = new HashSet<>();
for (TransactionInput in : inner.getInputsList()) {
validateNonNegValue(in.getSrcTxOutIdx(), "input outpoint idx");
if (in.getSrcTxOutIdx() >= Globals.MAX_OUTPUTS) {
throw new ValidationException("referencing impossible output idx");
}
validateAddressSpecHash(in.getSpecHash(), "input spec hash");
validateChainHash(in.getSrcTxId(), "input transaction id");
used_address_spec_hashes.add(new AddressSpecHash(in.getSpecHash()));
}
if (used_address_spec_hashes.size() != inner.getClaimsCount()) {
throw new ValidationException(String.format("Mismatch of used spec hashes (%d) and claims (%d)", used_address_spec_hashes.size(), inner.getClaimsCount()));
}
HashSet<AddressSpecHash> remaining_specs = new HashSet<>();
remaining_specs.addAll(used_address_spec_hashes);
for (AddressSpec spec : inner.getClaimsList()) {
AddressSpecHash spechash = AddressUtil.getHashForSpec(spec, md_addr);
if (!remaining_specs.contains(spechash)) {
throw new ValidationException(String.format("claim for unused spec hash %s", spechash.toString()));
}
remaining_specs.remove(spechash);
}
Assert.assertEquals(0, remaining_specs.size());
// Now we know the address spec list we have covers all inputs. Now we have to make sure the signatures match up.
// Maps claim idx -> set(key idx) of signed public keys
// such that signed_claim_map.get(i).size() can be used to see if there are enough
// signed keys for claim 'i'.
TreeMap<Integer, Set<Integer>> signed_claim_map = new TreeMap<>();
for (SignatureEntry se : tx.getSignaturesList()) {
if (inner.getClaimsCount() <= se.getClaimIdx())
throw new ValidationException("Signature entry for non-existant claim");
AddressSpec spec = inner.getClaims(se.getClaimIdx());
if (spec.getSigSpecsCount() <= se.getKeyIdx())
throw new ValidationException("Signature entry for non-existant sig spec");
SigSpec sig_spec = spec.getSigSpecs(se.getKeyIdx());
if (!SignatureUtil.checkSignature(sig_spec, tx.getTxHash(), se.getSignature())) {
throw new ValidationException("signature failed");
}
if (!signed_claim_map.containsKey(se.getClaimIdx()))
signed_claim_map.put(se.getClaimIdx(), new TreeSet<Integer>());
Set<Integer> set = signed_claim_map.get(se.getClaimIdx());
if (set.contains(se.getKeyIdx()))
throw new ValidationException("duplicate signatures for claim");
set.add(se.getKeyIdx());
}
// Make sure each claim is satisfied
for (int claim_idx = 0; claim_idx < inner.getClaimsCount(); claim_idx++) {
int found = 0;
if (signed_claim_map.containsKey(claim_idx))
found = signed_claim_map.get(claim_idx).size();
AddressSpec claim = inner.getClaims(claim_idx);
if (found < claim.getRequiredSigners()) {
throw new ValidationException(String.format("Claim %d only has %d of %d needed signatures", claim_idx, found, claim.getRequiredSigners()));
}
}
// Sanity check outputs
for (TransactionOutput out : inner.getOutputsList()) {
validatePositiveValue(out.getValue(), "output value");
validateAddressSpecHash(out.getRecipientSpecHash(), "output spec hash");
}
if (inner.getExtra().size() > Globals.MAX_TX_EXTRA) {
throw new ValidationException("Extra string too long");
}
}
}
use of duckutil.TimeRecordAuto in project snowblossom by snowblossomcoin.
the class Validation method checkBlockBasics.
/**
* Check the things about a block that can be checked without the database
*/
public static void checkBlockBasics(NetworkParams params, Block blk, boolean require_transactions, boolean ignore_target) throws ValidationException {
try (TimeRecordAuto tra_blk = TimeRecord.openAuto("Validation.checkBlockBasics")) {
BlockHeader header = blk.getHeader();
if (header == null)
throw new ValidationException("Header missing");
if ((header.getVersion() != 1) && (header.getVersion() != 2)) {
throw new ValidationException(String.format("Unknown block version: %d", header.getVersion()));
}
if (header.getBlockHeight() < params.getActivationHeightShards()) {
if (header.getVersion() != 1) {
throw new ValidationException(String.format("Block version must be 1 before shard activation"));
}
} else {
if (header.getVersion() != 2) {
throw new ValidationException("Block version must be 2 after shard activation");
}
}
if (header.getTimestamp() > System.currentTimeMillis() + params.getMaxClockSkewMs()) {
throw new ValidationException("Block too far into future");
}
validateChainHash(header.getPrevBlockHash(), "prev_block_hash");
validateChainHash(header.getMerkleRootHash(), "merkle_root_hash");
validateChainHash(header.getUtxoRootHash(), "utxo_root_hash");
validateChainHash(header.getSnowHash(), "snow_hash");
validateByteString(header.getNonce(), "nonce", Globals.NONCE_LENGTH);
validateByteString(header.getTarget(), "target", Globals.TARGET_LENGTH);
SnowFieldInfo field_info = params.getSnowFieldInfo(header.getSnowField());
if (field_info == null) {
throw new ValidationException("Unknown snow field");
}
LinkedList<SnowPowProof> proofs = new LinkedList<>();
proofs.addAll(header.getPowProofList());
if (proofs.size() != Globals.POW_LOOK_PASSES) {
throw new ValidationException("Wrong number of POW passes");
}
// check pow proofs
for (SnowPowProof proof : proofs) {
if (!checkProof(proof, field_info.getMerkleRootHash(), field_info.getLength())) {
throw new ValidationException("POW Merkle Proof does not compute");
}
}
// make sure pow proofs lead to snow hash
byte[] pass_one = PowUtil.hashHeaderBits(header, header.getNonce().toByteArray());
byte[] context = pass_one;
long word_count = field_info.getLength() / (long) Globals.SNOW_MERKLE_HASH_LEN;
for (SnowPowProof proof : proofs) {
long idx = proof.getWordIdx();
long nx = PowUtil.getNextSnowFieldIndex(context, word_count);
if (idx != nx) {
throw new ValidationException(String.format("POW Pass index does not match %d %d %d", idx, nx, word_count));
}
byte[] data = proof.getMerkleComponentList().get(0).toByteArray();
context = PowUtil.getNextContext(context, data);
}
ByteString block_hash = ByteString.copyFrom(context);
if (!header.getSnowHash().equals(block_hash)) {
throw new ValidationException("POW Hash does not match");
}
if (header.getVersion() == 1) {
if (header.getShardId() != 0) {
throw new ValidationException("Header version 1 must not have shard id");
}
if (header.getShardExportRootHashMap().size() != 0) {
throw new ValidationException("Header version 1 must not have export map");
}
if (header.getShardImportMap().size() != 0) {
throw new ValidationException("Header version 1 must not have shard import map");
}
} else if (header.getVersion() == 2) {
int my_shard_id = header.getShardId();
Set<Integer> my_cover_set = ShardUtil.getCoverSet(my_shard_id, params);
for (Map.Entry<Integer, ByteString> me : header.getShardExportRootHashMap().entrySet()) {
int export_shard_id = me.getKey();
if (my_cover_set.contains(export_shard_id)) {
throw new ValidationException("Has shard_export_root_hash for self");
}
validateChainHash(me.getValue(), "shard_export_root_hash utxo for " + export_shard_id);
}
for (int import_shard_id : header.getShardImportMap().keySet()) {
if (my_cover_set.contains(import_shard_id)) {
throw new ValidationException(String.format("Import for shard from cover set. Importing %d into %d", import_shard_id, my_shard_id));
}
BlockImportList bil = header.getShardImportMap().get(import_shard_id);
for (int import_height : bil.getHeightMap().keySet()) {
validateNonNegValue(import_shard_id, "import_shard_id");
validateNonNegValue(import_height, "import_height");
validateChainHash(bil.getHeightMap().get(import_height), "shard_import_blocks");
}
}
validatePositiveValue(header.getTxDataSizeSum(), "tx_data_size_sum");
validatePositiveValue(header.getTxCount(), "tx_count");
}
if (!ignore_target) {
if (!PowUtil.lessThanTarget(context, header.getTarget())) {
throw new ValidationException("Hash not less than target");
}
}
// if we has transactions, make sure the each validate and merkle to merkle_root_hash
if ((require_transactions) || (blk.getTransactionsCount() > 0)) {
if (blk.getTransactionsCount() < 1) {
throw new ValidationException("Must be at least one transaction in a block");
}
ArrayList<ChainHash> tx_list = new ArrayList<>();
for (int i = 0; i < blk.getTransactionsCount(); i++) {
Transaction tx = blk.getTransactions(i);
boolean coinbase = false;
if (i == 0)
coinbase = true;
checkTransactionBasics(tx, coinbase);
tx_list.add(new ChainHash(tx.getTxHash()));
}
ChainHash merkle_root = DigestUtil.getMerkleRootForTxList(tx_list);
if (!merkle_root.equals(header.getMerkleRootHash())) {
throw new ValidationException(String.format("MerkleRootHash mismatch. Found: %s, Block has: %s", merkle_root.toString(), new ChainHash(header.getMerkleRootHash()).toString()));
}
}
}
}
Aggregations