Search in sources :

Example 11 with TimeRecordAuto

use of duckutil.TimeRecordAuto in project snowblossom by snowblossomcoin.

the class BlockchainUtil method getWorkForSummary.

public static BigInteger getWorkForSummary(BlockHeader header, BlockSummary prev_summary, NetworkParams params, List<ImportedBlock> imported_blocks) {
    try (TimeRecordAuto tra = TimeRecord.openAuto("BlockchainUtil.getWorkForSummary")) {
        BigInteger target = BlockchainUtil.targetBytesToBigInteger(header.getTarget());
        BigInteger slice = BigInteger.valueOf(1024L);
        // So a block at max target is 'slice' number of work units
        // A block at half the target (harder) is twice the number of slices.
        // Ok, so whoever wrote those two lines clearly had some concept in mind.
        // I wonder what the hell it was.
        BigInteger work_in_block = params.getMaxTarget().multiply(slice).divide(target);
        // add in work from imported blocks
        for (ImportedBlock ib : imported_blocks) {
            BigInteger import_target = BlockchainUtil.targetBytesToBigInteger(ib.getHeader().getTarget());
            work_in_block = work_in_block.add(params.getMaxTarget().multiply(slice).divide(import_target));
        }
        // SIP2 - work is multipled by 4^activated_field.  That way, a higher field
        // takes precedence.
        BigInteger field_multipler = BigInteger.ONE.shiftLeft(prev_summary.getActivatedField() * 2);
        work_in_block = work_in_block.multiply(field_multipler);
        BigInteger prev_work_sum = BlockchainUtil.readInteger(prev_summary.getWorkSum());
        BigInteger worksum = prev_work_sum.add(work_in_block);
        return worksum;
    }
}
Also used : TimeRecordAuto(duckutil.TimeRecordAuto) BigInteger(java.math.BigInteger)

Example 12 with TimeRecordAuto

use of duckutil.TimeRecordAuto in project snowblossom by snowblossomcoin.

the class BlockchainUtil method getNewSummary.

public static BlockSummary getNewSummary(BlockHeader header, BlockSummary prev_summary, NetworkParams params, long tx_count, long tx_body_sum, List<ImportedBlock> imported_blocks) {
    try (TimeRecordAuto tra = TimeRecord.openAuto("BlockchainUtil.getNewSummary")) {
        BlockSummary.Builder bs = BlockSummary.newBuilder();
        BigInteger target = BlockchainUtil.targetBytesToBigInteger(header.getTarget());
        bs.setTotalTransactions(prev_summary.getTotalTransactions() + tx_count);
        bs.setBlockTxCount(tx_count);
        // true if this is the first block of a fresh split
        boolean fresh_block_split = false;
        if (header.getVersion() == 1) {
            // Record self regardless
            bs.putImportedShards(header.getShardId(), header);
        } else if (header.getVersion() == 2) {
            // update the tx body running average
            long prev_tx_size_average;
            int prev_shard_len;
            if (prev_summary.getHeader().getShardId() != header.getShardId()) {
                // shard split
                prev_tx_size_average = 0;
                prev_shard_len = 0;
                fresh_block_split = true;
            } else {
                prev_tx_size_average = prev_summary.getTxSizeAverage();
                prev_shard_len = prev_summary.getShardLength();
            }
            long prev_w = prev_tx_size_average * (1000L - params.getAvgWeight());
            long new_w = tx_body_sum * params.getAvgWeight();
            long new_avg = (prev_w + new_w) / 1000L;
            bs.setTxSizeAverage(new_avg);
            bs.setShardLength(prev_shard_len + 1);
            bs.putAllImportedShards(prev_summary.getImportedShardsMap());
            // put myself in
            bs.putImportedShards(header.getShardId(), header);
            for (ImportedBlock imb : imported_blocks) {
                int imp_shard = imb.getHeader().getShardId();
                bs.putImportedShards(imp_shard, imb.getHeader());
            }
            // Import previous shard history and prune it down
            for (Map.Entry<Integer, BlockImportList> me : prev_summary.getShardHistoryMap().entrySet()) {
                int shard = me.getKey();
                BlockImportList prev_hist = me.getValue();
                TreeMap<Integer, ByteString> height_map = new TreeMap<>();
                height_map.putAll(prev_hist.getHeightMapMap());
                while (height_map.size() > 5 * params.getMaxShardSkewHeight()) {
                    height_map.pollFirstEntry();
                }
                BlockImportList.Builder sh = BlockImportList.newBuilder();
                sh.putAllHeightMap(height_map);
                bs.putShardHistoryMap(shard, sh.build());
            }
            // Read all headers and stick them into the shard histories
            LinkedList<BlockHeader> all_headers = new LinkedList<>();
            all_headers.add(header);
            for (ImportedBlock imb : imported_blocks) {
                int imp_shard = imb.getHeader().getShardId();
                all_headers.add(imb.getHeader());
            }
            // Add all blocks into history
            for (BlockHeader bh : all_headers) {
                addBlockToHistory(bs, bh.getShardId(), bh.getBlockHeight(), new ChainHash(bh.getSnowHash()));
                for (Map.Entry<Integer, BlockImportList> me : bh.getShardImportMap().entrySet()) {
                    int shard = me.getKey();
                    BlockImportList bil = me.getValue();
                    addBlockToHistory(bs, shard, bil);
                }
            }
        }
        BigInteger worksum = getWorkForSummary(header, prev_summary, params, imported_blocks);
        bs.setWorkSum(worksum.toString());
        long weight = params.getAvgWeight();
        long decay = 1000L - weight;
        BigInteger decay_bi = BigInteger.valueOf(decay);
        BigInteger weight_bi = BigInteger.valueOf(weight);
        long block_time;
        long prev_block_time;
        BigInteger prev_target_avg;
        if (prev_summary.getHeader().getTimestamp() == 0) {
            // first block, just pick a time
            block_time = params.getBlockTimeTarget();
            prev_block_time = params.getBlockTimeTarget();
            prev_target_avg = params.getMaxTarget();
        } else {
            block_time = header.getTimestamp() - prev_summary.getHeader().getTimestamp();
            prev_block_time = prev_summary.getBlocktimeAverageMs();
            prev_target_avg = BlockchainUtil.readInteger(prev_summary.getTargetAverage());
            if (fresh_block_split) {
                // With a split, the difficulty drops
                // We can make a quick trick block stack
                // We can make a quick trick clock stack
                prev_target_avg = prev_target_avg.multiply(BigInteger.valueOf(2L));
            }
        }
        int field = prev_summary.getActivatedField();
        bs.setActivatedField(field);
        SnowFieldInfo next_field = params.getSnowFieldInfo(field + 1);
        if (next_field != null) {
            /*System.out.println(String.format("Field %d Target %f, activation %f", field+1,
          PowUtil.getDiffForTarget(prev_target_avg),
          PowUtil.getDiffForTarget(next_field.getActivationTarget())));*/
            if (prev_target_avg.compareTo(next_field.getActivationTarget()) <= 0) {
                bs.setActivatedField(field + 1);
            }
        }
        bs.setBlocktimeAverageMs((prev_block_time * decay + block_time * weight) / 1000L);
        bs.setTargetAverage(prev_target_avg.multiply(decay_bi).add(target.multiply(weight_bi)).divide(BigInteger.valueOf(1000L)).toString());
        bs.setHeader(header);
        return bs.build();
    }
}
Also used : TreeMap(java.util.TreeMap) LinkedList(java.util.LinkedList) TimeRecordAuto(duckutil.TimeRecordAuto) BigInteger(java.math.BigInteger)

Example 13 with TimeRecordAuto

use of duckutil.TimeRecordAuto in project snowblossom by snowblossomcoin.

the class Validation method deepTransactionCheck.

/**
 * The block header need not be complete or real
 * It only needs block height and timestamp set for the purpose of this check
 * @return the fee amount in flakes if tx is good
 */
public static long deepTransactionCheck(Transaction tx, UtxoUpdateBuffer utxo_buffer, BlockHeader block_header, NetworkParams params, Set<Integer> shard_cover_set, Map<Integer, UtxoUpdateBuffer> export_buffers) throws ValidationException {
    try (TimeRecordAuto tra_blk = TimeRecord.openAuto("Validation.deepTransactionCheck")) {
        TransactionInner inner = null;
        try {
            inner = TransactionInner.parseFrom(tx.getInnerData());
        } catch (java.io.IOException e) {
            throw new ValidationException("error parsing tx on second pass somehow", e);
        }
        long sum_of_inputs = 0L;
        // Make sure all inputs exist
        for (TransactionInput in : inner.getInputsList()) {
            TransactionOutput matching_out = utxo_buffer.getOutputMatching(in);
            if (matching_out == null) {
                throw new ValidationException(String.format("No matching output for input %s", new ChainHash(in.getSrcTxId())));
            }
            validateSpendable(matching_out, block_header, params);
            sum_of_inputs += matching_out.getValue();
            // SIP-4 check
            if (block_header.getBlockHeight() >= params.getActivationHeightTxInValue()) {
                if (in.getValue() != 0L) {
                    if (in.getValue() != matching_out.getValue()) {
                        throw new ValidationException(String.format("Input value does not match: %d %d", matching_out.getValue(), in.getValue()));
                    }
                }
            }
            utxo_buffer.useOutput(matching_out, new ChainHash(in.getSrcTxId()), in.getSrcTxOutIdx());
        }
        long spent = 0L;
        // Sum up all outputs
        int out_idx = 0;
        ArrayList<ByteString> raw_output_list = TransactionUtil.extractWireFormatTxOut(tx);
        for (TransactionOutput out : inner.getOutputsList()) {
            validateTransactionOutput(out, block_header, params);
            spent += out.getValue();
            if (shard_cover_set.contains(out.getTargetShard())) {
                utxo_buffer.addOutput(raw_output_list, out, new ChainHash(tx.getTxHash()), out_idx);
            } else {
                int target_shard = out.getTargetShard();
                if (!export_buffers.containsKey(target_shard)) {
                    HashedTrie hashed_trie_mem = new HashedTrie(new TrieDBMem(), true, false);
                    UtxoUpdateBuffer export_txo_buffer = new UtxoUpdateBuffer(hashed_trie_mem, UtxoUpdateBuffer.EMPTY);
                    export_buffers.put(target_shard, export_txo_buffer);
                }
                export_buffers.get(target_shard).addOutput(raw_output_list, out, new ChainHash(tx.getTxHash()), out_idx);
            }
            out_idx++;
        }
        spent += inner.getFee();
        if (!inner.getIsCoinbase()) {
            if (sum_of_inputs != spent) {
                throw new ValidationException(String.format("Transaction took in %d and spent %d", sum_of_inputs, spent));
            }
        }
        return inner.getFee();
    }
}
Also used : HashedTrie(snowblossom.lib.trie.HashedTrie) TrieDBMem(snowblossom.lib.trie.TrieDBMem) ByteString(com.google.protobuf.ByteString) TimeRecordAuto(duckutil.TimeRecordAuto)

Example 14 with TimeRecordAuto

use of duckutil.TimeRecordAuto in project snowblossom by snowblossomcoin.

the class Validation method deepBlockValidation.

public static void deepBlockValidation(NetworkParams params, HashedTrie utxo_hashed_trie, Block blk, BlockSummary prev_summary) throws ValidationException {
    try (TimeRecordAuto tra_blk = TimeRecord.openAuto("Validation.deepBlockValidation")) {
        // Check expected target
        BigInteger expected_target = PowUtil.calcNextTarget(prev_summary, params, blk.getHeader().getTimestamp());
        ByteString expected_target_bytes = BlockchainUtil.targetBigIntegerToBytes(expected_target);
        if (!blk.getHeader().getTarget().equals(expected_target_bytes)) {
            throw new ValidationException("Block target does not match expected target");
        }
        if (blk.getHeader().getSnowField() < prev_summary.getActivatedField()) {
            throw new ValidationException(String.format("Snow field %d when at least %d is required", blk.getHeader().getSnowField(), prev_summary.getActivatedField()));
        }
        // Check timestamps and block height
        ChainHash prevblock = new ChainHash(blk.getHeader().getPrevBlockHash());
        if (prevblock.equals(ChainHash.ZERO_HASH)) {
            if (blk.getHeader().getBlockHeight() != 0) {
                throw new ValidationException("Block height must be zero for first block");
            }
        } else {
            if (prev_summary.getHeader().getBlockHeight() + 1 != blk.getHeader().getBlockHeight()) {
                throw new ValidationException("Block height must not be prev block plus one");
            }
            if (prev_summary.getHeader().getTimestamp() >= blk.getHeader().getTimestamp()) {
                throw new ValidationException("Block time must be greater than last one");
            }
        }
        // At this point, we have a block with a reasonable header that matches everything
        // (pow, target, merkle root, etc).
        // now have to check the following
        // - coinbase tx height correct
        // - coinbase tx remark correct if first block
        // - For each transaction
        // - transaction inputs exist in utxo
        // - sum of inputs = sum of outputs + fee
        // - sum of coinbase output = block reward plus fee sum
        // - new UTXO root is what is expected
        Transaction coinbase_tx = blk.getTransactions(0);
        TransactionInner coinbase_inner = null;
        try {
            coinbase_inner = TransactionInner.parseFrom(coinbase_tx.getInnerData());
        } catch (java.io.IOException e) {
            throw new ValidationException("error parsing coinbase on second pass somehow", e);
        }
        if (coinbase_inner.getCoinbaseExtras().getBlockHeight() != blk.getHeader().getBlockHeight()) {
            throw new ValidationException("Block height in block header does not match block height in coinbase");
        }
        if (coinbase_inner.getCoinbaseExtras().getShardId() != blk.getHeader().getShardId()) {
            throw new ValidationException("Block shard_id in block header does not match shard_id in coinbase");
        }
        if (blk.getHeader().getBlockHeight() == 0) {
            if (!coinbase_inner.getCoinbaseExtras().getRemarks().startsWith(params.getBlockZeroRemark())) {
                throw new ValidationException("Block zero remark must start with defined remark");
            }
        }
        UtxoUpdateBuffer utxo_buffer = new UtxoUpdateBuffer(utxo_hashed_trie, new ChainHash(prev_summary.getHeader().getUtxoRootHash()));
        if (blk.getHeader().getVersion() == 2) {
            checkShardBasics(blk, prev_summary, params);
            if (shouldResetUtxo(blk, prev_summary, params)) {
                utxo_buffer = new UtxoUpdateBuffer(utxo_hashed_trie, UtxoUpdateBuffer.EMPTY);
            }
            // Add in imported outputs
            for (ImportedBlock ib : blk.getImportedBlocksList()) {
                validateShardImport(params, ib, blk.getHeader().getShardId(), utxo_buffer);
            }
        }
        long fee_sum = 0L;
        long tx_size_sum = 0L;
        int tx_count = 0;
        Set<Integer> cover_set = ShardUtil.getCoverSet(blk.getHeader().getShardId(), params);
        Map<Integer, UtxoUpdateBuffer> export_utxo_buffer = new TreeMap<>();
        for (Transaction tx : blk.getTransactionsList()) {
            fee_sum += deepTransactionCheck(tx, utxo_buffer, blk.getHeader(), params, cover_set, export_utxo_buffer);
            tx_size_sum += tx.getInnerData().size() + tx.getTxHash().size();
            tx_count++;
        }
        // Check export set
        if (!export_utxo_buffer.keySet().equals(blk.getHeader().getShardExportRootHashMap().keySet())) {
            throw new ValidationException("Export set mismatch");
        }
        for (int export_shard : export_utxo_buffer.keySet()) {
            ChainHash tx_export_hash = export_utxo_buffer.get(export_shard).simulateUpdates();
            ChainHash header_export_hash = new ChainHash(blk.getHeader().getShardExportRootHashMap().get(export_shard));
            if (!tx_export_hash.equals(header_export_hash)) {
                throw new ValidationException("Export set utxo hash mismatch");
            }
        }
        if (blk.getHeader().getVersion() == 2) {
            if (blk.getHeader().getTxDataSizeSum() != tx_size_sum) {
                throw new ValidationException("tx_data_size_sum mismatch");
            }
            if (blk.getHeader().getTxCount() != tx_count) {
                throw new ValidationException("tx_count mismatch");
            }
        }
        long reward = ShardUtil.getBlockReward(params, blk.getHeader());
        long coinbase_sum = fee_sum + reward;
        long coinbase_spent = 0L;
        for (TransactionOutput out : coinbase_inner.getOutputsList()) {
            coinbase_spent += out.getValue();
        }
        if (coinbase_sum != coinbase_spent) {
            throw new ValidationException(String.format("Coinbase could have spent %d but spent %d", coinbase_sum, coinbase_spent));
        }
        utxo_buffer.commitIfEqual(blk.getHeader().getUtxoRootHash());
    }
}
Also used : ByteString(com.google.protobuf.ByteString) BigInteger(java.math.BigInteger) TimeRecordAuto(duckutil.TimeRecordAuto) BigInteger(java.math.BigInteger)

Example 15 with TimeRecordAuto

use of duckutil.TimeRecordAuto in project snowblossom by snowblossomcoin.

the class ProofGen method getProof.

public static SnowPowProof getProof(FieldSource field_source, FieldSource deck_source, long word_index, long total_words) throws java.io.IOException {
    try (TimeRecordAuto tra = TimeRecord.openAuto("SnowMerkleProof.getProof")) {
        LinkedList<ByteString> partners = new LinkedList<ByteString>();
        MessageDigest md;
        try {
            md = MessageDigest.getInstance(Globals.SNOW_MERKLE_HASH_ALGO);
        } catch (java.security.NoSuchAlgorithmException e) {
            throw new RuntimeException(e);
        }
        getInnerProof(field_source, deck_source, md, partners, word_index, 0, total_words);
        SnowPowProof.Builder builder = SnowPowProof.newBuilder();
        builder.setWordIdx(word_index);
        builder.addAllMerkleComponent(partners);
        return builder.build();
    }
}
Also used : TimeRecordAuto(duckutil.TimeRecordAuto) ByteString(com.google.protobuf.ByteString) MessageDigest(java.security.MessageDigest) LinkedList(java.util.LinkedList)

Aggregations

TimeRecordAuto (duckutil.TimeRecordAuto)31 ByteString (com.google.protobuf.ByteString)11 LinkedList (java.util.LinkedList)10 BigInteger (java.math.BigInteger)7 TreeMap (java.util.TreeMap)7 MessageDigest (java.security.MessageDigest)3 HashMap (java.util.HashMap)3 Map (java.util.Map)3 ImmutableMap (com.google.common.collect.ImmutableMap)2 ArrayList (java.util.ArrayList)2 TreeSet (java.util.TreeSet)2 CodedInputStream (com.google.protobuf.CodedInputStream)1 MetricLog (duckutil.MetricLog)1 DecimalFormat (java.text.DecimalFormat)1 HashSet (java.util.HashSet)1 Random (java.util.Random)1 SplittableRandom (java.util.SplittableRandom)1 ValidationException (snowblossom.lib.ValidationException)1 HashedTrie (snowblossom.lib.trie.HashedTrie)1 TrieDBMem (snowblossom.lib.trie.TrieDBMem)1