use of io.nuls.kernel.model.NulsDigestData in project nuls by nuls-io.
the class BlockHeaderPoTest method serializeAndParse.
/**
* 验证区块头实体的序列化和反序列化的正确性
* Verify the correctness of serialization and deserialization of block header entities.
*/
@Test
public void serializeAndParse() {
BlockHeaderPo po = new BlockHeaderPo();
po.setHeight(1286L);
po.setExtend("extends".getBytes());
po.setMerkleHash(NulsDigestData.calcDigestData("merkleHash".getBytes()));
try {
po.setPackingAddress("address".getBytes());
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
po.setScriptSign(new BlockSignature());
po.setTime(12345678901L);
po.setTxCount(3);
List<NulsDigestData> txHashList = new ArrayList<>();
txHashList.add(NulsDigestData.calcDigestData("first-tx-hash".getBytes()));
txHashList.add(NulsDigestData.calcDigestData("second-tx-hash".getBytes()));
txHashList.add(NulsDigestData.calcDigestData("third-tx-hash".getBytes()));
po.setTxHashList(txHashList);
byte[] bytes = new byte[0];
try {
bytes = po.serialize();
} catch (IOException e) {
Log.error(e);
}
BlockHeaderPo newPo = new BlockHeaderPo();
try {
newPo.parse(bytes, 0);
} catch (NulsException e) {
Log.error(e);
}
assertNull(newPo.getHash());
assertEquals(po.getHeight(), newPo.getHeight());
assertEquals(po.getPreHash(), newPo.getPreHash());
assertEquals(po.getMerkleHash(), newPo.getMerkleHash());
assertTrue(Arrays.equals(po.getExtend(), newPo.getExtend()));
assertTrue(Arrays.equals(po.getPackingAddress(), newPo.getPackingAddress()));
assertTrue(Arrays.equals(po.getScriptSign().getPublicKey(), newPo.getScriptSign().getPublicKey()));
assertEquals(po.getScriptSign().getSignData(), newPo.getScriptSign().getSignData());
assertEquals(po.getTime(), newPo.getTime());
assertEquals(po.getTxCount(), newPo.getTxCount());
assertEquals(po.getTxHashList().get(0), newPo.getTxHashList().get(0));
assertEquals(po.getTxHashList().get(1), newPo.getTxHashList().get(1));
assertEquals(po.getTxHashList().get(2), newPo.getTxHashList().get(2));
}
use of io.nuls.kernel.model.NulsDigestData in project nuls by nuls-io.
the class BlockHashResponse method size.
@Override
public int size() {
int size = 0;
size += SerializeUtils.sizeOfNulsData(requestMessageHash);
size += SerializeUtils.sizeOfVarInt(hashList.size());
for (NulsDigestData hash : hashList) {
size += SerializeUtils.sizeOfNulsData(hash);
}
return size;
}
use of io.nuls.kernel.model.NulsDigestData in project nuls by nuls-io.
the class UtxoAccountsServiceImpl method getInputAddress.
private byte[] getInputAddress(Coin from) {
byte[] fromHash;
int fromIndex;
byte[] owner = from.getOwner();
// owner拆分出txHash和index
fromHash = UtxoAccountsUtil.getTxHashBytes(owner);
fromIndex = UtxoAccountsUtil.getIndex(owner);
NulsDigestData fromHashObj = new NulsDigestData();
try {
fromHashObj.parse(fromHash, 0);
Transaction outPutTx = utxoAccountsStorageService.getTx(fromHashObj);
return outPutTx.getCoinData().getTo().get(fromIndex).getOwner();
} catch (NulsException e) {
Log.error(e);
return null;
}
}
use of io.nuls.kernel.model.NulsDigestData in project nuls by nuls-io.
the class LocalCacheBlockBalance method parse.
@Override
public void parse(NulsByteBuffer byteBuffer) throws NulsException {
this.blockHeight = byteBuffer.readInt64();
this.hash = new NulsDigestData(byteBuffer.readByte(), byteBuffer.readByLengthByte());
this.preHash = new NulsDigestData(byteBuffer.readByte(), byteBuffer.readByLengthByte());
int listCount = (int) byteBuffer.readVarInt();
if (0 < listCount) {
List<UtxoAccountsBalancePo> list = new ArrayList<>();
for (int i = 0; i < listCount; i++) {
list.add(byteBuffer.readNulsData(new UtxoAccountsBalancePo()));
}
this.balanceList = list;
}
}
use of io.nuls.kernel.model.NulsDigestData in project nuls by nuls-io.
the class LimitHashMapTest method test.
@Test
public void test() throws IOException {
LimitHashMap<NulsDigestData, Transaction> map = new LimitHashMap<>(200000);
long use = 0;
List<NulsDigestData> hashList = new ArrayList<>();
for (int i = 0; i < 200000; i++) {
Transaction transaction = new TransferTransaction();
transaction.setTime(System.currentTimeMillis());
transaction.setHash(NulsDigestData.calcDigestData(transaction.serializeForHash()));
hashList.add(transaction.getHash());
long start = System.nanoTime();
map.put(transaction.getHash(), transaction);
use += (System.nanoTime() - start);
}
System.out.println("插入20万条累计用时:" + use + "纳秒");
long start = System.currentTimeMillis();
for (int i = 0; i < 100000; i++) {
map.getQueue().size();
}
System.out.println("queue size 100000次用时:" + (System.currentTimeMillis() - start) + "ms");
start = System.currentTimeMillis();
for (int i = 0; i < 100000; i++) {
map.getMap().size();
}
System.out.println("map size 100000次用时:" + (System.currentTimeMillis() - start) + "ms");
start = System.currentTimeMillis();
for (NulsDigestData key : hashList) {
map.get(key);
}
System.out.println("查询200000次用时:" + (System.currentTimeMillis() - start) + "ms");
start = System.currentTimeMillis();
for (NulsDigestData key : hashList) {
map.containsKey(key);
}
System.out.println("判断是否包含200000次用时:" + (System.currentTimeMillis() - start) + "ms");
start = System.currentTimeMillis();
for (NulsDigestData key : hashList) {
map.remove(key);
}
System.out.println("删除200000次用时:" + (System.currentTimeMillis() - start) + "ms");
assertTrue(true);
}
Aggregations