use of com.iota.iri.model.Hash in project iri by iotaledger.
the class TipsManager method belowMaxDepth.
boolean belowMaxDepth(Hash tip, int depth, Set<Hash> maxDepthOk) throws Exception {
// if tip is confirmed stop
if (TransactionViewModel.fromHash(tangle, tip).snapshotIndex() >= depth) {
return false;
}
// if tip unconfirmed, check if any referenced tx is confirmed below maxDepth
Queue<Hash> nonAnalyzedTransactions = new LinkedList<>(Collections.singleton(tip));
Set<Hash> analyzedTranscations = new HashSet<>();
Hash hash;
while ((hash = nonAnalyzedTransactions.poll()) != null) {
if (analyzedTranscations.add(hash)) {
TransactionViewModel transaction = TransactionViewModel.fromHash(tangle, hash);
if (transaction.snapshotIndex() != 0 && transaction.snapshotIndex() < depth) {
return true;
}
if (transaction.snapshotIndex() == 0) {
if (maxDepthOk.contains(hash)) {
// log.info("Memoization!");
} else {
nonAnalyzedTransactions.offer(transaction.getTrunkTransactionHash());
nonAnalyzedTransactions.offer(transaction.getBranchTransactionHash());
}
}
}
}
maxDepthOk.add(tip);
return false;
}
use of com.iota.iri.model.Hash in project iri by iotaledger.
the class TipsManager method recursiveUpdateRatings.
long recursiveUpdateRatings(Hash txHash, Map<Hash, Long> ratings, Set<Hash> analyzedTips) throws Exception {
long rating = 1;
if (analyzedTips.add(txHash)) {
TransactionViewModel transactionViewModel = TransactionViewModel.fromHash(tangle, txHash);
Set<Hash> approverHashes = transactionViewModel.getApprovers(tangle).getHashes();
for (Hash approver : approverHashes) {
rating = capSum(rating, recursiveUpdateRatings(approver, ratings, analyzedTips), Long.MAX_VALUE / 2);
}
ratings.put(txHash, rating);
} else {
if (ratings.containsKey(txHash)) {
rating = ratings.get(txHash);
} else {
rating = 0;
}
}
return rating;
}
use of com.iota.iri.model.Hash in project iri by iotaledger.
the class TipsManager method updateHashRatings.
Set<Hash> updateHashRatings(Hash txHash, Map<Hash, Set<Hash>> ratings, Set<Hash> analyzedTips) throws Exception {
Set<Hash> rating;
if (analyzedTips.add(txHash)) {
TransactionViewModel transactionViewModel = TransactionViewModel.fromHash(tangle, txHash);
rating = new HashSet<>(Collections.singleton(txHash));
Set<Hash> approverHashes = transactionViewModel.getApprovers(tangle).getHashes();
for (Hash approver : approverHashes) {
rating.addAll(updateHashRatings(approver, ratings, analyzedTips));
}
ratings.put(txHash, rating);
} else {
if (ratings.containsKey(txHash)) {
rating = ratings.get(txHash);
} else {
rating = new HashSet<>();
}
}
return rating;
}
use of com.iota.iri.model.Hash in project iri by iotaledger.
the class SnapshotTest method patch.
@Test
public void patch() throws Exception {
Map.Entry<Hash, Long> firstOne = Snapshot.initialState.entrySet().iterator().next();
Hash someHash = new Hash("PSRQPWWIECDGDDZXHGJNMEVJNSVOSMECPPVRPEVRZFVIZYNNXZNTOTJOZNGCZNQVSPXBXTYUJUOXYASLS");
Map<Hash, Long> diff = new HashMap<>();
diff.put(firstOne.getKey(), -firstOne.getValue());
diff.put(someHash, firstOne.getValue());
Assert.assertNotEquals(0, diff.size());
Assert.assertTrue("The ledger should be consistent", Snapshot.isConsistent(Snapshot.initialSnapshot.patchedDiff(diff)));
}
use of com.iota.iri.model.Hash in project iri by iotaledger.
the class BundleValidator method validate.
public static List<List<TransactionViewModel>> validate(Tangle tangle, Hash tailHash) throws Exception {
TransactionViewModel tail = TransactionViewModel.fromHash(tangle, tailHash);
List<List<TransactionViewModel>> transactions = new LinkedList<>();
if (tail.getCurrentIndex() != 0) {
return transactions;
}
final Map<Hash, TransactionViewModel> bundleTransactions = loadTransactionsFromTangle(tangle, tail);
for (TransactionViewModel transactionViewModel : bundleTransactions.values()) {
if (transactionViewModel.getCurrentIndex() == 0 && transactionViewModel.getValidity() >= 0) {
final List<TransactionViewModel> instanceTransactionViewModels = new LinkedList<>();
final long lastIndex = transactionViewModel.lastIndex();
long bundleValue = 0;
int i = 0;
final Sponge curlInstance = SpongeFactory.create(SpongeFactory.Mode.KERL);
final Sponge addressInstance = SpongeFactory.create(SpongeFactory.Mode.KERL);
final int[] addressTrits = new int[TransactionViewModel.ADDRESS_TRINARY_SIZE];
final int[] bundleHashTrits = new int[TransactionViewModel.BUNDLE_TRINARY_SIZE];
final int[] normalizedBundle = new int[Curl.HASH_LENGTH / ISS.TRYTE_WIDTH];
final int[] digestTrits = new int[Curl.HASH_LENGTH];
MAIN_LOOP: while (true) {
instanceTransactionViewModels.add(transactionViewModel);
if (transactionViewModel.getCurrentIndex() != i || transactionViewModel.lastIndex() != lastIndex || ((bundleValue = Math.addExact(bundleValue, transactionViewModel.value())) < -TransactionViewModel.SUPPLY || bundleValue > TransactionViewModel.SUPPLY)) {
instanceTransactionViewModels.get(0).setValidity(tangle, -1);
break;
}
if (transactionViewModel.value() != 0 && transactionViewModel.getAddressHash().trits()[Curl.HASH_LENGTH - 1] != 0) {
instanceTransactionViewModels.get(0).setValidity(tangle, -1);
break;
}
if (i++ == lastIndex) {
if (bundleValue == 0) {
if (instanceTransactionViewModels.get(0).getValidity() == 0) {
curlInstance.reset();
for (final TransactionViewModel transactionViewModel2 : instanceTransactionViewModels) {
curlInstance.absorb(transactionViewModel2.trits(), TransactionViewModel.ESSENCE_TRINARY_OFFSET, TransactionViewModel.ESSENCE_TRINARY_SIZE);
}
curlInstance.squeeze(bundleHashTrits, 0, bundleHashTrits.length);
if (Arrays.equals(instanceTransactionViewModels.get(0).getBundleHash().trits(), bundleHashTrits)) {
ISSInPlace.normalizedBundle(bundleHashTrits, normalizedBundle);
for (int j = 0; j < instanceTransactionViewModels.size(); ) {
transactionViewModel = instanceTransactionViewModels.get(j);
if (transactionViewModel.value() < 0) {
// let's recreate the address of the transactionViewModel.
addressInstance.reset();
int offset = 0, offsetNext = 0;
do {
offsetNext = (offset + ISS.NUMBER_OF_FRAGMENT_CHUNKS - 1) % (Curl.HASH_LENGTH / Converter.NUMBER_OF_TRITS_IN_A_TRYTE) + 1;
ISSInPlace.digest(SpongeFactory.Mode.KERL, normalizedBundle, offset % (Curl.HASH_LENGTH / Converter.NUMBER_OF_TRITS_IN_A_TRYTE), instanceTransactionViewModels.get(j).trits(), TransactionViewModel.SIGNATURE_MESSAGE_FRAGMENT_TRINARY_OFFSET, digestTrits);
addressInstance.absorb(digestTrits, 0, Curl.HASH_LENGTH);
offset = offsetNext;
} while (++j < instanceTransactionViewModels.size() && instanceTransactionViewModels.get(j).getAddressHash().equals(transactionViewModel.getAddressHash()) && instanceTransactionViewModels.get(j).value() == 0);
addressInstance.squeeze(addressTrits, 0, addressTrits.length);
// if (!Arrays.equals(Converter.bytes(addressTrits, 0, TransactionViewModel.ADDRESS_TRINARY_SIZE), transactionViewModel.getAddress().getHash().bytes())) {
if (!Arrays.equals(transactionViewModel.getAddressHash().trits(), addressTrits)) {
instanceTransactionViewModels.get(0).setValidity(tangle, -1);
break MAIN_LOOP;
}
} else {
j++;
}
}
instanceTransactionViewModels.get(0).setValidity(tangle, 1);
transactions.add(instanceTransactionViewModels);
} else {
instanceTransactionViewModels.get(0).setValidity(tangle, -1);
}
} else {
transactions.add(instanceTransactionViewModels);
}
} else {
instanceTransactionViewModels.get(0).setValidity(tangle, -1);
}
break;
} else {
transactionViewModel = bundleTransactions.get(transactionViewModel.getTrunkTransactionHash());
if (transactionViewModel == null) {
break;
}
}
}
}
}
return transactions;
}
Aggregations