use of com.biglybt.core.util.HashWrapper in project BiglyBT by BiglySoftware.
the class DHTDBMapping method remove.
protected DHTDBValueImpl remove(DHTTransportContact originator) {
// local remove
HashWrapper originator_id = new HashWrapper(originator.getID());
DHTDBValueImpl res = removeDirectValue(originator_id);
return (res);
}
use of com.biglybt.core.util.HashWrapper in project BiglyBT by BiglySoftware.
the class DHTDBMapping method get.
protected DHTDBValueImpl get(DHTTransportContact originator) {
if (direct_originator_map_may_be_null == null) {
return (null);
}
HashWrapper originator_id = new HashWrapper(originator.getID());
DHTDBValueImpl res = (DHTDBValueImpl) direct_originator_map_may_be_null.get(originator_id);
return (res);
}
use of com.biglybt.core.util.HashWrapper in project BiglyBT by BiglySoftware.
the class DHTDBMapping method add.
// All values have
// 1) a key
// 2) a value
// 3) an originator (the contact who originally published it)
// 4) a sender (the contact who sent it, could be diff for caches)
// rethink time :P
// a) for a value where sender + originator are the same we store a single value
// b) where sender + originator differ we store an entry per originator/value pair as the
// send can legitimately forward multiple values but their originator should differ
// c) the code that adds values is responsible for not accepting values that are either
// to "far away" from our ID, or that are cache-forwards from a contact "too far"
// away.
// for a given key
// c) we only allow up to 8 entries per sending IP address (excluding port)
// d) if multiple entries have the same value the value is only returned once
// e) only the originator can delete an entry
// a) prevents a single sender from filling up the mapping with garbage
// b) prevents the same key->value mapping being held multiple times when sent by different caches
// c) prevents multiple senders from same IP filling up, but supports multiple machines behind NAT
// d) optimises responses.
// Note that we can't trust the originator value in cache forwards, we therefore
// need to prevent someone from overwriting a valid originator->value1 mapping
// with an invalid originator->value2 mapping - that is we can't use uniqueness of
// originator
// a value can be "volatile" - this means that the cacher can ping the originator
// periodically and delete the value if it is dead
// the aim here is to
// 1) reduce ability for single contacts to spam the key while supporting up to 8
// contacts on a given IP (assuming NAT is being used)
// 2) stop one contact deleting or overwriting another contact's entry
// 3) support garbage collection for contacts that don't delete entries on exit
// TODO: we should enforce a max-values-per-sender restriction to stop a sender from spamming
// lots of keys - however, for a small DHT we need to be careful
protected void add(DHTDBValueImpl new_value) {
// don't replace a closer cache value with a further away one. in particular
// we have to avoid the case where the original publisher of a key happens to
// be close to it and be asked by another node to cache it!
DHTTransportContact originator = new_value.getOriginator();
DHTTransportContact sender = new_value.getSender();
HashWrapper originator_id = new HashWrapper(originator.getID());
boolean direct = Arrays.equals(originator.getID(), sender.getID());
if (direct) {
// direct contact from the originator is straight forward
addDirectValue(originator_id, new_value);
// remove any indirect values we might already have for this
Iterator<Map.Entry<HashWrapper, DHTDBValueImpl>> it = indirect_originator_value_map.entrySet().iterator();
List<HashWrapper> to_remove = new ArrayList<>();
while (it.hasNext()) {
Map.Entry<HashWrapper, DHTDBValueImpl> entry = it.next();
HashWrapper existing_key = entry.getKey();
DHTDBValueImpl existing_value = entry.getValue();
if (Arrays.equals(existing_value.getOriginator().getID(), originator.getID())) {
to_remove.add(existing_key);
}
}
for (int i = 0; i < to_remove.size(); i++) {
removeIndirectValue((HashWrapper) to_remove.get(i));
}
} else {
if (direct_originator_map_may_be_null != null && direct_originator_map_may_be_null.get(originator_id) != null) {
return;
}
// rule (b) - one entry per originator/value pair
HashWrapper originator_value_id = getOriginatorValueID(new_value);
DHTDBValueImpl existing_value = indirect_originator_value_map.get(originator_value_id);
if (existing_value != null) {
addIndirectValue(originator_value_id, new_value);
// System.out.println( " replacing existing" );
} else {
if (diversification_state == DHT.DT_NONE) {
addIndirectValue(originator_value_id, new_value);
}
}
}
}
use of com.biglybt.core.util.HashWrapper in project BiglyBT by BiglySoftware.
the class DHTDBMapping method getAllValues.
protected List<DHTDBValueImpl> getAllValues(DHTTransportContact originator) {
List<DHTDBValueImpl> res = new ArrayList<>();
Set<HashWrapper> duplicate_check = new HashSet<>();
Map<HashWrapper, DHTDBValueImpl>[] maps = new Map[] { direct_originator_map_may_be_null, indirect_originator_value_map };
for (int i = 0; i < maps.length; i++) {
Map<HashWrapper, DHTDBValueImpl> map = maps[i];
if (map == null) {
continue;
}
Iterator<Map.Entry<HashWrapper, DHTDBValueImpl>> it = map.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<HashWrapper, DHTDBValueImpl> entry = it.next();
DHTDBValueImpl entry_value = entry.getValue();
HashWrapper x = new HashWrapper(entry_value.getValue());
if (duplicate_check.contains(x)) {
continue;
}
duplicate_check.add(x);
if (entry_value.getValue().length > 0) {
res.add(entry_value);
}
}
}
return (res);
}
use of com.biglybt.core.util.HashWrapper in project BiglyBT by BiglySoftware.
the class DHTDBMapping method get.
protected DHTDBValueImpl[] get(DHTTransportContact by_who, int max, short flags) {
if ((flags & DHT.FLAG_STATS) != 0) {
if (adapter_key != null) {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream(64);
DataOutputStream dos = new DataOutputStream(baos);
adapter_key.serialiseStats(dos);
dos.close();
return (new DHTDBValueImpl[] { new DHTDBValueImpl(SystemTime.getCurrentTime(), baos.toByteArray(), 0, db.getLocalContact(), db.getLocalContact(), true, DHT.FLAG_STATS, 0, DHT.REP_FACT_DEFAULT) });
} catch (Throwable e) {
Debug.printStackTrace(e);
}
}
return (new DHTDBValueImpl[0]);
}
List<DHTDBValueImpl> res = new ArrayList<>();
Set<HashWrapper> duplicate_check = new HashSet<>();
Map<HashWrapper, DHTDBValueImpl>[] maps = new Map[] { direct_originator_map_may_be_null, indirect_originator_value_map };
for (int i = 0; i < maps.length; i++) {
Map<HashWrapper, DHTDBValueImpl> map = maps[i];
if (map == null) {
continue;
}
List<HashWrapper> keys_used = new ArrayList<>();
Iterator<Map.Entry<HashWrapper, DHTDBValueImpl>> it = map.entrySet().iterator();
while (it.hasNext() && (max == 0 || res.size() < max)) {
Map.Entry<HashWrapper, DHTDBValueImpl> entry = it.next();
HashWrapper entry_key = entry.getKey();
DHTDBValueImpl entry_value = entry.getValue();
HashWrapper x = new HashWrapper(entry_value.getValue());
if (duplicate_check.contains(x)) {
continue;
}
duplicate_check.add(x);
if (entry_value.getValue().length > 0) {
res.add(entry_value);
keys_used.add(entry_key);
}
}
for (int j = 0; j < keys_used.size(); j++) {
map.get(keys_used.get(j));
}
}
informRead(by_who);
DHTDBValueImpl[] v = new DHTDBValueImpl[res.size()];
res.toArray(v);
return (v);
}
Aggregations