use of com.biglybt.core.dht.transport.DHTTransportContact in project BiglyBT by BiglySoftware.
the class DHTDBImpl method republishCachedMappings.
protected int[] republishCachedMappings() {
if (suspended) {
logger.log("Cache republish skipped as suspended");
return (new int[] { 0, 0, 0 });
}
// first refresh any leaves that have not performed at least one lookup in the
// last period
router.refreshIdleLeaves(cache_republish_interval);
final Map<HashWrapper, List<DHTDBValueImpl>> republish = new HashMap<>();
List<DHTDBMapping> republish_via_survey = new ArrayList<>();
long now = System.currentTimeMillis();
try {
this_mon.enter();
checkCacheExpiration(true);
for (Entry<HashWrapper, DHTDBMapping> entry : stored_values.entrySet()) {
HashWrapper key = entry.getKey();
DHTDBMapping mapping = entry.getValue();
if (mapping.getDiversificationType() != DHT.DT_NONE) {
continue;
}
Iterator<DHTDBValueImpl> it2 = mapping.getValues();
boolean all_rf_values = it2.hasNext();
List<DHTDBValueImpl> values = new ArrayList<>();
while (it2.hasNext()) {
DHTDBValueImpl value = it2.next();
if (value.isLocal()) {
all_rf_values = false;
} else {
if (value.getReplicationFactor() == DHT.REP_FACT_DEFAULT) {
all_rf_values = false;
}
if (now < value.getStoreTime()) {
// deal with clock changes
value.setStoreTime(now);
} else if (now - value.getStoreTime() <= cache_republish_interval) {
// System.out.println( "skipping store" );
} else {
values.add(value);
}
}
}
if (all_rf_values) {
// if surveying is disabled then we swallow values here to prevent them
// from being replicated using the existing technique and muddying the waters
// handled by the survey process
values.clear();
republish_via_survey.add(mapping);
}
if (values.size() > 0) {
republish.put(key, values);
}
}
} finally {
this_mon.exit();
}
if (republish_via_survey.size() > 0) {
// we still check for being too far away here
List<HashWrapper> stop_caching = new ArrayList<>();
for (DHTDBMapping mapping : republish_via_survey) {
HashWrapper key = mapping.getKey();
byte[] lookup_id = key.getHash();
List<DHTTransportContact> contacts = control.getClosestKContactsList(lookup_id, false);
// if we are no longer one of the K closest contacts then we shouldn't
// cache the value
boolean keep_caching = false;
for (int j = 0; j < contacts.size(); j++) {
if (router.isID(contacts.get(j).getID())) {
keep_caching = true;
break;
}
}
if (!keep_caching) {
DHTLog.log("Dropping cache entry for " + DHTLog.getString(lookup_id) + " as now too far away");
stop_caching.add(key);
}
}
if (stop_caching.size() > 0) {
try {
this_mon.enter();
for (int i = 0; i < stop_caching.size(); i++) {
DHTDBMapping mapping = stored_values.remove(stop_caching.get(i));
if (mapping != null) {
removeFromPrefixMap(mapping);
mapping.destroy();
}
}
} finally {
this_mon.exit();
}
}
}
final int[] values_published = { 0 };
final int[] keys_published = { 0 };
final int[] republish_ops = { 0 };
final HashSet<DHTTransportContact> anti_spoof_done = new HashSet<>();
if (republish.size() > 0) {
// System.out.println( "cache replublish" );
// The approach is to refresh all leaves in the smallest subtree, thus populating the tree with
// sufficient information to directly know which nodes to republish the values
// to.
// However, I'm going to rely on the "refresh idle leaves" logic above
// (that's required to keep the DHT alive in general) to ensure that all
// k-buckets are reasonably up-to-date
Iterator<Map.Entry<HashWrapper, List<DHTDBValueImpl>>> it1 = republish.entrySet().iterator();
List<HashWrapper> stop_caching = new ArrayList<>();
// build a map of contact -> list of keys to republish
Map<HashWrapper, Object[]> contact_map = new HashMap<>();
while (it1.hasNext()) {
Map.Entry<HashWrapper, List<DHTDBValueImpl>> entry = it1.next();
HashWrapper key = entry.getKey();
byte[] lookup_id = key.getHash();
// just use the closest contacts - if some have failed then they'll
// get flushed out by this operation. Grabbing just the live ones
// is a bad idea as failures may rack up against the live ones due
// to network problems and kill them, leaving the dead ones!
List<DHTTransportContact> contacts = control.getClosestKContactsList(lookup_id, false);
// if we are no longer one of the K closest contacts then we shouldn't
// cache the value
boolean keep_caching = false;
for (int j = 0; j < contacts.size(); j++) {
if (router.isID(contacts.get(j).getID())) {
keep_caching = true;
break;
}
}
if (!keep_caching) {
DHTLog.log("Dropping cache entry for " + DHTLog.getString(lookup_id) + " as now too far away");
stop_caching.add(key);
// we carry on and do one last publish
}
for (int j = 0; j < contacts.size(); j++) {
DHTTransportContact contact = contacts.get(j);
if (router.isID(contact.getID())) {
// ignore ourselves
continue;
}
Object[] data = contact_map.get(new HashWrapper(contact.getID()));
if (data == null) {
data = new Object[] { contact, new ArrayList<HashWrapper>() };
contact_map.put(new HashWrapper(contact.getID()), data);
}
((List<HashWrapper>) data[1]).add(key);
}
}
Iterator<Object[]> it2 = contact_map.values().iterator();
final int con_tot = contact_map.size();
int con_num = 0;
while (it2.hasNext()) {
con_num++;
final int f_con_num = con_num;
final Object[] data = it2.next();
final DHTTransportContact contact = (DHTTransportContact) data[0];
// move to anti-spoof on cache forwards - gotta do a find-node first
// to get the random id
final AESemaphore sem = new AESemaphore("DHTDB:cacheForward");
contact.sendFindNode(new DHTTransportReplyHandlerAdapter() {
@Override
public void findNodeReply(DHTTransportContact _contact, DHTTransportContact[] _contacts) {
anti_spoof_done.add(_contact);
try {
// System.out.println( "cacheForward: pre-store findNode OK" );
List<HashWrapper> keys = (List<HashWrapper>) data[1];
byte[][] store_keys = new byte[keys.size()][];
DHTTransportValue[][] store_values = new DHTTransportValue[store_keys.length][];
keys_published[0] += store_keys.length;
for (int i = 0; i < store_keys.length; i++) {
HashWrapper wrapper = keys.get(i);
store_keys[i] = wrapper.getHash();
List<DHTDBValueImpl> values = republish.get(wrapper);
store_values[i] = new DHTTransportValue[values.size()];
values_published[0] += store_values[i].length;
for (int j = 0; j < values.size(); j++) {
DHTDBValueImpl value = values.get(j);
// we reduce the cache distance by 1 here as it is incremented by the
// recipients
store_values[i][j] = value.getValueForRelay(local_contact);
}
}
List<DHTTransportContact> contacts = new ArrayList<>();
contacts.add(contact);
republish_ops[0]++;
control.putDirectEncodedKeys(store_keys, "Republish cache: " + f_con_num + " of " + con_tot, store_values, contacts);
} finally {
sem.release();
}
}
@Override
public void failed(DHTTransportContact _contact, Throwable _error) {
try {
// System.out.println( "cacheForward: pre-store findNode Failed" );
DHTLog.log("cacheForward: pre-store findNode failed " + DHTLog.getString(_contact) + " -> failed: " + _error.getMessage());
router.contactDead(_contact.getID(), false);
} finally {
sem.release();
}
}
}, contact.getProtocolVersion() >= DHTTransportUDP.PROTOCOL_VERSION_ANTI_SPOOF2 ? new byte[0] : new byte[20], DHT.FLAG_LOOKUP_FOR_STORE);
sem.reserve();
}
try {
this_mon.enter();
for (int i = 0; i < stop_caching.size(); i++) {
DHTDBMapping mapping = stored_values.remove(stop_caching.get(i));
if (mapping != null) {
removeFromPrefixMap(mapping);
mapping.destroy();
}
}
} finally {
this_mon.exit();
}
}
DHTStorageBlock[] direct_key_blocks = getDirectKeyBlocks();
if (direct_key_blocks.length > 0) {
for (int i = 0; i < direct_key_blocks.length; i++) {
final DHTStorageBlock key_block = direct_key_blocks[i];
List contacts = control.getClosestKContactsList(key_block.getKey(), false);
boolean forward_it = false;
for (int j = 0; j < contacts.size(); j++) {
final DHTTransportContact contact = (DHTTransportContact) contacts.get(j);
if (router.isID(contact.getID())) {
forward_it = true;
break;
}
}
for (int j = 0; forward_it && j < contacts.size(); j++) {
final DHTTransportContact contact = (DHTTransportContact) contacts.get(j);
if (key_block.hasBeenSentTo(contact)) {
continue;
}
if (router.isID(contact.getID())) {
// ignore ourselves
continue;
}
if (contact.getProtocolVersion() >= DHTTransportUDP.PROTOCOL_VERSION_BLOCK_KEYS) {
final Runnable task = new Runnable() {
@Override
public void run() {
contact.sendKeyBlock(new DHTTransportReplyHandlerAdapter() {
@Override
public void keyBlockReply(DHTTransportContact _contact) {
DHTLog.log("key block forward ok " + DHTLog.getString(_contact));
key_block.sentTo(_contact);
}
@Override
public void failed(DHTTransportContact _contact, Throwable _error) {
DHTLog.log("key block forward failed " + DHTLog.getString(_contact) + " -> failed: " + _error.getMessage());
}
}, key_block.getRequest(), key_block.getCertificate());
}
};
if (anti_spoof_done.contains(contact)) {
task.run();
} else {
contact.sendFindNode(new DHTTransportReplyHandlerAdapter() {
@Override
public void findNodeReply(DHTTransportContact contact, DHTTransportContact[] contacts) {
task.run();
}
@Override
public void failed(DHTTransportContact _contact, Throwable _error) {
// System.out.println( "nodeAdded: pre-store findNode Failed" );
DHTLog.log("pre-kb findNode failed " + DHTLog.getString(_contact) + " -> failed: " + _error.getMessage());
router.contactDead(_contact.getID(), false);
}
}, contact.getProtocolVersion() >= DHTTransportUDP.PROTOCOL_VERSION_ANTI_SPOOF2 ? new byte[0] : new byte[20], DHT.FLAG_LOOKUP_FOR_STORE);
}
}
}
}
}
return (new int[] { values_published[0], keys_published[0], republish_ops[0] });
}
use of com.biglybt.core.dht.transport.DHTTransportContact in project BiglyBT by BiglySoftware.
the class DHTDBMapping method add.
// All values have
// 1) a key
// 2) a value
// 3) an originator (the contact who originally published it)
// 4) a sender (the contact who sent it, could be diff for caches)
// rethink time :P
// a) for a value where sender + originator are the same we store a single value
// b) where sender + originator differ we store an entry per originator/value pair as the
// send can legitimately forward multiple values but their originator should differ
// c) the code that adds values is responsible for not accepting values that are either
// to "far away" from our ID, or that are cache-forwards from a contact "too far"
// away.
// for a given key
// c) we only allow up to 8 entries per sending IP address (excluding port)
// d) if multiple entries have the same value the value is only returned once
// e) only the originator can delete an entry
// a) prevents a single sender from filling up the mapping with garbage
// b) prevents the same key->value mapping being held multiple times when sent by different caches
// c) prevents multiple senders from same IP filling up, but supports multiple machines behind NAT
// d) optimises responses.
// Note that we can't trust the originator value in cache forwards, we therefore
// need to prevent someone from overwriting a valid originator->value1 mapping
// with an invalid originator->value2 mapping - that is we can't use uniqueness of
// originator
// a value can be "volatile" - this means that the cacher can ping the originator
// periodically and delete the value if it is dead
// the aim here is to
// 1) reduce ability for single contacts to spam the key while supporting up to 8
// contacts on a given IP (assuming NAT is being used)
// 2) stop one contact deleting or overwriting another contact's entry
// 3) support garbage collection for contacts that don't delete entries on exit
// TODO: we should enforce a max-values-per-sender restriction to stop a sender from spamming
// lots of keys - however, for a small DHT we need to be careful
protected void add(DHTDBValueImpl new_value) {
// don't replace a closer cache value with a further away one. in particular
// we have to avoid the case where the original publisher of a key happens to
// be close to it and be asked by another node to cache it!
DHTTransportContact originator = new_value.getOriginator();
DHTTransportContact sender = new_value.getSender();
HashWrapper originator_id = new HashWrapper(originator.getID());
boolean direct = Arrays.equals(originator.getID(), sender.getID());
if (direct) {
// direct contact from the originator is straight forward
addDirectValue(originator_id, new_value);
// remove any indirect values we might already have for this
Iterator<Map.Entry<HashWrapper, DHTDBValueImpl>> it = indirect_originator_value_map.entrySet().iterator();
List<HashWrapper> to_remove = new ArrayList<>();
while (it.hasNext()) {
Map.Entry<HashWrapper, DHTDBValueImpl> entry = it.next();
HashWrapper existing_key = entry.getKey();
DHTDBValueImpl existing_value = entry.getValue();
if (Arrays.equals(existing_value.getOriginator().getID(), originator.getID())) {
to_remove.add(existing_key);
}
}
for (int i = 0; i < to_remove.size(); i++) {
removeIndirectValue((HashWrapper) to_remove.get(i));
}
} else {
if (direct_originator_map_may_be_null != null && direct_originator_map_may_be_null.get(originator_id) != null) {
return;
}
// rule (b) - one entry per originator/value pair
HashWrapper originator_value_id = getOriginatorValueID(new_value);
DHTDBValueImpl existing_value = indirect_originator_value_map.get(originator_value_id);
if (existing_value != null) {
addIndirectValue(originator_value_id, new_value);
// System.out.println( " replacing existing" );
} else {
if (diversification_state == DHT.DT_NONE) {
addIndirectValue(originator_value_id, new_value);
}
}
}
}
use of com.biglybt.core.dht.transport.DHTTransportContact in project BiglyBT by BiglySoftware.
the class DHTLog method getString.
public static String getString(Set s) {
if (logging_on) {
StringBuilder sb = new StringBuilder(128);
sb.append("{");
Iterator it = s.iterator();
while (it.hasNext()) {
if (sb.length() > 1) {
sb.append(",");
}
sb.append(getString((DHTTransportContact) it.next()));
}
sb.append("}");
return (sb.toString());
} else {
return ("");
}
}
use of com.biglybt.core.dht.transport.DHTTransportContact in project BiglyBT by BiglySoftware.
the class DHTSpeedTesterImpl method findContacts.
protected void findContacts() {
DHTTransportContact[] reachables = dht.getTransport().getReachableContacts();
for (int i = 0; i < reachables.length; i++) {
DHTTransportContact contact = reachables[i];
byte[] address = contact.getAddress().getAddress().getAddress();
if (tried_bloom == null || tried_bloom.getEntryCount() > 500) {
tried_bloom = BloomFilterFactory.createAddOnly(4096);
}
if (!tried_bloom.contains(address)) {
tried_bloom.add(address);
synchronized (pending_contacts) {
potentialPing ping = new potentialPing(contact, DHTNetworkPositionManager.estimateRTT(contact.getNetworkPositions(), dht.getTransport().getLocalContact().getNetworkPositions()));
pending_contacts.add(0, ping);
if (pending_contacts.size() > 60) {
pending_contacts.removeLast();
}
}
}
}
}
use of com.biglybt.core.dht.transport.DHTTransportContact in project BiglyBT by BiglySoftware.
the class DHTPlugin method initialize.
@Override
public void initialize(PluginInterface _plugin_interface) {
status = STATUS_INITALISING;
plugin_interface = _plugin_interface;
dht_data_port = UDPNetworkManager.getSingleton().getUDPNonDataListeningPortNumber();
log = plugin_interface.getLogger().getTimeStampedChannel(PLUGIN_NAME);
UIManager ui_manager = plugin_interface.getUIManager();
final BasicPluginViewModel model = ui_manager.createBasicPluginViewModel(PLUGIN_RESOURCE_ID);
model.setConfigSectionID(PLUGIN_CONFIGSECTION_ID);
BasicPluginConfigModel config = ui_manager.createBasicPluginConfigModel(ConfigSection.SECTION_PLUGINS, PLUGIN_CONFIGSECTION_ID);
config.addLabelParameter2("dht.info");
final BooleanParameter enabled_param = config.addBooleanParameter2("dht.enabled", "dht.enabled", true);
plugin_interface.getPluginconfig().addListener(new PluginConfigListener() {
@Override
public void configSaved() {
int new_dht_data_port = UDPNetworkManager.getSingleton().getUDPNonDataListeningPortNumber();
if (new_dht_data_port != dht_data_port) {
changePort(new_dht_data_port);
}
}
});
LabelParameter reseed_label = config.addLabelParameter2("dht.reseed.label");
final StringParameter reseed_ip = config.addStringParameter2("dht.reseed.ip", "dht.reseed.ip", "");
final IntParameter reseed_port = config.addIntParameter2("dht.reseed.port", "dht.reseed.port", 0);
reseed = config.addActionParameter2("dht.reseed.info", "dht.reseed");
reseed.setEnabled(false);
config.createGroup("dht.reseed.group", new Parameter[] { reseed_label, reseed_ip, reseed_port, reseed });
final BooleanParameter ipfilter_logging_param = config.addBooleanParameter2("dht.ipfilter.log", "dht.ipfilter.log", true);
ipfilter_logging[0] = ipfilter_logging_param.getValue();
ipfilter_logging_param.addListener(new ParameterListener() {
@Override
public void parameterChanged(Parameter p) {
ipfilter_logging[0] = ipfilter_logging_param.getValue();
}
});
warn_user = config.addBooleanParameter2("dht.warn.user", "dht.warn.user", true);
prefer_i2p = config.addBooleanParameter2("dht.prefer.i2p", "dht.prefer.i2p", false);
BooleanParameter sleeping = config.addBooleanParameter2("dht.is.sleeping", "dht.is.sleeping", false);
AERunStateHandler.addListener(new AERunStateHandler.RunStateChangeListener() {
@Override
public void runStateChanged(long run_state) {
sleeping.setValue(AERunStateHandler.isDHTSleeping());
}
}, true);
sleeping.addListener(new ParameterListener() {
@Override
public void parameterChanged(Parameter param) {
AERunStateHandler.setDHTSleeping(sleeping.getValue());
}
});
final BooleanParameter advanced = config.addBooleanParameter2("dht.advanced", "dht.advanced", false);
LabelParameter advanced_label = config.addLabelParameter2("dht.advanced.label");
final StringParameter override_ip = config.addStringParameter2("dht.override.ip", "dht.override.ip", "");
config.createGroup("dht.advanced.group", new Parameter[] { advanced_label, override_ip });
advanced.addEnabledOnSelection(advanced_label);
advanced.addEnabledOnSelection(override_ip);
final StringParameter command = config.addStringParameter2("dht.execute.command", "dht.execute.command", "print");
ActionParameter execute = config.addActionParameter2("dht.execute.info", "dht.execute");
final BooleanParameter logging = config.addBooleanParameter2("dht.logging", "dht.logging", false);
config.createGroup("dht.diagnostics.group", new Parameter[] { command, execute, logging });
logging.addListener(new ParameterListener() {
@Override
public void parameterChanged(Parameter param) {
if (dhts != null) {
for (int i = 0; i < dhts.length; i++) {
dhts[i].setLogging(logging.getValue());
}
}
}
});
final DHTPluginOperationListener log_polistener = new DHTPluginOperationListener() {
@Override
public boolean diversified() {
return (true);
}
@Override
public void starts(byte[] key) {
}
@Override
public void valueRead(DHTPluginContact originator, DHTPluginValue value) {
log.log("valueRead: " + new String(value.getValue()) + " from " + originator.getName() + "/" + originator.getAddress() + ", flags=" + Integer.toHexString(value.getFlags() & 0x00ff));
if ((value.getFlags() & DHTPlugin.FLAG_STATS) != 0) {
DHTPluginKeyStats stats = decodeStats(value);
log.log(" stats: size=" + (stats == null ? "null" : stats.getSize()));
}
}
@Override
public void valueWritten(DHTPluginContact target, DHTPluginValue value) {
log.log("valueWritten:" + new String(value.getValue()) + " to " + target.getName() + "/" + target.getAddress());
}
@Override
public void complete(byte[] key, boolean timeout_occurred) {
log.log("complete: timeout = " + timeout_occurred);
}
};
execute.addListener(new ParameterListener() {
@Override
public void parameterChanged(Parameter param) {
AEThread2 t = new AEThread2("DHT:commandrunner", true) {
@Override
public void run() {
try {
if (dhts == null) {
return;
}
String c = command.getValue().trim();
String lc = c.toLowerCase();
if (lc.equals("suspend")) {
if (!setSuspended(true)) {
Debug.out("Suspend failed");
}
return;
} else if (lc.equals("resume")) {
if (!setSuspended(false)) {
Debug.out("Resume failed");
}
return;
} else if (lc.equals("bridge_put")) {
try {
List<DistributedDatabase> ddbs = plugin_interface.getUtilities().getDistributedDatabases(new String[] { AENetworkClassifier.AT_I2P });
DistributedDatabase ddb = ddbs.get(0);
DistributedDatabaseKey key = ddb.createKey("fred");
key.setFlags(DistributedDatabaseKey.FL_BRIDGED);
ddb.write(new DistributedDatabaseListener() {
@Override
public void event(DistributedDatabaseEvent event) {
// TODO Auto-generated method stub
}
}, key, ddb.createValue("bill"));
} catch (Throwable e) {
e.printStackTrace();
}
return;
}
for (int i = 0; i < dhts.length; i++) {
DHT dht = dhts[i].getDHT();
DHTTransportUDP transport = (DHTTransportUDP) dht.getTransport();
if (lc.equals("print")) {
dht.print(true);
dhts[i].logStats();
} else if (lc.equals("pingall")) {
if (i == 1) {
dht.getControl().pingAll();
}
} else if (lc.equals("versions")) {
List<DHTRouterContact> contacts = dht.getRouter().getAllContacts();
Map<Byte, Integer> counts = new TreeMap<>();
for (DHTRouterContact r : contacts) {
DHTControlContact contact = (DHTControlContact) r.getAttachment();
byte v = contact.getTransportContact().getProtocolVersion();
Integer count = counts.get(v);
if (count == null) {
counts.put(v, 1);
} else {
counts.put(v, count + 1);
}
}
log.log("Net " + dht.getTransport().getNetwork());
int total = contacts.size();
if (total == 0) {
log.log(" no contacts");
} else {
String ver = "";
for (Map.Entry<Byte, Integer> entry : counts.entrySet()) {
ver += (ver.length() == 0 ? "" : ", ") + entry.getKey() + "=" + 100 * entry.getValue() / total + "%";
}
log.log(" contacts=" + total + ": " + ver);
}
} else if (lc.equals("testca")) {
((DHTTransportUDPImpl) transport).testExternalAddressChange();
} else if (lc.equals("testnd")) {
((DHTTransportUDPImpl) transport).testNetworkAlive(false);
} else if (lc.equals("testna")) {
((DHTTransportUDPImpl) transport).testNetworkAlive(true);
} else {
int pos = c.indexOf(' ');
if (pos != -1) {
String lhs = lc.substring(0, pos);
String rhs = c.substring(pos + 1);
if (lhs.equals("set")) {
pos = rhs.indexOf('=');
if (pos != -1) {
DHTPlugin.this.put(rhs.substring(0, pos).getBytes(), "DHT Plugin: set", rhs.substring(pos + 1).getBytes(), (byte) 0, log_polistener);
}
} else if (lhs.equals("get")) {
DHTPlugin.this.get(rhs.getBytes("UTF-8"), "DHT Plugin: get", (byte) 0, 1, 10000, true, false, log_polistener);
} else if (lhs.equals("query")) {
DHTPlugin.this.get(rhs.getBytes("UTF-8"), "DHT Plugin: get", DHTPlugin.FLAG_STATS, 1, 10000, true, false, log_polistener);
} else if (lhs.equals("punch")) {
Map originator_data = new HashMap();
originator_data.put("hello", "mum");
DHTNATPuncher puncher = dht.getNATPuncher();
if (puncher != null) {
puncher.punch("Test", transport.getLocalContact(), null, originator_data);
}
} else if (lhs.equals("stats")) {
try {
pos = rhs.lastIndexOf(":");
DHTTransportContact contact;
if (pos == -1) {
contact = transport.getLocalContact();
} else {
String host = rhs.substring(0, pos);
int port = Integer.parseInt(rhs.substring(pos + 1));
contact = transport.importContact(new InetSocketAddress(host, port), transport.getProtocolVersion(), false);
}
log.log("Stats request to " + contact.getName());
DHTTransportFullStats stats = contact.getStats();
log.log("Stats:" + (stats == null ? "<null>" : stats.getString()));
DHTControlActivity[] activities = dht.getControl().getActivities();
for (int j = 0; j < activities.length; j++) {
log.log(" act:" + activities[j].getString());
}
} catch (Throwable e) {
Debug.printStackTrace(e);
}
}
}
}
}
} catch (Throwable e) {
Debug.out(e);
}
}
};
t.start();
}
});
reseed.addListener(new ParameterListener() {
@Override
public void parameterChanged(Parameter param) {
reseed.setEnabled(false);
AEThread2 t = new AEThread2("DHT:reseeder", true) {
@Override
public void run() {
try {
String ip = reseed_ip.getValue().trim();
if (dhts == null) {
return;
}
int port = reseed_port.getValue();
for (int i = 0; i < dhts.length; i++) {
DHTPluginImpl dht = dhts[i];
if (ip.length() == 0 || port == 0) {
dht.checkForReSeed(true);
} else {
DHTTransportContact seed = dht.importSeed(ip, port);
if (seed != null) {
dht.integrateDHT(false, seed);
}
}
}
} finally {
reseed.setEnabled(true);
}
}
};
t.start();
}
});
model.getActivity().setVisible(false);
model.getProgress().setVisible(false);
log.addListener(new LoggerChannelListener() {
@Override
public void messageLogged(int type, String message) {
model.getLogArea().appendText(message + "\n");
}
@Override
public void messageLogged(String str, Throwable error) {
model.getLogArea().appendText(error.toString() + "\n");
}
});
dht_log = new DHTLogger() {
@Override
public void log(String str) {
log.log(str);
}
@Override
public void log(Throwable e) {
log.log(e);
}
@Override
public void log(int log_type, String str) {
if (isEnabled(log_type)) {
log.log(str);
}
}
@Override
public boolean isEnabled(int log_type) {
if (log_type == DHTLogger.LT_IP_FILTER) {
return ipfilter_logging[0];
}
return (true);
}
@Override
public PluginInterface getPluginInterface() {
return (log.getLogger().getPluginInterface());
}
};
if (!enabled_param.getValue()) {
model.getStatus().setText("Disabled");
status = STATUS_DISABLED;
init_sem.releaseForever();
return;
}
setPluginInfo();
plugin_interface.addListener(new PluginListener() {
@Override
public void initializationComplete() {
PluginInterface pi_upnp = plugin_interface.getPluginManager().getPluginInterfaceByClass(UPnPPlugin.class);
if (pi_upnp == null) {
log.log("UPnP plugin not found, can't map port");
} else {
upnp_mapping = ((UPnPPlugin) pi_upnp.getPlugin()).addMapping(plugin_interface.getPluginName(), false, dht_data_port, true);
}
String ip = null;
if (advanced.getValue()) {
ip = override_ip.getValue().trim();
if (ip.length() == 0) {
ip = null;
}
}
initComplete(model.getStatus(), logging.getValue(), ip);
}
@Override
public void closedownInitiated() {
if (dhts != null) {
for (int i = 0; i < dhts.length; i++) {
dhts[i].closedownInitiated();
}
}
saveClockSkew();
}
@Override
public void closedownComplete() {
}
});
final int sample_frequency = 60 * 1000;
// every 15 mins
final int sample_stats_ticks = 15;
plugin_interface.getUtilities().createTimer("DHTStats", true).addPeriodicEvent(sample_frequency, new UTTimerEventPerformer() {
@Override
public void perform(UTTimerEvent event) {
if (dhts != null) {
for (int i = 0; i < dhts.length; i++) {
dhts[i].updateStats(sample_stats_ticks);
}
}
setPluginInfo();
saveClockSkew();
}
});
}
Aggregations