Findhashfield optimization and limit

This commit is contained in:
shortcutme 2016-11-07 23:20:08 +01:00
parent 9fcd456205
commit bd51c61eb6

View file

@ -2,6 +2,7 @@
import os import os
import time import time
import json import json
import itertools
# Third party modules # Third party modules
import gevent import gevent
@ -321,20 +322,39 @@ class FileRequest(object):
self.response({"hashfield_raw": site.content_manager.hashfield.tostring()}) self.response({"hashfield_raw": site.content_manager.hashfield.tostring()})
def findHashIds(self, site, hash_ids, limit=100):
back_ip4 = {}
back_onion = {}
found = site.worker_manager.findOptionalHashIds(hash_ids, limit=limit)
for hash_id, peers in found.iteritems():
back_onion[hash_id] = list(itertools.islice((
helper.packOnionAddress(peer.ip, peer.port)
for peer in peers
if peer.ip.endswith("onion")
), 50))
back_ip4[hash_id] = list(itertools.islice((
helper.packAddress(peer.ip, peer.port)
for peer in peers
if not peer.ip.endswith("onion")
), 50))
return back_ip4, back_onion
def actionFindHashIds(self, params): def actionFindHashIds(self, params):
site = self.sites.get(params["site"]) site = self.sites.get(params["site"])
s = time.time()
if not site or not site.settings["serving"]: # Site unknown or not serving if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"}) self.response({"error": "Unknown site"})
self.connection.badAction(5) self.connection.badAction(5)
return False return False
found = site.worker_manager.findOptionalHashIds(params["hash_ids"]) event_key = "%s_findHashIds_%s_%s" % (self.connection.ip, params["site"], len(params["hash_ids"]))
if self.connection.cpu_time > 0.5 or not RateLimit.isAllowed(event_key, 60 * 5):
back_ip4 = {} time.sleep(0.1)
back_onion = {} back_ip4, back_onion = self.findHashIds(site, params["hash_ids"], limit=10)
for hash_id, peers in found.iteritems(): else:
back_onion[hash_id] = [helper.packOnionAddress(peer.ip, peer.port) for peer in peers if peer.ip.endswith("onion")] back_ip4, back_onion = self.findHashIds(site, params["hash_ids"])
back_ip4[hash_id] = [helper.packAddress(peer.ip, peer.port) for peer in peers if not peer.ip.endswith("onion")] RateLimit.called(event_key)
# Check my hashfield # Check my hashfield
if self.server.tor_manager and self.server.tor_manager.site_onions.get(site.address): # Running onion if self.server.tor_manager and self.server.tor_manager.site_onions.get(site.address): # Running onion
@ -347,16 +367,17 @@ class FileRequest(object):
my_ip = my_ip = helper.packAddress(self.server.ip, self.server.port) my_ip = my_ip = helper.packAddress(self.server.ip, self.server.port)
my_back = back_ip4 my_back = back_ip4
my_hashfield_set = set(site.content_manager.hashfield)
for hash_id in params["hash_ids"]: for hash_id in params["hash_ids"]:
if hash_id in site.content_manager.hashfield: if hash_id in my_hashfield_set:
if hash_id not in my_back: if hash_id not in my_back:
my_back[hash_id] = [] my_back[hash_id] = []
my_back[hash_id].append(my_ip) # Add myself my_back[hash_id].append(my_ip) # Add myself
if config.verbose: if config.verbose:
self.log.debug( self.log.debug(
"Found: IP4: %s, Onion: %s for %s hashids" % "Found: IP4: %s, Onion: %s for %s hashids in %.3fs" %
(len(back_ip4), len(back_onion), len(params["hash_ids"])) (len(back_ip4), len(back_onion), len(params["hash_ids"]), time.time() - s)
) )
self.response({"peers": back_ip4, "peers_onion": back_onion}) self.response({"peers": back_ip4, "peers_onion": back_onion})