Findhashfield optimization and limit
This commit is contained in:
parent
9fcd456205
commit
bd51c61eb6
1 changed files with 31 additions and 10 deletions
|
@ -2,6 +2,7 @@
|
|||
import os
|
||||
import time
|
||||
import json
|
||||
import itertools
|
||||
|
||||
# Third party modules
|
||||
import gevent
|
||||
|
@ -321,20 +322,39 @@ class FileRequest(object):
|
|||
|
||||
self.response({"hashfield_raw": site.content_manager.hashfield.tostring()})
|
||||
|
||||
def findHashIds(self, site, hash_ids, limit=100):
|
||||
back_ip4 = {}
|
||||
back_onion = {}
|
||||
found = site.worker_manager.findOptionalHashIds(hash_ids, limit=limit)
|
||||
|
||||
for hash_id, peers in found.iteritems():
|
||||
back_onion[hash_id] = list(itertools.islice((
|
||||
helper.packOnionAddress(peer.ip, peer.port)
|
||||
for peer in peers
|
||||
if peer.ip.endswith("onion")
|
||||
), 50))
|
||||
back_ip4[hash_id] = list(itertools.islice((
|
||||
helper.packAddress(peer.ip, peer.port)
|
||||
for peer in peers
|
||||
if not peer.ip.endswith("onion")
|
||||
), 50))
|
||||
return back_ip4, back_onion
|
||||
|
||||
def actionFindHashIds(self, params):
|
||||
site = self.sites.get(params["site"])
|
||||
s = time.time()
|
||||
if not site or not site.settings["serving"]: # Site unknown or not serving
|
||||
self.response({"error": "Unknown site"})
|
||||
self.connection.badAction(5)
|
||||
return False
|
||||
|
||||
found = site.worker_manager.findOptionalHashIds(params["hash_ids"])
|
||||
|
||||
back_ip4 = {}
|
||||
back_onion = {}
|
||||
for hash_id, peers in found.iteritems():
|
||||
back_onion[hash_id] = [helper.packOnionAddress(peer.ip, peer.port) for peer in peers if peer.ip.endswith("onion")]
|
||||
back_ip4[hash_id] = [helper.packAddress(peer.ip, peer.port) for peer in peers if not peer.ip.endswith("onion")]
|
||||
event_key = "%s_findHashIds_%s_%s" % (self.connection.ip, params["site"], len(params["hash_ids"]))
|
||||
if self.connection.cpu_time > 0.5 or not RateLimit.isAllowed(event_key, 60 * 5):
|
||||
time.sleep(0.1)
|
||||
back_ip4, back_onion = self.findHashIds(site, params["hash_ids"], limit=10)
|
||||
else:
|
||||
back_ip4, back_onion = self.findHashIds(site, params["hash_ids"])
|
||||
RateLimit.called(event_key)
|
||||
|
||||
# Check my hashfield
|
||||
if self.server.tor_manager and self.server.tor_manager.site_onions.get(site.address): # Running onion
|
||||
|
@ -347,16 +367,17 @@ class FileRequest(object):
|
|||
my_ip = my_ip = helper.packAddress(self.server.ip, self.server.port)
|
||||
my_back = back_ip4
|
||||
|
||||
my_hashfield_set = set(site.content_manager.hashfield)
|
||||
for hash_id in params["hash_ids"]:
|
||||
if hash_id in site.content_manager.hashfield:
|
||||
if hash_id in my_hashfield_set:
|
||||
if hash_id not in my_back:
|
||||
my_back[hash_id] = []
|
||||
my_back[hash_id].append(my_ip) # Add myself
|
||||
|
||||
if config.verbose:
|
||||
self.log.debug(
|
||||
"Found: IP4: %s, Onion: %s for %s hashids" %
|
||||
(len(back_ip4), len(back_onion), len(params["hash_ids"]))
|
||||
"Found: IP4: %s, Onion: %s for %s hashids in %.3fs" %
|
||||
(len(back_ip4), len(back_onion), len(params["hash_ids"]), time.time() - s)
|
||||
)
|
||||
self.response({"peers": back_ip4, "peers_onion": back_onion})
|
||||
|
||||
|
|
Loading…
Reference in a new issue