Updated features in readme, Connectable peer stats, Start ZeroNet when Windows starts option, ZeroName updater invalid key fix, Add peer ping to timeout on publish, Make sure the passive peers get the updated files

This commit is contained in:
HelloZeroNet 2015-04-17 00:34:08 +02:00
parent b39b6904e7
commit f1a885b0ef
8 changed files with 79 additions and 17 deletions

View file

@ -178,6 +178,7 @@ class Site:
for changed_file in changed:
self.bad_files[changed_file] = self.bad_files.get(changed_file, 0)+1
if not self.settings["own"]: self.storage.checkFiles(quick_check=True) # Quick check files based on file size
if self.bad_files:
self.download()
@ -187,12 +188,17 @@ class Site:
# Publish worker
def publisher(self, inner_path, peers, published, limit, event_done=None):
timeout = 5+int(self.storage.getSize(inner_path)/1024) # Timeout: 5sec + size in kb
file_size = self.storage.getSize(inner_path)
body = self.storage.read(inner_path)
while 1:
if not peers or len(published) >= limit:
if event_done: event_done.set(True)
break # All peers done, or published engouht
peer = peers.pop(0)
if peer.connection and peer.connection.last_ping_delay: # Peer connected
timeout = timeout = 5+int(file_size/1024)+peer.connection.last_ping_delay # Timeout: 5sec + size in kb + last_ping
else:
timeout = timeout = 5+int(file_size/1024) # Timeout: 5sec + size in kb
result = {"exception": "Timeout"}
for retry in range(2):
@ -201,7 +207,7 @@ class Site:
result = peer.request("update", {
"site": self.address,
"inner_path": inner_path,
"body": self.storage.open(inner_path).read(),
"body": body,
"peer": (config.ip_external, config.fileserver_port)
})
if result: break
@ -219,7 +225,7 @@ class Site:
# Update content.json on peers
def publish(self, limit=5, inner_path="content.json"):
self.log.info( "Publishing to %s/%s peers..." % (limit, len(self.peers)) )
published = [] # Successfuly published (Peer)
published = [] # Successfully published (Peer)
publishers = [] # Publisher threads
peers = self.peers.values()
@ -233,7 +239,12 @@ class Site:
if len(published) < min(len(self.peers), limit): time.sleep(0.2) # If less than we need sleep a bit
if len(published) == 0: gevent.joinall(publishers) # No successful publish, wait for all publisher
self.log.info("Successfuly published to %s peers" % len(published))
# Make sure the connected passive peers got the update
passive_peers = [peer for peer in peers if peer.connection and not peer.connection.closed and peer.key.endswith(":0") and peer not in published] # Every connected passive peer that we not published to
for peer in passive_peers:
gevent.spawn(self.publisher, inner_path, passive_peers, published, limit=10)
self.log.info("Successfuly published to %s peers, publishing to %s more passive peers" % (len(published), len(passive_peers)) )
return len(published)
@ -399,7 +410,7 @@ class Site:
self.announcePex()
# Need open connections
# Keep connections to get the updates (required for passive clients)
def needConnections(self):
need = min(len(self.peers)/2, 10) # Connect to half of total peers, but max 10
need = max(need, 5) # But minimum 5 peers

View file

@ -186,14 +186,14 @@ class SiteStorage:
self.site.content_manager.loadContent() # Reload content.json
for content_inner_path, content in self.site.content_manager.contents.items():
if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file
self.log.error("[MISSING] %s" % content_inner_path)
self.log.debug("[MISSING] %s" % content_inner_path)
bad_files.append(content_inner_path)
for file_relative_path in content["files"].keys():
file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
if not os.path.isfile(file_path):
self.log.error("[MISSING] %s" % file_inner_path)
self.log.debug("[MISSING] %s" % file_inner_path)
bad_files.append(file_inner_path)
continue
@ -203,7 +203,7 @@ class SiteStorage:
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
if not ok:
self.log.debug("[CHNAGED] %s" % file_inner_path)
self.log.debug("[CHANGED] %s" % file_inner_path)
bad_files.append(file_inner_path)
self.log.debug("%s verified: %s files, quick_check: %s, bad files: %s" % (content_inner_path, len(content["files"]), quick_check, bad_files))
@ -212,11 +212,12 @@ class SiteStorage:
# Check and try to fix site files integrity
def checkFiles(self, quick_check=True):
self.log.debug("Checking files... Quick:%s" % quick_check)
s = time.time()
bad_files = self.verifyFiles(quick_check)
if bad_files:
for bad_file in bad_files:
self.site.bad_files[bad_file] = self.site.bad_files.get("bad_file", 0)+1
self.log.debug("Checked files in %.2fs... Quick:%s" % (time.time()-s, quick_check))
# Delete site's all file