Updated features in readme, Connectable peer stats, Start ZeroNet when Windows starts option, ZeroName updater invalid key fix, Add peer ping to timeout on publish, Make sure the passive peers get the updated files

This commit is contained in:
HelloZeroNet 2015-04-17 00:34:08 +02:00
parent b39b6904e7
commit f1a885b0ef
8 changed files with 79 additions and 17 deletions

View file

@ -31,15 +31,15 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network
other peers. other peers.
## Features ## Features
* Easy, zero configuration setup * Real-time updated sites
* Namecoin .bit domains support
* Easy to setup: unpack & run
* Password-less [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) * Password-less [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki)
based authorization: Your account is protected by same cryptography as your bitcoin wallet based authorization: Your account is protected by same cryptography as your bitcoin wallet
* Namecoin .bit domains support
* SQL Database support: Allows easier site development and faster page load times * SQL Database support: Allows easier site development and faster page load times
* Tor network support
* Automatic, uPnP port opening * Automatic, uPnP port opening
* Plugin for multiuser (openproxy) support * Plugin for multiuser (openproxy) support
* [ZeroFrame API](http://zeronet.readthedocs.org/en/latest/site_development/zeroframe_api_reference/) for dynamic sites
* One click ZeroNet client updater
## Screenshots ## Screenshots

View file

@ -104,7 +104,11 @@ class UiRequestPlugin(object):
yield self.formatTableRow([ yield self.formatTableRow([
("<a href='#ShowPeers' onclick='document.getElementById(\"peers_%s\").style.display=\"initial\"; return false'>%s</a>", (site.address, site.address)), ("<a href='#ShowPeers' onclick='document.getElementById(\"peers_%s\").style.display=\"initial\"; return false'>%s</a>", (site.address, site.address)),
("%s", [peer.connection.id for peer in site.peers.values() if peer.connection and peer.connection.connected]), ("%s", [peer.connection.id for peer in site.peers.values() if peer.connection and peer.connection.connected]),
("%s/%s", ( len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]), len(site.peers) ) ), ("%s/%s/%s", (
len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]),
len(site.getConnectablePeers(100)),
len(site.peers)
) ),
("%s", len(site.content_manager.contents)), ("%s", len(site.content_manager.contents)),
]) ])
yield "<tr><td id='peers_%s' style='display: none; white-space: pre'>" % site.address yield "<tr><td id='peers_%s' style='display: none; white-space: pre'>" % site.address

View file

@ -32,6 +32,7 @@ class ActionsPlugin(object):
(self.titleConnections, False), (self.titleConnections, False),
(self.titleTransfer, False), (self.titleTransfer, False),
(self.titleConsole, self.toggleConsole), (self.titleConsole, self.toggleConsole),
(self.titleAutorun, self.toggleAutorun),
"--", "--",
("ZeroNet Twitter", lambda: self.opensite("https://twitter.com/HelloZeroNet") ), ("ZeroNet Twitter", lambda: self.opensite("https://twitter.com/HelloZeroNet") ),
("ZeroNet Reddit", lambda: self.opensite("http://www.reddit.com/r/zeronet/") ), ("ZeroNet Reddit", lambda: self.opensite("http://www.reddit.com/r/zeronet/") ),
@ -51,6 +52,7 @@ class ActionsPlugin(object):
super(ActionsPlugin, self).main() super(ActionsPlugin, self).main()
icon._die = True icon._die = True
def quit(self): def quit(self):
self.icon.die() self.icon.die()
time.sleep(0.1) time.sleep(0.1)
@ -58,10 +60,12 @@ class ActionsPlugin(object):
self.main.file_server.stop() self.main.file_server.stop()
#sys.exit() #sys.exit()
def opensite(self, url): def opensite(self, url):
import webbrowser import webbrowser
webbrowser.open(url, new=2) webbrowser.open(url, new=2)
def titleIp(self): def titleIp(self):
title = "!IP: %s" % config.ip_external title = "!IP: %s" % config.ip_external
if self.main.file_server.port_opened: if self.main.file_server.port_opened:
@ -70,18 +74,22 @@ class ActionsPlugin(object):
title += " (passive)" title += " (passive)"
return title return title
def titleConnections(self): def titleConnections(self):
title = "Connections: %s" % len(self.main.file_server.connections) title = "Connections: %s" % len(self.main.file_server.connections)
return title return title
def titleTransfer(self): def titleTransfer(self):
title = "Received: %.2f MB | Sent: %.2f MB" % (float(self.main.file_server.bytes_recv)/1024/1024, float(self.main.file_server.bytes_sent)/1024/1024) title = "Received: %.2f MB | Sent: %.2f MB" % (float(self.main.file_server.bytes_recv)/1024/1024, float(self.main.file_server.bytes_sent)/1024/1024)
return title return title
def titleConsole(self): def titleConsole(self):
if self.console: return "+Show console window" if self.console: return "+Show console window"
else: return "Show console window" else: return "Show console window"
def toggleConsole(self): def toggleConsole(self):
if self.console: if self.console:
notificationicon.hideConsole() notificationicon.hideConsole()
@ -89,3 +97,34 @@ class ActionsPlugin(object):
else: else:
notificationicon.showConsole() notificationicon.showConsole()
self.console = True self.console = True
def getAutorunPath(self):
return "%s\\zeronet.cmd" % winfolders.get(winfolders.STARTUP)
def formatAutorun(self):
args = sys.argv[:]
args.insert(0, sys.executable)
if sys.platform == 'win32':
args = ['"%s"' % arg for arg in args]
cmd = " ".join(args)
cmd = cmd.replace("start.py", "zeronet.py").replace('"--open_browser"', "").replace('"default_browser"', "") # Dont open browser on autorun
return "cd /D %s \n%s" % (os.getcwd(), cmd)
def isAutorunEnabled(self):
path = self.getAutorunPath()
return os.path.isfile(path) and open(path).read() == self.formatAutorun()
def titleAutorun(self):
if self.isAutorunEnabled(): return "+Start ZeroNet when Windows starts"
else: return "Start ZeroNet when Windows starts"
def toggleAutorun(self):
if self.isAutorunEnabled():
os.unlink(self.getAutorunPath())
else:
open(self.getAutorunPath(), "w").write(self.formatAutorun())

View file

@ -632,9 +632,8 @@ class NotificationIcon(object):
Shell_NotifyIcon(NIM_ADD, ctypes.pointer(iconinfo)) Shell_NotifyIcon(NIM_ADD, ctypes.pointer(iconinfo))
iconinfo.union.uVersion = NOTIFYICON_VERSION iconinfo.union.uVersion = NOTIFYICON_VERSION
self.iconinfo = ctypes.pointer(iconinfo)
Shell_NotifyIcon(NIM_SETVERSION, ctypes.pointer(iconinfo)) Shell_NotifyIcon(NIM_SETVERSION, ctypes.pointer(iconinfo))
self.iconinfo = iconinfo
PostMessage(self._hwnd, WM_NULL, 0, 0) PostMessage(self._hwnd, WM_NULL, 0, 0)

View file

@ -46,3 +46,8 @@ def get(intFolder):
exit_code=_SHGetFolderPath(0, intFolder, 0, 0, auPathBuffer) exit_code=_SHGetFolderPath(0, intFolder, 0, 0, auPathBuffer)
return auPathBuffer.value return auPathBuffer.value
if __name__ == "__main__":
import os
print get(STARTUP)
open(get(STARTUP)+"\\zeronet.cmd", "w").write("cd /D %s\r\nzeronet.py" % os.getcwd())

View file

@ -20,6 +20,9 @@ def processNameOp(domain, value):
if "zeronet" not in data: if "zeronet" not in data:
print "No zeronet in ", data.keys() print "No zeronet in ", data.keys()
return False return False
if type(data["zeronet"]) != type({}):
print "Bad type: ", data["zeronet"]
return False
if "slave" in sys.argv: if "slave" in sys.argv:
print "Waiting for master update arrive" print "Waiting for master update arrive"
@ -96,7 +99,7 @@ print "Processing block from #%s to #%s..." % (config["lastprocessed"], last_blo
for block_id in range(config["lastprocessed"], last_block+1): for block_id in range(config["lastprocessed"], last_block+1):
processBlock(block_id) processBlock(block_id)
#processBlock(223911) # Testing # processBlock(223911) # Testing
while 1: while 1:
print "Waiting for new block", print "Waiting for new block",

View file

@ -178,6 +178,7 @@ class Site:
for changed_file in changed: for changed_file in changed:
self.bad_files[changed_file] = self.bad_files.get(changed_file, 0)+1 self.bad_files[changed_file] = self.bad_files.get(changed_file, 0)+1
if not self.settings["own"]: self.storage.checkFiles(quick_check=True) # Quick check files based on file size if not self.settings["own"]: self.storage.checkFiles(quick_check=True) # Quick check files based on file size
if self.bad_files: if self.bad_files:
self.download() self.download()
@ -187,12 +188,17 @@ class Site:
# Publish worker # Publish worker
def publisher(self, inner_path, peers, published, limit, event_done=None): def publisher(self, inner_path, peers, published, limit, event_done=None):
timeout = 5+int(self.storage.getSize(inner_path)/1024) # Timeout: 5sec + size in kb file_size = self.storage.getSize(inner_path)
body = self.storage.read(inner_path)
while 1: while 1:
if not peers or len(published) >= limit: if not peers or len(published) >= limit:
if event_done: event_done.set(True) if event_done: event_done.set(True)
break # All peers done, or published engouht break # All peers done, or published engouht
peer = peers.pop(0) peer = peers.pop(0)
if peer.connection and peer.connection.last_ping_delay: # Peer connected
timeout = timeout = 5+int(file_size/1024)+peer.connection.last_ping_delay # Timeout: 5sec + size in kb + last_ping
else:
timeout = timeout = 5+int(file_size/1024) # Timeout: 5sec + size in kb
result = {"exception": "Timeout"} result = {"exception": "Timeout"}
for retry in range(2): for retry in range(2):
@ -201,7 +207,7 @@ class Site:
result = peer.request("update", { result = peer.request("update", {
"site": self.address, "site": self.address,
"inner_path": inner_path, "inner_path": inner_path,
"body": self.storage.open(inner_path).read(), "body": body,
"peer": (config.ip_external, config.fileserver_port) "peer": (config.ip_external, config.fileserver_port)
}) })
if result: break if result: break
@ -219,7 +225,7 @@ class Site:
# Update content.json on peers # Update content.json on peers
def publish(self, limit=5, inner_path="content.json"): def publish(self, limit=5, inner_path="content.json"):
self.log.info( "Publishing to %s/%s peers..." % (limit, len(self.peers)) ) self.log.info( "Publishing to %s/%s peers..." % (limit, len(self.peers)) )
published = [] # Successfuly published (Peer) published = [] # Successfully published (Peer)
publishers = [] # Publisher threads publishers = [] # Publisher threads
peers = self.peers.values() peers = self.peers.values()
@ -233,7 +239,12 @@ class Site:
if len(published) < min(len(self.peers), limit): time.sleep(0.2) # If less than we need sleep a bit if len(published) < min(len(self.peers), limit): time.sleep(0.2) # If less than we need sleep a bit
if len(published) == 0: gevent.joinall(publishers) # No successful publish, wait for all publisher if len(published) == 0: gevent.joinall(publishers) # No successful publish, wait for all publisher
self.log.info("Successfuly published to %s peers" % len(published)) # Make sure the connected passive peers got the update
passive_peers = [peer for peer in peers if peer.connection and not peer.connection.closed and peer.key.endswith(":0") and peer not in published] # Every connected passive peer that we not published to
for peer in passive_peers:
gevent.spawn(self.publisher, inner_path, passive_peers, published, limit=10)
self.log.info("Successfuly published to %s peers, publishing to %s more passive peers" % (len(published), len(passive_peers)) )
return len(published) return len(published)
@ -399,7 +410,7 @@ class Site:
self.announcePex() self.announcePex()
# Need open connections # Keep connections to get the updates (required for passive clients)
def needConnections(self): def needConnections(self):
need = min(len(self.peers)/2, 10) # Connect to half of total peers, but max 10 need = min(len(self.peers)/2, 10) # Connect to half of total peers, but max 10
need = max(need, 5) # But minimum 5 peers need = max(need, 5) # But minimum 5 peers

View file

@ -186,14 +186,14 @@ class SiteStorage:
self.site.content_manager.loadContent() # Reload content.json self.site.content_manager.loadContent() # Reload content.json
for content_inner_path, content in self.site.content_manager.contents.items(): for content_inner_path, content in self.site.content_manager.contents.items():
if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file
self.log.error("[MISSING] %s" % content_inner_path) self.log.debug("[MISSING] %s" % content_inner_path)
bad_files.append(content_inner_path) bad_files.append(content_inner_path)
for file_relative_path in content["files"].keys(): for file_relative_path in content["files"].keys():
file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
file_inner_path = file_inner_path.strip("/") # Strip leading / file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path) file_path = self.getPath(file_inner_path)
if not os.path.isfile(file_path): if not os.path.isfile(file_path):
self.log.error("[MISSING] %s" % file_inner_path) self.log.debug("[MISSING] %s" % file_inner_path)
bad_files.append(file_inner_path) bad_files.append(file_inner_path)
continue continue
@ -203,7 +203,7 @@ class SiteStorage:
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
if not ok: if not ok:
self.log.debug("[CHNAGED] %s" % file_inner_path) self.log.debug("[CHANGED] %s" % file_inner_path)
bad_files.append(file_inner_path) bad_files.append(file_inner_path)
self.log.debug("%s verified: %s files, quick_check: %s, bad files: %s" % (content_inner_path, len(content["files"]), quick_check, bad_files)) self.log.debug("%s verified: %s files, quick_check: %s, bad files: %s" % (content_inner_path, len(content["files"]), quick_check, bad_files))
@ -212,11 +212,12 @@ class SiteStorage:
# Check and try to fix site files integrity # Check and try to fix site files integrity
def checkFiles(self, quick_check=True): def checkFiles(self, quick_check=True):
self.log.debug("Checking files... Quick:%s" % quick_check) s = time.time()
bad_files = self.verifyFiles(quick_check) bad_files = self.verifyFiles(quick_check)
if bad_files: if bad_files:
for bad_file in bad_files: for bad_file in bad_files:
self.site.bad_files[bad_file] = self.site.bad_files.get("bad_file", 0)+1 self.site.bad_files[bad_file] = self.site.bad_files.get("bad_file", 0)+1
self.log.debug("Checked files in %.2fs... Quick:%s" % (time.time()-s, quick_check))
# Delete site's all file # Delete site's all file