rev280, The whole project reformatted to PEP8, UiRequest getPosted to query posted variables

This commit is contained in:
HelloZeroNet 2015-07-12 20:36:46 +02:00
parent a5741704e4
commit b5ecb62bc6
49 changed files with 5704 additions and 5205 deletions

File diff suppressed because it is too large Load diff

View file

@ -1,130 +1,134 @@
import re, time, cgi, os, sys import time
from Plugin import PluginManager import os
from Config import config import sys
import atexit import atexit
allow_reload = False # No reload supported from Plugin import PluginManager
from Config import config
allow_reload = False # No source reload supported in this plugin
@PluginManager.registerTo("Actions") @PluginManager.registerTo("Actions")
class ActionsPlugin(object): class ActionsPlugin(object):
def main(self):
global notificationicon, winfolders
from lib import notificationicon, winfolders
import gevent.threadpool
self.main = sys.modules["main"] def main(self):
global notificationicon, winfolders
from lib import notificationicon, winfolders
import gevent.threadpool
icon = notificationicon.NotificationIcon(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'trayicon.ico'), "ZeroNet %s" % config.version) self.main = sys.modules["main"]
self.icon = icon
if not config.debug: # Hide console if not in debug mode icon = notificationicon.NotificationIcon(
notificationicon.hideConsole() os.path.join(os.path.dirname(os.path.abspath(__file__)), 'trayicon.ico'),
self.console = False "ZeroNet %s" % config.version
else: )
self.console = True self.icon = icon
@atexit.register if not config.debug: # Hide console if not in debug mode
def hideIcon(): notificationicon.hideConsole()
icon.die() self.console = False
else:
self.console = True
icon.items = ( @atexit.register
(self.titleIp, False), def hideIcon():
(self.titleConnections, False), icon.die()
(self.titleTransfer, False),
(self.titleConsole, self.toggleConsole),
(self.titleAutorun, self.toggleAutorun),
"--",
("ZeroNet Twitter", lambda: self.opensite("https://twitter.com/HelloZeroNet") ),
("ZeroNet Reddit", lambda: self.opensite("http://www.reddit.com/r/zeronet/") ),
("ZeroNet Github", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet") ),
("Report bug/request feature", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet/issues") ),
"--",
("!Open ZeroNet", lambda: self.opensite("http://%s:%s" % (config.ui_ip, config.ui_port)) ),
#"--",
#("Start ZeroNet when Windows starts", quit),
"--",
("Quit", self.quit),
) icon.items = (
(self.titleIp, False),
(self.titleConnections, False),
(self.titleTransfer, False),
(self.titleConsole, self.toggleConsole),
(self.titleAutorun, self.toggleAutorun),
"--",
("ZeroNet Twitter", lambda: self.opensite("https://twitter.com/HelloZeroNet")),
("ZeroNet Reddit", lambda: self.opensite("http://www.reddit.com/r/zeronet/")),
("ZeroNet Github", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet")),
("Report bug/request feature", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet/issues")),
"--",
("!Open ZeroNet", lambda: self.opensite("http://%s:%s" % (config.ui_ip, config.ui_port))),
"--",
("Quit", self.quit),
icon.clicked = lambda: self.opensite("http://%s:%s" % (config.ui_ip, config.ui_port)) )
gevent.threadpool.start_new_thread(icon._run, ()) # Start in real thread (not gevent compatible)
super(ActionsPlugin, self).main()
icon._die = True
icon.clicked = lambda: self.opensite("http://%s:%s" % (config.ui_ip, config.ui_port))
gevent.threadpool.start_new_thread(icon._run, ()) # Start in real thread (not gevent compatible)
super(ActionsPlugin, self).main()
icon._die = True
def quit(self): def quit(self):
self.icon.die() self.icon.die()
time.sleep(0.1) time.sleep(0.1)
self.main.ui_server.stop() self.main.ui_server.stop()
self.main.file_server.stop() self.main.file_server.stop()
#sys.exit() # sys.exit()
def opensite(self, url):
import webbrowser
webbrowser.open(url, new=0)
def opensite(self, url): def titleIp(self):
import webbrowser title = "!IP: %s" % config.ip_external
webbrowser.open(url, new=2) if self.main.file_server.port_opened:
title += " (active)"
else:
title += " (passive)"
return title
def titleConnections(self):
title = "Connections: %s" % len(self.main.file_server.connections)
return title
def titleIp(self): def titleTransfer(self):
title = "!IP: %s" % config.ip_external title = "Received: %.2f MB | Sent: %.2f MB" % (
if self.main.file_server.port_opened: float(self.main.file_server.bytes_recv) / 1024 / 1024,
title += " (active)" float(self.main.file_server.bytes_sent) / 1024 / 1024
else: )
title += " (passive)" return title
return title
def titleConsole(self):
if self.console:
return "+Show console window"
else:
return "Show console window"
def titleConnections(self): def toggleConsole(self):
title = "Connections: %s" % len(self.main.file_server.connections) if self.console:
return title notificationicon.hideConsole()
self.console = False
else:
notificationicon.showConsole()
self.console = True
def getAutorunPath(self):
return "%s\\zeronet.cmd" % winfolders.get(winfolders.STARTUP)
def titleTransfer(self): def formatAutorun(self):
title = "Received: %.2f MB | Sent: %.2f MB" % (float(self.main.file_server.bytes_recv)/1024/1024, float(self.main.file_server.bytes_sent)/1024/1024) args = sys.argv[:]
return title args.insert(0, sys.executable)
if sys.platform == 'win32':
args = ['"%s"' % arg for arg in args]
cmd = " ".join(args)
# Dont open browser on autorun
cmd = cmd.replace("start.py", "zeronet.py").replace('"--open_browser"', "").replace('"default_browser"', "")
def titleConsole(self): return "cd /D %s \n%s" % (os.getcwd(), cmd)
if self.console: return "+Show console window"
else: return "Show console window"
def isAutorunEnabled(self):
path = self.getAutorunPath()
return os.path.isfile(path) and open(path).read() == self.formatAutorun()
def toggleConsole(self): def titleAutorun(self):
if self.console: if self.isAutorunEnabled():
notificationicon.hideConsole() return "+Start ZeroNet when Windows starts"
self.console = False else:
else: return "Start ZeroNet when Windows starts"
notificationicon.showConsole()
self.console = True
def toggleAutorun(self):
def getAutorunPath(self): if self.isAutorunEnabled():
return "%s\\zeronet.cmd" % winfolders.get(winfolders.STARTUP) os.unlink(self.getAutorunPath())
else:
open(self.getAutorunPath(), "w").write(self.formatAutorun())
def formatAutorun(self):
args = sys.argv[:]
args.insert(0, sys.executable)
if sys.platform == 'win32':
args = ['"%s"' % arg for arg in args]
cmd = " ".join(args)
cmd = cmd.replace("start.py", "zeronet.py").replace('"--open_browser"', "").replace('"default_browser"', "") # Dont open browser on autorun
return "cd /D %s \n%s" % (os.getcwd(), cmd)
def isAutorunEnabled(self):
path = self.getAutorunPath()
return os.path.isfile(path) and open(path).read() == self.formatAutorun()
def titleAutorun(self):
if self.isAutorunEnabled(): return "+Start ZeroNet when Windows starts"
else: return "Start ZeroNet when Windows starts"
def toggleAutorun(self):
if self.isAutorunEnabled():
os.unlink(self.getAutorunPath())
else:
open(self.getAutorunPath(), "w").write(self.formatAutorun())

View file

@ -1,75 +1,71 @@
import logging, json, os, re, sys, time import logging
import gevent import re
from Plugin import PluginManager
from Config import config
from Debug import Debug
allow_reload = False # No reload supported from Plugin import PluginManager
allow_reload = False # No reload supported
log = logging.getLogger("ZeronamePlugin") log = logging.getLogger("ZeronamePlugin")
@PluginManager.registerTo("SiteManager") @PluginManager.registerTo("SiteManager")
class SiteManagerPlugin(object): class SiteManagerPlugin(object):
zeroname_address = "1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F" zeroname_address = "1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F"
site_zeroname = None site_zeroname = None
def load(self):
super(SiteManagerPlugin, self).load()
if not self.get(self.zeroname_address): self.need(self.zeroname_address) # Need ZeroName site
# Checks if its a valid address def load(self):
def isAddress(self, address): super(SiteManagerPlugin, self).load()
if self.isDomain(address): if not self.get(self.zeroname_address):
return True self.need(self.zeroname_address) # Need ZeroName site
else:
return super(SiteManagerPlugin, self).isAddress(address)
# Checks if its a valid address
def isAddress(self, address):
if self.isDomain(address):
return True
else:
return super(SiteManagerPlugin, self).isAddress(address)
# Return: True if the address is domain # Return: True if the address is domain
def isDomain(self, address): def isDomain(self, address):
return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address) return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address)
# Resolve domain
# Return: The address or None
def resolveDomain(self, domain):
domain = domain.lower()
if not self.site_zeroname:
self.site_zeroname = self.need(self.zeroname_address)
self.site_zeroname.needFile("data/names.json", priority=10)
db = self.site_zeroname.storage.loadJson("data/names.json")
return db.get(domain)
# Resolve domain # Return or create site and start download site files
# Return: The address or None # Return: Site or None if dns resolve failed
def resolveDomain(self, domain): def need(self, address, all_file=True):
domain = domain.lower() if self.isDomain(address): # Its looks like a domain
if not self.site_zeroname: address_resolved = self.resolveDomain(address)
self.site_zeroname = self.need(self.zeroname_address) if address_resolved:
self.site_zeroname.needFile("data/names.json", priority=10) address = address_resolved
db = self.site_zeroname.storage.loadJson("data/names.json") else:
return db.get(domain) return None
return super(SiteManagerPlugin, self).need(address, all_file)
# Return or create site and start download site files # Return: Site object or None if not found
# Return: Site or None if dns resolve failed def get(self, address):
def need(self, address, all_file=True): if self.sites is None: # Not loaded yet
if self.isDomain(address): # Its looks like a domain self.load()
address_resolved = self.resolveDomain(address) if self.isDomain(address): # Its looks like a domain
if address_resolved: address_resolved = self.resolveDomain(address)
address = address_resolved if address_resolved: # Domain found
else: site = self.sites.get(address_resolved)
return None if site:
site_domain = site.settings.get("domain")
return super(SiteManagerPlugin, self).need(address, all_file) if site_domain != address:
site.settings["domain"] = address
else: # Domain not found
# Return: Site object or None if not found site = self.sites.get(address)
def get(self, address):
if self.sites == None: # Not loaded yet
self.load()
if self.isDomain(address): # Its looks like a domain
address_resolved = self.resolveDomain(address)
if address_resolved: # Domain found
site = self.sites.get(address_resolved)
if site:
site_domain = site.settings.get("domain")
if site_domain != address:
site.settings["domain"] = address
else: # Domain not found
site = self.sites.get(address)
else: # Access by site address
site = self.sites.get(address)
return site
else: # Access by site address
site = self.sites.get(address)
return site

View file

@ -1,40 +1,39 @@
import re import re
from Plugin import PluginManager from Plugin import PluginManager
@PluginManager.registerTo("UiRequest") @PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object): class UiRequestPlugin(object):
def __init__(self, *args, **kwargs):
from Site import SiteManager
self.site_manager = SiteManager.site_manager
super(UiRequestPlugin, self).__init__(*args, **kwargs)
def __init__(self, *args, **kwargs):
from Site import SiteManager
self.site_manager = SiteManager.site_manager
super(UiRequestPlugin, self).__init__(*args, **kwargs)
# Media request # Media request
def actionSiteMedia(self, path): def actionSiteMedia(self, path):
match = re.match("/media/(?P<address>[A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", path) match = re.match("/media/(?P<address>[A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", path)
if match: # Its a valid domain, resolve first if match: # Its a valid domain, resolve first
domain = match.group("address") domain = match.group("address")
address = self.site_manager.resolveDomain(domain) address = self.site_manager.resolveDomain(domain)
if address: if address:
path = "/media/"+address+match.group("inner_path") path = "/media/" + address + match.group("inner_path")
return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output
# Is mediarequest allowed from that referer
def isMediaRequestAllowed(self, site_address, referer):
referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
referer_path = re.sub("\?.*", "", referer_path) # Remove http params
# Is mediarequest allowed from that referer if self.isProxyRequest(): # Match to site domain
def isMediaRequestAllowed(self, site_address, referer): referer = re.sub("^http://zero[/]+", "http://", referer) # Allow /zero access
referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address referer_site_address = re.match("http[s]{0,1}://(.*?)(/|$)", referer).group(1)
referer_path = re.sub("\?.*", "", referer_path) # Remove http params else: # Match to request path
referer_site_address = re.match("/(?P<address>[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", referer_path).group("address")
if self.isProxyRequest(): # Match to site domain
referer = re.sub("^http://zero[/]+", "http://", referer) # Allow /zero access
referer_site_address = re.match("http[s]{0,1}://(.*?)(/|$)", referer).group(1)
else: # Match to request path
referer_site_address = re.match("/(?P<address>[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", referer_path).group("address")
if referer_site_address == site_address: # Referer site address as simple address
return True
elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns
return True
else: # Invalid referer
return False
if referer_site_address == site_address: # Referer site address as simple address
return True
elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns
return True
else: # Invalid referer
return False

View file

@ -1,75 +1,83 @@
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException import time
import time, json, os, sys, re, socket import json
import os
import sys
import re
import socket
from bitcoinrpc.authproxy import AuthServiceProxy
def publish(): def publish():
print "* Signing..." print "* Signing..."
os.system("python zeronet.py siteSign %s %s" % (config["site"], config["privatekey"])) os.system("python zeronet.py siteSign %s %s" % (config["site"], config["privatekey"]))
print "* Publishing..." print "* Publishing..."
os.system("python zeronet.py sitePublish %s" % config["site"]) os.system("python zeronet.py sitePublish %s" % config["site"])
def processNameOp(domain, value): def processNameOp(domain, value):
if not value.startswith("{"): return False if not value.startswith("{"):
try: return False
data = json.loads(value) try:
except Exception, err: data = json.loads(value)
print "Json load error: %s" % err except Exception, err:
return False print "Json load error: %s" % err
if "zeronet" not in data: return False
print "No zeronet in ", data.keys() if "zeronet" not in data:
return False print "No zeronet in ", data.keys()
if type(data["zeronet"]) != type({}): return False
print "Bad type: ", data["zeronet"] if not isinstance(data["zeronet"], dict):
return False print "Not dict: ", data["zeronet"]
return False
if not re.match("^[a-z]([a-z0-9-]{0,62}[a-z0-9])?$", domain):
print "Invalid domain: ", domain
return False
if "slave" in sys.argv: if "slave" in sys.argv:
print "Waiting for master update arrive" print "Waiting for master update arrive"
time.sleep(30) # Wait 30 sec to allow master updater time.sleep(30) # Wait 30 sec to allow master updater
#Note: Requires the file data/names.json to exist and contain "{}" to work
names_raw = open(names_path, "rb").read()
names = json.loads(names_raw)
for subdomain, address in data["zeronet"].items():
address = re.sub("[^A-Za-z0-9]", "", address)
print subdomain, domain, "->", address
if subdomain:
names["%s.%s.bit" % (subdomain, domain)] = address
else:
names["%s.bit" % domain] = address
new_names_raw = json.dumps(names, indent=2, sort_keys=True)
if new_names_raw != names_raw:
open(names_path, "wb").write(new_names_raw)
return True
else:
print "names not changed"
return False
# Note: Requires the file data/names.json to exist and contain "{}" to work
names_raw = open(names_path, "rb").read()
names = json.loads(names_raw)
for subdomain, address in data["zeronet"].items():
subdomain = subdomain.lower()
address = re.sub("[^A-Za-z0-9]", "", address)
print subdomain, domain, "->", address
if subdomain:
names["%s.%s.bit" % (subdomain, domain)] = address
else:
names["%s.bit" % domain] = address
new_names_raw = json.dumps(names, indent=2, sort_keys=True)
if new_names_raw != names_raw:
open(names_path, "wb").write(new_names_raw)
return True
else:
print "names not changed"
return False
def processBlock(block_id): def processBlock(block_id):
print "Processing block #%s..." % block_id print "Processing block #%s..." % block_id
s = time.time() s = time.time()
block_hash = rpc.getblockhash(block_id) block_hash = rpc.getblockhash(block_id)
block = rpc.getblock(block_hash) block = rpc.getblock(block_hash)
print "Checking %s tx" % len(block["tx"]) print "Checking %s tx" % len(block["tx"])
updated = 0 updated = 0
for tx in block["tx"]: for tx in block["tx"]:
try: try:
transaction = rpc.getrawtransaction(tx, 1) transaction = rpc.getrawtransaction(tx, 1)
for vout in transaction.get("vout",[]): for vout in transaction.get("vout", []):
if "scriptPubKey" in vout and "nameOp" in vout["scriptPubKey"] and "name" in vout["scriptPubKey"]["nameOp"]: if "scriptPubKey" in vout and "nameOp" in vout["scriptPubKey"] and "name" in vout["scriptPubKey"]["nameOp"]:
name_op = vout["scriptPubKey"]["nameOp"] name_op = vout["scriptPubKey"]["nameOp"]
updated += processNameOp(name_op["name"].replace("d/", ""), name_op["value"]) updated += processNameOp(name_op["name"].replace("d/", ""), name_op["value"])
except Exception, err: except Exception, err:
print "Error processing tx #%s %s" % (tx, err) print "Error processing tx #%s %s" % (tx, err)
print "Done in %.3fs (updated %s)." % (time.time()-s, updated) print "Done in %.3fs (updated %s)." % (time.time() - s, updated)
if updated: if updated:
publish() publish()
# Loading config... # Loading config...
@ -83,16 +91,16 @@ else:
namecoin_location = os.path.expanduser("~/.namecoin/") namecoin_location = os.path.expanduser("~/.namecoin/")
config_path = namecoin_location + 'zeroname_config.json' config_path = namecoin_location + 'zeroname_config.json'
if not os.path.isfile(config_path): # Create sample config if not os.path.isfile(config_path): # Create sample config
open(config_path, "w").write( open(config_path, "w").write(
json.dumps({'site': 'site', 'zeronet_path': '/home/zeronet/', 'privatekey': '', 'lastprocessed': 223911}, indent=2) json.dumps({'site': 'site', 'zeronet_path': '/home/zeronet/', 'privatekey': '', 'lastprocessed': 223911}, indent=2)
) )
print "Example config written to %s" % config_path print "Example config written to %s" % config_path
sys.exit(0) sys.exit(0)
config = json.load(open(config_path)) config = json.load(open(config_path))
names_path = "%s/data/%s/data/names.json" % (config["zeronet_path"], config["site"]) names_path = "%s/data/%s/data/names.json" % (config["zeronet_path"], config["site"])
os.chdir(config["zeronet_path"]) # Change working dir - tells script where Zeronet install is. os.chdir(config["zeronet_path"]) # Change working dir - tells script where Zeronet install is.
# Getting rpc connect details # Getting rpc connect details
namecoin_conf = open(namecoin_location + "namecoin.conf").read() namecoin_conf = open(namecoin_location + "namecoin.conf").read()
@ -102,42 +110,47 @@ rpc_user = re.search("rpcuser=(.*)$", namecoin_conf, re.M).group(1)
rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1) rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1)
rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass) rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass)
rpc = AuthServiceProxy(rpc_url, timeout=60*5) rpc = AuthServiceProxy(rpc_url, timeout=60 * 5)
last_block = int(rpc.getinfo()["blocks"]) last_block = int(rpc.getinfo()["blocks"])
if not config["lastprocessed"]: # Start processing from last block if not config["lastprocessed"]: # Start processing from last block
config["lastprocessed"] = last_block config["lastprocessed"] = last_block
# Processing skipped blocks # Processing skipped blocks
print "Processing block from #%s to #%s..." % (config["lastprocessed"], last_block) print "Processing block from #%s to #%s..." % (config["lastprocessed"], last_block)
for block_id in range(config["lastprocessed"], last_block+1): for block_id in range(config["lastprocessed"], last_block + 1):
processBlock(block_id) processBlock(block_id)
# processBlock(223911) # Testing zeronetwork.bit # processBlock(223911) # Testing zeronetwork.bit
# processBlock(227052) # Testing brainwallets.bit # processBlock(227052) # Testing brainwallets.bit
# processBlock(236824) # Utf8 domain name (invalid should skip)
# processBlock(236752) # Uppercase domain (invalid should skip)
# processBlock(236870) # Encoded domain (should pass)
# sys.exit(0)
while 1: while 1:
print "Waiting for new block", print "Waiting for new block",
sys.stdout.flush() sys.stdout.flush()
while 1: while 1:
try: try:
rpc = AuthServiceProxy(rpc_url, timeout=60*5) rpc = AuthServiceProxy(rpc_url, timeout=60 * 5)
if (int(rpc.getinfo()["blocks"]) > last_block): break if (int(rpc.getinfo()["blocks"]) > last_block):
time.sleep(1) break
rpc.waitforblock() time.sleep(1)
print "Found" rpc.waitforblock()
break # Block found print "Found"
except socket.timeout: # Timeout break # Block found
print ".", except socket.timeout: # Timeout
sys.stdout.flush() print ".",
except Exception, err: sys.stdout.flush()
print "Exception", err.__class__, err except Exception, err:
time.sleep(5) print "Exception", err.__class__, err
time.sleep(5)
last_block = int(rpc.getinfo()["blocks"]) last_block = int(rpc.getinfo()["blocks"])
for block_id in range(config["lastprocessed"]+1, last_block+1): for block_id in range(config["lastprocessed"] + 1, last_block + 1):
processBlock(block_id) processBlock(block_id)
config["lastprocessed"] = last_block config["lastprocessed"] = last_block
open(config_path, "w").write(json.dumps(config, indent=2)) open(config_path, "w").write(json.dumps(config, indent=2))

View file

@ -19,18 +19,16 @@ def lookupDomain(domain):
try: try:
domain_object = rpc.name_show("d/"+domain) domain_object = rpc.name_show("d/"+domain)
except Exception, err: except:
#domain doesn't exist #domain doesn't exist
print "Error looking up domain - does not exist %s %s" % (domain,err)
return None return None
domain_json = json.loads(domain_object['value']) domain_json = json.loads(domain_object['value'])
try: try:
domain_address = domain_json["zeronet"][subdomain] domain_address = domain_json["zeronet"][subdomain]
except Exception, err: except:
#domain exists but doesn't have any zeronet value #domain exists but doesn't have any zeronet value
print "Error looking up domain - doesn't contain zeronet value %s %s" % (domain,err)
return None return None
return domain_address return domain_address
@ -54,3 +52,30 @@ rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1)
rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass) rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass)
rpc = AuthServiceProxy(rpc_url, timeout=60*5) rpc = AuthServiceProxy(rpc_url, timeout=60*5)
"""
while 1:
print "Waiting for new block",
sys.stdout.flush()
while 1:
try:
rpc = AuthServiceProxy(rpc_url, timeout=60*5)
if (int(rpc.getinfo()["blocks"]) > last_block): break
time.sleep(1)
rpc.waitforblock()
print "Found"
break # Block found
except socket.timeout: # Timeout
print ".",
sys.stdout.flush()
except Exception, err:
print "Exception", err.__class__, err
time.sleep(5)
last_block = int(rpc.getinfo()["blocks"])
for block_id in range(config["lastprocessed"]+1, last_block+1):
processBlock(block_id)
config["lastprocessed"] = last_block
open(config_path, "w").write(json.dumps(config, indent=2))
"""

View file

@ -1,191 +1,195 @@
import argparse, sys, os, time import argparse
import sys
import os
import ConfigParser import ConfigParser
class Config(object): class Config(object):
def __init__(self):
self.version = "0.3.1"
self.rev = 280
self.parser = self.createArguments()
argv = sys.argv[:] # Copy command line arguments
argv = self.parseConfig(argv) # Add arguments from config file
self.parseCommandline(argv) # Parse argv
self.setAttributes()
def __init__(self):
self.version = "0.3.1"
self.rev = 280
self.parser = self.createArguments()
argv = sys.argv[:] # Copy command line arguments
argv = self.parseConfig(argv) # Add arguments from config file
self.parseCommandline(argv) # Parse argv
self.setAttributes()
def __str__(self): def __str__(self):
return str(self.arguments).replace("Namespace", "Config") # Using argparse str output return str(self.arguments).replace("Namespace", "Config") # Using argparse str output
# Convert string to bool
def strToBool(self, v):
return v.lower() in ("yes", "true", "t", "1")
# Convert string to bool # Create command line arguments
def strToBool(self, v): def createArguments(self):
return v.lower() in ("yes", "true", "t", "1") # Platform specific
if sys.platform.startswith("win"):
coffeescript = "type %s | tools\\coffee\\coffee.cmd"
else:
coffeescript = None
""" Probably fixed
if sys.platform.lower().startswith("darwin"):
# For some reasons openssl doesnt works on mac yet (https://github.com/HelloZeroNet/ZeroNet/issues/94)
use_openssl = False
else:
use_openssl = True
"""
use_openssl = True
# Create parser
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.register('type', 'bool', self.strToBool)
subparsers = parser.add_subparsers(title="Action to perform", dest="action")
# Create command line arguments # Main
def createArguments(self): action = subparsers.add_parser("main", help='Start UiServer and FileServer (default)')
# Platform specific
if sys.platform.startswith("win"):
coffeescript = "type %s | tools\\coffee\\coffee.cmd"
else:
coffeescript = None
""" Probably fixed
if sys.platform.lower().startswith("darwin"): # For some reasons openssl doesnt works on mac yet (https://github.com/HelloZeroNet/ZeroNet/issues/94)
use_openssl = False
else:
use_openssl = True
"""
use_openssl = True
# Create parser # SiteCreate
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) action = subparsers.add_parser("siteCreate", help='Create a new site')
parser.register('type','bool', self.strToBool)
subparsers = parser.add_subparsers(title="Action to perform", dest="action")
# Main # SiteSign
action = subparsers.add_parser("main", help='Start UiServer and FileServer (default)') action = subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]')
action.add_argument('address', help='Site to sign')
action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?')
action.add_argument('--inner_path', help='File you want to sign (default: content.json)',
default="content.json", metavar="inner_path")
action.add_argument('--publish', help='Publish site after the signing', action='store_true')
# SiteCreate # SitePublish
action = subparsers.add_parser("siteCreate", help='Create a new site') action = subparsers.add_parser("sitePublish", help='Publish site to other peers: address')
action.add_argument('address', help='Site to publish')
action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)',
default=None, nargs='?')
action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)',
default=15441, nargs='?')
action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)',
default="content.json", metavar="inner_path")
# SiteSign # SiteVerify
action = subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]') action = subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
action.add_argument('address', help='Site to sign') action.add_argument('address', help='Site to verify')
action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?')
action.add_argument('--inner_path', help='File you want to sign (default: content.json)', default="content.json", metavar="inner_path")
action.add_argument('--publish', help='Publish site after the signing', action='store_true')
# SitePublish # dbRebuild
action = subparsers.add_parser("sitePublish", help='Publish site to other peers: address') action = subparsers.add_parser("dbRebuild", help='Rebuild site database cache')
action.add_argument('address', help='Site to publish') action.add_argument('address', help='Site to rebuild')
action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)', default=None, nargs='?')
action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)', default=15441, nargs='?')
action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)', default="content.json", metavar="inner_path")
# SiteVerify # dbQuery
action = subparsers.add_parser("siteVerify", help='Verify site files using sha512: address') action = subparsers.add_parser("dbQuery", help='Query site sql cache')
action.add_argument('address', help='Site to verify') action.add_argument('address', help='Site to query')
action.add_argument('query', help='Sql query')
#dbRebuild # PeerPing
action = subparsers.add_parser("dbRebuild", help='Rebuild site database cache') action = subparsers.add_parser("peerPing", help='Send Ping command to peer')
action.add_argument('address', help='Site to rebuild') action.add_argument('peer_ip', help='Peer ip')
action.add_argument('peer_port', help='Peer port', nargs='?')
#dbQuery # PeerGetFile
action = subparsers.add_parser("dbQuery", help='Query site sql cache') action = subparsers.add_parser("peerGetFile", help='Request and print a file content from peer')
action.add_argument('address', help='Site to query') action.add_argument('peer_ip', help='Peer ip')
action.add_argument('query', help='Sql query') action.add_argument('peer_port', help='Peer port')
action.add_argument('site', help='Site address')
action.add_argument('filename', help='File name to request')
# PeerPing # PeerGetFile
action = subparsers.add_parser("peerPing", help='Send Ping command to peer') action = subparsers.add_parser("peerCmd", help='Request and print a file content from peer')
action.add_argument('peer_ip', help='Peer ip') action.add_argument('peer_ip', help='Peer ip')
action.add_argument('peer_port', help='Peer port', nargs='?') action.add_argument('peer_port', help='Peer port')
action.add_argument('cmd', help='Command to execute')
action.add_argument('parameters', help='Parameters to command', nargs='?')
# PeerGetFile # CryptSign
action = subparsers.add_parser("peerGetFile", help='Request and print a file content from peer') action = subparsers.add_parser("cryptSign", help='Sign message using Bitcoin private key')
action.add_argument('peer_ip', help='Peer ip') action.add_argument('message', help='Message to sign')
action.add_argument('peer_port', help='Peer port') action.add_argument('privatekey', help='Private key')
action.add_argument('site', help='Site address')
action.add_argument('filename', help='File name to request')
# PeerGetFile # Config parameters
action = subparsers.add_parser("peerCmd", help='Request and print a file content from peer') parser.add_argument('--debug', help='Debug mode', action='store_true')
action.add_argument('peer_ip', help='Peer ip') parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true')
action.add_argument('peer_port', help='Peer port')
action.add_argument('cmd', help='Command to execute')
action.add_argument('parameters', help='Parameters to command', nargs='?')
# CryptSign parser.add_argument('--config_file', help='Path of config file', default="zeronet.conf", metavar="path")
action = subparsers.add_parser("cryptSign", help='Sign message using Bitcoin private key') parser.add_argument('--data_dir', help='Path of data directory', default="data", metavar="path")
action.add_argument('message', help='Message to sign') parser.add_argument('--log_dir', help='Path of logging directory', default="log", metavar="path")
action.add_argument('privatekey', help='Private key')
parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip')
parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port')
parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*')
parser.add_argument('--open_browser', help='Open homepage in web browser automatically',
nargs='?', const="default_browser", metavar='browser_name')
parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr',
metavar='address')
parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, metavar='size')
# Config parameters parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip')
parser.add_argument('--debug', help='Debug mode', action='store_true') parser.add_argument('--fileserver_port', help='FileServer bind port', default=15441, type=int, metavar='port')
parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true') parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true')
parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port')
parser.add_argument('--ip_external', help='External ip (tested on start if None)', metavar='ip')
parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup',
type='bool', choices=[True, False], default=use_openssl)
parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true')
parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory',
type='bool', choices=[True, False], default=True)
parser.add_argument('--config_file', help='Path of config file', default="zeronet.conf", metavar="path") parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript,
parser.add_argument('--data_dir', help='Path of data directory', default="data", metavar="path") metavar='executable_path')
parser.add_argument('--log_dir', help='Path of logging directory', default="log", metavar="path")
parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip') parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev))
parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port')
parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*')
parser.add_argument('--open_browser', help='Open homepage in web browser automatically', nargs='?', const="default_browser", metavar='browser_name')
parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr', metavar='address')
parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, metavar='size')
parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip') return parser
parser.add_argument('--fileserver_port',help='FileServer bind port', default=15441, type=int, metavar='port')
parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true')
parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port')
parser.add_argument('--ip_external', help='External ip (tested on start if None)', metavar='ip')
parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=use_openssl)
parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true')
parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory', type='bool', choices=[True, False], default=True)
parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript, metavar='executable_path') # Find arguments specificed for current action
def getActionArguments(self):
back = {}
arguments = self.parser._subparsers._group_actions[0].choices[self.action]._actions[1:] # First is --version
for argument in arguments:
back[argument.dest] = getattr(self, argument.dest)
return back
parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev)) # Try to find action from sys.argv
def getAction(self, argv):
actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions
found_action = False
for action in actions: # See if any in sys.argv
if action in argv:
found_action = action
break
return found_action
return parser # Parse command line arguments
def parseCommandline(self, argv):
# Find out if action is specificed on start
action = self.getAction(argv)
if len(argv) == 1 or not action: # If no action specificed set the main action
argv.append("main")
self.arguments = self.parser.parse_args(argv[1:])
# Parse config file
def parseConfig(self, argv):
# Find config file path from parameters
config_file = "zeronet.conf"
if "--config_file" in argv:
config_file = argv[argv.index("--config_file") + 1]
# Load config file
if os.path.isfile(config_file):
config = ConfigParser.ConfigParser(allow_no_value=True)
config.read(config_file)
for section in config.sections():
for key, val in config.items(section):
if section != "global": # If not global prefix key with section
key = section + "_" + key
if val:
argv.insert(1, val)
argv.insert(1, "--%s" % key)
return argv
# Find arguments specificed for current action # Expose arguments as class attributes
def getActionArguments(self): def setAttributes(self):
back = {} # Set attributes from arguments
arguments = self.parser._subparsers._group_actions[0].choices[self.action]._actions[1:] # First is --version args = vars(self.arguments)
for argument in arguments: for key, val in args.items():
back[argument.dest] = getattr(self, argument.dest) setattr(self, key, val)
return back
# Try to find action from sys.argv
def getAction(self, argv):
actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions
found_action = False
for action in actions: # See if any in sys.argv
if action in argv:
found_action = action
break
return found_action
# Parse command line arguments
def parseCommandline(self, argv):
# Find out if action is specificed on start
action = self.getAction(argv)
if len(argv) == 1 or not action: # If no action specificed set the main action
argv.append("main")
self.arguments = self.parser.parse_args(argv[1:])
# Parse config file
def parseConfig(self, argv):
# Find config file path from parameters
config_file = "zeronet.conf"
if "--config_file" in argv:
config_file = argv[argv.index("--config_file")+1]
# Load config file
if os.path.isfile(config_file):
config = ConfigParser.ConfigParser(allow_no_value=True)
config.read(config_file)
for section in config.sections():
for key, val in config.items(section):
if section != "global": # If not global prefix key with section
key = section+"_"+key
if val: argv.insert(1, val)
argv.insert(1, "--%s" % key)
return argv
# Expose arguments as class attributes
def setAttributes(self):
# Set attributes from arguments
args = vars(self.arguments)
for key, val in args.items():
setattr(self, key, val)
config = Config() config = Config()

View file

@ -1,282 +1,294 @@
import logging, socket, time import socket
from cStringIO import StringIO import time
import gevent, msgpack
import gevent
import msgpack
from Config import config from Config import config
from Debug import Debug from Debug import Debug
from util import StreamingMsgpack from util import StreamingMsgpack
from Crypt import CryptConnection from Crypt import CryptConnection
class Connection(object): class Connection(object):
__slots__ = ("sock", "sock_wrapped", "ip", "port", "peer_id", "id", "protocol", "type", "server", "unpacker", "req_id", "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "last_recv_time", "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "last_ping_delay", "last_req_time", "last_cmd", "name", "updateName", "waiting_requests") __slots__ = (
"sock", "sock_wrapped", "ip", "port", "peer_id", "id", "protocol", "type", "server", "unpacker", "req_id",
"handshake", "crypt", "connected", "event_connected", "closed", "start_time", "last_recv_time",
"last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent",
"last_ping_delay", "last_req_time", "last_cmd", "name", "updateName", "waiting_requests"
)
def __init__(self, server, ip, port, sock=None): def __init__(self, server, ip, port, sock=None):
self.sock = sock self.sock = sock
self.ip = ip self.ip = ip
self.port = port self.port = port
self.peer_id = None # Bittorrent style peer id (not used yet) self.peer_id = None # Bittorrent style peer id (not used yet)
self.id = server.last_connection_id self.id = server.last_connection_id
server.last_connection_id += 1 server.last_connection_id += 1
self.protocol = "?" self.protocol = "?"
self.type = "?" self.type = "?"
self.server = server self.server = server
self.unpacker = None # Stream incoming socket messages here self.unpacker = None # Stream incoming socket messages here
self.req_id = 0 # Last request id self.req_id = 0 # Last request id
self.handshake = {} # Handshake info got from peer self.handshake = {} # Handshake info got from peer
self.crypt = None # Connection encryption method self.crypt = None # Connection encryption method
self.sock_wrapped = False # Socket wrapped to encryption self.sock_wrapped = False # Socket wrapped to encryption
self.connected = False self.connected = False
self.event_connected = gevent.event.AsyncResult() # Solves on handshake received self.event_connected = gevent.event.AsyncResult() # Solves on handshake received
self.closed = False self.closed = False
# Stats # Stats
self.start_time = time.time() self.start_time = time.time()
self.last_recv_time = 0 self.last_recv_time = 0
self.last_message_time = 0 self.last_message_time = 0
self.last_send_time = 0 self.last_send_time = 0
self.last_sent_time = 0 self.last_sent_time = 0
self.incomplete_buff_recv = 0 self.incomplete_buff_recv = 0
self.bytes_recv = 0 self.bytes_recv = 0
self.bytes_sent = 0 self.bytes_sent = 0
self.last_ping_delay = None self.last_ping_delay = None
self.last_req_time = 0 self.last_req_time = 0
self.last_cmd = None self.last_cmd = None
self.name = None self.name = None
self.updateName() self.updateName()
self.waiting_requests = {} # Waiting sent requests self.waiting_requests = {} # Waiting sent requests
def updateName(self):
self.name = "Conn#%2s %-12s [%s]" % (self.id, self.ip, self.protocol)
def updateName(self): def __str__(self):
self.name = "Conn#%2s %-12s [%s]" % (self.id, self.ip, self.protocol) return self.name
def __repr__(self):
return "<%s>" % self.__str__()
def __str__(self): def log(self, text):
return self.name self.server.log.debug("%s > %s" % (self.name, text))
# Open connection to peer and wait for handshake
def connect(self):
self.log("Connecting...")
self.type = "out"
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((self.ip, int(self.port)))
def __repr__(self): # Implicit SSL in the future
return "<%s>" % self.__str__() # self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa")
# self.sock.do_handshake()
# self.crypt = "tls-rsa"
# self.sock_wrapped = True
# Detect protocol
self.send({"cmd": "handshake", "req_id": 0, "params": self.handshakeInfo()})
gevent.spawn(self.messageLoop)
return self.event_connected.get() # Wait for handshake
def log(self, text): # Handle incoming connection
self.server.log.debug("%s > %s" % (self.name, text)) def handleIncomingConnection(self, sock):
self.log("Incoming connection...")
self.type = "in"
try:
if sock.recv(1, gevent.socket.MSG_PEEK) == "\x16":
self.log("Crypt in connection using implicit SSL")
self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", True)
self.sock_wrapped = True
self.crypt = "tls-rsa"
except Exception, err:
self.log("Socket peek error: %s" % Debug.formatException(err))
self.messageLoop()
# Message loop for connection
def messageLoop(self):
if not self.sock:
self.log("Socket error: No socket found")
return False
self.protocol = "v2"
self.updateName()
self.connected = True
# Open connection to peer and wait for handshake self.unpacker = msgpack.Unpacker()
def connect(self): try:
self.log("Connecting...") while True:
self.type = "out" buff = self.sock.recv(16 * 1024)
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if not buff:
self.sock.connect((self.ip, int(self.port))) break # Connection closed
# Implicit SSL in the future self.last_recv_time = time.time()
#self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa") self.incomplete_buff_recv += 1
#self.sock.do_handshake() self.bytes_recv += len(buff)
#self.crypt = "tls-rsa" self.server.bytes_recv += len(buff)
#self.sock_wrapped = True if not self.unpacker:
# Detect protocol self.unpacker = msgpack.Unpacker()
self.send({"cmd": "handshake", "req_id": 0, "params": self.handshakeInfo()}) self.unpacker.feed(buff)
gevent.spawn(self.messageLoop) for message in self.unpacker:
return self.event_connected.get() # Wait for handshake self.incomplete_buff_recv = 0
self.handleMessage(message)
message = None
buff = None
except Exception, err:
if not self.closed:
self.log("Socket error: %s" % Debug.formatException(err))
self.close() # MessageLoop ended, close connection
# My handshake info
def handshakeInfo(self):
return {
"version": config.version,
"protocol": "v2",
"peer_id": self.server.peer_id,
"fileserver_port": self.server.port,
"port_opened": self.server.port_opened,
"rev": config.rev,
"crypt_supported": CryptConnection.manager.crypt_supported,
"crypt": self.crypt
}
def setHandshake(self, handshake):
self.handshake = handshake
if handshake.get("port_opened", None) is False: # Not connectable
self.port = 0
else:
self.port = handshake["fileserver_port"] # Set peer fileserver port
# Check if we can encrypt the connection
if handshake.get("crypt_supported"):
if handshake.get("crypt"): # Recommended crypt by server
crypt = handshake["crypt"]
else: # Select the best supported on both sides
crypt = CryptConnection.manager.selectCrypt(handshake["crypt_supported"])
# Handle incoming connection if crypt:
def handleIncomingConnection(self, sock): self.crypt = crypt
self.log("Incoming connection...") self.event_connected.set(True) # Mark handshake as done
self.type = "in"
try:
if sock.recv(1, gevent.socket.MSG_PEEK) == "\x16":
self.log("Crypt in connection using implicit SSL")
self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", True)
self.sock_wrapped = True
self.crypt = "tls-rsa"
except Exception, err:
self.log("Socket peek error: %s" % Debug.formatException(err))
self.messageLoop()
# Handle incoming message
def handleMessage(self, message):
self.last_message_time = time.time()
if message.get("cmd") == "response": # New style response
if message["to"] in self.waiting_requests:
self.waiting_requests[message["to"]].set(message) # Set the response to event
del self.waiting_requests[message["to"]]
elif message["to"] == 0: # Other peers handshake
ping = time.time() - self.start_time
if config.debug_socket:
self.log("Handshake response: %s, ping: %s" % (message, ping))
self.last_ping_delay = ping
# Server switched to crypt, lets do it also if not crypted already
if message.get("crypt") and not self.sock_wrapped:
self.crypt = message["crypt"]
server = (self.type == "in")
self.log("Crypt out connection using: %s (server side: %s)..." % (self.crypt, server))
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server)
self.sock.do_handshake()
self.setHandshake(message)
else:
self.log("Unknown response: %s" % message)
elif message.get("cmd"): # Handhsake request
if message["cmd"] == "handshake":
if config.debug_socket:
self.log("Handshake request: %s" % message)
self.setHandshake(message["params"])
data = self.handshakeInfo()
data["cmd"] = "response"
data["to"] = message["req_id"]
self.send(data) # Send response to handshake
# Sent crypt request to client
if self.crypt and not self.sock_wrapped:
server = (self.type == "in")
self.log("Crypt in connection using: %s (server side: %s)..." % (self.crypt, server))
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server)
self.sock_wrapped = True
else:
self.server.handleRequest(self, message)
else: # Old style response, no req_id definied
if config.debug_socket:
self.log("Old style response, waiting: %s" % self.waiting_requests.keys())
last_req_id = min(self.waiting_requests.keys()) # Get the oldest waiting request and set it true
self.waiting_requests[last_req_id].set(message)
del self.waiting_requests[last_req_id] # Remove from waiting request
# Message loop for connection # Send data to connection
def messageLoop(self): def send(self, message, streaming=False):
if not self.sock: if config.debug_socket:
self.log("Socket error: No socket found") self.log("Send: %s, to: %s, streaming: %s, site: %s, inner_path: %s, req_id: %s" % (
return False message.get("cmd"), message.get("to"), streaming,
self.protocol = "v2" message.get("params", {}).get("site"), message.get("params", {}).get("inner_path"),
self.updateName() message.get("req_id"))
self.connected = True )
self.last_send_time = time.time()
if streaming:
bytes_sent = StreamingMsgpack.stream(message, self.sock.sendall)
message = None
self.bytes_sent += bytes_sent
self.server.bytes_sent += bytes_sent
else:
data = msgpack.packb(message)
message = None
self.bytes_sent += len(data)
self.server.bytes_sent += len(data)
self.sock.sendall(data)
self.last_sent_time = time.time()
return True
self.unpacker = msgpack.Unpacker() # Create and send a request to peer
try: def request(self, cmd, params={}):
while True: # Last command sent more than 10 sec ago, timeout
buff = self.sock.recv(16*1024) if self.waiting_requests and self.protocol == "v2" and time.time() - max(self.last_req_time, self.last_recv_time) > 10:
if not buff: break # Connection closed self.log("Request %s timeout: %s" % (self.last_cmd, time.time() - self.last_send_time))
self.last_recv_time = time.time() self.close()
self.incomplete_buff_recv += 1 return False
self.bytes_recv += len(buff)
self.server.bytes_recv += len(buff)
if not self.unpacker:
self.unpacker = msgpack.Unpacker()
self.unpacker.feed(buff)
for message in self.unpacker:
self.incomplete_buff_recv = 0
self.handleMessage(message)
message = None
buff = None
except Exception, err:
if not self.closed: self.log("Socket error: %s" % Debug.formatException(err))
self.close() # MessageLoop ended, close connection
self.last_req_time = time.time()
self.last_cmd = cmd
self.req_id += 1
data = {"cmd": cmd, "req_id": self.req_id, "params": params}
event = gevent.event.AsyncResult() # Create new event for response
self.waiting_requests[self.req_id] = event
self.send(data) # Send request
res = event.get() # Wait until event solves
return res
# My handshake info def ping(self):
def handshakeInfo(self): s = time.time()
return { response = None
"version": config.version, with gevent.Timeout(10.0, False):
"protocol": "v2", try:
"peer_id": self.server.peer_id, response = self.request("ping")
"fileserver_port": self.server.port, except Exception, err:
"port_opened": self.server.port_opened, self.log("Ping error: %s" % Debug.formatException(err))
"rev": config.rev, if response and "body" in response and response["body"] == "Pong!":
"crypt_supported": CryptConnection.manager.crypt_supported, self.last_ping_delay = time.time() - s
"crypt": self.crypt return True
} else:
return False
# Close connection
def close(self):
if self.closed:
return False # Already closed
self.closed = True
self.connected = False
self.event_connected.set(False)
def setHandshake(self, handshake): if config.debug_socket:
self.handshake = handshake self.log(
if handshake.get("port_opened", None) == False: # Not connectable "Closing connection, waiting_requests: %s, buff: %s..." %
self.port = 0 (len(self.waiting_requests), self.incomplete_buff_recv)
else: )
self.port = handshake["fileserver_port"] # Set peer fileserver port for request in self.waiting_requests.values(): # Mark pending requests failed
# Check if we can encrypt the connection request.set(False)
if handshake.get("crypt_supported"): self.waiting_requests = {}
if handshake.get("crypt"): # Recommended crypt by server self.server.removeConnection(self) # Remove connection from server registry
crypt = handshake["crypt"] try:
else: # Select the best supported on both sides if self.sock:
crypt = CryptConnection.manager.selectCrypt(handshake["crypt_supported"]) self.sock.shutdown(gevent.socket.SHUT_WR)
self.sock.close()
except Exception, err:
if config.debug_socket:
self.log("Close error: %s" % err)
if crypt: # Little cleanup
self.crypt = crypt self.sock = None
self.event_connected.set(True) # Mark handshake as done self.unpacker = None
# Handle incoming message
def handleMessage(self, message):
self.last_message_time = time.time()
if message.get("cmd") == "response": # New style response
if message["to"] in self.waiting_requests:
self.waiting_requests[message["to"]].set(message) # Set the response to event
del self.waiting_requests[message["to"]]
elif message["to"] == 0: # Other peers handshake
ping = time.time()-self.start_time
if config.debug_socket: self.log("Handshake response: %s, ping: %s" % (message, ping))
self.last_ping_delay = ping
# Server switched to crypt, lets do it also if not crypted already
if message.get("crypt") and not self.sock_wrapped:
self.crypt = message["crypt"]
server = (self.type == "in")
self.log("Crypt out connection using: %s (server side: %s)..." % (self.crypt, server))
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server)
self.sock.do_handshake()
self.setHandshake(message)
else:
self.log("Unknown response: %s" % message)
elif message.get("cmd"): # Handhsake request
if message["cmd"] == "handshake":
if config.debug_socket: self.log("Handshake request: %s" % message)
self.setHandshake(message["params"])
data = self.handshakeInfo()
data["cmd"] = "response"
data["to"] = message["req_id"]
self.send(data) # Send response to handshake
# Sent crypt request to client
if self.crypt and not self.sock_wrapped:
server = (self.type == "in")
self.log("Crypt in connection using: %s (server side: %s)..." % (self.crypt, server))
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server)
self.sock_wrapped = True
else:
self.server.handleRequest(self, message)
else: # Old style response, no req_id definied
if config.debug_socket: self.log("Old style response, waiting: %s" % self.waiting_requests.keys())
last_req_id = min(self.waiting_requests.keys()) # Get the oldest waiting request and set it true
self.waiting_requests[last_req_id].set(message)
del self.waiting_requests[last_req_id] # Remove from waiting request
# Send data to connection
def send(self, message, streaming=False):
if config.debug_socket: self.log("Send: %s, to: %s, streaming: %s, site: %s, inner_path: %s, req_id: %s" % (message.get("cmd"), message.get("to"), streaming, message.get("params", {}).get("site"), message.get("params", {}).get("inner_path"), message.get("req_id")))
self.last_send_time = time.time()
if streaming:
bytes_sent = StreamingMsgpack.stream(message, self.sock.sendall)
message = None
self.bytes_sent += bytes_sent
self.server.bytes_sent += bytes_sent
else:
data = msgpack.packb(message)
message = None
self.bytes_sent += len(data)
self.server.bytes_sent += len(data)
self.sock.sendall(data)
self.last_sent_time = time.time()
return True
# Create and send a request to peer
def request(self, cmd, params={}):
if self.waiting_requests and self.protocol == "v2" and time.time() - max(self.last_req_time, self.last_recv_time) > 10: # Last command sent more than 10 sec ago, timeout
self.log("Request %s timeout: %s" % (self.last_cmd, time.time() - self.last_send_time))
self.close()
return False
self.last_req_time = time.time()
self.last_cmd = cmd
self.req_id += 1
data = {"cmd": cmd, "req_id": self.req_id, "params": params}
event = gevent.event.AsyncResult() # Create new event for response
self.waiting_requests[self.req_id] = event
self.send(data) # Send request
res = event.get() # Wait until event solves
return res
def ping(self):
s = time.time()
response = None
with gevent.Timeout(10.0, False):
try:
response = self.request("ping")
except Exception, err:
self.log("Ping error: %s" % Debug.formatException(err))
if response and "body" in response and response["body"] == "Pong!":
self.last_ping_delay = time.time()-s
return True
else:
return False
# Close connection
def close(self):
if self.closed: return False # Already closed
self.closed = True
self.connected = False
self.event_connected.set(False)
if config.debug_socket: self.log("Closing connection, waiting_requests: %s, buff: %s..." % (len(self.waiting_requests), self.incomplete_buff_recv))
for request in self.waiting_requests.values(): # Mark pending requests failed
request.set(False)
self.waiting_requests = {}
self.server.removeConnection(self) # Remove connection from server registry
try:
if self.sock:
self.sock.shutdown(gevent.socket.SHUT_WR)
self.sock.close()
except Exception, err:
if config.debug_socket: self.log("Close error: %s" % err)
# Little cleanup
self.sock = None
self.unpacker = None

View file

@ -43,14 +43,16 @@ class ConnectionServer:
# Check msgpack version # Check msgpack version
if msgpack.version[0] == 0 and msgpack.version[1] < 4: if msgpack.version[0] == 0 and msgpack.version[1] < 4:
self.log.error( self.log.error(
"Error: Too old msgpack version: %s (>0.4.0 required), please update using `sudo pip install msgpack-python --upgrade`" % "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo pip install msgpack-python --upgrade`" %
str(msgpack.version) str(msgpack.version)
) )
sys.exit(0) sys.exit(0)
if port: # Listen server on a port if port: # Listen server on a port
self.pool = Pool(1000) # do not accept more than 1000 connections self.pool = Pool(1000) # do not accept more than 1000 connections
self.stream_server = StreamServer((ip.replace("*", ""), port), self.handleIncomingConnection, spawn=self.pool, backlog=100) self.stream_server = StreamServer(
(ip.replace("*", ""), port), self.handleIncomingConnection, spawn=self.pool, backlog=100
)
if request_handler: if request_handler:
self.handleRequest = request_handler self.handleRequest = request_handler
@ -152,25 +154,32 @@ class ConnectionServer:
for connection in self.connections[:]: # Make a copy for connection in self.connections[:]: # Make a copy
idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time) idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time)
if connection.unpacker and idle > 30: # Delete the unpacker if not needed if connection.unpacker and idle > 30:
# Delete the unpacker if not needed
del connection.unpacker del connection.unpacker
connection.unpacker = None connection.unpacker = None
connection.log("Unpacker deleted") connection.log("Unpacker deleted")
if idle > 60 * 60: # Wake up after 1h if idle > 60 * 60:
# Wake up after 1h
connection.log("[Cleanup] After wakeup, idle: %s" % idle) connection.log("[Cleanup] After wakeup, idle: %s" % idle)
connection.close() connection.close()
elif idle > 20 * 60 and connection.last_send_time < time.time() - 10: # Idle more than 20 min and we not send request in last 10 sec elif idle > 20 * 60 and connection.last_send_time < time.time() - 10:
# Idle more than 20 min and we not send request in last 10 sec
if not connection.ping(): # send ping request if not connection.ping(): # send ping request
connection.close() connection.close()
elif idle > 10 and connection.incomplete_buff_recv > 0: # Incompelte data with more than 10 sec idle elif idle > 10 and connection.incomplete_buff_recv > 0:
# Incompelte data with more than 10 sec idle
connection.log("[Cleanup] Connection buff stalled") connection.log("[Cleanup] Connection buff stalled")
connection.close() connection.close()
elif idle > 10 and connection.waiting_requests and time.time() - connection.last_send_time > 10: # Sent command and no response in 10 sec elif idle > 10 and connection.waiting_requests and time.time() - connection.last_send_time > 10:
connection.log("[Cleanup] Command %s timeout: %s" % (connection.last_cmd, time.time() - connection.last_send_time)) # Sent command and no response in 10 sec
connection.log(
"[Cleanup] Command %s timeout: %s" % (connection.last_cmd, time.time() - connection.last_send_time)
)
connection.close() connection.close()
elif idle > 60 and connection.protocol == "?": # No connection after 1 min elif idle > 60 and connection.protocol == "?": # No connection after 1 min

View file

@ -108,7 +108,7 @@ class ContentManager(object):
return total_size return total_size
# Find the file info line from self.contents # Find the file info line from self.contents
# Return: { "sha512": "c29d73d30ee8c9c1b5600e8a84447a6de15a3c3db6869aca4a2a578c1721f518", "size": 41 , "content_inner_path": "content.json"} # Return: { "sha512": "c29d73d...21f518", "size": 41 , "content_inner_path": "content.json"}
def getFileInfo(self, inner_path): def getFileInfo(self, inner_path):
dirs = inner_path.split("/") # Parent dirs of content.json dirs = inner_path.split("/") # Parent dirs of content.json
inner_path_parts = [dirs.pop()] # Filename relative to content.json inner_path_parts = [dirs.pop()] # Filename relative to content.json
@ -279,11 +279,17 @@ class ContentManager(object):
privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey) privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
valid_signers = self.getValidSigners(inner_path, new_content) valid_signers = self.getValidSigners(inner_path, new_content)
if privatekey_address not in valid_signers: if privatekey_address not in valid_signers:
return self.log.error("Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address)) return self.log.error(
"Private key invalid! Valid signers: %s, Private key address: %s" %
(valid_signers, privatekey_address)
)
self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers)) self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key sign the valid signers if inner_path == "content.json" and privatekey_address == self.site.address:
new_content["signers_sign"] = CryptBitcoin.sign("%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey) # If signing using the root key, then sign the valid signers
new_content["signers_sign"] = CryptBitcoin.sign(
"%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey
)
if not new_content["signers_sign"]: if not new_content["signers_sign"]:
self.log.info("Old style address, signers_sign is none") self.log.info("Old style address, signers_sign is none")
@ -352,7 +358,9 @@ class ContentManager(object):
if not cert_address: # Cert signer not allowed if not cert_address: # Cert signer not allowed
self.log.error("Invalid cert signer: %s" % domain) self.log.error("Invalid cert signer: %s" % domain)
return False return False
return CryptBitcoin.verify("%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"]) return CryptBitcoin.verify(
"%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"]
)
# Checks if the content.json content is valid # Checks if the content.json content is valid
# Return: True or False # Return: True or False
@ -414,10 +422,13 @@ class ContentManager(object):
if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json
return None return None
elif old_content["modified"] > new_content["modified"]: # We have newer elif old_content["modified"] > new_content["modified"]: # We have newer
self.log.debug("We have newer %s (Our: %s, Sent: %s)" % (inner_path, old_content["modified"], new_content["modified"])) self.log.debug(
"We have newer %s (Our: %s, Sent: %s)" %
(inner_path, old_content["modified"], new_content["modified"])
)
gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers
return False return False
if new_content["modified"] > time.time() + 60 * 60 * 24: # Content modified in the far future (allow 1 day window) if new_content["modified"] > time.time() + 60 * 60 * 24: # Content modified in the far future (allow 1 day+)
self.log.error("%s modify is in the future!" % inner_path) self.log.error("%s modify is in the future!" % inner_path)
return False return False
# Check sign # Check sign
@ -437,7 +448,9 @@ class ContentManager(object):
signs_required = self.getSignsRequired(inner_path, new_content) signs_required = self.getSignsRequired(inner_path, new_content)
if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json
if not CryptBitcoin.verify("%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]): if not CryptBitcoin.verify(
"%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]
):
self.log.error("%s invalid signers_sign!" % inner_path) self.log.error("%s invalid signers_sign!" % inner_path)
return False return False
@ -470,8 +483,10 @@ class ContentManager(object):
else: else:
hash_valid = False hash_valid = False
if file_info["size"] != file.tell(): if file_info["size"] != file.tell():
self.log.error("%s file size does not match %s <> %s, Hash: %s" % (inner_path, file.tell(), self.log.error(
file_info["size"], hash_valid)) "%s file size does not match %s <> %s, Hash: %s" %
(inner_path, file.tell(), file_info["size"], hash_valid)
)
return False return False
return hash_valid return hash_valid
@ -493,7 +508,9 @@ def testSign():
from Site import Site from Site import Site
site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
content_manager = ContentManager(site) content_manager = ContentManager(site)
content_manager.sign("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR") content_manager.sign(
"data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR"
)
def testVerify(): def testVerify():
@ -504,10 +521,14 @@ def testVerify():
print "Loaded contents:", content_manager.contents.keys() print "Loaded contents:", content_manager.contents.keys()
file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json")) file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json"))
print "content.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False) print "content.json valid:", content_manager.verifyFile(
"data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False
)
file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json")) file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json"))
print "messages.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False) print "messages.json valid:", content_manager.verifyFile(
"data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False
)
def testInfo(): def testInfo():

View file

@ -1,72 +1,75 @@
import logging
from lib.BitcoinECC import BitcoinECC from lib.BitcoinECC import BitcoinECC
from lib.pybitcointools import bitcoin as btctools from lib.pybitcointools import bitcoin as btctools
import logging
from Config import config from Config import config
# Try to load openssl # Try to load openssl
try: try:
if not config.use_openssl: raise Exception("Disabled by config") if not config.use_openssl:
from lib.opensslVerify import opensslVerify raise Exception("Disabled by config")
logging.info("OpenSSL loaded, version: %s" % opensslVerify.openssl_version) from lib.opensslVerify import opensslVerify
logging.info("OpenSSL loaded, version: %s" % opensslVerify.openssl_version)
except Exception, err: except Exception, err:
logging.info("OpenSSL load failed: %s, falling back to slow bitcoin verify" % err) logging.info("OpenSSL load failed: %s, falling back to slow bitcoin verify" % err)
opensslVerify = None opensslVerify = None
def newPrivatekey(uncompressed=True): # Return new private key def newPrivatekey(uncompressed=True): # Return new private key
privatekey = btctools.encode_privkey(btctools.random_key(), "wif") privatekey = btctools.encode_privkey(btctools.random_key(), "wif")
return privatekey return privatekey
def newSeed(): def newSeed():
return btctools.random_key() return btctools.random_key()
def hdPrivatekey(seed, child): def hdPrivatekey(seed, child):
masterkey = btctools.bip32_master_key(seed) masterkey = btctools.bip32_master_key(seed)
childkey = btctools.bip32_ckd(masterkey, child % 100000000) # Too large child id could cause problems childkey = btctools.bip32_ckd(masterkey, child % 100000000) # Too large child id could cause problems
key = btctools.bip32_extract_key(childkey) key = btctools.bip32_extract_key(childkey)
return btctools.encode_privkey(key, "wif") return btctools.encode_privkey(key, "wif")
def privatekeyToAddress(privatekey): # Return address from private key def privatekeyToAddress(privatekey): # Return address from private key
if privatekey.startswith("23") and len(privatekey) > 52: # Backward compatibility to broken lib if privatekey.startswith("23") and len(privatekey) > 52: # Backward compatibility to broken lib
bitcoin = BitcoinECC.Bitcoin() bitcoin = BitcoinECC.Bitcoin()
bitcoin.BitcoinAddressFromPrivate(privatekey) bitcoin.BitcoinAddressFromPrivate(privatekey)
return bitcoin.BitcoinAddresFromPublicKey() return bitcoin.BitcoinAddresFromPublicKey()
else: else:
try: try:
return btctools.privkey_to_address(privatekey) return btctools.privkey_to_address(privatekey)
except Exception, err: # Invalid privatekey except Exception: # Invalid privatekey
return False return False
def sign(data, privatekey): # Return sign to data using private key def sign(data, privatekey): # Return sign to data using private key
if privatekey.startswith("23") and len(privatekey) > 52: return None # Old style private key not supported if privatekey.startswith("23") and len(privatekey) > 52:
sign = btctools.ecdsa_sign(data, privatekey) return None # Old style private key not supported
return sign sign = btctools.ecdsa_sign(data, privatekey)
return sign
def signOld(data, privatekey): # Return sign to data using private key (backward compatible old style) def signOld(data, privatekey): # Return sign to data using private key (backward compatible old style)
bitcoin = BitcoinECC.Bitcoin() bitcoin = BitcoinECC.Bitcoin()
bitcoin.BitcoinAddressFromPrivate(privatekey) bitcoin.BitcoinAddressFromPrivate(privatekey)
sign = bitcoin.SignECDSA(data) sign = bitcoin.SignECDSA(data)
return sign return sign
def verify(data, address, sign): # Verify data using address and sign def verify(data, address, sign): # Verify data using address and sign
if hasattr(sign, "endswith"): if hasattr(sign, "endswith"):
if opensslVerify: # Use the faster method if avalible if opensslVerify: # Use the faster method if avalible
pub = opensslVerify.getMessagePubkey(data, sign) pub = opensslVerify.getMessagePubkey(data, sign)
sign_address = btctools.pubtoaddr(pub) sign_address = btctools.pubtoaddr(pub)
else: # Use pure-python else: # Use pure-python
pub = btctools.ecdsa_recover(data, sign) pub = btctools.ecdsa_recover(data, sign)
sign_address = btctools.pubtoaddr(pub) sign_address = btctools.pubtoaddr(pub)
if type(address) is list: # Any address in the list if type(address) is list: # Any address in the list
return sign_address in address return sign_address in address
else: # One possible address else: # One possible address
return sign_address == address return sign_address == address
else: # Backward compatible old style else: # Backward compatible old style
bitcoin = BitcoinECC.Bitcoin() bitcoin = BitcoinECC.Bitcoin()
return bitcoin.VerifyMessageFromBitcoinAddress(address, data, sign) return bitcoin.VerifyMessageFromBitcoinAddress(address, data, sign)

View file

@ -4,103 +4,104 @@ import os
import ssl import ssl
from Config import config from Config import config
import gevent
from util import SslPatch from util import SslPatch
class CryptConnectionManager: class CryptConnectionManager:
def __init__(self): def __init__(self):
# OpenSSL params # OpenSSL params
if sys.platform.startswith("win"): if sys.platform.startswith("win"):
self.openssl_bin = "src\\lib\\opensslVerify\\openssl.exe" self.openssl_bin = "src\\lib\\opensslVerify\\openssl.exe"
else: else:
self.openssl_bin = "openssl" self.openssl_bin = "openssl"
self.openssl_env = {"OPENSSL_CONF": "src/lib/opensslVerify/openssl.cnf"} self.openssl_env = {"OPENSSL_CONF": "src/lib/opensslVerify/openssl.cnf"}
self.crypt_supported = [] # Supported cryptos self.crypt_supported = [] # Supported cryptos
# Select crypt that supported by both sides
# Return: Name of the crypto
def selectCrypt(self, client_supported):
for crypt in self.crypt_supported:
if crypt in client_supported:
return crypt
return False
# Select crypt that supported by both sides # Wrap socket for crypt
# Return: Name of the crypto # Return: wrapped socket
def selectCrypt(self, client_supported): def wrapSocket(self, sock, crypt, server=False):
for crypt in self.crypt_supported: if crypt == "tls-rsa":
if crypt in client_supported: ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:HIGH:"
return crypt ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
return False if server:
return ssl.wrap_socket(
sock, server_side=server, keyfile='%s/key-rsa.pem' % config.data_dir,
certfile='%s/cert-rsa.pem' % config.data_dir, ciphers=ciphers)
else:
return ssl.wrap_socket(sock, ciphers=ciphers)
else:
return sock
def removeCerts(self):
for file_name in ["cert-rsa.pem", "key-rsa.pem"]:
file_path = "%s/%s" % (config.data_dir, file_name)
if os.path.isfile(file_path):
os.unlink(file_path)
# Wrap socket for crypt # Load and create cert files is necessary
# Return: wrapped socket def loadCerts(self):
def wrapSocket(self, sock, crypt, server=False): if config.disable_encryption:
if crypt == "tls-rsa": return False
ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:HIGH:!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
if server:
return ssl.wrap_socket(sock, server_side=server, keyfile='%s/key-rsa.pem' % config.data_dir, certfile='%s/cert-rsa.pem' % config.data_dir, ciphers=ciphers)
else:
return ssl.wrap_socket(sock, ciphers=ciphers)
else:
return sock
if self.loadSslRsaCert():
self.crypt_supported.append("tls-rsa")
def removeCerts(self): # Try to create RSA server cert + sign for connection encryption
for file_name in ["cert-rsa.pem", "key-rsa.pem"]: # Return: True on success
file_path = "%s/%s" % (config.data_dir, file_name) def loadSslRsaCert(self):
if os.path.isfile(file_path): os.unlink(file_path) import subprocess
if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir):
return True # Files already exits
# Load and create cert files is necessary back = subprocess.Popen(
def loadCerts(self): "%s req -x509 -newkey rsa:2048 -sha256 -batch -keyout %s/key-rsa.pem -out %s/cert-rsa.pem -nodes -config %s" % (
if config.disable_encryption: return False self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]
),
shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
).stdout.read().strip()
logging.debug("Generating RSA cert and key PEM files...%s" % back)
if self.loadSslRsaCert(): if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir):
self.crypt_supported.append("tls-rsa") return True
else:
logging.error("RSA ECC SSL cert generation failed, cert or key files not exits.")
return False
# Not used yet: Missing on some platform
def createSslEccCert(self):
return False
import subprocess
# Try to create RSA server cert + sign for connection encryption # Create ECC privatekey
# Return: True on success back = subprocess.Popen(
def loadSslRsaCert(self): "%s ecparam -name prime256v1 -genkey -out %s/key-ecc.pem" % (self.openssl_bin, config.data_dir),
import subprocess shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
).stdout.read().strip()
self.log.debug("Generating ECC privatekey PEM file...%s" % back)
if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir): # Create ECC cert
return True # Files already exits back = subprocess.Popen(
"%s req -new -key %s/key-ecc.pem -x509 -nodes -out %s/cert-ecc.pem -config %s" % (
self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]),
shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
).stdout.read().strip()
self.log.debug("Generating ECC cert PEM file...%s" % back)
back = subprocess.Popen( if os.path.isfile("%s/cert-ecc.pem" % config.data_dir) and os.path.isfile("%s/key-ecc.pem" % config.data_dir):
"%s req -x509 -newkey rsa:2048 -sha256 -batch -keyout %s/key-rsa.pem -out %s/cert-rsa.pem -nodes -config %s" % (self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]), return True
shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env else:
).stdout.read().strip() self.logging.error("ECC SSL cert generation failed, cert or key files not exits.")
logging.debug("Generating RSA cert and key PEM files...%s" % back) return False
if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir):
return True
else:
logging.error("RSA ECC SSL cert generation failed, cert or key files not exits.")
return False
# Not used yet: Missing on some platform
def createSslEccCert(self):
return False
import subprocess
# Create ECC privatekey
back = subprocess.Popen(
"%s ecparam -name prime256v1 -genkey -out %s/key-ecc.pem" % (self.openssl_bin, config.data_dir),
shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
).stdout.read().strip()
self.log.debug("Generating ECC privatekey PEM file...%s" % back)
# Create ECC cert
back = subprocess.Popen(
"%s req -new -key %s/key-ecc.pem -x509 -nodes -out %s/cert-ecc.pem -config %s" % (self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]),
shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
).stdout.read().strip()
self.log.debug("Generating ECC cert PEM file...%s" % back)
if os.path.isfile("%s/cert-ecc.pem" % config.data_dir) and os.path.isfile("%s/key-ecc.pem" % config.data_dir):
return True
else:
self.logging.error("ECC SSL cert generation failed, cert or key files not exits.")
return False
manager = CryptConnectionManager() manager = CryptConnectionManager()

View file

@ -1,36 +1,37 @@
import hashlib import hashlib
def sha1sum(file, blocksize=65536): def sha1sum(file, blocksize=65536):
if hasattr(file, "endswith"): # Its a string open it if hasattr(file, "endswith"): # Its a string open it
file = open(file, "rb") file = open(file, "rb")
hash = hashlib.sha1() hash = hashlib.sha1()
for block in iter(lambda: file.read(blocksize), ""): for block in iter(lambda: file.read(blocksize), ""):
hash.update(block) hash.update(block)
return hash.hexdigest() return hash.hexdigest()
def sha512sum(file, blocksize=65536): def sha512sum(file, blocksize=65536):
if hasattr(file, "endswith"): # Its a string open it if hasattr(file, "endswith"): # Its a string open it
file = open(file, "rb") file = open(file, "rb")
hash = hashlib.sha512() hash = hashlib.sha512()
for block in iter(lambda: file.read(blocksize), ""): for block in iter(lambda: file.read(blocksize), ""):
hash.update(block) hash.update(block)
return hash.hexdigest()[0:64] # Truncate to 256bits is good enough return hash.hexdigest()[0:64] # Truncate to 256bits is good enough
if __name__ == "__main__": if __name__ == "__main__":
import cStringIO as StringIO import cStringIO as StringIO
a = StringIO.StringIO() a = StringIO.StringIO()
a.write("hello!") a.write("hello!")
a.seek(0) a.seek(0)
print hashlib.sha1("hello!").hexdigest() print hashlib.sha1("hello!").hexdigest()
print sha1sum(a) print sha1sum(a)
import time import time
s = time.time() s = time.time()
print sha1sum(open("F:\\Temp\\bigfile")), print sha1sum(open("F:\\Temp\\bigfile")),
print time.time()-s print time.time() - s
s = time.time() s = time.time()
print sha512sum(open("F:\\Temp\\bigfile")), print sha512sum(open("F:\\Temp\\bigfile")),
print time.time()-s print time.time() - s

View file

@ -1,263 +1,283 @@
import sqlite3, json, time, logging, re, os import sqlite3
import json
import time
import logging
import re
import os
from DbCursor import DbCursor from DbCursor import DbCursor
class Db: class Db:
def __init__(self, schema, db_path):
self.db_path = db_path
self.db_dir = os.path.dirname(db_path)+"/"
self.schema = schema
self.schema["version"] = self.schema.get("version", 1)
self.conn = None
self.cur = None
self.log = logging.getLogger("Db:%s" % schema["db_name"])
self.table_names = None
self.collect_stats = False
self.query_stats = {}
self.db_keyvalues = {}
def __init__(self, schema, db_path):
self.db_path = db_path
self.db_dir = os.path.dirname(db_path) + "/"
self.schema = schema
self.schema["version"] = self.schema.get("version", 1)
self.conn = None
self.cur = None
self.log = logging.getLogger("Db:%s" % schema["db_name"])
self.table_names = None
self.collect_stats = False
self.query_stats = {}
self.db_keyvalues = {}
def connect(self): def connect(self):
self.log.debug("Connecting to %s (sqlite version: %s)..." % (self.db_path, sqlite3.version)) self.log.debug("Connecting to %s (sqlite version: %s)..." % (self.db_path, sqlite3.version))
if not os.path.isdir(self.db_dir): # Directory not exist yet if not os.path.isdir(self.db_dir): # Directory not exist yet
os.makedirs(self.db_dir) os.makedirs(self.db_dir)
self.log.debug("Created Db path: %s" % self.db_dir) self.log.debug("Created Db path: %s" % self.db_dir)
if not os.path.isfile(self.db_path): if not os.path.isfile(self.db_path):
self.log.debug("Db file not exist yet: %s" % self.db_path) self.log.debug("Db file not exist yet: %s" % self.db_path)
self.conn = sqlite3.connect(self.db_path) self.conn = sqlite3.connect(self.db_path)
self.conn.row_factory = sqlite3.Row self.conn.row_factory = sqlite3.Row
self.conn.isolation_level = None self.conn.isolation_level = None
self.cur = self.getCursor() self.cur = self.getCursor()
# We need more speed then security # We need more speed then security
self.cur.execute("PRAGMA journal_mode = WAL") self.cur.execute("PRAGMA journal_mode = WAL")
self.cur.execute("PRAGMA journal_mode = MEMORY") self.cur.execute("PRAGMA journal_mode = MEMORY")
self.cur.execute("PRAGMA synchronous = OFF") self.cur.execute("PRAGMA synchronous = OFF")
# Execute query using dbcursor
def execute(self, query, params=None):
if not self.conn:
self.connect()
return self.cur.execute(query, params)
# Execute query using dbcursor def close(self):
def execute(self, query, params = None): self.log.debug("Closing")
if not self.conn: self.connect() if self.cur:
return self.cur.execute(query, params) self.cur.close()
if self.conn:
self.conn.close()
# Gets a cursor object to database
# Return: Cursor class
def getCursor(self):
if not self.conn:
self.connect()
return DbCursor(self.conn, self)
def close(self): # Get the table version
self.log.debug("Closing") # Return: Table version or None if not exist
if self.cur: self.cur.close() def getTableVersion(self, table_name):
if self.conn: self.conn.close() """if not self.table_names: # Get existing table names
res = self.cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
self.table_names = [row["name"] for row in res]
if table_name not in self.table_names:
return False
else:"""
if not self.db_keyvalues: # Get db keyvalues
try:
res = self.cur.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues
except sqlite3.OperationalError, err: # Table not exist
self.log.debug("Query error: %s" % err)
return False
# Gets a cursor object to database for row in res:
# Return: Cursor class self.db_keyvalues[row["key"]] = row["value"]
def getCursor(self):
if not self.conn: self.connect()
return DbCursor(self.conn, self)
return self.db_keyvalues.get("table.%s.version" % table_name, 0)
# Get the table version # Check Db tables
# Return: Table version or None if not exist # Return: <list> Changed table names
def getTableVersion(self, table_name): def checkTables(self):
"""if not self.table_names: # Get existing table names s = time.time()
res = self.cur.execute("SELECT name FROM sqlite_master WHERE type='table'") changed_tables = []
self.table_names = [row["name"] for row in res] cur = self.getCursor()
if table_name not in self.table_names:
return False
else:""" cur.execute("BEGIN")
if not self.db_keyvalues: # Get db keyvalues
try:
res = self.cur.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues
except sqlite3.OperationalError, err: # Table not exist
self.log.debug("Query error: %s" % err)
return False
for row in res: # Check internal tables
self.db_keyvalues[row["key"]] = row["value"] # Check keyvalue table
changed = cur.needTable("keyvalue", [
["keyvalue_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
["key", "TEXT"],
["value", "INTEGER"],
["json_id", "INTEGER REFERENCES json (json_id)"],
], [
"CREATE UNIQUE INDEX key_id ON keyvalue(json_id, key)"
], version=self.schema["version"])
if changed:
changed_tables.append("keyvalue")
return self.db_keyvalues.get("table.%s.version" % table_name, 0) # Check json table
if self.schema["version"] == 1:
changed = cur.needTable("json", [
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
["path", "VARCHAR(255)"]
], [
"CREATE UNIQUE INDEX path ON json(path)"
], version=self.schema["version"])
else:
changed = cur.needTable("json", [
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
["directory", "VARCHAR(255)"],
["file_name", "VARCHAR(255)"]
], [
"CREATE UNIQUE INDEX path ON json(directory, file_name)"
], version=self.schema["version"])
if changed:
changed_tables.append("json")
# Check schema tables
for table_name, table_settings in self.schema["tables"].items():
changed = cur.needTable(
table_name, table_settings["cols"],
table_settings["indexes"], version=table_settings["schema_changed"]
)
if changed:
changed_tables.append(table_name)
cur.execute("COMMIT")
self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time() - s, changed_tables))
# Check Db tables return changed_tables
# Return: <list> Changed table names
def checkTables(self):
s = time.time()
changed_tables = []
cur = self.getCursor()
cur.execute("BEGIN") # Load json file to db
# Return: True if matched
def loadJson(self, file_path, file=None, cur=None):
if not file_path.startswith(self.db_dir):
return False # Not from the db dir: Skipping
relative_path = re.sub("^%s" % self.db_dir, "", file_path) # File path realative to db file
# Check if filename matches any of mappings in schema
matched_maps = []
for match, map_settings in self.schema["maps"].items():
if re.match(match, relative_path):
matched_maps.append(map_settings)
# Check internal tables # No match found for the file
# Check keyvalue table if not matched_maps:
changed = cur.needTable("keyvalue", [ return False
["keyvalue_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
["key", "TEXT"],
["value", "INTEGER"],
["json_id", "INTEGER REFERENCES json (json_id)"],
],[
"CREATE UNIQUE INDEX key_id ON keyvalue(json_id, key)"
], version=self.schema["version"])
if changed: changed_tables.append("keyvalue")
# Check json table # Load the json file
if self.schema["version"] == 1: if not file:
changed = cur.needTable("json", [ file = open(file_path)
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], data = json.load(file)
["path", "VARCHAR(255)"]
], [
"CREATE UNIQUE INDEX path ON json(path)"
], version=self.schema["version"])
else:
changed = cur.needTable("json", [
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
["directory", "VARCHAR(255)"],
["file_name", "VARCHAR(255)"]
], [
"CREATE UNIQUE INDEX path ON json(directory, file_name)"
], version=self.schema["version"])
if changed: changed_tables.append("json")
# Check schema tables # No cursor specificed
for table_name, table_settings in self.schema["tables"].items(): if not cur:
changed = cur.needTable(table_name, table_settings["cols"], table_settings["indexes"], version=table_settings["schema_changed"]) cur = self.getCursor()
if changed: changed_tables.append(table_name) cur.execute("BEGIN")
cur.logging = False
commit_after_done = True
else:
commit_after_done = False
cur.execute("COMMIT") # Row for current json file
self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time()-s, changed_tables)) json_row = cur.getJsonRow(relative_path)
return changed_tables # Check matched mappings in schema
for map in matched_maps:
# Insert non-relational key values
if map.get("to_keyvalue"):
# Get current values
res = cur.execute("SELECT * FROM keyvalue WHERE json_id = ?", (json_row["json_id"],))
current_keyvalue = {}
current_keyvalue_id = {}
for row in res:
current_keyvalue[row["key"]] = row["value"]
current_keyvalue_id[row["key"]] = row["keyvalue_id"]
for key in map["to_keyvalue"]:
if key not in current_keyvalue: # Keyvalue not exist yet in the db
cur.execute(
"INSERT INTO keyvalue ?",
{"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
)
elif data.get(key) != current_keyvalue[key]: # Keyvalue different value
cur.execute(
"UPDATE keyvalue SET value = ? WHERE keyvalue_id = ?",
(data.get(key), current_keyvalue_id[key])
)
# Load json file to db """
# Return: True if matched for key in map.get("to_keyvalue", []):
def loadJson(self, file_path, file = None, cur = None): cur.execute("INSERT OR REPLACE INTO keyvalue ?",
if not file_path.startswith(self.db_dir): return False # Not from the db dir: Skipping {"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
relative_path = re.sub("^%s" % self.db_dir, "", file_path) # File path realative to db file )
# Check if filename matches any of mappings in schema """
matched_maps = []
for match, map_settings in self.schema["maps"].items():
if re.match(match, relative_path):
matched_maps.append(map_settings)
# No match found for the file # Insert data to tables
if not matched_maps: return False for table_settings in map.get("to_table", []):
if isinstance(table_settings, dict): # Custom settings
table_name = table_settings["table"] # Table name to insert datas
node = table_settings.get("node", table_name) # Node keyname in data json file
key_col = table_settings.get("key_col") # Map dict key as this col
val_col = table_settings.get("val_col") # Map dict value as this col
import_cols = table_settings.get("import_cols")
replaces = table_settings.get("replaces")
else: # Simple settings
table_name = table_settings
node = table_settings
key_col = None
val_col = None
import_cols = None
replaces = None
# Load the json file cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],))
if not file: file = open(file_path)
data = json.load(file)
# No cursor specificed if node not in data:
if not cur: continue
cur = self.getCursor()
cur.execute("BEGIN")
cur.logging = False
commit_after_done = True
else:
commit_after_done = False
# Row for current json file if key_col: # Map as dict
json_row = cur.getJsonRow(relative_path) for key, val in data[node].iteritems():
if val_col: # Single value
cur.execute(
"INSERT OR REPLACE INTO %s ?" % table_name,
{key_col: key, val_col: val, "json_id": json_row["json_id"]}
)
else: # Multi value
if isinstance(val, dict): # Single row
row = val
if import_cols:
row = {key: row[key] for key in import_cols} # Filter row by import_cols
row[key_col] = key
# Replace in value if necessary
if replaces:
for replace_key, replace in replaces.iteritems():
if replace_key in row:
for replace_from, replace_to in replace.iteritems():
row[replace_key] = row[replace_key].replace(replace_from, replace_to)
# Check matched mappings in schema row["json_id"] = json_row["json_id"]
for map in matched_maps: cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
# Insert non-relational key values else: # Multi row
if map.get("to_keyvalue"): for row in val:
# Get current values row[key_col] = key
res = cur.execute("SELECT * FROM keyvalue WHERE json_id = ?", (json_row["json_id"],)) row["json_id"] = json_row["json_id"]
current_keyvalue = {} cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
current_keyvalue_id = {} else: # Map as list
for row in res: for row in data[node]:
current_keyvalue[row["key"]] = row["value"] row["json_id"] = json_row["json_id"]
current_keyvalue_id[row["key"]] = row["keyvalue_id"] cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
for key in map["to_keyvalue"]: if commit_after_done:
if key not in current_keyvalue: # Keyvalue not exist yet in the db cur.execute("COMMIT")
cur.execute("INSERT INTO keyvalue ?", return True
{"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
)
elif data.get(key) != current_keyvalue[key]: # Keyvalue different value
cur.execute("UPDATE keyvalue SET value = ? WHERE keyvalue_id = ?", (data.get(key), current_keyvalue_id[key]))
"""for key in map.get("to_keyvalue", []):
cur.execute("INSERT OR REPLACE INTO keyvalue ?",
{"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
)
"""
# Insert data to tables
for table_settings in map.get("to_table", []):
if isinstance(table_settings, dict): # Custom settings
table_name = table_settings["table"] # Table name to insert datas
node = table_settings.get("node", table_name) # Node keyname in data json file
key_col = table_settings.get("key_col") # Map dict key as this col
val_col = table_settings.get("val_col") # Map dict value as this col
import_cols = table_settings.get("import_cols")
replaces = table_settings.get("replaces")
else: # Simple settings
table_name = table_settings
node = table_settings
key_col = None
val_col = None
import_cols = None
replaces = None
cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],))
if node not in data: continue
table_schema = self.schema["tables"][table_name]
if key_col: # Map as dict
for key, val in data[node].iteritems():
if val_col: # Single value
cur.execute("INSERT OR REPLACE INTO %s ?" % table_name,
{ key_col: key, val_col: val, "json_id": json_row["json_id"] }
)
else: # Multi value
if isinstance(val, dict): # Single row
row = val
if import_cols: row = { key: row[key] for key in import_cols } # Filter row by import_cols
row[key_col] = key
# Replace in value if necessary
if replaces:
for replace_key, replace in replaces.iteritems():
if replace_key in row:
for replace_from, replace_to in replace.iteritems():
row[replace_key] = row[replace_key].replace(replace_from, replace_to)
row["json_id"] = json_row["json_id"]
cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
else: # Multi row
for row in val:
row[key_col] = key
row["json_id"] = json_row["json_id"]
cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
else: # Map as list
for row in data[node]:
row["json_id"] = json_row["json_id"]
cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
if commit_after_done: cur.execute("COMMIT")
return True
if __name__ == "__main__": if __name__ == "__main__":
s = time.time() s = time.time()
console_log = logging.StreamHandler() console_log = logging.StreamHandler()
logging.getLogger('').setLevel(logging.DEBUG) logging.getLogger('').setLevel(logging.DEBUG)
logging.getLogger('').addHandler(console_log) logging.getLogger('').addHandler(console_log)
console_log.setLevel(logging.DEBUG) console_log.setLevel(logging.DEBUG)
dbjson = DbJson(json.load(open("zerotalk.schema.json")), "data/users/zerotalk.db") dbjson = Db(json.load(open("zerotalk.schema.json")), "data/users/zerotalk.db")
dbjson.collect_stats = True dbjson.collect_stats = True
dbjson.checkTables() dbjson.checkTables()
cur = dbjson.getCursor() cur = dbjson.getCursor()
cur.execute("BEGIN") cur.execute("BEGIN")
cur.logging = False cur.logging = False
dbjson.loadJson("data/users/content.json", cur=cur) dbjson.loadJson("data/users/content.json", cur=cur)
for user_dir in os.listdir("data/users"): for user_dir in os.listdir("data/users"):
if os.path.isdir("data/users/%s" % user_dir): if os.path.isdir("data/users/%s" % user_dir):
dbjson.loadJson("data/users/%s/data.json" % user_dir, cur=cur) dbjson.loadJson("data/users/%s/data.json" % user_dir, cur=cur)
#print ".", # print ".",
cur.logging = True cur.logging = True
cur.execute("COMMIT") cur.execute("COMMIT")
print "Done in %.3fs" % (time.time()-s) print "Done in %.3fs" % (time.time() - s)
for query, stats in sorted(dbjson.query_stats.items()): for query, stats in sorted(dbjson.query_stats.items()):
print "-", query, stats print "-", query, stats

View file

@ -1,115 +1,118 @@
import time, re import time
import re
# Special sqlite cursor # Special sqlite cursor
class DbCursor: class DbCursor:
def __init__(self, conn, db):
self.conn = conn
self.db = db
self.cursor = conn.cursor()
self.logging = True
def __init__(self, conn, db):
self.conn = conn
self.db = db
self.cursor = conn.cursor()
self.logging = False
def execute(self, query, params=None): def execute(self, query, params=None):
if isinstance(params, dict): # Make easier select and insert by allowing dict params if isinstance(params, dict): # Make easier select and insert by allowing dict params
if query.startswith("SELECT") or query.startswith("DELETE"): # Convert param dict to SELECT * FROM table WHERE key = ?, key2 = ? format if query.startswith("SELECT") or query.startswith("DELETE"):
wheres = "AND ".join([key+" = ?" for key in params]) # Convert param dict to SELECT * FROM table WHERE key = ?, key2 = ? format
query = query.replace("?", wheres) wheres = "AND ".join([key + " = ?" for key in params])
params = params.values() query = query.replace("?", wheres)
else: # Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format params = params.values()
keys = ", ".join(params.keys()) else:
values = ", ".join(['?' for key in params.keys()]) # Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format
query = query.replace("?", "(%s) VALUES (%s)" % (keys, values)) keys = ", ".join(params.keys())
params = tuple(params.values()) values = ", ".join(['?' for key in params.keys()])
query = query.replace("?", "(%s) VALUES (%s)" % (keys, values))
params = tuple(params.values())
s = time.time() s = time.time()
# if query == "COMMIT": self.logging = True # Turn logging back on transaction commit # if query == "COMMIT": self.logging = True # Turn logging back on transaction commit
if params: # Query has parameters if params: # Query has parameters
res = self.cursor.execute(query, params) res = self.cursor.execute(query, params)
if self.logging: if self.logging:
self.db.log.debug((query.replace("?", "%s") % params)+" (Done in %.4f)" % (time.time()-s)) self.db.log.debug((query.replace("?", "%s") % params) + " (Done in %.4f)" % (time.time() - s))
else: else:
res = self.cursor.execute(query) res = self.cursor.execute(query)
if self.logging: self.db.log.debug(query+" (Done in %.4f)" % (time.time()-s)) if self.logging:
self.db.log.debug(query + " (Done in %.4f)" % (time.time() - s))
# Log query stats # Log query stats
if self.db.collect_stats: if self.db.collect_stats:
if query not in self.db.query_stats: if query not in self.db.query_stats:
self.db.query_stats[query] = {"call": 0, "time": 0.0} self.db.query_stats[query] = {"call": 0, "time": 0.0}
self.db.query_stats[query]["call"] += 1 self.db.query_stats[query]["call"] += 1
self.db.query_stats[query]["time"] += time.time()-s self.db.query_stats[query]["time"] += time.time() - s
# if query == "BEGIN": self.logging = False # Turn logging off on transaction commit # if query == "BEGIN": self.logging = False # Turn logging off on transaction commit
return res return res
# Create new table
# Return: True on success
def createTable(self, table, cols):
# TODO: Check current structure
"""table_changed = False
res = c.execute("PRAGMA table_info(%s)" % table)
if res:
for row in res:
print row["name"], row["type"], cols[row["name"]]
print row
else:
table_changed = True
# Create new table if table_changed: # Table structure changed, drop and create again"""
# Return: True on success self.execute("DROP TABLE IF EXISTS %s" % table)
def createTable(self, table, cols): col_definitions = []
# TODO: Check current structure for col_name, col_type in cols:
"""table_changed = False col_definitions.append("%s %s" % (col_name, col_type))
res = c.execute("PRAGMA table_info(%s)" % table)
if res:
for row in res:
print row["name"], row["type"], cols[row["name"]]
print row
else:
table_changed = True
if table_changed: # Table structure changed, drop and create again""" self.execute("CREATE TABLE %s (%s)" % (table, ",".join(col_definitions)))
self.execute("DROP TABLE IF EXISTS %s" % table) return True
col_definitions = []
for col_name, col_type in cols:
col_definitions.append("%s %s" % (col_name, col_type))
self.execute("CREATE TABLE %s (%s)" % (table, ",".join(col_definitions))) # Create indexes on table
return True # Return: True on success
def createIndexes(self, table, indexes):
# indexes.append("CREATE INDEX %s_id ON %s(%s_id)" % (table, table, table)) # Primary key index
for index in indexes:
self.execute(index)
# Create table if not exist
# Return: True if updated
def needTable(self, table, cols, indexes=None, version=1):
current_version = self.db.getTableVersion(table)
if int(current_version) < int(version): # Table need update or not extis
self.db.log.info("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version))
self.createTable(table, cols)
if indexes:
self.createIndexes(table, indexes)
self.execute(
"INSERT OR REPLACE INTO keyvalue ?",
{"json_id": 0, "key": "table.%s.version" % table, "value": version}
)
return True
else: # Not changed
return False
# Get or create a row for json file
# Return: The database row
def getJsonRow(self, file_path):
directory, file_name = re.match("^(.*?)/*([^/]*)$", file_path).groups()
if self.db.schema["version"] == 1:
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
row = res.fetchone()
if not row: # No row yet, create it
self.execute("INSERT INTO json ?", {"path": file_path})
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
row = res.fetchone()
else:
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
row = res.fetchone()
if not row: # No row yet, create it
self.execute("INSERT INTO json ?", {"directory": directory, "file_name": file_name})
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
row = res.fetchone()
return row
# Create indexes on table def close(self):
# Return: True on success self.cursor.close()
def createIndexes(self, table, indexes):
# indexes.append("CREATE INDEX %s_id ON %s(%s_id)" % (table, table, table)) # Primary key index
for index in indexes:
self.execute(index)
# Create table if not exist
# Return: True if updated
def needTable(self, table, cols, indexes=None, version=1):
current_version = self.db.getTableVersion(table)
if int(current_version) < int(version): # Table need update or not extis
self.db.log.info("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version))
self.createTable(table, cols)
if indexes: self.createIndexes(table, indexes)
self.execute("INSERT OR REPLACE INTO keyvalue ?",
{"json_id": 0, "key": "table.%s.version" % table, "value": version}
)
return True
else: # Not changed
return False
# Get or create a row for json file
# Return: The database row
def getJsonRow(self, file_path):
directory, file_name = re.match("^(.*?)/*([^/]*)$", file_path).groups()
if self.db.schema["version"] == 1:
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
row = res.fetchone()
if not row: # No row yet, create it
self.execute("INSERT INTO json ?", {"path": file_path})
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
row = res.fetchone()
else:
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
row = res.fetchone()
if not row: # No row yet, create it
self.execute("INSERT INTO json ?", {"directory": directory, "file_name": file_name})
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
row = res.fetchone()
return row
def close(self):
self.cursor.close()

View file

@ -1,47 +1,51 @@
import sys, os, traceback import sys
import os
import traceback
# Non fatal exception # Non fatal exception
class Notify(Exception): class Notify(Exception):
def __init__(self, message): def __init__(self, message):
self.message = message self.message = message
def __str__(self): def __str__(self):
return self.message return self.message
def formatException(err=None): def formatException(err=None):
if type(err) == Notify: return err if type(err) == Notify:
exc_type, exc_obj, exc_tb = sys.exc_info() return err
if not err: err = exc_obj.message exc_type, exc_obj, exc_tb = sys.exc_info()
tb = [] if not err:
for frame in traceback.extract_tb(exc_tb): err = exc_obj.message
path, line, function, text = frame tb = []
file = os.path.split(path)[1] for frame in traceback.extract_tb(exc_tb):
tb.append("%s line %s" % (file, line)) path, line, function, text = frame
return "%s: %s in %s" % (exc_type.__name__, err, " > ".join(tb)) file = os.path.split(path)[1]
tb.append("%s line %s" % (file, line))
return "%s: %s in %s" % (exc_type.__name__, err, " > ".join(tb))
if __name__ == "__main__": if __name__ == "__main__":
try:
print 1 / 0
except Exception, err:
print type(err).__name__
print "1/0 error: %s" % formatException(err)
try: def loadJson():
print 1/0 json.loads("Errr")
except Exception, err:
print type(err).__name__
print "1/0 error: %s" % formatException(err)
def loadJson(): import json
json.loads("Errr") try:
loadJson()
except Exception, err:
print err
print "Json load error: %s" % formatException(err)
import json try:
try: raise Notify("nothing...")
loadJson() except Exception, err:
except Exception, err: print "Notify: %s" % formatException(err)
print err
print "Json load error: %s" % formatException(err)
try: loadJson()
raise Notify("nothing...")
except Exception, err:
print "Notify: %s" % formatException(err)
loadJson()

View file

@ -1,55 +1,64 @@
import gevent, sys, logging import sys
import logging
import gevent
from Config import config from Config import config
last_error = None last_error = None
# Store last error, ignore notify, allow manual error logging # Store last error, ignore notify, allow manual error logging
def handleError(*args): def handleError(*args):
global last_error global last_error
if not args: # Manual called if not args: # Manual called
args = sys.exc_info() args = sys.exc_info()
silent = True silent = True
else: else:
silent = False silent = False
if args[0].__name__ != "Notify": last_error = args if args[0].__name__ != "Notify":
if not silent and args[0].__name__ != "Notify": last_error = args
logging.exception("Unhandled exception") if not silent and args[0].__name__ != "Notify":
sys.__excepthook__(*args) logging.exception("Unhandled exception")
sys.__excepthook__(*args)
# Ignore notify errors # Ignore notify errors
def handleErrorNotify(*args): def handleErrorNotify(*args):
if args[0].__name__ != "Notify": if args[0].__name__ != "Notify":
logging.exception("Unhandled exception") logging.exception("Unhandled exception")
sys.__excepthook__(*args) sys.__excepthook__(*args)
OriginalGreenlet = gevent.Greenlet OriginalGreenlet = gevent.Greenlet
class ErrorhookedGreenlet(OriginalGreenlet): class ErrorhookedGreenlet(OriginalGreenlet):
def _report_error(self, exc_info): def _report_error(self, exc_info):
sys.excepthook(exc_info[0], exc_info[1], exc_info[2]) sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
if config.debug: if config.debug:
sys.excepthook = handleError sys.excepthook = handleError
else: else:
sys.excepthook = handleErrorNotify sys.excepthook = handleErrorNotify
gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet
reload(gevent) reload(gevent)
if __name__ == "__main__": if __name__ == "__main__":
import time import time
from gevent import monkey; monkey.patch_all(thread=False, ssl=False) from gevent import monkey
import Debug monkey.patch_all(thread=False, ssl=False)
def sleeper(): import Debug
print "started"
time.sleep(3)
print "stopped"
thread1 = gevent.spawn(sleeper)
thread2 = gevent.spawn(sleeper)
time.sleep(1)
print "killing..."
thread1.throw(Exception("Hello"))
thread2.throw(Debug.Notify("Throw"))
print "killed"
def sleeper():
print "started"
time.sleep(3)
print "stopped"
thread1 = gevent.spawn(sleeper)
thread2 = gevent.spawn(sleeper)
time.sleep(1)
print "killing..."
thread1.throw(Exception("Hello"))
thread2.throw(Debug.Notify("Throw"))
print "killed"

View file

@ -1,81 +1,91 @@
import os, subprocess, re, logging, time import os
import subprocess
import re
import logging
import time
from Config import config from Config import config
# Find files with extension in path # Find files with extension in path
def findfiles(path, find_ext): def findfiles(path, find_ext):
for root, dirs, files in os.walk(path, topdown = False): for root, dirs, files in os.walk(path, topdown=False):
for file in sorted(files): for file in sorted(files):
file_path = root+"/"+file file_path = root + "/" + file
file_ext = file.split(".")[-1] file_ext = file.split(".")[-1]
if file_ext in find_ext and not file.startswith("all."): yield file_path.replace("\\", "/") if file_ext in find_ext and not file.startswith("all."):
yield file_path.replace("\\", "/")
# Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features # Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features
def merge(merged_path): def merge(merged_path):
merge_dir = os.path.dirname(merged_path) merge_dir = os.path.dirname(merged_path)
s = time.time() s = time.time()
ext = merged_path.split(".")[-1] ext = merged_path.split(".")[-1]
if ext == "js": # If merging .js find .coffee too if ext == "js": # If merging .js find .coffee too
find_ext = ["js", "coffee"] find_ext = ["js", "coffee"]
else: else:
find_ext = [ext] find_ext = [ext]
# If exist check the other files modification date # If exist check the other files modification date
if os.path.isfile(merged_path): if os.path.isfile(merged_path):
merged_mtime = os.path.getmtime(merged_path) merged_mtime = os.path.getmtime(merged_path)
else: else:
merged_mtime = 0 merged_mtime = 0
changed = {}
for file_path in findfiles(merge_dir, find_ext):
if os.path.getmtime(file_path) > merged_mtime:
changed[file_path] = True
if not changed:
return # Assets not changed, nothing to do
changed = {} if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile
for file_path in findfiles(merge_dir, find_ext): merged_old = open(merged_path, "rb").read().decode("utf8")
if os.path.getmtime(file_path) > merged_mtime: old_parts = {}
changed[file_path] = True for match in re.findall("(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
if not changed: return # Assets not changed, nothing to do old_parts[match[1]] = match[2].strip("\n\r")
if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile # Merge files
merged_old = open(merged_path, "rb").read().decode("utf8") parts = []
old_parts = {} s_total = time.time()
for match in re.findall("(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL): for file_path in findfiles(merge_dir, find_ext):
old_parts[match[1]] = match[2].strip("\n\r") parts.append("\n\n/* ---- %s ---- */\n\n" % file_path)
if file_path.endswith(".coffee"): # Compile coffee script
if file_path in changed or file_path not in old_parts: # Only recompile if changed or its not compiled before
if not config.coffeescript_compiler:
logging.error("No coffeescript compiler definied, skipping compiling %s" % merged_path)
return False # No coffeescript compiler, skip this file
command = config.coffeescript_compiler % os.path.join(*file_path.split("/")) # Fix os path separator
s = time.time()
compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
out = compiler.stdout.read().decode("utf8")
logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s))
if out and out.startswith("("):
parts.append(out)
else:
error = out
logging.error("%s Compile error: %s" % (file_path, error))
parts.append(
"alert('%s compile error: %s');" %
(file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n"))
)
else: # Not changed use the old_part
parts.append(old_parts[file_path])
else: # Add to parts
parts.append(open(file_path).read().decode("utf8"))
# Merge files merged = u"\n".join(parts)
parts = [] if ext == "css": # Vendor prefix css
s_total = time.time() from lib.cssvendor import cssvendor
for file_path in findfiles(merge_dir, find_ext): merged = cssvendor.prefix(merged)
parts.append("\n\n/* ---- %s ---- */\n\n" % file_path) merged = merged.replace("\r", "")
if file_path.endswith(".coffee"): # Compile coffee script open(merged_path, "wb").write(merged.encode("utf8"))
if file_path in changed or file_path not in old_parts: # Only recompile if changed or its not compiled before logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total))
if not config.coffeescript_compiler:
logging.error("No coffeescript compiler definied, skipping compiling %s" % merged_path)
return False # No coffeescript compiler, skip this file
command = config.coffeescript_compiler % os.path.join(*file_path.split("/")) # Fix os path separator
s = time.time()
compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
out = compiler.stdout.read().decode("utf8")
logging.debug("Running: %s (Done in %.2fs)" % (command, time.time()-s))
if out and out.startswith("("):
parts.append(out)
else:
error = out
logging.error("%s Compile error: %s" % (file_path, error))
parts.append("alert('%s compile error: %s');" % (file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n") ) )
else: # Not changed use the old_part
parts.append(old_parts[file_path])
else: # Add to parts
parts.append(open(file_path).read().decode("utf8"))
merged = u"\n".join(parts)
if ext == "css": # Vendor prefix css
from lib.cssvendor import cssvendor
merged = cssvendor.prefix(merged)
merged = merged.replace("\r", "")
open(merged_path, "wb").write(merged.encode("utf8"))
logging.debug("Merged %s (%.2fs)" % (merged_path, time.time()-s_total))
if __name__ == "__main__": if __name__ == "__main__":
logging.getLogger().setLevel(logging.DEBUG) logging.getLogger().setLevel(logging.DEBUG)
os.chdir("..") os.chdir("..")
config.coffeescript_compiler = r'type "%s" | tools\coffee-node\bin\node.exe tools\coffee-node\bin\coffee --no-header -s -p' config.coffeescript_compiler = r'type "%s" | tools\coffee-node\bin\node.exe tools\coffee-node\bin\coffee --no-header -s -p'
merge("data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/js/all.js") merge("data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/js/all.js")

View file

@ -1,42 +1,44 @@
import logging, os, sys, time import logging
import time
import threading import threading
from Config import config from Config import config
if config.debug: # Only load pyfilesytem if using debug mode if config.debug: # Only load pyfilesytem if using debug mode
try: try:
from fs.osfs import OSFS from fs.osfs import OSFS
pyfilesystem = OSFS("src") pyfilesystem = OSFS("src")
pyfilesystem_plugins = OSFS("plugins") pyfilesystem_plugins = OSFS("plugins")
logging.debug("Pyfilesystem detected, source code autoreload enabled") logging.debug("Pyfilesystem detected, source code autoreload enabled")
except Exception, err: except Exception, err:
pyfilesystem = False pyfilesystem = False
else: else:
pyfilesystem = False pyfilesystem = False
class DebugReloader: class DebugReloader:
def __init__ (self, callback, directory = "/"):
self.last_chaged = 0
if pyfilesystem:
self.directory = directory
self.callback = callback
logging.debug("Adding autoreload: %s, cb: %s" % (directory, callback))
thread = threading.Thread(target=self.addWatcher)
thread.daemon = True
thread.start()
def __init__(self, callback, directory="/"):
self.last_chaged = 0
if pyfilesystem:
self.directory = directory
self.callback = callback
logging.debug("Adding autoreload: %s, cb: %s" % (directory, callback))
thread = threading.Thread(target=self.addWatcher)
thread.daemon = True
thread.start()
def addWatcher(self, recursive=True): def addWatcher(self, recursive=True):
try: try:
time.sleep(1) # Wait for .pyc compiles time.sleep(1) # Wait for .pyc compiles
pyfilesystem.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive) pyfilesystem.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive)
pyfilesystem_plugins.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive) pyfilesystem_plugins.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive)
except Exception, err: except Exception, err:
print "File system watcher failed: %s (on linux pyinotify not gevent compatible yet :( )" % err print "File system watcher failed: %s (on linux pyinotify not gevent compatible yet :( )" % err
def changed(self, evt):
def changed(self, evt): if not evt.path or "%s/" % config.data_dir in evt.path or evt.path.endswith("pyc") or time.time() - self.last_chaged < 1:
if not evt.path or "%s/" % config.data_dir in evt.path or evt.path.endswith("pyc") or time.time()-self.last_chaged < 1: return False # Ignore *.pyc changes and no reload within 1 sec return False # Ignore *.pyc changes and no reload within 1 sec
#logging.debug("Changed: %s" % evt) time.sleep(0.1) # Wait for lock release
time.sleep(0.1) # Wait for lock release self.callback()
self.callback() self.last_chaged = time.time()
self.last_chaged = time.time()

View file

@ -11,7 +11,8 @@ from Debug import Debug
from Config import config from Config import config
from util import RateLimit, StreamingMsgpack from util import RateLimit, StreamingMsgpack
FILE_BUFF = 1024*512 FILE_BUFF = 1024 * 512
# Request from me # Request from me
class FileRequest(object): class FileRequest(object):
@ -52,7 +53,7 @@ class FileRequest(object):
self.actionGetFile(params) self.actionGetFile(params)
elif cmd == "update": elif cmd == "update":
event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"]) event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"])
if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second
self.response({"ok": "File update queued"}) self.response({"ok": "File update queued"})
# If called more than once within 10 sec only keep the last update # If called more than once within 10 sec only keep the last update
RateLimit.callAsync(event, 10, self.actionUpdate, params) RateLimit.callAsync(event, 10, self.actionUpdate, params)
@ -69,72 +70,86 @@ class FileRequest(object):
# Update a site file request # Update a site file request
def actionUpdate(self, params): def actionUpdate(self, params):
site = self.sites.get(params["site"]) site = self.sites.get(params["site"])
if not site or not site.settings["serving"]: # Site unknown or not serving if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"}) self.response({"error": "Unknown site"})
return False return False
if site.settings["own"] and params["inner_path"].endswith("content.json"): if site.settings["own"] and params["inner_path"].endswith("content.json"):
self.log.debug("Someone trying to push a file to own site %s, reload local %s first" % (site.address, params["inner_path"])) self.log.debug(
"Someone trying to push a file to own site %s, reload local %s first" %
(site.address, params["inner_path"])
)
changed = site.content_manager.loadContent(params["inner_path"], add_bad_files=False) changed = site.content_manager.loadContent(params["inner_path"], add_bad_files=False)
if changed: # Content.json changed locally if changed: # Content.json changed locally
site.settings["size"] = site.content_manager.getTotalSize() # Update site size site.settings["size"] = site.content_manager.getTotalSize() # Update site size
buff = StringIO(params["body"]) buff = StringIO(params["body"])
valid = site.content_manager.verifyFile(params["inner_path"], buff) valid = site.content_manager.verifyFile(params["inner_path"], buff)
if valid == True: # Valid and changed if valid is True: # Valid and changed
self.log.info("Update for %s looks valid, saving..." % params["inner_path"]) self.log.info("Update for %s looks valid, saving..." % params["inner_path"])
buff.seek(0) buff.seek(0)
site.storage.write(params["inner_path"], buff) site.storage.write(params["inner_path"], buff)
site.onFileDone(params["inner_path"]) # Trigger filedone site.onFileDone(params["inner_path"]) # Trigger filedone
if params["inner_path"].endswith("content.json"): # Download every changed file from peer if params["inner_path"].endswith("content.json"): # Download every changed file from peer
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer = True) # Add or get peer peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer
site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"]), "publish_%s" % params["inner_path"]) # On complete publish to other peers # On complete publish to other peers
site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"]), "publish_%s" % params["inner_path"])
# Load new content file and download changed files in new thread
gevent.spawn( gevent.spawn(
lambda: site.downloadContent(params["inner_path"], peer=peer) lambda: site.downloadContent(params["inner_path"], peer=peer)
) # Load new content file and download changed files in new thread )
self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]}) self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]})
elif valid == None: # Not changed elif valid is None: # Not changed
peer = site.addPeer(*params["peer"], return_peer = True) # Add or get peer peer = site.addPeer(*params["peer"], return_peer=True) # Add or get peer
if peer: if peer:
self.log.debug("Same version, adding new peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) ) self.log.debug(
for task in site.worker_manager.tasks: # New peer add to every ongoing task "Same version, adding new peer for locked files: %s, tasks: %s" %
if task["peers"]: site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) # Download file from this peer too if its peer locked (peer.key, len(site.worker_manager.tasks))
)
for task in site.worker_manager.tasks: # New peer add to every ongoing task
if task["peers"]:
# Download file from this peer too if its peer locked
site.needFile(task["inner_path"], peer=peer, update=True, blocking=False)
self.response({"ok": "File not changed"}) self.response({"ok": "File not changed"})
else: # Invalid sign or sha1 hash else: # Invalid sign or sha1 hash
self.log.debug("Update for %s is invalid" % params["inner_path"]) self.log.debug("Update for %s is invalid" % params["inner_path"])
self.response({"error": "File invalid"}) self.response({"error": "File invalid"})
# Send file content request # Send file content request
def actionGetFile(self, params): def actionGetFile(self, params):
site = self.sites.get(params["site"]) site = self.sites.get(params["site"])
if not site or not site.settings["serving"]: # Site unknown or not serving if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"}) self.response({"error": "Unknown site"})
return False return False
try: try:
file_path = site.storage.getPath(params["inner_path"]) file_path = site.storage.getPath(params["inner_path"])
if config.debug_socket: self.log.debug("Opening file: %s" % file_path) if config.debug_socket:
self.log.debug("Opening file: %s" % file_path)
with StreamingMsgpack.FilePart(file_path, "rb") as file: with StreamingMsgpack.FilePart(file_path, "rb") as file:
file.seek(params["location"]) file.seek(params["location"])
file.read_bytes = FILE_BUFF file.read_bytes = FILE_BUFF
back = {"body": file, back = {
"size": os.fstat(file.fileno()).st_size, "body": file,
"location": min(file.tell()+FILE_BUFF, os.fstat(file.fileno()).st_size) "size": os.fstat(file.fileno()).st_size,
} "location": min(file.tell() + FILE_BUFF, os.fstat(file.fileno()).st_size)
}
if config.debug_socket: if config.debug_socket:
self.log.debug("Sending file %s from position %s to %s" % (file_path, self.log.debug(
params["location"], "Sending file %s from position %s to %s" %
back["location"])) (file_path, params["location"], back["location"])
)
self.response(back, streaming=True) self.response(back, streaming=True)
if config.debug_socket: if config.debug_socket:
self.log.debug("File %s sent" % file_path) self.log.debug("File %s sent" % file_path)
# Add peer to site if not added before # Add peer to site if not added before
connected_peer = site.addPeer(self.connection.ip, self.connection.port) connected_peer = site.addPeer(self.connection.ip, self.connection.port)
if connected_peer: # Just added if connected_peer: # Just added
connected_peer.connect(self.connection) # Assign current connection to peer connected_peer.connect(self.connection) # Assign current connection to peer
except Exception, err: except Exception, err:
@ -145,7 +160,7 @@ class FileRequest(object):
# Peer exchange request # Peer exchange request
def actionPex(self, params): def actionPex(self, params):
site = self.sites.get(params["site"]) site = self.sites.get(params["site"])
if not site or not site.settings["serving"]: # Site unknown or not serving if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"}) self.response({"error": "Unknown site"})
return False return False
@ -156,10 +171,11 @@ class FileRequest(object):
added += 1 added += 1
connected_peer.connect(self.connection) # Assign current connection to peer connected_peer.connect(self.connection) # Assign current connection to peer
for peer in params["peers"]: # Add sent peers to site for peer in params["peers"]: # Add sent peers to site
address = self.unpackAddress(peer) address = self.unpackAddress(peer)
got_peer_keys.append("%s:%s" % address) got_peer_keys.append("%s:%s" % address)
if site.addPeer(*address): added += 1 if site.addPeer(*address):
added += 1
# Send back peers that is not in the sent list and connectable (not port 0) # Send back peers that is not in the sent list and connectable (not port 0)
packed_peers = [peer.packAddress() for peer in site.getConnectablePeers(params["need"], got_peer_keys)] packed_peers = [peer.packAddress() for peer in site.getConnectablePeers(params["need"], got_peer_keys)]
if added: if added:
@ -170,12 +186,14 @@ class FileRequest(object):
# Get modified content.json files since # Get modified content.json files since
def actionListModified(self, params): def actionListModified(self, params):
site = self.sites.get(params["site"]) site = self.sites.get(params["site"])
if not site or not site.settings["serving"]: # Site unknown or not serving if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"}) self.response({"error": "Unknown site"})
return False return False
modified_files = {inner_path: content["modified"] modified_files = {
for inner_path, content in site.content_manager.contents.iteritems() inner_path: content["modified"]
if content["modified"] > params["since"]} for inner_path, content in site.content_manager.contents.iteritems()
if content["modified"] > params["since"]
}
# Add peer to site if not added before # Add peer to site if not added before
connected_peer = site.addPeer(self.connection.ip, self.connection.port) connected_peer = site.addPeer(self.connection.ip, self.connection.port)

View file

@ -1,5 +1,10 @@
import os, logging, urllib2, re, time import logging
import gevent, msgpack import urllib2
import re
import time
import gevent
from Config import config from Config import config
from FileRequest import FileRequest from FileRequest import FileRequest
from Site import SiteManager from Site import SiteManager
@ -9,216 +14,216 @@ from util import UpnpPunch
class FileServer(ConnectionServer): class FileServer(ConnectionServer):
def __init__(self):
ConnectionServer.__init__(self, config.fileserver_ip, config.fileserver_port, self.handleRequest)
if config.ip_external: # Ip external definied in arguments
self.port_opened = True
SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
else:
self.port_opened = None # Is file server opened on router
self.sites = SiteManager.site_manager.list()
def __init__(self):
ConnectionServer.__init__(self, config.fileserver_ip, config.fileserver_port, self.handleRequest)
if config.ip_external: # Ip external definied in arguments
self.port_opened = True
SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
else:
self.port_opened = None # Is file server opened on router
self.sites = SiteManager.site_manager.list()
# Handle request to fileserver # Handle request to fileserver
def handleRequest(self, connection, message): def handleRequest(self, connection, message):
if "params" in message: if "params" in message:
self.log.debug("FileRequest: %s %s %s %s" % (str(connection), message["cmd"], message["params"].get("site"), message["params"].get("inner_path"))) self.log.debug(
else: "FileRequest: %s %s %s %s" %
self.log.debug("FileRequest: %s %s" % (str(connection), req["cmd"])) (str(connection), message["cmd"], message["params"].get("site"), message["params"].get("inner_path"))
req = FileRequest(self, connection) )
req.route(message["cmd"], message.get("req_id"), message.get("params")) else:
self.log.debug("FileRequest: %s %s" % (str(connection), message["cmd"]))
req = FileRequest(self, connection)
req.route(message["cmd"], message.get("req_id"), message.get("params"))
# Reload the FileRequest class to prevent restarts in debug mode
def reload(self):
global FileRequest
import imp
FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest
# Reload the FileRequest class to prevent restarts in debug mode # Try to open the port using upnp
def reload(self): def openport(self, port=None, check=True):
global FileRequest if not port:
import imp port = self.port
FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest if self.port_opened:
return True # Port already opened
if check: # Check first if its already opened
if self.testOpenport(port)["result"] is True:
return True # Port already opened
self.log.info("Trying to open port using UpnpPunch...")
try:
upnp_punch = UpnpPunch.open_port(self.port, 'ZeroNet')
upnp_punch = True
except Exception, err:
self.log.error("UpnpPunch run error: %s" % Debug.formatException(err))
upnp_punch = False
# Try to open the port using upnp if upnp_punch and self.testOpenport(port)["result"] is True:
def openport(self, port=None, check=True): return True
if not port: port = self.port
if self.port_opened: return True # Port already opened
if check: # Check first if its already opened
if self.testOpenport(port)["result"] == True:
return True # Port already opened
self.log.info("Trying to open port using UpnpPunch...") self.log.info("Upnp mapping failed :( Please forward port %s on your router to your ipaddress" % port)
try: return False
upnp_punch = UpnpPunch.open_port(self.port, 'ZeroNet')
upnp_punch = True
except Exception, err:
self.log.error("UpnpPunch run error: %s" % Debug.formatException(err))
upnp_punch = False
if upnp_punch and self.testOpenport(port)["result"] == True: # Test if the port is open
return True def testOpenport(self, port=None):
time.sleep(1) # Wait for port open
if not port:
port = self.port
back = self.testOpenportPortchecker(port)
if back["result"] is True: # Successful port check
return back
else: # Alternative port checker
return self.testOpenportCanyouseeme(port)
self.log.info("Upnp mapping failed :( Please forward port %s on your router to your ipaddress" % port) def testOpenportPortchecker(self, port=None):
return False self.log.info("Checking port %s using portchecker.co..." % port)
try:
data = urllib2.urlopen("http://portchecker.co/check", "port=%s" % port, timeout=20.0).read()
message = re.match('.*<div id="results-wrapper">(.*?)</div>', data, re.DOTALL).group(1)
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace("&nbsp;", " ").strip()) # Strip http tags
except Exception, err:
message = "Error: %s" % Debug.formatException(err)
data = ""
if "closed" in message or "Error" in message:
self.log.info("[BAD :(] Port closed: %s" % message)
if port == self.port:
self.port_opened = False # Self port, update port_opened status
match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) # Try find my external ip in message
if match: # Found my ip in message
config.ip_external = match.group(1)
SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
else:
config.ip_external = False
return {"result": False, "message": message}
else:
self.log.info("[OK :)] Port open: %s" % message)
if port == self.port: # Self port, update port_opened status
self.port_opened = True
match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) # Try find my external ip in message
if match: # Found my ip in message
config.ip_external = match.group(1)
SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
else:
config.ip_external = False
return {"result": True, "message": message}
# Test if the port is open def testOpenportCanyouseeme(self, port=None):
def testOpenport(self, port = None): self.log.info("Checking port %s using canyouseeme.org..." % port)
time.sleep(1) # Wait for port open try:
if not port: port = self.port data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read()
back = self.testOpenportPortchecker(port) message = re.match('.*<p style="padding-left:15px">(.*?)</p>', data, re.DOTALL).group(1)
if back["result"] == True: # Successful port check message = re.sub("<.*?>", "", message.replace("<br>", " ").replace("&nbsp;", " ")) # Strip http tags
return back except Exception, err:
else: # Alternative port checker message = "Error: %s" % Debug.formatException(err)
return self.testOpenportCanyouseeme(port)
if "Error" in message:
self.log.info("[BAD :(] Port closed: %s" % message)
if port == self.port:
self.port_opened = False # Self port, update port_opened status
match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message
if match: # Found my ip in message
config.ip_external = match.group(1)
SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
else:
config.ip_external = False
return {"result": False, "message": message}
else:
self.log.info("[OK :)] Port open: %s" % message)
if port == self.port: # Self port, update port_opened status
self.port_opened = True
match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message
if match: # Found my ip in message
config.ip_external = match.group(1)
SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
else:
config.ip_external = False
return {"result": True, "message": message}
def testOpenportPortchecker(self, port = None): # Set external ip without testing
self.log.info("Checking port %s using portchecker.co..." % port) def setIpExternal(self, ip_external):
try: logging.info("Setting external ip without testing: %s..." % ip_external)
data = urllib2.urlopen("http://portchecker.co/check", "port=%s" % port, timeout=20.0).read() config.ip_external = ip_external
message = re.match('.*<div id="results-wrapper">(.*?)</div>', data, re.DOTALL).group(1) self.port_opened = True
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace("&nbsp;", " ").strip()) # Strip http tags
except Exception, err:
message = "Error: %s" % Debug.formatException(err)
data = ""
if "closed" in message or "Error" in message:
self.log.info("[BAD :(] Port closed: %s" % message)
if port == self.port:
self.port_opened = False # Self port, update port_opened status
match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) # Try find my external ip in message
if match: # Found my ip in message
config.ip_external = match.group(1)
SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
else:
config.ip_external = False
return {"result": False, "message": message}
else:
self.log.info("[OK :)] Port open: %s" % message)
if port == self.port: # Self port, update port_opened status
self.port_opened = True
match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) # Try find my external ip in message
if match: # Found my ip in message
config.ip_external = match.group(1)
SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
else:
config.ip_external = False
return {"result": True, "message": message}
# Check site file integrity
def checkSite(self, site):
if site.settings["serving"]:
site.announce() # Announce site to tracker
site.update() # Update site's content.json and download changed files
if self.port_opened is False: # In passive mode keep 5 active peer connection to get the updates
site.needConnections()
def testOpenportCanyouseeme(self, port = None): # Check sites integrity
self.log.info("Checking port %s using canyouseeme.org..." % port) def checkSites(self):
try: if self.port_opened is None: # Test and open port if not tested yet
data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read() self.openport()
message = re.match('.*<p style="padding-left:15px">(.*?)</p>', data, re.DOTALL).group(1)
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace("&nbsp;", " ")) # Strip http tags
except Exception, err:
message = "Error: %s" % Debug.formatException(err)
if "Error" in message:
self.log.info("[BAD :(] Port closed: %s" % message)
if port == self.port:
self.port_opened = False # Self port, update port_opened status
match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message
if match: # Found my ip in message
config.ip_external = match.group(1)
SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
else:
config.ip_external = False
return {"result": False, "message": message}
else:
self.log.info("[OK :)] Port open: %s" % message)
if port == self.port: # Self port, update port_opened status
self.port_opened = True
match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message
if match: # Found my ip in message
config.ip_external = match.group(1)
SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
else:
config.ip_external = False
return {"result": True, "message": message}
self.log.debug("Checking sites integrity..")
for address, site in self.sites.items(): # Check sites integrity
gevent.spawn(self.checkSite, site) # Check in new thread
time.sleep(2) # Prevent too quick request
site = None
# Set external ip without testing # Announce sites every 20 min
def setIpExternal(self, ip_external): def announceSites(self):
logging.info("Setting external ip without testing: %s..." % ip_external) import gc
config.ip_external = ip_external while 1:
self.port_opened = True time.sleep(20 * 60) # Announce sites every 20 min
for address, site in self.sites.items():
if site.settings["serving"]:
site.announce() # Announce site to tracker
# Reset bad file retry counter
for inner_path in site.bad_files:
site.bad_files[inner_path] = 0
# Check site file integrity # Retry failed files
def checkSite(self, site): if site.bad_files:
if site.settings["serving"]: site.retryBadFiles()
site.announce() # Announce site to tracker
site.update() # Update site's content.json and download changed files
if self.port_opened == False: # In passive mode keep 5 active peer connection to get the updates
site.needConnections()
# In passive mode keep 5 active peer connection to get the updates
if self.port_opened is False:
site.needConnections()
# Check sites integrity time.sleep(2) # Prevent too quick request
def checkSites(self):
if self.port_opened == None: # Test and open port if not tested yet
self.openport()
self.log.debug("Checking sites integrity..") site = None
for address, site in self.sites.items(): # Check sites integrity gc.collect() # Implicit grabage collection
gevent.spawn(self.checkSite, site) # Check in new thread
time.sleep(2) # Prevent too quick request
site = None
# Detects if computer back from wakeup
def wakeupWatcher(self):
last_time = time.time()
while 1:
time.sleep(30)
if time.time() - last_time > 60: # If taken more than 60 second then the computer was in sleep mode
self.log.info(
"Wakeup detected: time wrap from %s to %s (%s sleep seconds), acting like startup..." %
(last_time, time.time(), time.time() - last_time)
)
self.port_opened = None # Check if we still has the open port on router
self.checkSites()
last_time = time.time()
# Announce sites every 20 min # Bind and start serving sites
def announceSites(self): def start(self, check_sites=True):
import gc self.log = logging.getLogger("FileServer")
while 1:
time.sleep(20*60) # Announce sites every 20 min
for address, site in self.sites.items():
if site.settings["serving"]:
site.announce() # Announce site to tracker
# Reset bad file retry counter if config.debug:
for inner_path in site.bad_files: # Auto reload FileRequest on change
site.bad_files[inner_path] = 0 from Debug import DebugReloader
DebugReloader(self.reload)
# Retry failed files if check_sites: # Open port, Update sites, Check files integrity
if site.bad_files: gevent.spawn(self.checkSites)
site.retryBadFiles()
# In passive mode keep 5 active peer connection to get the updates thread_announce_sites = gevent.spawn(self.announceSites)
if self.port_opened == False: thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher)
site.needConnections()
time.sleep(2) # Prevent too quick request ConnectionServer.start(self)
site = None # thread_wakeup_watcher.kill(exception=Debug.Notify("Stopping FileServer"))
gc.collect() # Implicit grabage collection # thread_announce_sites.kill(exception=Debug.Notify("Stopping FileServer"))
self.log.debug("Stopped.")
# Detects if computer back from wakeup
def wakeupWatcher(self):
last_time = time.time()
while 1:
time.sleep(30)
if time.time()-last_time > 60: # If taken more than 60 second then the computer was in sleep mode
self.log.info("Wakeup detected: time wrap from %s to %s (%s sleep seconds), acting like startup..." % (last_time, time.time(), time.time()-last_time))
self.port_opened = None # Check if we still has the open port on router
self.checkSites()
last_time = time.time()
# Bind and start serving sites
def start(self, check_sites = True):
self.log = logging.getLogger("FileServer")
if config.debug:
# Auto reload FileRequest on change
from Debug import DebugReloader
DebugReloader(self.reload)
if check_sites: # Open port, Update sites, Check files integrity
gevent.spawn(self.checkSites)
thread_announce_sites = gevent.spawn(self.announceSites)
thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher)
ConnectionServer.start(self)
# thread_wakeup_watcher.kill(exception=Debug.Notify("Stopping FileServer"))
# thread_announce_sites.kill(exception=Debug.Notify("Stopping FileServer"))
self.log.debug("Stopped.")

View file

@ -1,8 +1,14 @@
import os, logging, gevent, time, msgpack, sys, random, socket, struct import logging
import gevent
import time
import sys
import socket
import struct
from cStringIO import StringIO from cStringIO import StringIO
from Config import config
from Debug import Debug from Debug import Debug
# Communicate remote peers # Communicate remote peers
class Peer(object): class Peer(object):
__slots__ = ("ip", "port", "site", "key", "connection_server", "connection", "last_found", "last_response", __slots__ = ("ip", "port", "site", "key", "connection_server", "connection", "last_found", "last_response",
@ -49,7 +55,8 @@ class Peer(object):
self.connection = self.connection_server.getConnection(self.ip, self.port) self.connection = self.connection_server.getConnection(self.ip, self.port)
except Exception, err: except Exception, err:
self.onConnectionError() self.onConnectionError()
self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed)) self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" %
(Debug.formatException(err), self.connection_error, self.hash_failed))
self.connection = None self.connection = None
# Check if we have connection to peer # Check if we have connection to peer
@ -57,7 +64,7 @@ class Peer(object):
if self.connection and self.connection.connected: # We have connection to peer if self.connection and self.connection.connected: # We have connection to peer
return self.connection return self.connection
else: # Try to find from other sites connections else: # Try to find from other sites connections
self.connection = self.connection_server.getConnection(self.ip, self.port, create=False) # Do not create new connection if not found self.connection = self.connection_server.getConnection(self.ip, self.port, create=False)
return self.connection return self.connection
def __str__(self): def __str__(self):
@ -68,7 +75,7 @@ class Peer(object):
# Peer ip:port to packed 6byte format # Peer ip:port to packed 6byte format
def packAddress(self): def packAddress(self):
return socket.inet_aton(self.ip)+struct.pack("H", self.port) return socket.inet_aton(self.ip) + struct.pack("H", self.port)
def unpackAddress(self, packed): def unpackAddress(self, packed):
return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0] return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0]
@ -83,22 +90,17 @@ class Peer(object):
self.connect() self.connect()
if not self.connection: if not self.connection:
self.onConnectionError() self.onConnectionError()
return None # Connection failed return None # Connection failed
#if cmd != "ping" and self.last_response and time.time() - self.last_response > 20*60: # If last response if older than 20 minute, ping first to see if still alive for retry in range(1, 3): # Retry 3 times
# if not self.ping(): return None
for retry in range(1,3): # Retry 3 times
#if config.debug_socket: self.log.debug("sendCmd: %s %s" % (cmd, params.get("inner_path")))
try: try:
response = self.connection.request(cmd, params) response = self.connection.request(cmd, params)
if not response: if not response:
raise Exception("Send error") raise Exception("Send error")
#if config.debug_socket: self.log.debug("Got response to: %s" % cmd)
if "error" in response: if "error" in response:
self.log("%s error: %s" % (cmd, response["error"])) self.log("%s error: %s" % (cmd, response["error"]))
self.onConnectionError() self.onConnectionError()
else: # Successful request, reset connection error num else: # Successful request, reset connection error num
self.connection_error = 0 self.connection_error = 0
self.last_response = time.time() self.last_response = time.time()
return response return response
@ -108,10 +110,11 @@ class Peer(object):
break break
else: else:
self.onConnectionError() self.onConnectionError()
self.log("%s (connection_error: %s, hash_failed: %s, retry: %s)" % (Debug.formatException(err), self.log(
self.connection_error, "%s (connection_error: %s, hash_failed: %s, retry: %s)" %
self.hash_failed, retry)) (Debug.formatException(err), self.connection_error, self.hash_failed, retry)
time.sleep(1*retry) )
time.sleep(1 * retry)
self.connect() self.connect()
return None # Failed after 4 retry return None # Failed after 4 retry
@ -121,7 +124,8 @@ class Peer(object):
buff = StringIO() buff = StringIO()
s = time.time() s = time.time()
while True: # Read in 512k parts while True: # Read in 512k parts
back = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location}) # Get file content from last location back = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location})
if not back or "body" not in back: # Error if not back or "body" not in back: # Error
return False return False
@ -145,7 +149,7 @@ class Peer(object):
response = self.request("ping") response = self.request("ping")
if response and "body" in response and response["body"] == "Pong!": if response and "body" in response and response["body"] == "Pong!":
response_time = time.time()-s response_time = time.time() - s
break # All fine, exit from for loop break # All fine, exit from for loop
# Timeout reached or bad response # Timeout reached or bad response
self.onConnectionError() self.onConnectionError()
@ -185,7 +189,8 @@ class Peer(object):
# Stop and remove from site # Stop and remove from site
def remove(self): def remove(self):
self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed)) self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed))
if self.site and self.key in self.site.peers: del(self.site.peers[self.key]) if self.site and self.key in self.site.peers:
del(self.site.peers[self.key])
if self.connection: if self.connection:
self.connection.close() self.connection.close()

View file

@ -1,99 +1,108 @@
import logging, os, sys import logging
import os
import sys
from Debug import Debug from Debug import Debug
from Config import config from Config import config
class PluginManager: class PluginManager:
def __init__(self):
self.log = logging.getLogger("PluginManager")
self.plugin_path = "plugins" # Plugin directory
self.plugins = {} # Registered plugins (key: class name, value: list of plugins for class)
self.plugin_names = [] # Loaded plugin names
sys.path.append(self.plugin_path) def __init__(self):
self.log = logging.getLogger("PluginManager")
self.plugin_path = "plugins" # Plugin directory
self.plugins = {} # Registered plugins (key: class name, value: list of plugins for class)
self.plugin_names = [] # Loaded plugin names
sys.path.append(self.plugin_path)
if config.debug: # Auto reload Plugins on file change
from Debug import DebugReloader
DebugReloader(self.reloadPlugins)
# -- Load / Unload --
# Load all plugin
def loadPlugins(self):
for dir_name in os.listdir(self.plugin_path):
dir_path = os.path.join(self.plugin_path, dir_name)
if dir_name.startswith("disabled"):
continue # Dont load if disabled
if not os.path.isdir(dir_path):
continue # Dont load if not dir
if dir_name.startswith("Debug") and not config.debug:
continue # Only load in debug mode if module name starts with Debug
self.log.debug("Loading plugin: %s" % dir_name)
try:
__import__(dir_name)
except Exception, err:
self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err)))
if dir_name not in self.plugin_names:
self.plugin_names.append(dir_name)
# Reload all plugins
def reloadPlugins(self):
self.plugins = {} # Reset registered plugins
for module_name, module in sys.modules.items():
if module and "__file__" in dir(module) and self.plugin_path in module.__file__: # Module file within plugin_path
if "allow_reload" not in dir(module) or module.allow_reload: # Check if reload disabled
try:
reload(module)
except Exception, err:
self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err)))
self.loadPlugins() # Load new plugins
if config.debug: # Auto reload Plugins on file change plugin_manager = PluginManager() # Singletone
from Debug import DebugReloader
DebugReloader(self.reloadPlugins)
# -- Load / Unload --
# Load all plugin
def loadPlugins(self):
for dir_name in os.listdir(self.plugin_path):
dir_path = os.path.join(self.plugin_path, dir_name)
if dir_name.startswith("disabled"): continue # Dont load if disabled
if not os.path.isdir(dir_path): continue # Dont load if not dir
if dir_name.startswith("Debug") and not config.debug: continue # Only load in debug mode if module name starts with Debug
self.log.debug("Loading plugin: %s" % dir_name)
try:
__import__(dir_name)
except Exception, err:
self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err)))
if dir_name not in self.plugin_names: self.plugin_names.append(dir_name)
# Reload all plugins
def reloadPlugins(self):
self.plugins = {} # Reset registered plugins
for module_name, module in sys.modules.items():
if module and "__file__" in dir(module) and self.plugin_path in module.__file__: # Module file within plugin_path
if "allow_reload" not in dir(module) or module.allow_reload: # Check if reload disabled
try:
reload(module)
except Exception, err:
self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err)))
self.loadPlugins() # Load new plugins
plugin_manager = PluginManager() # Singletone
# -- Decorators -- # -- Decorators --
# Accept plugin to class decorator # Accept plugin to class decorator
def acceptPlugins(base_class): def acceptPlugins(base_class):
class_name = base_class.__name__ class_name = base_class.__name__
if class_name in plugin_manager.plugins: # Has plugins if class_name in plugin_manager.plugins: # Has plugins
classes = plugin_manager.plugins[class_name][:] # Copy the current plugins classes = plugin_manager.plugins[class_name][:] # Copy the current plugins
classes.reverse() classes.reverse()
classes.append(base_class) # Add the class itself to end of inherience line classes.append(base_class) # Add the class itself to end of inherience line
PluginedClass = type(class_name, tuple(classes), dict()) # Create the plugined class plugined_class = type(class_name, tuple(classes), dict()) # Create the plugined class
plugin_manager.log.debug("New class accepts plugins: %s (Loaded plugins: %s)" % (class_name, classes)) plugin_manager.log.debug("New class accepts plugins: %s (Loaded plugins: %s)" % (class_name, classes))
else: # No plugins just use the original else: # No plugins just use the original
PluginedClass = base_class plugined_class = base_class
return PluginedClass return plugined_class
# Register plugin to class name decorator # Register plugin to class name decorator
def registerTo(class_name): def registerTo(class_name):
plugin_manager.log.debug("New plugin registered to: %s" % class_name) plugin_manager.log.debug("New plugin registered to: %s" % class_name)
if class_name not in plugin_manager.plugins: plugin_manager.plugins[class_name] = [] if class_name not in plugin_manager.plugins:
plugin_manager.plugins[class_name] = []
def classDecorator(self):
plugin_manager.plugins[class_name].append(self)
return self
return classDecorator
def classDecorator(self):
plugin_manager.plugins[class_name].append(self)
return self
return classDecorator
# - Example usage - # - Example usage -
if __name__ == "__main__": if __name__ == "__main__":
@registerTo("Request") @registerTo("Request")
class RequestPlugin(object): class RequestPlugin(object):
def actionMainPage(self, path):
return "Hello MainPage!"
def actionMainPage(self, path):
return "Hello MainPage!"
@accept @acceptPlugins
class Request(object): class Request(object):
def route(self, path):
func = getattr(self, "action"+path, None)
if func:
return func(path)
else:
return "Can't route to", path
print Request().route("MainPage") def route(self, path):
func = getattr(self, "action" + path, None)
if func:
return func(path)
else:
return "Can't route to", path
print Request().route("MainPage")

File diff suppressed because it is too large Load diff

View file

@ -1,99 +1,99 @@
import json, logging, time, re, os import json
import gevent import logging
import re
import os
from Plugin import PluginManager from Plugin import PluginManager
from Config import config from Config import config
TRACKERS = [ TRACKERS = [
("udp", "open.demonii.com", 1337), ("udp", "open.demonii.com", 1337),
#("udp", "sugoi.pomf.se", 2710), # ("udp", "sugoi.pomf.se", 2710),
#("udp", "tracker.coppersurfer.tk", 80), # ("udp", "tracker.coppersurfer.tk", 80),
("udp", "tracker.leechers-paradise.org", 6969), ("udp", "tracker.leechers-paradise.org", 6969),
("udp", "9.rarbg.com", 2710), ("udp", "9.rarbg.com", 2710),
#("udp", "www.eddie4.nl", 6969), # ("udp", "www.eddie4.nl", 6969),
#("udp", "trackr.sytes.net", 80), # ("udp", "trackr.sytes.net", 80),
#("udp", "tracker4.piratux.com", 6969) # ("udp", "tracker4.piratux.com", 6969)
#("http", "exodus.desync.com:80/announce", None), Off # ("http", "exodus.desync.com:80/announce", None), Off
("http", "tracker.aletorrenty.pl:2710/announce", None), ("http", "tracker.aletorrenty.pl:2710/announce", None),
#("http", "torrent.gresille.org/announce", None), # Slow # ("http", "torrent.gresille.org/announce", None), # Slow
#("http", "announce.torrentsmd.com:6969/announce", None), # Off # ("http", "announce.torrentsmd.com:6969/announce", None), # Off
#("http", "i.bandito.org/announce", None), # Off # ("http", "i.bandito.org/announce", None), # Off
("http", "retracker.telecom.kz/announce", None), ("http", "retracker.telecom.kz/announce", None),
("http", "torrent.gresille.org/announce", None), ("http", "torrent.gresille.org/announce", None),
] ]
@PluginManager.acceptPlugins @PluginManager.acceptPlugins
class SiteManager(object): class SiteManager(object):
def __init__(self):
self.sites = None
# Load all sites from data/sites.json def __init__(self):
def load(self): self.sites = None
from Site import Site
if not self.sites: self.sites = {}
address_found = []
added = 0
# Load new adresses
for address in json.load(open("%s/sites.json" % config.data_dir)):
if address not in self.sites and os.path.isfile("%s/%s/content.json" % (config.data_dir, address)):
self.sites[address] = Site(address)
added += 1
address_found.append(address)
# Remove deleted adresses # Load all sites from data/sites.json
for address in self.sites.keys(): def load(self):
if address not in address_found: from Site import Site
del(self.sites[address]) if not self.sites:
logging.debug("Removed site: %s" % address) self.sites = {}
address_found = []
added = 0
# Load new adresses
for address in json.load(open("%s/sites.json" % config.data_dir)):
if address not in self.sites and os.path.isfile("%s/%s/content.json" % (config.data_dir, address)):
self.sites[address] = Site(address)
added += 1
address_found.append(address)
if added: logging.debug("SiteManager added %s sites" % added) # Remove deleted adresses
for address in self.sites.keys():
if address not in address_found:
del(self.sites[address])
logging.debug("Removed site: %s" % address)
if added:
logging.debug("SiteManager added %s sites" % added)
# Checks if its a valid address
def isAddress(self, address):
return re.match("^[A-Za-z0-9]{26,35}$", address)
# Return: Site object or None if not found
def get(self, address):
if self.sites is None: # Not loaded yet
self.load()
return self.sites.get(address)
# Return or create site and start download site files
def need(self, address, all_file=True):
from Site import Site
site = self.get(address)
if not site: # Site not exist yet
if not self.isAddress(address):
return False # Not address: %s % address
logging.debug("Added new site: %s" % address)
site = Site(address)
self.sites[address] = site
if not site.settings["serving"]: # Maybe it was deleted before
site.settings["serving"] = True
site.saveSettings()
if all_file:
site.download()
return site
def delete(self, address):
logging.debug("SiteManager deleted site: %s" % address)
del(self.sites[address])
# Lazy load sites
def list(self):
if self.sites is None: # Not loaded yet
self.load()
return self.sites
# Checks if its a valid address site_manager = SiteManager() # Singletone
def isAddress(self, address):
return re.match("^[A-Za-z0-9]{26,35}$", address)
peer_blacklist = [] # Dont download from this peers
# Return: Site object or None if not found
def get(self, address):
if self.sites == None: # Not loaded yet
self.load()
return self.sites.get(address)
# Return or create site and start download site files
def need(self, address, all_file=True):
from Site import Site
new = False
site = self.get(address)
if not site: # Site not exist yet
if not self.isAddress(address): return False # Not address: %s % address
logging.debug("Added new site: %s" % address)
site = Site(address)
self.sites[address] = site
if not site.settings["serving"]: # Maybe it was deleted before
site.settings["serving"] = True
site.saveSettings()
new = True
if all_file: site.download()
return site
def delete(self, address):
logging.debug("SiteManager deleted site: %s" % address)
del(self.sites[address])
# Lazy load sites
def list(self):
if self.sites == None: # Not loaded yet
self.load()
return self.sites
site_manager = SiteManager() # Singletone
peer_blacklist = [] # Dont download from this peers

View file

@ -1,292 +1,297 @@
import os, re, shutil, json, time, sqlite3 import os
import re
import shutil
import json
import time
import sqlite3
import gevent.event import gevent.event
from Db import Db from Db import Db
from Debug import Debug from Debug import Debug
from Config import config from Config import config
class SiteStorage: class SiteStorage:
def __init__(self, site, allow_create=True):
self.site = site
self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
self.log = site.log
self.db = None # Db class
self.db_checked = False # Checked db tables since startup
self.event_db_busy = None # Gevent AsyncResult if db is working on rebuild
self.has_db = self.isFile("dbschema.json") # The site has schema
if not os.path.isdir(self.directory): def __init__(self, site, allow_create=True):
if allow_create: self.site = site
os.mkdir(self.directory) # Create directory if not found self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
else: self.log = site.log
raise Exception("Directory not exists: %s" % self.directory) self.db = None # Db class
self.db_checked = False # Checked db tables since startup
self.event_db_busy = None # Gevent AsyncResult if db is working on rebuild
self.has_db = self.isFile("dbschema.json") # The site has schema
if not os.path.isdir(self.directory):
if allow_create:
os.mkdir(self.directory) # Create directory if not found
else:
raise Exception("Directory not exists: %s" % self.directory)
# Load db from dbschema.json # Load db from dbschema.json
def openDb(self, check=True): def openDb(self, check=True):
schema = self.loadJson("dbschema.json") schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"]) db_path = self.getPath(schema["db_file"])
if check: if check:
if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: # Not exist or null if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: # Not exist or null
self.rebuildDb() self.rebuildDb()
self.db = Db(schema, db_path) self.db = Db(schema, db_path)
if check and not self.db_checked: if check and not self.db_checked:
changed_tables = self.db.checkTables() changed_tables = self.db.checkTables()
if changed_tables: self.rebuildDb(delete_db=False) # Todo only update the changed table datas if changed_tables:
self.rebuildDb(delete_db=False) # Todo only update the changed table datas
def closeDb(self):
if self.db:
self.db.close()
self.event_db_busy = None
self.db = None
def closeDb(self): # Return db class
if self.db: self.db.close() def getDb(self):
self.event_db_busy = None if not self.db:
self.db = None self.log.debug("No database, waiting for dbschema.json...")
self.site.needFile("dbschema.json", priority=1)
self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist
if self.has_db:
self.openDb()
return self.db
# Rebuild sql cache
def rebuildDb(self, delete_db=True):
self.has_db = self.isFile("dbschema.json")
if not self.has_db:
return False
self.event_db_busy = gevent.event.AsyncResult()
schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"])
if os.path.isfile(db_path) and delete_db:
if self.db:
self.db.close() # Close db if open
self.log.info("Deleting %s" % db_path)
try:
os.unlink(db_path)
except Exception, err:
self.log.error("Delete error: %s" % err)
self.openDb(check=False)
self.log.info("Creating tables...")
self.db.checkTables()
self.log.info("Importing data...")
cur = self.db.getCursor()
cur.execute("BEGIN")
cur.logging = False
found = 0
s = time.time()
for content_inner_path, content in self.site.content_manager.contents.items():
content_path = self.getPath(content_inner_path)
if os.path.isfile(content_path): # Missing content.json file
if self.db.loadJson(content_path, cur=cur):
found += 1
else:
self.log.error("[MISSING] %s" % content_inner_path)
for file_relative_path in content["files"].keys():
if not file_relative_path.endswith(".json"):
continue # We only interesed in json files
content_inner_path_dir = self.site.content_manager.toDir(content_inner_path) # Content.json dir relative to site
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
if os.path.isfile(file_path):
if self.db.loadJson(file_path, cur=cur):
found += 1
else:
self.log.error("[MISSING] %s" % file_inner_path)
cur.execute("END")
self.log.info("Imported %s data file in %ss" % (found, time.time() - s))
self.event_db_busy.set(True) # Event done, notify waiters
self.event_db_busy = None # Clear event
# Return db class # Execute sql query or rebuild on dberror
def getDb(self): def query(self, query, params=None):
if not self.db: if self.event_db_busy: # Db not ready for queries
self.log.debug("No database, waiting for dbschema.json...") self.log.debug("Wating for db...")
self.site.needFile("dbschema.json", priority=1) self.event_db_busy.get() # Wait for event
self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist try:
if self.has_db: self.openDb() res = self.getDb().execute(query, params)
return self.db except sqlite3.DatabaseError, err:
if err.__class__.__name__ == "DatabaseError":
self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query))
self.rebuildDb()
res = self.db.cur.execute(query, params)
else:
raise err
return res
# Open file object
def open(self, inner_path, mode="rb"):
return open(self.getPath(inner_path), mode)
# Rebuild sql cache # Open file object
def rebuildDb(self, delete_db=True): def read(self, inner_path, mode="r"):
self.has_db = self.isFile("dbschema.json") return open(self.getPath(inner_path), mode).read()
if not self.has_db: return False
self.event_db_busy = gevent.event.AsyncResult()
schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"])
if os.path.isfile(db_path) and delete_db:
if self.db: self.db.close() # Close db if open
self.log.info("Deleting %s" % db_path)
try:
os.unlink(db_path)
except Exception, err:
self.log.error("Delete error: %s" % err)
self.openDb(check=False)
self.log.info("Creating tables...")
self.db.checkTables()
self.log.info("Importing data...")
cur = self.db.getCursor()
cur.execute("BEGIN")
cur.logging = False
found = 0
s = time.time()
for content_inner_path, content in self.site.content_manager.contents.items():
content_path = self.getPath(content_inner_path)
if os.path.isfile(content_path): # Missing content.json file
if self.db.loadJson(content_path, cur=cur): found += 1
else:
self.log.error("[MISSING] %s" % content_inner_path)
for file_relative_path in content["files"].keys():
if not file_relative_path.endswith(".json"): continue # We only interesed in json files
file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
if os.path.isfile(file_path):
if self.db.loadJson(file_path, cur=cur): found += 1
else:
self.log.error("[MISSING] %s" % file_inner_path)
cur.execute("END")
self.log.info("Imported %s data file in %ss" % (found, time.time()-s))
self.event_db_busy.set(True) # Event done, notify waiters
self.event_db_busy = None # Clear event
# Write content to file
def write(self, inner_path, content):
file_path = self.getPath(inner_path)
# Create dir if not exist
file_dir = os.path.dirname(file_path)
if not os.path.isdir(file_dir):
os.makedirs(file_dir)
# Write file
if hasattr(content, 'read'): # File-like object
with open(file_path, "wb") as file:
shutil.copyfileobj(content, file) # Write buff to disk
else: # Simple string
with open(file_path, "wb") as file:
file.write(content)
del content
self.onUpdated(inner_path)
# Execute sql query or rebuild on dberror # Site content updated
def query(self, query, params=None): def onUpdated(self, inner_path):
if self.event_db_busy: # Db not ready for queries file_path = self.getPath(inner_path)
self.log.debug("Wating for db...") # Update Sql cache
self.event_db_busy.get() # Wait for event if inner_path == "dbschema.json":
try: self.has_db = self.isFile("dbschema.json")
res = self.getDb().execute(query, params) self.getDb().checkTables() # Check if any if table schema changed
except sqlite3.DatabaseError, err: elif inner_path.endswith(".json") and self.has_db: # Load json file to db
if err.__class__.__name__ == "DatabaseError": self.log.debug("Loading json file to db: %s" % inner_path)
self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query)) try:
self.rebuildDb() self.getDb().loadJson(file_path)
res = self.db.cur.execute(query, params) except Exception, err:
else: self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
raise err self.closeDb()
return res
# Load and parse json file
def loadJson(self, inner_path):
with self.open(inner_path) as file:
return json.load(file)
# Open file object # Write formatted json file
def open(self, inner_path, mode="rb"): def writeJson(self, inner_path, data):
return open(self.getPath(inner_path), mode) content = json.dumps(data, indent=2, sort_keys=True)
# Make it a little more compact by removing unnecessary white space
def compact_list(match):
return "[ " + match.group(1).strip() + " ]"
# Open file object def compact_dict(match):
def read(self, inner_path, mode="r"): return "{ " + match.group(1).strip() + " }"
return open(self.getPath(inner_path), mode).read()
content = re.sub("\[([^,\{\[]{10,100}?)\]", compact_list, content, flags=re.DOTALL)
content = re.sub("\{([^,\[\{]{10,100}?)\}", compact_dict, content, flags=re.DOTALL)
# Write to disk
self.write(inner_path, content)
# Write content to file # Get file size
def write(self, inner_path, content): def getSize(self, inner_path):
file_path = self.getPath(inner_path) path = self.getPath(inner_path)
# Create dir if not exist if os.path.isfile(path):
file_dir = os.path.dirname(file_path) return os.path.getsize(path)
if not os.path.isdir(file_dir): else:
os.makedirs(file_dir) return 0
# Write file
if hasattr(content, 'read'): # File-like object
with open(file_path, "wb") as file:
shutil.copyfileobj(content, file) # Write buff to disk
else: # Simple string
with open(file_path, "wb") as file:
file.write(content)
del content
self.onUpdated(inner_path)
# File exist
def isFile(self, inner_path):
return os.path.isfile(self.getPath(inner_path))
# Site content updated # Dir exist
def onUpdated(self, inner_path): def isDir(self, inner_path):
file_path = self.getPath(inner_path) return os.path.isdir(self.getPath(inner_path))
# Update Sql cache
if inner_path == "dbschema.json":
self.has_db = self.isFile("dbschema.json")
self.getDb().checkTables() # Check if any if table schema changed
elif inner_path.endswith(".json") and self.has_db: # Load json file to db
self.log.debug("Loading json file to db: %s" % inner_path)
try:
self.getDb().loadJson(file_path)
except Exception, err:
self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
self.closeDb()
# Security check and return path of site's file
def getPath(self, inner_path):
inner_path = inner_path.replace("\\", "/") # Windows separator fix
inner_path = re.sub("^%s/" % re.escape(self.directory), "", inner_path) # Remove site directory if begins with it
file_path = self.directory + "/" + inner_path
allowed_dir = os.path.abspath(self.directory) # Only files within this directory allowed
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
raise Exception("File not allowed: %s" % file_path)
return file_path
# Load and parse json file # Verify all files sha512sum using content.json
def loadJson(self, inner_path): def verifyFiles(self, quick_check=False): # Fast = using file size
with self.open(inner_path) as file: bad_files = []
return json.load(file) if not self.site.content_manager.contents.get("content.json"): # No content.json, download it first
self.site.needFile("content.json", update=True) # Force update to fix corrupt file
self.site.content_manager.loadContent() # Reload content.json
for content_inner_path, content in self.site.content_manager.contents.items():
if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file
self.log.debug("[MISSING] %s" % content_inner_path)
bad_files.append(content_inner_path)
for file_relative_path in content["files"].keys():
file_inner_path = self.site.content_manager.toDir(content_inner_path) + file_relative_path # Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
if not os.path.isfile(file_path):
self.log.debug("[MISSING] %s" % file_inner_path)
bad_files.append(file_inner_path)
continue
# Write formatted json file if quick_check:
def writeJson(self, inner_path, data): ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"]
content = json.dumps(data, indent=2, sort_keys=True) else:
# Make it a little more compact by removing unnecessary white space ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
def compact_list(match):
return "[ "+match.group(1).strip()+" ]"
def compact_dict(match): if not ok:
return "{ "+match.group(1).strip()+" }" self.log.debug("[CHANGED] %s" % file_inner_path)
bad_files.append(file_inner_path)
self.log.debug(
"%s verified: %s files, quick_check: %s, bad files: %s" %
(content_inner_path, len(content["files"]), quick_check, bad_files)
)
content = re.sub("\[([^,\{\[]{10,100}?)\]", compact_list, content, flags=re.DOTALL) return bad_files
content = re.sub("\{([^,\[\{]{10,100}?)\}", compact_dict, content, flags=re.DOTALL)
# Write to disk
self.write(inner_path, content)
# Check and try to fix site files integrity
def checkFiles(self, quick_check=True):
s = time.time()
bad_files = self.verifyFiles(quick_check)
if bad_files:
for bad_file in bad_files:
self.site.bad_files[bad_file] = self.site.bad_files.get("bad_file", 0) + 1
self.log.debug("Checked files in %.2fs... Quick:%s" % (time.time() - s, quick_check))
# Get file size # Delete site's all file
def getSize(self, inner_path): def deleteFiles(self):
path = self.getPath(inner_path) if self.has_db:
if os.path.isfile(path): self.log.debug("Deleting db file...")
return os.path.getsize(path) self.closeDb()
else: try:
return 0 schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"])
if os.path.isfile(db_path):
os.unlink(db_path)
except Exception, err:
self.log.error("Db file delete error: %s" % err)
self.log.debug("Deleting files from content.json...")
files = [] # Get filenames
for content_inner_path, content in self.site.content_manager.contents.items():
files.append(content_inner_path)
for file_relative_path in content["files"].keys():
file_inner_path = self.site.content_manager.toDir(content_inner_path) + file_relative_path # Relative to site dir
files.append(file_inner_path)
# File exist for inner_path in files:
def isFile(self, inner_path): path = self.getPath(inner_path)
return os.path.isfile(self.getPath(inner_path)) if os.path.isfile(path):
os.unlink(path)
self.log.debug("Deleting empty dirs...")
for root, dirs, files in os.walk(self.directory, topdown=False):
for dir in dirs:
path = os.path.join(root, dir)
if os.path.isdir(path) and os.listdir(path) == []:
os.removedirs(path)
self.log.debug("Removing %s" % path)
if os.path.isdir(self.directory) and os.listdir(self.directory) == []:
os.removedirs(self.directory) # Remove sites directory if empty
# Dir exist if os.path.isdir(self.directory):
def isDir(self, inner_path): self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory)
return os.path.isdir(self.getPath(inner_path)) return False # Some files not deleted
else:
self.log.debug("Site data directory deleted: %s..." % self.directory)
# Security check and return path of site's file return True # All clean
def getPath(self, inner_path):
inner_path = inner_path.replace("\\", "/") # Windows separator fix
inner_path = re.sub("^%s/" % re.escape(self.directory), "", inner_path) # Remove site directory if begins with it
file_path = self.directory+"/"+inner_path
allowed_dir = os.path.abspath(self.directory) # Only files within this directory allowed
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
raise Exception("File not allowed: %s" % file_path)
return file_path
# Verify all files sha512sum using content.json
def verifyFiles(self, quick_check=False): # Fast = using file size
bad_files = []
if not self.site.content_manager.contents.get("content.json"): # No content.json, download it first
self.site.needFile("content.json", update=True) # Force update to fix corrupt file
self.site.content_manager.loadContent() # Reload content.json
for content_inner_path, content in self.site.content_manager.contents.items():
if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file
self.log.debug("[MISSING] %s" % content_inner_path)
bad_files.append(content_inner_path)
for file_relative_path in content["files"].keys():
file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
if not os.path.isfile(file_path):
self.log.debug("[MISSING] %s" % file_inner_path)
bad_files.append(file_inner_path)
continue
if quick_check:
ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"]
else:
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
if not ok:
self.log.debug("[CHANGED] %s" % file_inner_path)
bad_files.append(file_inner_path)
self.log.debug("%s verified: %s files, quick_check: %s, bad files: %s" % (content_inner_path, len(content["files"]), quick_check, bad_files))
return bad_files
# Check and try to fix site files integrity
def checkFiles(self, quick_check=True):
s = time.time()
bad_files = self.verifyFiles(quick_check)
if bad_files:
for bad_file in bad_files:
self.site.bad_files[bad_file] = self.site.bad_files.get("bad_file", 0)+1
self.log.debug("Checked files in %.2fs... Quick:%s" % (time.time()-s, quick_check))
# Delete site's all file
def deleteFiles(self):
if self.has_db:
self.log.debug("Deleting db file...")
self.closeDb()
try:
schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"])
if os.path.isfile(db_path): os.unlink(db_path)
except Exception, err:
self.log.error("Db file delete error: %s" % err)
self.log.debug("Deleting files from content.json...")
files = [] # Get filenames
for content_inner_path, content in self.site.content_manager.contents.items():
files.append(content_inner_path)
for file_relative_path in content["files"].keys():
file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
files.append(file_inner_path)
for inner_path in files:
path = self.getPath(inner_path)
if os.path.isfile(path): os.unlink(path)
self.log.debug("Deleting empty dirs...")
for root, dirs, files in os.walk(self.directory, topdown=False):
for dir in dirs:
path = os.path.join(root,dir)
if os.path.isdir(path) and os.listdir(path) == []:
os.removedirs(path)
self.log.debug("Removing %s" % path)
if os.path.isdir(self.directory) and os.listdir(self.directory) == []: os.removedirs(self.directory) # Remove sites directory if empty
if os.path.isdir(self.directory):
self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory)
return False # Some files not deleted
else:
self.log.debug("Site data directory deleted: %s..." % self.directory)
return True # All clean

View file

@ -0,0 +1,140 @@
import time
import socket
import msgpack
print "Connecting..."
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 1234))
print "1 Threaded: Send, receive 10000 ping request...",
s = time.time()
for i in range(10000):
sock.sendall(msgpack.packb({"cmd": "Ping"}))
req = sock.recv(16 * 1024)
print time.time() - s, repr(req), time.time() - s
print "1 Threaded: Send, receive, decode 10000 ping request...",
s = time.time()
unpacker = msgpack.Unpacker()
reqs = 0
for i in range(10000):
sock.sendall(msgpack.packb({"cmd": "Ping"}))
unpacker.feed(sock.recv(16 * 1024))
for req in unpacker:
reqs += 1
print "Found:", req, "x", reqs, time.time() - s
print "1 Threaded: Send, receive, decode, reconnect 1000 ping request...",
s = time.time()
unpacker = msgpack.Unpacker()
reqs = 0
for i in range(1000):
sock.sendall(msgpack.packb({"cmd": "Ping"}))
unpacker.feed(sock.recv(16 * 1024))
for req in unpacker:
reqs += 1
sock.close()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 1234))
print "Found:", req, "x", reqs, time.time() - s
print "1 Threaded: Request, receive, decode 10000 x 10k data request...",
s = time.time()
unpacker = msgpack.Unpacker()
reqs = 0
for i in range(10000):
sock.sendall(msgpack.packb({"cmd": "Bigdata"}))
"""buff = StringIO()
data = sock.recv(16*1024)
buff.write(data)
if not data:
break
while not data.endswith("\n"):
data = sock.recv(16*1024)
if not data: break
buff.write(data)
req = msgpack.unpackb(buff.getvalue().strip("\n"))
reqs += 1"""
req_found = False
while not req_found:
buff = sock.recv(16 * 1024)
unpacker.feed(buff)
for req in unpacker:
reqs += 1
req_found = True
break # Only process one request
print "Found:", len(req["res"]), "x", reqs, time.time() - s
print "10 Threaded: Request, receive, decode 10000 x 10k data request...",
import gevent
s = time.time()
reqs = 0
req = None
def requester():
global reqs, req
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 1234))
unpacker = msgpack.Unpacker()
for i in range(1000):
sock.sendall(msgpack.packb({"cmd": "Bigdata"}))
req_found = False
while not req_found:
buff = sock.recv(16 * 1024)
unpacker.feed(buff)
for req in unpacker:
reqs += 1
req_found = True
break # Only process one request
threads = []
for i in range(10):
threads.append(gevent.spawn(requester))
gevent.joinall(threads)
print "Found:", len(req["res"]), "x", reqs, time.time() - s
print "1 Threaded: ZeroMQ Send, receive 1000 ping request...",
s = time.time()
import zmq.green as zmq
c = zmq.Context()
zmq_sock = c.socket(zmq.REQ)
zmq_sock.connect('tcp://127.0.0.1:1234')
for i in range(1000):
zmq_sock.send(msgpack.packb({"cmd": "Ping"}))
req = zmq_sock.recv(16 * 1024)
print "Found:", req, time.time() - s
print "1 Threaded: ZeroMQ Send, receive 1000 x 10k data request...",
s = time.time()
import zmq.green as zmq
c = zmq.Context()
zmq_sock = c.socket(zmq.REQ)
zmq_sock.connect('tcp://127.0.0.1:1234')
for i in range(1000):
zmq_sock.send(msgpack.packb({"cmd": "Bigdata"}))
req = msgpack.unpackb(zmq_sock.recv(1024 * 1024))
print "Found:", len(req["res"]), time.time() - s
print "1 Threaded: direct ZeroMQ Send, receive 1000 x 10k data request...",
s = time.time()
import zmq.green as zmq
c = zmq.Context()
zmq_sock = c.socket(zmq.REQ)
zmq_sock.connect('tcp://127.0.0.1:1233')
for i in range(1000):
zmq_sock.send(msgpack.packb({"cmd": "Bigdata"}))
req = msgpack.unpackb(zmq_sock.recv(1024 * 1024))
print "Found:", len(req["res"]), time.time() - s

View file

@ -1,11 +1,16 @@
#!/usr/bin/python2 #!/usr/bin/python2
from gevent import monkey; monkey.patch_all() from gevent import monkey
import os, time, sys, socket, ssl monkey.patch_all()
sys.path.append(os.path.abspath("src")) # Imports relative to src dir import os
import time
import sys
import socket
import ssl
sys.path.append(os.path.abspath("src")) # Imports relative to src dir
import cStringIO as StringIO import cStringIO as StringIO
import gevent import gevent
from gevent.queue import Queue, Empty, JoinableQueue
from gevent.server import StreamServer from gevent.server import StreamServer
from gevent.pool import Pool from gevent.pool import Pool
from util import SslPatch from util import SslPatch
@ -13,38 +18,44 @@ from util import SslPatch
# Server # Server
socks = [] socks = []
data = os.urandom(1024*100) data = os.urandom(1024 * 100)
data += "\n" data += "\n"
def handle(sock_raw, addr):
socks.append(sock_raw)
sock = sock_raw
#sock = ctx.wrap_socket(sock, server_side=True)
#if sock_raw.recv( 1, gevent.socket.MSG_PEEK ) == "\x16":
# sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='key-cz.pem', certfile='cert-cz.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
#fp = os.fdopen(sock.fileno(), 'rb', 1024*512)
try:
while True:
line = sock.recv(16*1024)
if not line: break
if line == "bye\n":
break
elif line == "gotssl\n":
sock.sendall("yes\n")
sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='data/key-rsa.pem', certfile='data/cert-rsa.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
else:
sock.sendall(data)
except Exception, err:
print err
try:
sock.shutdown(gevent.socket.SHUT_WR)
sock.close()
except:
pass
socks.remove(sock_raw)
pool = Pool(1000) # do not accept more than 10000 connections def handle(sock_raw, addr):
server = StreamServer(('127.0.0.1', 1234), handle) # socks.append(sock_raw)
sock = sock_raw
# sock = ctx.wrap_socket(sock, server_side=True)
# if sock_raw.recv( 1, gevent.socket.MSG_PEEK ) == "\x16":
# sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='key-cz.pem',
# certfile='cert-cz.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
# fp = os.fdopen(sock.fileno(), 'rb', 1024*512)
try:
while True:
line = sock.recv(16 * 1024)
if not line:
break
if line == "bye\n":
break
elif line == "gotssl\n":
sock.sendall("yes\n")
sock = gevent.ssl.wrap_socket(
sock_raw, server_side=True, keyfile='data/key-rsa.pem', certfile='data/cert-rsa.pem',
ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1
)
else:
sock.sendall(data)
except Exception, err:
print err
try:
sock.shutdown(gevent.socket.SHUT_WR)
sock.close()
except:
pass
socks.remove(sock_raw)
pool = Pool(1000) # do not accept more than 10000 connections
server = StreamServer(('127.0.0.1', 1234), handle)
server.start() server.start()
@ -54,92 +65,97 @@ server.start()
total_num = 0 total_num = 0
total_bytes = 0 total_bytes = 0
clipher = None clipher = None
ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDH+AES128:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:AES128-SHA:HIGH:!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK" ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDH+AES128:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:AES128-SHA:HIGH:" + \
"!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
# ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) # ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
def getData(): def getData():
global total_num, total_bytes, clipher global total_num, total_bytes, clipher
data = None data = None
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#sock = socket.ssl(s) # sock = socket.ssl(s)
#sock = ssl.wrap_socket(sock) # sock = ssl.wrap_socket(sock)
sock.connect(("127.0.0.1", 1234)) sock.connect(("127.0.0.1", 1234))
#sock.do_handshake() # sock.do_handshake()
#clipher = sock.cipher() # clipher = sock.cipher()
sock.send("gotssl\n") sock.send("gotssl\n")
if sock.recv(128) == "yes\n": if sock.recv(128) == "yes\n":
sock = ssl.wrap_socket(sock, ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1) sock = ssl.wrap_socket(sock, ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
sock.do_handshake() sock.do_handshake()
clipher = sock.cipher() clipher = sock.cipher()
for req in range(100):
sock.sendall("req\n")
buff = StringIO.StringIO()
data = sock.recv(16 * 1024)
buff.write(data)
if not data:
break
while not data.endswith("\n"):
data = sock.recv(16 * 1024)
if not data:
break
buff.write(data)
total_num += 1
total_bytes += buff.tell()
if not data:
print "No data"
for req in range(100): sock.shutdown(gevent.socket.SHUT_WR)
sock.sendall("req\n") sock.close()
buff = StringIO.StringIO()
data = sock.recv(16*1024)
buff.write(data)
if not data:
break
while not data.endswith("\n"):
data = sock.recv(16*1024)
if not data: break
buff.write(data)
total_num += 1
total_bytes += buff.tell()
if not data:
print "No data"
sock.shutdown(gevent.socket.SHUT_WR)
sock.close()
s = time.time() s = time.time()
def info(): def info():
import psutil, os import psutil
process = psutil.Process(os.getpid()) import os
if "memory_info" in dir(process): process = psutil.Process(os.getpid())
memory_info = process.memory_info if "memory_info" in dir(process):
else: memory_info = process.memory_info
memory_info = process.get_memory_info else:
while 1: memory_info = process.get_memory_info
print total_num, "req", (total_bytes/1024), "kbytes", "transfered in", time.time()-s, "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20) while 1:
time.sleep(1) print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s,
print "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20)
time.sleep(1)
gevent.spawn(info) gevent.spawn(info)
for test in range(10): for test in range(10):
clients = [] clients = []
for i in range(10): # Thread for i in range(10): # Thread
clients.append(gevent.spawn(getData)) clients.append(gevent.spawn(getData))
gevent.joinall(clients) gevent.joinall(clients)
print total_num, "req", (total_bytes/1024), "kbytes", "transfered in", time.time()-s print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s
# Separate client/server process: # Separate client/server process:
# 10*10*100: # 10*10*100:
# Raw: 10000 req 1000009 kbytes transfered in 5.39999985695 # Raw: 10000 req 1000009 kbytes transfered in 5.39999985695
# RSA 2048: 10000 req 1000009 kbytes transfered in 27.7890000343 using ('ECDHE-RSA-AES256-SHA', 'TLSv1/SSLv3', 256) # RSA 2048: 10000 req 1000009 kbytes transfered in 27.7890000343 using ('ECDHE-RSA-AES256-SHA', 'TLSv1/SSLv3', 256)
# ECC: 10000 req 1000009 kbytes transfered in 26.1959998608 using ('ECDHE-ECDSA-AES256-SHA', 'TLSv1/SSLv3', 256) # ECC: 10000 req 1000009 kbytes transfered in 26.1959998608 using ('ECDHE-ECDSA-AES256-SHA', 'TLSv1/SSLv3', 256)
# ECC: 10000 req 1000009 kbytes transfered in 28.2410001755 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 13.3828125 # ECC: 10000 req 1000009 kbytes transfered in 28.2410001755 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 13.3828125
# #
# 10*100*10: # 10*100*10:
# Raw: 10000 req 1000009 kbytes transfered in 7.02700018883 Mem: 14.328125 # Raw: 10000 req 1000009 kbytes transfered in 7.02700018883 Mem: 14.328125
# RSA 2048: 10000 req 1000009 kbytes transfered in 44.8860001564 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.078125 # RSA 2048: 10000 req 1000009 kbytes transfered in 44.8860001564 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.078125
# ECC: 10000 req 1000009 kbytes transfered in 37.9430000782 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.0234375 # ECC: 10000 req 1000009 kbytes transfered in 37.9430000782 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.0234375
# #
# 1*100*100: # 1*100*100:
# Raw: 10000 req 1000009 kbytes transfered in 4.64400005341 Mem: 14.06640625 # Raw: 10000 req 1000009 kbytes transfered in 4.64400005341 Mem: 14.06640625
# RSA: 10000 req 1000009 kbytes transfered in 24.2300000191 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 19.7734375 # RSA: 10000 req 1000009 kbytes transfered in 24.2300000191 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 19.7734375
# ECC: 10000 req 1000009 kbytes transfered in 22.8849999905 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 17.8125 # ECC: 10000 req 1000009 kbytes transfered in 22.8849999905 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 17.8125
# AES128: 10000 req 1000009 kbytes transfered in 21.2839999199 using ('AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.1328125 # AES128: 10000 req 1000009 kbytes transfered in 21.2839999199 using ('AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.1328125
# ECC+128: 10000 req 1000009 kbytes transfered in 20.496999979 using ('ECDHE-ECDSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.40234375 # ECC+128: 10000 req 1000009 kbytes transfered in 20.496999979 using ('ECDHE-ECDSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.40234375
# #
# #
# Single process: # Single process:
# 1*100*100 # 1*100*100
# RSA: 10000 req 1000009 kbytes transfered in 41.7899999619 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 26.91015625 # RSA: 10000 req 1000009 kbytes transfered in 41.7899999619 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 26.91015625
# #
# 10*10*100 # 10*10*100
# RSA: 10000 req 1000009 kbytes transfered in 40.1640000343 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.94921875 # RSA: 10000 req 1000009 kbytes transfered in 40.1640000343 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.94921875

View file

@ -1,4 +1,10 @@
import time, re, os, mimetypes, json, cgi import time
import re
import os
import mimetypes
import json
import cgi
from Config import config from Config import config
from Site import SiteManager from Site import SiteManager
from User import UserManager from User import UserManager
@ -6,400 +12,417 @@ from Plugin import PluginManager
from Ui.UiWebsocket import UiWebsocket from Ui.UiWebsocket import UiWebsocket
status_texts = { status_texts = {
200: "200 OK", 200: "200 OK",
400: "400 Bad Request", 400: "400 Bad Request",
403: "403 Forbidden", 403: "403 Forbidden",
404: "404 Not Found", 404: "404 Not Found",
500: "500 Internal Server Error", 500: "500 Internal Server Error",
} }
@PluginManager.acceptPlugins @PluginManager.acceptPlugins
class UiRequest(object): class UiRequest(object):
def __init__(self, server, get, env, start_response):
if server: def __init__(self, server, get, env, start_response):
self.server = server if server:
self.log = server.log self.server = server
self.get = get # Get parameters self.log = server.log
self.env = env # Enviroment settings self.get = get # Get parameters
self.start_response = start_response # Start response function self.env = env # Enviroment settings
self.user = None
self.start_response = start_response # Start response function
self.user = None
# Call the request handler function base on path
def route(self, path): # Return posted variables as dict
if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: # Restict Ui access by ip def getPosted(self):
return self.error403() if self.env['REQUEST_METHOD'] == "POST":
return dict(cgi.parse_qsl(
path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension self.env['wsgi.input'].readline().decode()
path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access ))
else:
if path == "/": return {}
return self.actionIndex()
elif path.endswith("favicon.ico"): # Call the request handler function base on path
return self.actionFile("src/Ui/media/img/favicon.ico") def route(self, path):
# Media if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: # Restict Ui access by ip
elif path.startswith("/uimedia/"): return self.error403()
return self.actionUiMedia(path)
elif path.startswith("/media"): path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension
return self.actionSiteMedia(path) path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access
# Websocket
elif path == "/Websocket": if path == "/":
return self.actionWebsocket() return self.actionIndex()
# Debug elif path.endswith("favicon.ico"):
elif path == "/Debug" and config.debug: return self.actionFile("src/Ui/media/img/favicon.ico")
return self.actionDebug() # Media
elif path == "/Console" and config.debug: elif path.startswith("/uimedia/"):
return self.actionConsole() return self.actionUiMedia(path)
# Site media wrapper elif path.startswith("/media"):
else: return self.actionSiteMedia(path)
body = self.actionWrapper(path) # Websocket
if body: elif path == "/Websocket":
return body return self.actionWebsocket()
else: # Debug
func = getattr(self, "action"+path.lstrip("/"), None) # Check if we have action+request_path function elif path == "/Debug" and config.debug:
if func: return self.actionDebug()
return func() elif path == "/Console" and config.debug:
else: return self.actionConsole()
return self.error404(path) # Site media wrapper
else:
body = self.actionWrapper(path)
# The request is proxied by chrome extension if body:
def isProxyRequest(self): return body
return self.env["PATH_INFO"].startswith("http://") else:
func = getattr(self, "action" + path.lstrip("/"), None) # Check if we have action+request_path function
if func:
def isAjaxRequest(self): return func()
return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest" else:
return self.error404(path)
# Get mime by filename # The request is proxied by chrome extension
def getContentType(self, file_name): def isProxyRequest(self):
content_type = mimetypes.guess_type(file_name)[0] return self.env["PATH_INFO"].startswith("http://")
if not content_type:
if file_name.endswith("json"): # Correct json header def isAjaxRequest(self):
content_type = "application/json" return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest"
else:
content_type = "application/octet-stream" # Get mime by filename
return content_type def getContentType(self, file_name):
content_type = mimetypes.guess_type(file_name)[0]
if not content_type:
# Returns: <dict> Cookies based on self.env if file_name.endswith("json"): # Correct json header
def getCookies(self): content_type = "application/json"
raw_cookies = self.env.get('HTTP_COOKIE') else:
if raw_cookies: content_type = "application/octet-stream"
cookies = cgi.parse_qsl(raw_cookies) return content_type
return {key.strip(): val for key, val in cookies}
else: # Returns: <dict> Cookies based on self.env
return {} def getCookies(self):
raw_cookies = self.env.get('HTTP_COOKIE')
if raw_cookies:
def getCurrentUser(self): cookies = cgi.parse_qsl(raw_cookies)
if self.user: return self.user # Cache return {key.strip(): val for key, val in cookies}
self.user = UserManager.user_manager.get() # Get user else:
if not self.user: return {}
self.user = UserManager.user_manager.create()
return self.user def getCurrentUser(self):
if self.user:
return self.user # Cache
# Send response headers self.user = UserManager.user_manager.get() # Get user
def sendHeader(self, status=200, content_type="text/html", extra_headers=[]): if not self.user:
if content_type == "text/html": content_type = "text/html; charset=utf-8" self.user = UserManager.user_manager.create()
headers = [] return self.user
headers.append(("Version", "HTTP/1.1"))
headers.append(("Access-Control-Allow-Origin", "*")) # Allow json access # Send response headers
if self.env["REQUEST_METHOD"] == "OPTIONS": def sendHeader(self, status=200, content_type="text/html", extra_headers=[]):
headers.append(("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept")) # Allow json access if content_type == "text/html":
content_type = "text/html; charset=utf-8"
if (self.env["REQUEST_METHOD"] == "OPTIONS" or not self.isAjaxRequest()) and status == 200 and (content_type == "text/css" or content_type.startswith("application") or self.env["REQUEST_METHOD"] == "OPTIONS" or content_type.startswith("image")): # Cache Css, Js, Image files for 10min headers = []
headers.append(("Cache-Control", "public, max-age=600")) # Cache 10 min headers.append(("Version", "HTTP/1.1"))
else: # Images, Css, Js headers.append(("Access-Control-Allow-Origin", "*")) # Allow json access
headers.append(("Cache-Control", "no-cache, no-store, private, must-revalidate, max-age=0")) # No caching at all if self.env["REQUEST_METHOD"] == "OPTIONS":
#headers.append(("Cache-Control", "public, max-age=604800")) # Cache 1 week # Allow json access
headers.append(("Content-Type", content_type)) headers.append(("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept"))
for extra_header in extra_headers:
headers.append(extra_header) cacheable_type = (
return self.start_response(status_texts[status], headers) content_type == "text/css" or content_type.startswith("image") or
self.env["REQUEST_METHOD"] == "OPTIONS" or content_type == "application/javascript"
)
# Renders a template
def render(self, template_path, *args, **kwargs): if status == 200 and cacheable_type: # Cache Css, Js, Image files for 10min
#template = SimpleTemplate(open(template_path), lookup=[os.path.dirname(template_path)]) headers.append(("Cache-Control", "public, max-age=600")) # Cache 10 min
#yield str(template.render(*args, **kwargs).encode("utf8")) else: # Images, Css, Js
template = open(template_path).read().decode("utf8") headers.append(("Cache-Control", "no-cache, no-store, private, must-revalidate, max-age=0")) # No caching at all
return template.format(**kwargs).encode("utf8") headers.append(("Content-Type", content_type))
for extra_header in extra_headers:
headers.append(extra_header)
# - Actions - return self.start_response(status_texts[status], headers)
# Redirect to an url # Renders a template
def actionRedirect(self, url): def render(self, template_path, *args, **kwargs):
self.start_response('301 Redirect', [('Location', url)]) template = open(template_path).read().decode("utf8")
yield "Location changed: %s" % url return template.format(**kwargs).encode("utf8")
# - Actions -
def actionIndex(self):
return self.actionRedirect("/"+config.homepage) # Redirect to an url
def actionRedirect(self, url):
self.start_response('301 Redirect', [('Location', url)])
# Render a file from media with iframe site wrapper yield "Location changed: %s" % url
def actionWrapper(self, path, extra_headers=None):
if not extra_headers: extra_headers = [] def actionIndex(self):
if self.get.get("wrapper") == "False": return self.actionSiteMedia("/media"+path) # Only serve html files with frame return self.actionRedirect("/" + config.homepage)
match = re.match("/(?P<address>[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path) # Render a file from media with iframe site wrapper
if match: def actionWrapper(self, path, extra_headers=None):
address = match.group("address") if not extra_headers:
inner_path = match.group("inner_path").lstrip("/") extra_headers = []
if "." in inner_path and not inner_path.endswith(".html"): return self.actionSiteMedia("/media"+path) # Only serve html files with frame if self.get.get("wrapper") == "False":
if self.env.get("HTTP_X_REQUESTED_WITH"): return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper return self.actionSiteMedia("/media" + path) # Only serve html files with frame
file_inner_path = inner_path match = re.match("/(?P<address>[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path)
if not file_inner_path: file_inner_path = "index.html" # If inner path defaults to index.html if match:
address = match.group("address")
if not inner_path and not path.endswith("/"): inner_path = address+"/" # Fix relative resources loading if missing / end of site address inner_path = match.group("inner_path").lstrip("/")
inner_path = re.sub(".*/(.+)", "\\1", inner_path) # Load innerframe relative to current url if "." in inner_path and not inner_path.endswith(".html"):
return self.actionSiteMedia("/media" + path) # Only serve html files with frame
site = SiteManager.site_manager.get(address) if self.env.get("HTTP_X_REQUESTED_WITH"):
return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper
if site and site.content_manager.contents.get("content.json") and (not site.getReachableBadFiles() or site.settings["own"]): # Its downloaded or own
title = site.content_manager.contents["content.json"]["title"] file_inner_path = inner_path
else: if not file_inner_path:
title = "Loading %s..." % address file_inner_path = "index.html" # If inner path defaults to index.html
site = SiteManager.site_manager.need(address) # Start download site
if not inner_path and not path.endswith("/"):
if not site: return False inner_path = address + "/" # Fix relative resources loading if missing / end of site address
inner_path = re.sub(".*/(.+)", "\\1", inner_path) # Load innerframe relative to current url
#extra_headers.append(("X-Frame-Options", "DENY"))
site = SiteManager.site_manager.get(address)
self.sendHeader(extra_headers=extra_headers[:])
if (
# Wrapper variable inits site and site.content_manager.contents.get("content.json") and
query_string = "" (not site.getReachableBadFiles() or site.settings["own"])
body_style = "" ): # Its downloaded or own
meta_tags = "" title = site.content_manager.contents["content.json"]["title"]
else:
if self.env.get("QUERY_STRING"): query_string = "?"+self.env["QUERY_STRING"]+"&wrapper=False" title = "Loading %s..." % address
else: query_string = "?wrapper=False" site = SiteManager.site_manager.need(address) # Start download site
if self.isProxyRequest(): # Its a remote proxy request if not site:
if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1 return False
server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"]
else: # Remote client, use SERVER_NAME as server's real address self.sendHeader(extra_headers=extra_headers[:])
server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"])
homepage = "http://zero/"+config.homepage # Wrapper variable inits
else: # Use relative path query_string = ""
server_url = "" body_style = ""
homepage = "/"+config.homepage meta_tags = ""
if site.content_manager.contents.get("content.json") : # Got content.json if self.env.get("QUERY_STRING"):
content = site.content_manager.contents["content.json"] query_string = "?" + self.env["QUERY_STRING"] + "&wrapper=False"
if content.get("background-color"): else:
body_style += "background-color: "+cgi.escape(site.content_manager.contents["content.json"]["background-color"], True)+";" query_string = "?wrapper=False"
if content.get("viewport"):
meta_tags += '<meta name="viewport" id="viewport" content="%s">' % cgi.escape(content["viewport"], True) if self.isProxyRequest(): # Its a remote proxy request
if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1
return self.render("src/Ui/template/wrapper.html", server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"]
server_url=server_url, else: # Remote client, use SERVER_NAME as server's real address
inner_path=inner_path, server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"])
file_inner_path=file_inner_path, homepage = "http://zero/" + config.homepage
address=address, else: # Use relative path
title=title, server_url = ""
body_style=body_style, homepage = "/" + config.homepage
meta_tags=meta_tags,
query_string=query_string, if site.content_manager.contents.get("content.json"): # Got content.json
wrapper_key=site.settings["wrapper_key"], content = site.content_manager.contents["content.json"]
permissions=json.dumps(site.settings["permissions"]), if content.get("background-color"):
show_loadingscreen=json.dumps(not site.storage.isFile(file_inner_path)), body_style += "background-color: %s;" % \
rev=config.rev, cgi.escape(site.content_manager.contents["content.json"]["background-color"], True)
homepage=homepage if content.get("viewport"):
) meta_tags += '<meta name="viewport" id="viewport" content="%s">' % cgi.escape(content["viewport"], True)
else: # Bad url return self.render(
return False "src/Ui/template/wrapper.html",
server_url=server_url,
inner_path=inner_path,
# Returns if media request allowed from that referer file_inner_path=file_inner_path,
def isMediaRequestAllowed(self, site_address, referer): address=address,
referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address title=title,
return referer_path.startswith("/"+site_address) body_style=body_style,
meta_tags=meta_tags,
query_string=query_string,
# Serve a media for site wrapper_key=site.settings["wrapper_key"],
def actionSiteMedia(self, path): permissions=json.dumps(site.settings["permissions"]),
path = path.replace("/index.html/", "/") # Base Backward compatibility fix show_loadingscreen=json.dumps(not site.storage.isFile(file_inner_path)),
if path.endswith("/"): path = path+"index.html" rev=config.rev,
homepage=homepage
match = re.match("/media/(?P<address>[A-Za-z0-9\._-]+)/(?P<inner_path>.*)", path) )
referer = self.env.get("HTTP_REFERER") else: # Bad url
if referer and match: # Only allow same site to receive media return False
if not self.isMediaRequestAllowed(match.group("address"), referer):
return self.error403("Media referrer error") # Referrer not starts same address as requested path # Returns if media request allowed from that referer
def isMediaRequestAllowed(self, site_address, referer):
if match: # Looks like a valid path referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
address = match.group("address") return referer_path.startswith("/" + site_address)
file_path = "%s/%s/%s" % (config.data_dir, address, match.group("inner_path"))
allowed_dir = os.path.abspath("%s/%s" % (config.data_dir, address)) # Only files within data/sitehash allowed # Serve a media for site
data_dir = os.path.abspath("data") # No files from data/ allowed def actionSiteMedia(self, path):
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir) or allowed_dir == data_dir: # File not in allowed path path = path.replace("/index.html/", "/") # Base Backward compatibility fix
return self.error403() if path.endswith("/"):
else: path = path + "index.html"
if config.debug and file_path.split("/")[-1].startswith("all."): # When debugging merge *.css to all.css and *.js to all.js
site = self.server.sites.get(address) match = re.match("/media/(?P<address>[A-Za-z0-9\._-]+)/(?P<inner_path>.*)", path)
if site.settings["own"]:
from Debug import DebugMedia referer = self.env.get("HTTP_REFERER")
DebugMedia.merge(file_path) if referer and match: # Only allow same site to receive media
if os.path.isfile(file_path): # File exits if not self.isMediaRequestAllowed(match.group("address"), referer):
#self.sendHeader(content_type=self.getContentType(file_path)) # ?? Get Exception without this return self.error403("Media referrer error") # Referrer not starts same address as requested path
return self.actionFile(file_path)
else: # File not exits, try to download if match: # Looks like a valid path
site = SiteManager.site_manager.need(address, all_file=False) address = match.group("address")
result = site.needFile(match.group("inner_path"), priority=1) # Wait until file downloads file_path = "%s/%s/%s" % (config.data_dir, address, match.group("inner_path"))
if result: allowed_dir = os.path.abspath("%s/%s" % (config.data_dir, address)) # Only files within data/sitehash allowed
#self.sendHeader(content_type=self.getContentType(file_path)) data_dir = os.path.abspath("data") # No files from data/ allowed
return self.actionFile(file_path) if (
else: ".." in file_path
self.log.debug("File not found: %s" % match.group("inner_path")) or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir)
return self.error404(match.group("inner_path")) or allowed_dir == data_dir
): # File not in allowed path
else: # Bad url return self.error403()
return self.error404(path) else:
if config.debug and file_path.split("/")[-1].startswith("all."):
# If debugging merge *.css to all.css and *.js to all.js
# Serve a media for ui site = self.server.sites.get(address)
def actionUiMedia(self, path): if site.settings["own"]:
match = re.match("/uimedia/(?P<inner_path>.*)", path) from Debug import DebugMedia
if match: # Looks like a valid path DebugMedia.merge(file_path)
file_path = "src/Ui/media/%s" % match.group("inner_path") if os.path.isfile(file_path): # File exits
allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed # self.sendHeader(content_type=self.getContentType(file_path)) # ?? Get Exception without this
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): # File not in allowed path return self.actionFile(file_path)
return self.error403() else: # File not exits, try to download
else: site = SiteManager.site_manager.need(address, all_file=False)
if config.debug and match.group("inner_path").startswith("all."): # When debugging merge *.css to all.css and *.js to all.js result = site.needFile(match.group("inner_path"), priority=1) # Wait until file downloads
from Debug import DebugMedia if result:
DebugMedia.merge(file_path) # self.sendHeader(content_type=self.getContentType(file_path))
return self.actionFile(file_path) return self.actionFile(file_path)
else: # Bad url else:
return self.error400() self.log.debug("File not found: %s" % match.group("inner_path"))
return self.error404(match.group("inner_path"))
# Stream a file to client else: # Bad url
def actionFile(self, file_path, block_size = 64*1024): return self.error404(path)
if os.path.isfile(file_path):
# Try to figure out content type by extension # Serve a media for ui
content_type = self.getContentType(file_path) def actionUiMedia(self, path):
match = re.match("/uimedia/(?P<inner_path>.*)", path)
self.sendHeader(content_type = content_type) # TODO: Dont allow external access: extra_headers=[("Content-Security-Policy", "default-src 'unsafe-inline' data: http://localhost:43110 ws://localhost:43110")] if match: # Looks like a valid path
if self.env["REQUEST_METHOD"] != "OPTIONS": file_path = "src/Ui/media/%s" % match.group("inner_path")
file = open(file_path, "rb") allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed
while 1: if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
try: # File not in allowed path
block = file.read(block_size) return self.error403()
if block: else:
yield block if config.debug and match.group("inner_path").startswith("all."):
else: # If debugging merge *.css to all.css and *.js to all.js
raise StopIteration from Debug import DebugMedia
except StopIteration: DebugMedia.merge(file_path)
file.close() return self.actionFile(file_path)
break else: # Bad url
else: # File not exits return self.error400()
yield self.error404(file_path)
# Stream a file to client
def actionFile(self, file_path, block_size=64 * 1024):
# On websocket connection if os.path.isfile(file_path):
def actionWebsocket(self): # Try to figure out content type by extension
ws = self.env.get("wsgi.websocket") content_type = self.getContentType(file_path)
if ws:
wrapper_key = self.get["wrapper_key"] # TODO: Dont allow external access: extra_headers=
# Find site by wrapper_key # [("Content-Security-Policy", "default-src 'unsafe-inline' data: http://localhost:43110 ws://localhost:43110")]
site = None self.sendHeader(content_type=content_type)
for site_check in self.server.sites.values(): if self.env["REQUEST_METHOD"] != "OPTIONS":
if site_check.settings["wrapper_key"] == wrapper_key: site = site_check file = open(file_path, "rb")
while 1:
if site: # Correct wrapper key try:
user = self.getCurrentUser() block = file.read(block_size)
if not user: if block:
self.log.error("No user found") yield block
return self.error403() else:
ui_websocket = UiWebsocket(ws, site, self.server, user) raise StopIteration
site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events except StopIteration:
ui_websocket.start() file.close()
for site_check in self.server.sites.values(): # Remove websocket from every site (admin sites allowed to join other sites event channels) break
if ui_websocket in site_check.websockets: else: # File not exits
site_check.websockets.remove(ui_websocket) yield self.error404(file_path)
return "Bye."
else: # No site found by wrapper key # On websocket connection
self.log.error("Wrapper key not found: %s" % wrapper_key) def actionWebsocket(self):
return self.error403() ws = self.env.get("wsgi.websocket")
else: if ws:
start_response("400 Bad Request", []) wrapper_key = self.get["wrapper_key"]
return "Not a websocket!" # Find site by wrapper_key
site = None
for site_check in self.server.sites.values():
# Debug last error if site_check.settings["wrapper_key"] == wrapper_key:
def actionDebug(self): site = site_check
# Raise last error from DebugHook
import sys if site: # Correct wrapper key
last_error = sys.modules["main"].DebugHook.last_error user = self.getCurrentUser()
if last_error: if not user:
raise last_error[0], last_error[1], last_error[2] self.log.error("No user found")
else: return self.error403()
self.sendHeader() ui_websocket = UiWebsocket(ws, site, self.server, user)
return "No error! :)" site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events
ui_websocket.start()
for site_check in self.server.sites.values():
# Just raise an error to get console # Remove websocket from every site (admin sites allowed to join other sites event channels)
def actionConsole(self): if ui_websocket in site_check.websockets:
import sys site_check.websockets.remove(ui_websocket)
sites = self.server.sites return "Bye."
main = sys.modules["main"] else: # No site found by wrapper key
raise Exception("Here is your console") self.log.error("Wrapper key not found: %s" % wrapper_key)
return self.error403()
else:
# - Tests - self.start_response("400 Bad Request", [])
return "Not a websocket!"
def actionTestStream(self):
self.sendHeader() # Debug last error
yield " "*1080 # Overflow browser's buffer def actionDebug(self):
yield "He" # Raise last error from DebugHook
time.sleep(1) import sys
yield "llo!" last_error = sys.modules["main"].DebugHook.last_error
yield "Running websockets: %s" % len(self.server.websockets) if last_error:
self.server.sendMessage("Hello!") raise last_error[0], last_error[1], last_error[2]
else:
self.sendHeader()
# - Errors - return "No error! :)"
# Send bad request error # Just raise an error to get console
def error400(self): def actionConsole(self):
self.sendHeader(400) import sys
return "Bad Request" sites = self.server.sites
main = sys.modules["main"]
raise Exception("Here is your console")
# You are not allowed to access this
def error403(self, message="Forbidden"): # - Tests -
self.sendHeader(403)
return message def actionTestStream(self):
self.sendHeader()
yield " " * 1080 # Overflow browser's buffer
# Send file not found error yield "He"
def error404(self, path = None): time.sleep(1)
self.sendHeader(404) yield "llo!"
return "Not Found: %s" % path.encode("utf8") yield "Running websockets: %s" % len(self.server.websockets)
self.server.sendMessage("Hello!")
# Internal server error # - Errors -
def error500(self, message = ":("):
self.sendHeader(500) # Send bad request error
return "<h1>Server error</h1>%s" % cgi.escape(message) def error400(self):
self.sendHeader(400)
return "Bad Request"
# You are not allowed to access this
def error403(self, message="Forbidden"):
self.sendHeader(403)
return message
# Send file not found error
def error404(self, path=None):
self.sendHeader(404)
return "Not Found: %s" % path.encode("utf8")
# Internal server error
def error500(self, message=":("):
self.sendHeader(500)
return "<h1>Server error</h1>%s" % cgi.escape(message)
# - Reload for eaiser developing - # - Reload for eaiser developing -
#def reload(): # def reload():
#import imp, sys # import imp, sys
#global UiWebsocket # global UiWebsocket
#UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket # UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket
#reload(sys.modules["User.UserManager"]) # reload(sys.modules["User.UserManager"])
#UserManager.reloadModule() # UserManager.reloadModule()
#self.user = UserManager.user_manager.getCurrent() # self.user = UserManager.user_manager.getCurrent()

View file

@ -1,7 +1,11 @@
import logging, time, cgi, string, random import logging
import time
import cgi
from gevent.pywsgi import WSGIServer from gevent.pywsgi import WSGIServer
from gevent.pywsgi import WSGIHandler from gevent.pywsgi import WSGIHandler
from lib.geventwebsocket.handler import WebSocketHandler from lib.geventwebsocket.handler import WebSocketHandler
from UiRequest import UiRequest from UiRequest import UiRequest
from Site import SiteManager from Site import SiteManager
from Config import config from Config import config
@ -10,134 +14,129 @@ from Debug import Debug
# Skip websocket handler if not necessary # Skip websocket handler if not necessary
class UiWSGIHandler(WSGIHandler): class UiWSGIHandler(WSGIHandler):
def __init__(self, *args, **kwargs):
self.server = args[2]
super(UiWSGIHandler, self).__init__(*args, **kwargs)
self.args = args
self.kwargs = kwargs
def __init__(self, *args, **kwargs):
self.server = args[2]
super(UiWSGIHandler, self).__init__(*args, **kwargs)
self.args = args
self.kwargs = kwargs
def run_application(self): def run_application(self):
self.server.sockets[self.client_address] = self.socket self.server.sockets[self.client_address] = self.socket
if "HTTP_UPGRADE" in self.environ: # Websocket request if "HTTP_UPGRADE" in self.environ: # Websocket request
try: try:
ws_handler = WebSocketHandler(*self.args, **self.kwargs) ws_handler = WebSocketHandler(*self.args, **self.kwargs)
ws_handler.__dict__ = self.__dict__ # Match class variables ws_handler.__dict__ = self.__dict__ # Match class variables
ws_handler.run_application() ws_handler.run_application()
except Exception, err: except Exception, err:
logging.error("UiWSGIHandler websocket error: %s" % Debug.formatException(err)) logging.error("UiWSGIHandler websocket error: %s" % Debug.formatException(err))
if config.debug: # Allow websocket errors to appear on /Debug if config.debug: # Allow websocket errors to appear on /Debug
import sys import sys
del self.server.sockets[self.client_address] del self.server.sockets[self.client_address]
sys.modules["main"].DebugHook.handleError() sys.modules["main"].DebugHook.handleError()
else: # Standard HTTP request else: # Standard HTTP request
#print self.application.__class__.__name__ # print self.application.__class__.__name__
try: try:
super(UiWSGIHandler, self).run_application() super(UiWSGIHandler, self).run_application()
except Exception, err: except Exception, err:
logging.error("UiWSGIHandler error: %s" % Debug.formatException(err)) logging.error("UiWSGIHandler error: %s" % Debug.formatException(err))
if config.debug: # Allow websocket errors to appear on /Debug if config.debug: # Allow websocket errors to appear on /Debug
import sys import sys
del self.server.sockets[self.client_address] del self.server.sockets[self.client_address]
sys.modules["main"].DebugHook.handleError() sys.modules["main"].DebugHook.handleError()
if self.client_address in self.server.sockets: if self.client_address in self.server.sockets:
del self.server.sockets[self.client_address] del self.server.sockets[self.client_address]
class UiServer: class UiServer:
def __init__(self):
self.ip = config.ui_ip
self.port = config.ui_port
if self.ip == "*": self.ip = "" # Bind all
#self.sidebar_websockets = [] # Sidebar websocket connections
#self.auth_key = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(12)) # Global admin auth key
self.sites = SiteManager.site_manager.list()
self.log = logging.getLogger(__name__)
def __init__(self):
self.ip = config.ui_ip
self.port = config.ui_port
if self.ip == "*":
self.ip = "" # Bind all
self.sites = SiteManager.site_manager.list()
self.log = logging.getLogger(__name__)
# Handle WSGI request
def handleRequest(self, env, start_response):
path = env["PATH_INFO"]
if env.get("QUERY_STRING"):
get = dict(cgi.parse_qsl(env['QUERY_STRING']))
else:
get = {}
ui_request = UiRequest(self, get, env, start_response)
if config.debug: # Let the exception catched by werkezung
return ui_request.route(path)
else: # Catch and display the error
try:
return ui_request.route(path)
except Exception, err:
logging.debug("UiRequest error: %s" % Debug.formatException(err))
return ui_request.error500("Err: %s" % Debug.formatException(err))
# Handle WSGI request # Reload the UiRequest class to prevent restarts in debug mode
def handleRequest(self, env, start_response): def reload(self):
path = env["PATH_INFO"] global UiRequest
if env.get("QUERY_STRING"): import imp
get = dict(cgi.parse_qsl(env['QUERY_STRING'])) import sys
else: reload(sys.modules["User.UserManager"])
get = {} reload(sys.modules["Ui.UiWebsocket"])
ui_request = UiRequest(self, get, env, start_response) UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest
if config.debug: # Let the exception catched by werkezung # UiRequest.reload()
return ui_request.route(path)
else: # Catch and display the error
try:
return ui_request.route(path)
except Exception, err:
logging.debug("UiRequest error: %s" % Debug.formatException(err))
return ui_request.error500("Err: %s" % Debug.formatException(err))
# Bind and run the server
def start(self):
handler = self.handleRequest
# Reload the UiRequest class to prevent restarts in debug mode if config.debug:
def reload(self): # Auto reload UiRequest on change
global UiRequest from Debug import DebugReloader
import imp, sys DebugReloader(self.reload)
reload(sys.modules["User.UserManager"])
reload(sys.modules["Ui.UiWebsocket"])
UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest
#UiRequest.reload()
# Werkzeug Debugger
try:
from werkzeug.debug import DebuggedApplication
handler = DebuggedApplication(self.handleRequest, evalex=True)
except Exception, err:
self.log.info("%s: For debugging please download Werkzeug (http://werkzeug.pocoo.org/)" % err)
from Debug import DebugReloader
self.log.write = lambda msg: self.log.debug(msg.strip()) # For Wsgi access.log
self.log.info("--------------------------------------")
self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port))
self.log.info("--------------------------------------")
# Bind and run the server if config.open_browser:
def start(self): logging.info("Opening browser: %s...", config.open_browser)
handler = self.handleRequest import webbrowser
if config.open_browser == "default_browser":
browser = webbrowser.get()
else:
browser = webbrowser.get(config.open_browser)
browser.open("http://%s:%s" % (config.ui_ip, config.ui_port), new=2)
if config.debug: self.server = WSGIServer((self.ip.replace("*", ""), self.port), handler, handler_class=UiWSGIHandler, log=self.log)
# Auto reload UiRequest on change self.server.sockets = {}
from Debug import DebugReloader self.server.serve_forever()
DebugReloader(self.reload) self.log.debug("Stopped.")
# Werkzeug Debugger def stop(self):
try: self.log.debug("Stopping...")
from werkzeug.debug import DebuggedApplication # Close WS sockets
handler = DebuggedApplication(self.handleRequest, evalex=True) if "clients" in dir(self.server):
except Exception, err: for client in self.server.clients.values():
self.log.info("%s: For debugging please download Werkzeug (http://werkzeug.pocoo.org/)" % err) client.ws.close()
from Debug import DebugReloader # Close http sockets
self.log.write = lambda msg: self.log.debug(msg.strip()) # For Wsgi access.log sock_closed = 0
self.log.info("--------------------------------------") for sock in self.server.sockets.values():
self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port)) try:
self.log.info("--------------------------------------") sock._sock.close()
sock.close()
if config.open_browser: sock_closed += 1
logging.info("Opening browser: %s...", config.open_browser) except Exception:
import webbrowser pass
if config.open_browser == "default_browser": self.log.debug("Socket closed: %s" % sock_closed)
browser = webbrowser.get()
else:
browser = webbrowser.get(config.open_browser)
browser.open("http://%s:%s" % (config.ui_ip, config.ui_port), new=2)
self.server = WSGIServer((self.ip.replace("*", ""), self.port), handler, handler_class=UiWSGIHandler, log=self.log)
self.server.sockets = {}
self.server.serve_forever()
self.log.debug("Stopped.")
def stop(self):
self.log.debug("Stopping...")
# Close WS sockets
if "clients" in dir(self.server):
for client in self.server.clients.values():
client.ws.close()
# Close http sockets
sock_closed = 0
for sock in self.server.sockets.values():
try:
sock._sock.close()
sock.close()
sock_closed += 1
except Exception, err:
pass
self.log.debug("Socket closed: %s" % sock_closed)
self.server.socket.close()
self.server.stop()
time.sleep(1)
self.server.socket.close()
self.server.stop()
time.sleep(1)

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,7 @@
import logging, json, time import logging
import json
import time
from Crypt import CryptBitcoin from Crypt import CryptBitcoin
from Plugin import PluginManager from Plugin import PluginManager
from Config import config from Config import config
@ -6,6 +9,7 @@ from Config import config
@PluginManager.acceptPlugins @PluginManager.acceptPlugins
class User(object): class User(object):
def __init__(self, master_address=None, master_seed=None, data={}): def __init__(self, master_address=None, master_seed=None, data={}):
if master_seed: if master_seed:
self.master_seed = master_seed self.master_seed = master_seed
@ -27,7 +31,8 @@ class User(object):
if self.master_address not in users: if self.master_address not in users:
users[self.master_address] = {} # Create if not exist users[self.master_address] = {} # Create if not exist
user_data = users[self.master_address] user_data = users[self.master_address]
if self.master_seed: user_data["master_seed"] = self.master_seed if self.master_seed:
user_data["master_seed"] = self.master_seed
user_data["sites"] = self.sites user_data["sites"] = self.sites
user_data["certs"] = self.certs user_data["certs"] = self.certs
open("%s/users.json" % config.data_dir, "w").write(json.dumps(users, indent=2, sort_keys=True)) open("%s/users.json" % config.data_dir, "w").write(json.dumps(users, indent=2, sort_keys=True))
@ -40,26 +45,28 @@ class User(object):
# Return: {"auth_address": "xxx", "auth_privatekey": "xxx"} # Return: {"auth_address": "xxx", "auth_privatekey": "xxx"}
def getSiteData(self, address, create=True): def getSiteData(self, address, create=True):
if address not in self.sites: # Generate new BIP32 child key based on site address if address not in self.sites: # Generate new BIP32 child key based on site address
if not create: return {"auth_address": None, "auth_privatekey": None} # Dont create user yet if not create:
return {"auth_address": None, "auth_privatekey": None} # Dont create user yet
s = time.time() s = time.time()
address_id = self.getAddressAuthIndex(address) # Convert site address to int address_id = self.getAddressAuthIndex(address) # Convert site address to int
auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id) auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id)
self.sites[address] = { self.sites[address] = {
"auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey), "auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey),
"auth_privatekey": auth_privatekey "auth_privatekey": auth_privatekey
} }
self.save() self.save()
self.log.debug("Added new site: %s in %.3fs" % (address, time.time()-s)) self.log.debug("Added new site: %s in %.3fs" % (address, time.time() - s))
return self.sites[address] return self.sites[address]
# Get data for a new, unique site # Get data for a new, unique site
# Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}] # Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}]
def getNewSiteData(self): def getNewSiteData(self):
import random import random
bip32_index = random.randrange(2**256) % 100000000 bip32_index = random.randrange(2 ** 256) % 100000000
site_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, bip32_index) site_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, bip32_index)
site_address = CryptBitcoin.privatekeyToAddress(site_privatekey) site_address = CryptBitcoin.privatekeyToAddress(site_privatekey)
if site_address in self.sites: raise Exception("Random error: site exist!") if site_address in self.sites:
raise Exception("Random error: site exist!")
# Save to sites # Save to sites
self.getSiteData(site_address) self.getSiteData(site_address)
self.sites[site_address]["privatekey"] = site_privatekey self.sites[site_address]["privatekey"] = site_privatekey
@ -85,7 +92,8 @@ class User(object):
# Add cert for the user # Add cert for the user
def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign): def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign):
domain = domain.lower() domain = domain.lower()
auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0] # Find privatekey by auth address # Find privatekey by auth address
auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0]
cert_node = { cert_node = {
"auth_address": auth_address, "auth_address": auth_address,
"auth_privatekey": auth_privatekey, "auth_privatekey": auth_privatekey,
@ -95,10 +103,13 @@ class User(object):
} }
# Check if we have already cert for that domain and its not the same # Check if we have already cert for that domain and its not the same
if self.certs.get(domain) and self.certs[domain] != cert_node: if self.certs.get(domain) and self.certs[domain] != cert_node:
raise Exception("You already have certificate for this domain: %s/%s@%s" % (self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain)) raise Exception(
elif self.certs.get(domain) == cert_node: # Same, not updated "You already have certificate for this domain: %s/%s@%s" %
(self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain)
)
elif self.certs.get(domain) == cert_node: # Same, not updated
return None return None
else: # Not exist yet, add else: # Not exist yet, add
self.certs[domain] = cert_node self.certs[domain] = cert_node
self.save() self.save()
return True return True
@ -113,17 +124,19 @@ class User(object):
return site_data return site_data
# Get cert for the site address # Get cert for the site address
# Return: { "auth_address": ..., "auth_privatekey":..., "auth_type": "web", "auth_user_name": "nofish", "cert_sign": ... } or None # Return: { "auth_address":.., "auth_privatekey":.., "auth_type": "web", "auth_user_name": "nofish", "cert_sign":.. } or None
def getCert(self, address): def getCert(self, address):
site_data = self.getSiteData(address, create=False) site_data = self.getSiteData(address, create=False)
if not site_data or not "cert" in site_data: return None # Site dont have cert if not site_data or "cert" not in site_data:
return None # Site dont have cert
return self.certs.get(site_data["cert"]) return self.certs.get(site_data["cert"])
# Get cert user name for the site address # Get cert user name for the site address
# Return: user@certprovider.bit or None # Return: user@certprovider.bit or None
def getCertUserId(self, address): def getCertUserId(self, address):
site_data = self.getSiteData(address, create=False) site_data = self.getSiteData(address, create=False)
if not site_data or not "cert" in site_data: return None # Site dont have cert if not site_data or "cert" not in site_data:
return None # Site dont have cert
cert = self.certs.get(site_data["cert"]) cert = self.certs.get(site_data["cert"])
if cert: if cert:
return cert["auth_user_name"]+"@"+site_data["cert"] return cert["auth_user_name"] + "@" + site_data["cert"]

View file

@ -1,5 +1,4 @@
# Included modules # Included modules
import os
import json import json
import logging import logging
@ -43,7 +42,7 @@ class UserManager(object):
def create(self, master_address=None, master_seed=None): def create(self, master_address=None, master_seed=None):
user = User(master_address, master_seed) user = User(master_address, master_seed)
logging.debug("Created user: %s" % user.master_address) logging.debug("Created user: %s" % user.master_address)
if user.master_address: # If successfully created if user.master_address: # If successfully created
self.users[user.master_address] = user self.users[user.master_address] = user
user.save() user.save()
return user return user
@ -74,10 +73,10 @@ def reloadModule():
import imp import imp
global User, UserManager, user_manager global User, UserManager, user_manager
User = imp.load_source("User", "src/User/User.py").User # Reload source User = imp.load_source("User", "src/User/User.py").User # Reload source
#module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module # module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module
#UserManager = module.UserManager # UserManager = module.UserManager
#user_manager = module.user_manager # user_manager = module.user_manager
# Reload users # Reload users
user_manager = UserManager() user_manager = UserManager()
user_manager.load() user_manager.load()

View file

@ -1,99 +1,102 @@
import gevent, time, logging, shutil, os import time
from Peer import Peer
import gevent
from Debug import Debug from Debug import Debug
class Worker(object): class Worker(object):
def __init__(self, manager, peer):
self.manager = manager
self.peer = peer
self.task = None
self.key = None
self.running = False
self.thread = None
def __init__(self, manager, peer):
self.manager = manager
self.peer = peer
self.task = None
self.key = None
self.running = False
self.thread = None
def __str__(self): def __str__(self):
return "Worker %s %s" % (self.manager.site.address_short, self.key) return "Worker %s %s" % (self.manager.site.address_short, self.key)
def __repr__(self):
return "<%s>" % self.__str__()
def __repr__(self): # Downloader thread
return "<%s>" % self.__str__() def downloader(self):
self.peer.hash_failed = 0 # Reset hash error counter
while self.running:
# Try to pickup free file download task
task = self.manager.getTask(self.peer)
if not task: # Die, no more task
self.manager.log.debug("%s: No task found, stopping" % self.key)
break
if not task["time_started"]:
task["time_started"] = time.time() # Task started now
if task["workers_num"] > 0: # Wait a bit if someone already working on it
self.manager.log.debug("%s: Someone already working on %s, sleeping 1 sec..." % (self.key, task["inner_path"]))
time.sleep(1)
self.manager.log.debug("%s: %s, task done after sleep: %s" % (self.key, task["inner_path"], task["done"]))
# Downloader thread if task["done"] is False:
def downloader(self): self.task = task
self.peer.hash_failed = 0 # Reset hash error counter site = task["site"]
while self.running: task["workers_num"] += 1
# Try to pickup free file download task try:
task = self.manager.getTask(self.peer) buff = self.peer.getFile(site.address, task["inner_path"])
if not task: # Die, no more task except Exception, err:
self.manager.log.debug("%s: No task found, stopping" % self.key) self.manager.log.debug("%s: getFile error: err" % (self.key, err))
break buff = None
if not task["time_started"]: task["time_started"] = time.time() # Task started now if self.running is False: # Worker no longer needed or got killed
self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"]))
break
if buff: # Download ok
correct = site.content_manager.verifyFile(task["inner_path"], buff)
else: # Download error
correct = False
if correct is True or correct is None: # Hash ok or same file
self.manager.log.debug("%s: Hash correct: %s" % (self.key, task["inner_path"]))
if correct is True and task["done"] is False: # Save if changed and task not done yet
buff.seek(0)
site.storage.write(task["inner_path"], buff)
if task["done"] is False:
self.manager.doneTask(task)
task["workers_num"] -= 1
self.task = None
else: # Hash failed
self.manager.log.debug(
"%s: Hash failed: %s, failed peers: %s" %
(self.key, task["inner_path"], len(task["failed"]))
)
task["failed"].append(self.peer)
self.task = None
self.peer.hash_failed += 1
if self.peer.hash_failed >= max(len(self.manager.tasks), 3):
# Broken peer: More fails than tasks number but atleast 3
break
task["workers_num"] -= 1
time.sleep(1)
self.peer.onWorkerDone()
self.running = False
self.manager.removeWorker(self)
if task["workers_num"] > 0: # Wait a bit if someone already working on it # Start the worker
self.manager.log.debug("%s: Someone already working on %s, sleeping 1 sec..." % (self.key, task["inner_path"])) def start(self):
time.sleep(1) self.running = True
self.manager.log.debug("%s: %s, task done after sleep: %s" % (self.key, task["inner_path"], task["done"])) self.thread = gevent.spawn(self.downloader)
if task["done"] == False: # Skip current task
self.task = task def skip(self):
site = task["site"] self.manager.log.debug("%s: Force skipping" % self.key)
task["workers_num"] += 1 if self.thread:
try: self.thread.kill(exception=Debug.Notify("Worker stopped"))
buff = self.peer.getFile(site.address, task["inner_path"]) self.start()
except Exception, err:
self.manager.log.debug("%s: getFile error: err" % (self.key, err))
buff = None
if self.running == False: # Worker no longer needed or got killed
self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"]))
break
if buff: # Download ok
correct = site.content_manager.verifyFile(task["inner_path"], buff)
else: # Download error
correct = False
if correct == True or correct == None: # Hash ok or same file
self.manager.log.debug("%s: Hash correct: %s" % (self.key, task["inner_path"]))
if correct == True and task["done"] == False: # Save if changed and task not done yet
buff.seek(0)
file_path = site.storage.getPath(task["inner_path"])
site.storage.write(task["inner_path"], buff)
if task["done"] == False: self.manager.doneTask(task)
task["workers_num"] -= 1
self.task = None
else: # Hash failed
self.manager.log.debug("%s: Hash failed: %s, failed peers: %s" % (self.key, task["inner_path"], len(task["failed"])))
task["failed"].append(self.peer)
self.task = None
self.peer.hash_failed += 1
if self.peer.hash_failed >= max(len(self.manager.tasks), 3): # More fails than tasks number but atleast 3: Broken peer
break
task["workers_num"] -= 1
time.sleep(1)
self.peer.onWorkerDone()
self.running = False
self.manager.removeWorker(self)
# Force stop the worker
# Start the worker def stop(self):
def start(self): self.manager.log.debug("%s: Force stopping" % self.key)
self.running = True self.running = False
self.thread = gevent.spawn(self.downloader) if self.thread:
self.thread.kill(exception=Debug.Notify("Worker stopped"))
del self.thread
# Skip current task self.manager.removeWorker(self)
def skip(self):
self.manager.log.debug("%s: Force skipping" % self.key)
if self.thread:
self.thread.kill(exception=Debug.Notify("Worker stopped"))
self.start()
# Force stop the worker
def stop(self):
self.manager.log.debug("%s: Force stopping" % self.key)
self.running = False
if self.thread:
self.thread.kill(exception=Debug.Notify("Worker stopped"))
del self.thread
self.manager.removeWorker(self)

View file

@ -1,204 +1,211 @@
import time
import logging
import random
import gevent
from Worker import Worker from Worker import Worker
import gevent, time, logging, random
MAX_WORKERS = 10 MAX_WORKERS = 10 # Max concurent workers
# Worker manager for site
class WorkerManager: class WorkerManager:
def __init__(self, site):
self.site = site
self.workers = {} # Key: ip:port, Value: Worker.Worker
self.tasks = [] # {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0, "failed": peer_ids}
self.started_task_num = 0 # Last added task num
self.running = True
self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short)
self.process_taskchecker = gevent.spawn(self.checkTasks)
def __init__(self, site):
self.site = site
self.workers = {} # Key: ip:port, Value: Worker.Worker
self.tasks = []
# {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False,
# "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0, "failed": peer_ids}
self.started_task_num = 0 # Last added task num
self.running = True
self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short)
self.process_taskchecker = gevent.spawn(self.checkTasks)
def __str__(self): def __str__(self):
return "WorkerManager %s" % self.site.address_short return "WorkerManager %s" % self.site.address_short
def __repr__(self):
return "<%s>" % self.__str__()
def __repr__(self): # Check expired tasks
return "<%s>" % self.__str__() def checkTasks(self):
while self.running:
tasks = task = worker = workers = None # Cleanup local variables
time.sleep(15) # Check every 15 sec
# Clean up workers
for worker in self.workers.values():
if worker.task and worker.task["done"]:
worker.skip() # Stop workers with task done
if not self.tasks:
continue
# Check expired tasks tasks = self.tasks[:] # Copy it so removing elements wont cause any problem
def checkTasks(self): for task in tasks:
while self.running: if task["time_started"] and time.time() >= task["time_started"] + 60: # Task taking too long time, skip it
tasks = task = worker = workers = None # Cleanup local variables self.log.debug("Timeout, Skipping: %s" % task)
time.sleep(15) # Check every 15 sec # Skip to next file workers
workers = self.findWorkers(task)
if workers:
for worker in workers:
worker.skip()
else:
self.failTask(task)
elif time.time() >= task["time_added"] + 60 and not self.workers: # No workers left
self.log.debug("Timeout, Cleanup task: %s" % task)
# Remove task
self.failTask(task)
# Clean up workers elif (task["time_started"] and time.time() >= task["time_started"] + 15) or not self.workers:
for worker in self.workers.values(): # Task started more than 15 sec ago or no workers
if worker.task and worker.task["done"]: worker.skip() # Stop workers with task done self.log.debug("Task taking more than 15 secs, find more peers: %s" % task["inner_path"])
task["site"].announce() # Find more peers
if task["peers"]: # Release the peer lock
self.log.debug("Task peer lock release: %s" % task["inner_path"])
task["peers"] = []
self.startWorkers()
break # One reannounce per loop
if not self.tasks: continue self.log.debug("checkTasks stopped running")
tasks = self.tasks[:] # Copy it so removing elements wont cause any problem # Tasks sorted by this
for task in tasks: def taskSorter(self, task):
if task["time_started"] and time.time() >= task["time_started"]+60: # Task taking too long time, skip it if task["inner_path"] == "content.json":
self.log.debug("Timeout, Skipping: %s" % task) return 9999 # Content.json always prority
# Skip to next file workers if task["inner_path"] == "index.html":
workers = self.findWorkers(task) return 9998 # index.html also important
if workers: priority = task["priority"]
for worker in workers: if task["inner_path"].endswith(".js") or task["inner_path"].endswith(".css"):
worker.skip() priority += 1 # download js and css files first
else: return priority - task["workers_num"] # Prefer more priority and less workers
self.failTask(task)
elif time.time() >= task["time_added"]+60 and not self.workers: # No workers left
self.log.debug("Timeout, Cleanup task: %s" % task)
# Remove task
self.failTask(task)
elif (task["time_started"] and time.time() >= task["time_started"]+15) or not self.workers: # Task started more than 15 sec ago or no workers # Returns the next free or less worked task
self.log.debug("Task taking more than 15 secs, find more peers: %s" % task["inner_path"]) def getTask(self, peer):
task["site"].announce() # Find more peers self.tasks.sort(key=self.taskSorter, reverse=True) # Sort tasks by priority and worker numbers
if task["peers"]: # Release the peer lock for task in self.tasks: # Find a task
self.log.debug("Task peer lock release: %s" % task["inner_path"]) if task["peers"] and peer not in task["peers"]:
task["peers"] = [] continue # This peer not allowed to pick this task
self.startWorkers() if peer in task["failed"]:
break # One reannounce per loop continue # Peer already tried to solve this, but failed
return task
# New peers added to site
def onPeers(self):
self.startWorkers()
self.log.debug("checkTasks stopped running") # Add new worker
def addWorker(self, peer):
key = peer.key
if key not in self.workers and len(self.workers) < MAX_WORKERS:
# We dont have worker for that peer and workers num less than max
worker = Worker(self, peer)
self.workers[key] = worker
worker.key = key
worker.start()
return worker
else: # We have woker for this peer or its over the limit
return False
# Start workers to process tasks
def startWorkers(self, peers=None):
if not self.tasks:
return False # No task for workers
if len(self.workers) >= MAX_WORKERS and not peers:
return False # Workers number already maxed and no starting peers definied
if not peers:
peers = self.site.peers.values() # No peers definied, use any from site
random.shuffle(peers)
for peer in peers: # One worker for every peer
if peers and peer not in peers:
continue # If peers definied and peer not valid
worker = self.addWorker(peer)
if worker:
self.log.debug("Added worker: %s, workers: %s/%s" % (peer.key, len(self.workers), MAX_WORKERS))
# Stop all worker
def stopWorkers(self):
for worker in self.workers.values():
worker.stop()
tasks = self.tasks[:] # Copy
for task in tasks: # Mark all current task as failed
self.failTask(task)
# Find workers by task
def findWorkers(self, task):
workers = []
for worker in self.workers.values():
if worker.task == task:
workers.append(worker)
return workers
# Tasks sorted by this # Ends and remove a worker
def taskSorter(self, task): def removeWorker(self, worker):
if task["inner_path"] == "content.json": return 9999 # Content.json always prority worker.running = False
if task["inner_path"] == "index.html": return 9998 # index.html also important if worker.key in self.workers:
priority = task["priority"] del(self.workers[worker.key])
if task["inner_path"].endswith(".js") or task["inner_path"].endswith(".css"): priority += 1 # download js and css files first self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), MAX_WORKERS))
return priority-task["workers_num"] # Prefer more priority and less workers
# Create new task and return asyncresult
def addTask(self, inner_path, peer=None, priority=0):
self.site.onFileStart(inner_path) # First task, trigger site download started
task = self.findTask(inner_path)
if task: # Already has task for that file
if peer and task["peers"]: # This peer also has new version, add it to task possible peers
task["peers"].append(peer)
self.log.debug("Added peer %s to %s" % (peer.key, task["inner_path"]))
self.startWorkers([peer])
elif peer and peer in task["failed"]:
task["failed"].remove(peer) # New update arrived, remove the peer from failed peers
self.log.debug("Removed peer %s from failed %s" % (peer.key, task["inner_path"]))
self.startWorkers([peer])
# Returns the next free or less worked task if priority:
def getTask(self, peer): task["priority"] += priority # Boost on priority
self.tasks.sort(key=self.taskSorter, reverse=True) # Sort tasks by priority and worker numbers return task["evt"]
for task in self.tasks: # Find a task else: # No task for that file yet
if task["peers"] and peer not in task["peers"]: continue # This peer not allowed to pick this task evt = gevent.event.AsyncResult()
if peer in task["failed"]: continue # Peer already tried to solve this, but failed if peer:
return task peers = [peer] # Only download from this peer
else:
peers = None
task = {
"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False,
"time_added": time.time(), "time_started": None, "peers": peers, "priority": priority, "failed": []
}
self.tasks.append(task)
self.started_task_num += 1
self.log.debug(
"New task: %s, peer lock: %s, priority: %s, tasks: %s" %
(task["inner_path"], peers, priority, self.started_task_num)
)
self.startWorkers(peers)
return evt
# Find a task using inner_path
def findTask(self, inner_path):
for task in self.tasks:
if task["inner_path"] == inner_path:
return task
return None # Not found
# New peers added to site # Mark a task failed
def onPeers(self): def failTask(self, task):
self.startWorkers() if task in self.tasks:
task["done"] = True
self.tasks.remove(task) # Remove from queue
# Add new worker self.site.onFileFail(task["inner_path"])
def addWorker(self, peer): task["evt"].set(False)
key = peer.key if not self.tasks:
if key not in self.workers and len(self.workers) < MAX_WORKERS: # We dont have worker for that peer and workers num less than max self.started_task_num = 0
worker = Worker(self, peer)
self.workers[key] = worker
worker.key = key
worker.start()
return worker
else: # We have woker for this peer or its over the limit
return False
# Start workers to process tasks
def startWorkers(self, peers=None):
if not self.tasks: return False # No task for workers
if len(self.workers) >= MAX_WORKERS and not peers: return False # Workers number already maxed and no starting peers definied
if not peers: peers = self.site.peers.values() # No peers definied, use any from site
random.shuffle(peers)
for peer in peers: # One worker for every peer
if peers and peer not in peers: continue # If peers definied and peer not valid
worker = self.addWorker(peer)
if worker: self.log.debug("Added worker: %s, workers: %s/%s" % (peer.key, len(self.workers), MAX_WORKERS))
# Stop all worker
def stopWorkers(self):
for worker in self.workers.values():
worker.stop()
tasks = self.tasks[:] # Copy
for task in tasks: # Mark all current task as failed
self.failTask(task)
# Find workers by task
def findWorkers(self, task):
workers = []
for worker in self.workers.values():
if worker.task == task: workers.append(worker)
return workers
# Ends and remove a worker
def removeWorker(self, worker):
worker.running = False
if worker.key in self.workers:
del(self.workers[worker.key])
self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), MAX_WORKERS))
# Create new task and return asyncresult
def addTask(self, inner_path, peer=None, priority = 0):
self.site.onFileStart(inner_path) # First task, trigger site download started
task = self.findTask(inner_path)
if task: # Already has task for that file
if peer and task["peers"]: # This peer also has new version, add it to task possible peers
task["peers"].append(peer)
self.log.debug("Added peer %s to %s" % (peer.key, task["inner_path"]))
self.startWorkers([peer])
elif peer and peer in task["failed"]:
task["failed"].remove(peer) # New update arrived, remove the peer from failed peers
self.log.debug("Removed peer %s from failed %s" % (peer.key, task["inner_path"]))
self.startWorkers([peer])
if priority:
task["priority"] += priority # Boost on priority
return task["evt"]
else: # No task for that file yet
evt = gevent.event.AsyncResult()
if peer:
peers = [peer] # Only download from this peer
else:
peers = None
task = {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_added": time.time(), "time_started": None, "peers": peers, "priority": priority, "failed": []}
self.tasks.append(task)
self.started_task_num += 1
self.log.debug("New task: %s, peer lock: %s, priority: %s, tasks: %s" % (task["inner_path"], peers, priority, self.started_task_num))
self.startWorkers(peers)
return evt
# Find a task using inner_path
def findTask(self, inner_path):
for task in self.tasks:
if task["inner_path"] == inner_path:
return task
return None # Not found
# Mark a task failed
def failTask(self, task):
if task in self.tasks:
task["done"] = True
self.tasks.remove(task) # Remove from queue
self.site.onFileFail(task["inner_path"])
task["evt"].set(False)
if not self.tasks:
self.started_task_num = 0
# Mark a task done
def doneTask(self, task):
task["done"] = True
self.tasks.remove(task) # Remove from queue
self.site.onFileDone(task["inner_path"])
task["evt"].set(True)
if not self.tasks:
self.started_task_num = 0
self.site.onComplete() # No more task trigger site complete
# Mark a task done
def doneTask(self, task):
task["done"] = True
self.tasks.remove(task) # Remove from queue
self.site.onFileDone(task["inner_path"])
task["evt"].set(True)
if not self.tasks:
self.started_task_num = 0
self.site.onComplete() # No more task trigger site complete

View file

@ -1,29 +1,39 @@
import re import re
def prefix(content): def prefix(content):
content = re.sub("@keyframes (.*? {.*?[^ ]})", "@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n", content, flags=re.DOTALL) content = re.sub(
content = re.sub('([^-\*])(border-radius|box-shadow|transition|animation|box-sizing|transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])', '\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content) "@keyframes (.*? {.*?[^ ]})", "@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n",
content = re.sub('(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])', content, flags=re.DOTALL
'\\1: -webkit-\\2(\\3);'+ )
'\\1: -moz-\\2(\\3);'+ content = re.sub(
'\\1: -o-\\2(\\3);'+ '([^-\*])(border-radius|box-shadow|transition|animation|box-sizing|' +
'\\1: -ms-\\2(\\3);'+ 'transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])',
'\\1: \\2(\\3);', content) '\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content
return content )
content = re.sub(
'(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])',
'\\1: -webkit-\\2(\\3);' +
'\\1: -moz-\\2(\\3);' +
'\\1: -o-\\2(\\3);' +
'\\1: -ms-\\2(\\3);' +
'\\1: \\2(\\3);', content
)
return content
if __name__ == "__main__": if __name__ == "__main__":
print prefix(""" print prefix("""
.test { .test {
border-radius: 5px; border-radius: 5px;
background: linear-gradient(red, blue); background: linear-gradient(red, blue);
} }
@keyframes flip { @keyframes flip {
0% { transform: perspective(120px) rotateX(0deg) rotateY(0deg); } 0% { transform: perspective(120px) rotateX(0deg) rotateY(0deg); }
50% { transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) } 50% { transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) }
100% { transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); } 100% { transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); }
} }
""") """)

View file

@ -16,8 +16,10 @@ update_after_shutdown = False # If set True then update and restart zeronet aft
from Config import config from Config import config
# Create necessary files and dirs # Create necessary files and dirs
if not os.path.isdir(config.log_dir): os.mkdir(config.log_dir) if not os.path.isdir(config.log_dir):
if not os.path.isdir(config.data_dir): os.mkdir(config.data_dir) os.mkdir(config.log_dir)
if not os.path.isdir(config.data_dir):
os.mkdir(config.data_dir)
if not os.path.isfile("%s/sites.json" % config.data_dir): if not os.path.isfile("%s/sites.json" % config.data_dir):
open("%s/sites.json" % config.data_dir, "w").write("{}") open("%s/sites.json" % config.data_dir, "w").write("{}")
if not os.path.isfile("%s/users.json" % config.data_dir): if not os.path.isfile("%s/users.json" % config.data_dir):
@ -50,12 +52,11 @@ logging.getLogger('').name = "-" # Remove root prefix
# Debug dependent configuration # Debug dependent configuration
from Debug import DebugHook from Debug import DebugHook
if config.debug: if config.debug:
console_log.setLevel(logging.DEBUG) # Display everything to console console_log.setLevel(logging.DEBUG) # Display everything to console
else: else:
console_log.setLevel(logging.INFO) # Display only important info to console console_log.setLevel(logging.INFO) # Display only important info to console
monkey.patch_all(thread=False) # Make time, socket gevent compatible. Not thread: pyfilesystem and system tray icon not compatible
monkey.patch_all(thread=False) # Not thread: pyfilesystem and system tray icon not compatible
# Log current config # Log current config
@ -67,7 +68,7 @@ if config.proxy:
from util import SocksProxy from util import SocksProxy
import urllib2 import urllib2
logging.info("Patching sockets to socks proxy: %s" % config.proxy) logging.info("Patching sockets to socks proxy: %s" % config.proxy)
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
SocksProxy.monkeyPath(*config.proxy.split(":")) SocksProxy.monkeyPath(*config.proxy.split(":"))
@ -81,6 +82,7 @@ PluginManager.plugin_manager.loadPlugins()
@PluginManager.acceptPlugins @PluginManager.acceptPlugins
class Actions(object): class Actions(object):
# Default action: Start serving UiServer and FileServer # Default action: Start serving UiServer and FileServer
def main(self): def main(self):
logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__)) logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__))
global ui_server, file_server global ui_server, file_server
@ -113,8 +115,10 @@ class Actions(object):
logging.info("----------------------------------------------------------------------") logging.info("----------------------------------------------------------------------")
while True: while True:
if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes": break if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes":
else: logging.info("Please, secure it now, you going to need it to modify your site!") break
else:
logging.info("Please, secure it now, you going to need it to modify your site!")
logging.info("Creating directory structure...") logging.info("Creating directory structure...")
from Site import Site from Site import Site
@ -132,7 +136,7 @@ class Actions(object):
def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False): def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False):
from Site import Site from Site import Site
logging.info("Signing site: %s..." % address) logging.info("Signing site: %s..." % address)
site = Site(address, allow_create = False) site = Site(address, allow_create=False)
if not privatekey: # If no privatekey in args then ask it now if not privatekey: # If no privatekey in args then ask it now
import getpass import getpass
@ -151,7 +155,10 @@ class Actions(object):
for content_inner_path in site.content_manager.contents: for content_inner_path in site.content_manager.contents:
logging.info("Verifing %s signature..." % content_inner_path) logging.info("Verifing %s signature..." % content_inner_path)
if site.content_manager.verifyFile(content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False) == True: file_correct = site.content_manager.verifyFile(
content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False
)
if file_correct is True:
logging.info("[OK] %s signed by address %s!" % (content_inner_path, address)) logging.info("[OK] %s signed by address %s!" % (content_inner_path, address))
else: else:
logging.error("[ERROR] %s: invalid file!" % content_inner_path) logging.error("[ERROR] %s: invalid file!" % content_inner_path)
@ -160,7 +167,7 @@ class Actions(object):
logging.info("Verifying site files...") logging.info("Verifying site files...")
bad_files += site.storage.verifyFiles() bad_files += site.storage.verifyFiles()
if not bad_files: if not bad_files:
logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time()-s)) logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s))
else: else:
logging.error("[ERROR] Error during verifying site files!") logging.error("[ERROR] Error during verifying site files!")
@ -170,7 +177,7 @@ class Actions(object):
site = Site(address) site = Site(address)
s = time.time() s = time.time()
site.storage.rebuildDb() site.storage.rebuildDb()
logging.info("Done in %.3fs" % (time.time()-s)) logging.info("Done in %.3fs" % (time.time() - s))
def dbQuery(self, address, query): def dbQuery(self, address, query):
from Site import Site from Site import Site
@ -188,46 +195,44 @@ class Actions(object):
s = time.time() s = time.time()
site.announce() site.announce()
print "Response time: %.3fs" % (time.time()-s) print "Response time: %.3fs" % (time.time() - s)
print site.peers print site.peers
def siteNeedFile(self, address, inner_path): def siteNeedFile(self, address, inner_path):
from Site import Site from Site import Site
site = Site(address) site = Site(address)
site.announce() site.announce()
print site.needFile(inner_path, update=True) print site.needFile(inner_path, update=True)
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"): def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"):
global file_server global file_server
from Site import Site from Site import Site
from File import FileServer # We need fileserver to handle incoming file requests from File import FileServer # We need fileserver to handle incoming file requests
logging.info("Creating FileServer....") logging.info("Creating FileServer....")
file_server = FileServer() file_server = FileServer()
file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity
file_server.openport() file_server.openport()
site = file_server.sites[address] site = file_server.sites[address]
site.settings["serving"] = True # Serving the site even if its disabled site.settings["serving"] = True # Serving the site even if its disabled
if peer_ip: # Announce ip specificed if peer_ip: # Announce ip specificed
site.addPeer(peer_ip, peer_port) site.addPeer(peer_ip, peer_port)
else: # Just ask the tracker else: # Just ask the tracker
logging.info("Gathering peers from tracker") logging.info("Gathering peers from tracker")
site.announce() # Gather peers site.announce() # Gather peers
published = site.publish(20, inner_path) # Push to 20 peers published = site.publish(20, inner_path) # Push to 20 peers
if published > 0: if published > 0:
time.sleep(3) time.sleep(3)
logging.info("Serving files (max 60s)...") logging.info("Serving files (max 60s)...")
gevent.joinall([file_server_thread], timeout=60) gevent.joinall([file_server_thread], timeout=60)
logging.info("Done.") logging.info("Done.")
else: else:
logging.info("No peers found for this site, sitePublish command only works if you already have peers serving your site") logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
# Crypto commands # Crypto commands
def cryptPrivatekeyToAddress(self, privatekey=None): def cryptPrivatekeyToAddress(self, privatekey=None):
from Crypt import CryptBitcoin from Crypt import CryptBitcoin
if not privatekey: # If no privatekey in args then ask it now if not privatekey: # If no privatekey in args then ask it now
import getpass import getpass
privatekey = getpass.getpass("Private key (input hidden):") privatekey = getpass.getpass("Private key (input hidden):")
@ -252,7 +257,7 @@ class Actions(object):
for i in range(5): for i in range(5):
s = time.time() s = time.time()
print peer.ping(), print peer.ping(),
print "Response time: %.3fs (crypt: %s)" % (time.time()-s, peer.connection.crypt) print "Response time: %.3fs (crypt: %s)" % (time.time() - s, peer.connection.crypt)
time.sleep(1) time.sleep(1)
def peerGetFile(self, peer_ip, peer_port, site, filename): def peerGetFile(self, peer_ip, peer_port, site, filename):
@ -266,7 +271,7 @@ class Actions(object):
peer = Peer(peer_ip, peer_port) peer = Peer(peer_ip, peer_port)
s = time.time() s = time.time()
print peer.getFile(site, filename).read() print peer.getFile(site, filename).read()
print "Response time: %.3fs" % (time.time()-s) print "Response time: %.3fs" % (time.time() - s)
def peerCmd(self, peer_ip, peer_port, cmd, parameters): def peerCmd(self, peer_ip, peer_port, cmd, parameters):
logging.info("Opening a simple connection server") logging.info("Opening a simple connection server")
@ -284,9 +289,10 @@ class Actions(object):
logging.info("Response: %s" % peer.request(cmd, parameters)) logging.info("Response: %s" % peer.request(cmd, parameters))
actions = Actions() actions = Actions()
# Starts here when running zeronet.py # Starts here when running zeronet.py
def start(): def start():
# Call function # Call function
func = getattr(actions, config.action, None) func = getattr(actions, config.action, None)

View file

@ -1,60 +1,57 @@
# Based on http://stackoverflow.com/a/2022629 # Based on http://stackoverflow.com/a/2022629
class Event(list): class Event(list):
def __call__(self, *args, **kwargs):
for f in self[:]:
if "once" in dir(f) and f in self:
self.remove(f)
f(*args, **kwargs)
def __call__(self, *args, **kwargs):
for f in self[:]:
if "once" in dir(f) and f in self:
self.remove(f)
f(*args, **kwargs)
def __repr__(self): def __repr__(self):
return "Event(%s)" % list.__repr__(self) return "Event(%s)" % list.__repr__(self)
def once(self, func, name=None):
func.once = True
func.name = None
if name: # Dont function with same name twice
names = [f.name for f in self if "once" in dir(f)]
if name not in names:
func.name = name
self.append(func)
else:
self.append(func)
return self
def once(self, func, name=None):
func.once = True
func.name = None
if name: # Dont function with same name twice
names = [f.name for f in self if "once" in dir(f)]
if name not in names:
func.name = name
self.append(func)
else:
self.append(func)
return self
def testBenchmark(): def testBenchmark():
def say(pre, text): def say(pre, text):
print "%s Say: %s" % (pre, text) print "%s Say: %s" % (pre, text)
import time
s = time.time()
onChanged = Event()
for i in range(1000):
onChanged.once(lambda pre: say(pre, "once"), "once")
print "Created 1000 once in %.3fs" % (time.time()-s)
onChanged("#1")
import time
s = time.time()
on_changed = Event()
for i in range(1000):
on_changed.once(lambda pre: say(pre, "once"), "once")
print "Created 1000 once in %.3fs" % (time.time() - s)
on_changed("#1")
def testUsage(): def testUsage():
def say(pre, text): def say(pre, text):
print "%s Say: %s" % (pre, text) print "%s Say: %s" % (pre, text)
onChanged = Event() on_changed = Event()
onChanged.once(lambda pre: say(pre, "once")) on_changed.once(lambda pre: say(pre, "once"))
onChanged.once(lambda pre: say(pre, "once")) on_changed.once(lambda pre: say(pre, "once"))
onChanged.once(lambda pre: say(pre, "namedonce"), "namedonce") on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce")
onChanged.once(lambda pre: say(pre, "namedonce"), "namedonce") on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce")
onChanged.append(lambda pre: say(pre, "always")) on_changed.append(lambda pre: say(pre, "always"))
onChanged("#1") on_changed("#1")
onChanged("#2") on_changed("#2")
onChanged("#3") on_changed("#3")
if __name__ == "__main__": if __name__ == "__main__":
testBenchmark() testBenchmark()

View file

@ -12,8 +12,11 @@ except AttributeError:
OldSSLSocket = __ssl__.SSLSocket OldSSLSocket = __ssl__.SSLSocket
class NewSSLSocket(OldSSLSocket): class NewSSLSocket(OldSSLSocket):
"""Fix SSLSocket constructor.""" """Fix SSLSocket constructor."""
def __init__( def __init__(
self, sock, keyfile=None, certfile=None, server_side=False, cert_reqs=0, self, sock, keyfile=None, certfile=None, server_side=False, cert_reqs=0,
ssl_version=2, ca_certs=None, do_handshake_on_connect=True, ssl_version=2, ca_certs=None, do_handshake_on_connect=True,

View file

@ -1,11 +1,13 @@
import urllib2, logging import urllib2
import logging
import GeventSslPatch import GeventSslPatch
from Config import config from Config import config
def get(url):
logging.debug("Get %s" % url)
req = urllib2.Request(url)
req.add_header('User-Agent', "ZeroNet %s (https://github.com/HelloZeroNet/ZeroNet)" % config.version)
req.add_header('Accept', 'application/json')
return urllib2.urlopen(req)
def get(url):
logging.debug("Get %s" % url)
req = urllib2.Request(url)
req.add_header('User-Agent', "ZeroNet %s (https://github.com/HelloZeroNet/ZeroNet)" % config.version)
req.add_header('Accept', 'application/json')
return urllib2.urlopen(req)

View file

@ -1,142 +1,145 @@
import gevent, time import gevent
import time
class Noparallel(object): # Only allow function running once in same time class Noparallel(object): # Only allow function running once in same time
def __init__(self,blocking=True):
self.threads = {}
self.blocking = blocking # Blocking: Acts like normal function else thread returned
def __init__(self, blocking=True):
self.threads = {}
self.blocking = blocking # Blocking: Acts like normal function else thread returned
def __call__(self, func): def __call__(self, func):
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
key = (func, tuple(args), tuple(kwargs.items())) # Unique key for function including parameters key = (func, tuple(args), tuple(kwargs.items())) # Unique key for function including parameters
if key in self.threads: # Thread already running (if using blocking mode) if key in self.threads: # Thread already running (if using blocking mode)
thread = self.threads[key] thread = self.threads[key]
if self.blocking: if self.blocking:
thread.join() # Blocking until its finished thread.join() # Blocking until its finished
return thread.value # Return the value return thread.value # Return the value
else: # No blocking else: # No blocking
if thread.ready(): # Its finished, create a new if thread.ready(): # Its finished, create a new
thread = gevent.spawn(func, *args, **kwargs) thread = gevent.spawn(func, *args, **kwargs)
self.threads[key] = thread self.threads[key] = thread
return thread return thread
else: # Still running else: # Still running
return thread return thread
else: # Thread not running else: # Thread not running
thread = gevent.spawn(func, *args, **kwargs) # Spawning new thread thread = gevent.spawn(func, *args, **kwargs) # Spawning new thread
thread.link(lambda thread: self.cleanup(key, thread)) thread.link(lambda thread: self.cleanup(key, thread))
self.threads[key] = thread self.threads[key] = thread
if self.blocking: # Wait for finish if self.blocking: # Wait for finish
thread.join() thread.join()
ret = thread.value ret = thread.value
return ret return ret
else: # No blocking just return the thread else: # No blocking just return the thread
return thread return thread
wrapper.func_name = func.func_name wrapper.func_name = func.func_name
return wrapper return wrapper
# Cleanup finished threads # Cleanup finished threads
def cleanup(self, key, thread): def cleanup(self, key, thread):
if key in self.threads: del(self.threads[key]) if key in self.threads:
del(self.threads[key])
class Test(): class Test():
@Noparallel()
def count(self, num=5): @Noparallel()
for i in range(num): def count(self, num=5):
print self, i for i in range(num):
time.sleep(1) print self, i
return "%s return:%s" % (self, i) time.sleep(1)
return "%s return:%s" % (self, i)
class TestNoblock(): class TestNoblock():
@Noparallel(blocking=False)
def count(self, num=5): @Noparallel(blocking=False)
for i in range(num): def count(self, num=5):
print self, i for i in range(num):
time.sleep(1) print self, i
return "%s return:%s" % (self, i) time.sleep(1)
return "%s return:%s" % (self, i)
def testBlocking(): def testBlocking():
test = Test() test = Test()
test2 = Test() test2 = Test()
print "Counting..." print "Counting..."
print "Creating class1/thread1" print "Creating class1/thread1"
thread1 = gevent.spawn(test.count) thread1 = gevent.spawn(test.count)
print "Creating class1/thread2 (ignored)" print "Creating class1/thread2 (ignored)"
thread2 = gevent.spawn(test.count) thread2 = gevent.spawn(test.count)
print "Creating class2/thread3" print "Creating class2/thread3"
thread3 = gevent.spawn(test2.count) thread3 = gevent.spawn(test2.count)
print "Joining class1/thread1" print "Joining class1/thread1"
thread1.join() thread1.join()
print "Joining class1/thread2" print "Joining class1/thread2"
thread2.join() thread2.join()
print "Joining class2/thread3" print "Joining class2/thread3"
thread3.join() thread3.join()
print "Creating class1/thread4 (its finished, allowed again)" print "Creating class1/thread4 (its finished, allowed again)"
thread4 = gevent.spawn(test.count) thread4 = gevent.spawn(test.count)
print "Joining thread4" print "Joining thread4"
thread4.join() thread4.join()
print thread1.value, thread2.value, thread3.value, thread4.value
print "Done."
print thread1.value, thread2.value, thread3.value, thread4.value
print "Done."
def testNoblocking(): def testNoblocking():
test = TestNoblock() test = TestNoblock()
test2 = TestNoblock() test2 = TestNoblock()
print "Creating class1/thread1" print "Creating class1/thread1"
thread1 = test.count() thread1 = test.count()
print "Creating class1/thread2 (ignored)" print "Creating class1/thread2 (ignored)"
thread2 = test.count() thread2 = test.count()
print "Creating class2/thread3" print "Creating class2/thread3"
thread3 = test2.count() thread3 = test2.count()
print "Joining class1/thread1" print "Joining class1/thread1"
thread1.join() thread1.join()
print "Joining class1/thread2" print "Joining class1/thread2"
thread2.join() thread2.join()
print "Joining class2/thread3" print "Joining class2/thread3"
thread3.join() thread3.join()
print "Creating class1/thread4 (its finished, allowed again)" print "Creating class1/thread4 (its finished, allowed again)"
thread4 = test.count() thread4 = test.count()
print "Joining thread4" print "Joining thread4"
thread4.join() thread4.join()
print thread1.value, thread2.value, thread3.value, thread4.value
print thread1.value, thread2.value, thread3.value, thread4.value print "Done."
print "Done."
def testBenchmark(): def testBenchmark():
import time import time
def printThreadNum():
import gc
from greenlet import greenlet
objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
print "Greenlets: %s" % len(objs)
printThreadNum() def printThreadNum():
test = TestNoblock() import gc
s = time.time() from greenlet import greenlet
for i in range(3): objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
gevent.spawn(test.count, i+1) print "Greenlets: %s" % len(objs)
print "Created in %.3fs" % (time.time()-s)
printThreadNum()
time.sleep(5)
printThreadNum()
test = TestNoblock()
s = time.time()
for i in range(3):
gevent.spawn(test.count, i + 1)
print "Created in %.3fs" % (time.time() - s)
printThreadNum()
time.sleep(5)
if __name__ == "__main__": if __name__ == "__main__":
from gevent import monkey from gevent import monkey
monkey.patch_all() monkey.patch_all()
testBenchmark() testBenchmark()
print "Testing blocking mode..." print "Testing blocking mode..."
testBlocking() testBlocking()
print "Testing noblocking mode..." print "Testing noblocking mode..."
testNoblocking() testNoblocking()
print [instance.threads for instance in registry]

View file

@ -1,61 +1,64 @@
import json, re, os import json
import re
import os
def queryFile(file_path, filter_path, filter_key = None, filter_val = None): def queryFile(file_path, filter_path, filter_key=None, filter_val=None):
back = [] back = []
data = json.load(open(file_path)) data = json.load(open(file_path))
if filter_path == ['']: return [data] if filter_path == ['']:
for key in filter_path: # Get to the point return [data]
data = data.get(key) for key in filter_path: # Get to the point
if not data: return data = data.get(key)
if not data:
return
for row in data: for row in data:
if filter_val: # Filter by value if filter_val: # Filter by value
if row[filter_key] == filter_val: back.append(row) if row[filter_key] == filter_val:
else: back.append(row)
back.append(row) else:
back.append(row)
return back return back
# Find in json files # Find in json files
# Return: [{u'body': u'Hello Topic 1!!', 'inner_path': '1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6', u'added': 1422740732, u'message_id': 1},...] # Return: [{u'body': u'Hello Topic 1!!', 'inner_path': '1KRxE1...beEp6', u'added': 1422740732, u'message_id': 1},...]
def query(path_pattern, filter): def query(path_pattern, filter):
if "=" in filter: # Filter by value if "=" in filter: # Filter by value
filter_path, filter_val = filter.split("=") filter_path, filter_val = filter.split("=")
filter_path = filter_path.split(".") filter_path = filter_path.split(".")
filter_key = filter_path.pop() # Last element is the key filter_key = filter_path.pop() # Last element is the key
filter_val = int(filter_val) filter_val = int(filter_val)
else: # No filter else: # No filter
filter_path = filter filter_path = filter
filter_path = filter_path.split(".") filter_path = filter_path.split(".")
filter_key = None filter_key = None
filter_val = None filter_val = None
if "/*/" in path_pattern: # Wildcard search if "/*/" in path_pattern: # Wildcard search
root_dir, file_pattern = path_pattern.replace("\\", "/").split("/*/") root_dir, file_pattern = path_pattern.replace("\\", "/").split("/*/")
else: # No wildcard else: # No wildcard
root_dir, file_pattern = re.match("(.*)/(.*?)$", path_pattern.replace("\\", "/")).groups() root_dir, file_pattern = re.match("(.*)/(.*?)$", path_pattern.replace("\\", "/")).groups()
for root, dirs, files in os.walk(root_dir, topdown=False): for root, dirs, files in os.walk(root_dir, topdown=False):
root = root.replace("\\", "/") root = root.replace("\\", "/")
inner_path = root.replace(root_dir, "").strip("/") inner_path = root.replace(root_dir, "").strip("/")
for file_name in files: for file_name in files:
if file_pattern != file_name: continue if file_pattern != file_name:
continue
try:
res = queryFile(root+"/"+file_name, filter_path, filter_key, filter_val)
if not res: continue
except Exception, err: # Json load error
# print file_name, filter, err
continue
for row in res:
row["inner_path"] = inner_path
yield row
try:
res = queryFile(root + "/" + file_name, filter_path, filter_key, filter_val)
if not res:
continue
except Exception: # Json load error
continue
for row in res:
row["inner_path"] = inner_path
yield row
if __name__ == "__main__": if __name__ == "__main__":
#for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "topics")): for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")):
# print row print row
for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")):
print row

View file

@ -4,119 +4,120 @@ import logging
log = logging.getLogger("RateLimit") log = logging.getLogger("RateLimit")
called_db = {} # Holds events last call time called_db = {} # Holds events last call time
queue_db = {} # Commands queued to run queue_db = {} # Commands queued to run
# Register event as called # Register event as called
# Return: None # Return: None
def called(event): def called(event):
called_db[event] = time.time() called_db[event] = time.time()
# Check if calling event is allowed # Check if calling event is allowed
# Return: True if allowed False if not # Return: True if allowed False if not
def isAllowed(event, allowed_again=10): def isAllowed(event, allowed_again=10):
last_called = called_db.get(event) last_called = called_db.get(event)
if not last_called: # Its not called before if not last_called: # Its not called before
return True return True
elif time.time()-last_called >= allowed_again: elif time.time() - last_called >= allowed_again:
del called_db[event] # Delete last call time to save memory del called_db[event] # Delete last call time to save memory
return True return True
else: else:
return False return False
def callQueue(event): def callQueue(event):
func, args, kwargs, thread = queue_db[event] func, args, kwargs, thread = queue_db[event]
log.debug("Calling: %s" % event) log.debug("Calling: %s" % event)
del called_db[event] del called_db[event]
del queue_db[event] del queue_db[event]
return func(*args, **kwargs) return func(*args, **kwargs)
# Rate limit and delay function call if necessary
# Rate limit and delay function call if needed, If the function called again within the rate limit interval then previous queued call will be dropped # If the function called again within the rate limit interval then previous queued call will be dropped
# Return: Immedietly gevent thread # Return: Immediately gevent thread
def callAsync(event, allowed_again=10, func=None, *args, **kwargs): def callAsync(event, allowed_again=10, func=None, *args, **kwargs):
if isAllowed(event, allowed_again): # Not called recently, call it now if isAllowed(event, allowed_again): # Not called recently, call it now
called(event) called(event)
# print "Calling now" # print "Calling now"
return gevent.spawn(func, *args, **kwargs) return gevent.spawn(func, *args, **kwargs)
else: # Called recently, schedule it for later else: # Called recently, schedule it for later
time_left = allowed_again-max(0, time.time()-called_db[event]) time_left = allowed_again - max(0, time.time() - called_db[event])
log.debug("Added to queue (%.2fs left): %s " % (time_left, event)) log.debug("Added to queue (%.2fs left): %s " % (time_left, event))
if not queue_db.get(event): # Function call not queued yet if not queue_db.get(event): # Function call not queued yet
thread = gevent.spawn_later(time_left, lambda: callQueue(event)) # Call this function later thread = gevent.spawn_later(time_left, lambda: callQueue(event)) # Call this function later
queue_db[event] = (func, args, kwargs, thread) queue_db[event] = (func, args, kwargs, thread)
return thread return thread
else: # Function call already queued, just update the parameters else: # Function call already queued, just update the parameters
thread = queue_db[event][3] thread = queue_db[event][3]
queue_db[event] = (func, args, kwargs, thread) queue_db[event] = (func, args, kwargs, thread)
return thread return thread
# Rate limit and delay function call if needed # Rate limit and delay function call if needed
# Return: Wait for execution/delay then return value # Return: Wait for execution/delay then return value
def call(event, allowed_again=10, func=None, *args, **kwargs): def call(event, allowed_again=10, func=None, *args, **kwargs):
if isAllowed(event): # Not called recently, call it now if isAllowed(event): # Not called recently, call it now
called(event) called(event)
# print "Calling now" # print "Calling now"
return func(*args, **kwargs) return func(*args, **kwargs)
else: # Called recently, schedule it for later else: # Called recently, schedule it for later
time_left = max(0, allowed_again-(time.time()-called_db[event])) time_left = max(0, allowed_again - (time.time() - called_db[event]))
# print "Time left: %s" % time_left, args, kwargs # print "Time left: %s" % time_left, args, kwargs
log.debug("Calling sync (%.2fs left): %s" % (time_left, event)) log.debug("Calling sync (%.2fs left): %s" % (time_left, event))
time.sleep(time_left) time.sleep(time_left)
called(event) called(event)
back = func(*args, **kwargs) back = func(*args, **kwargs)
if event in called_db: if event in called_db:
del called_db[event] del called_db[event]
return back return back
# Cleanup expired events every 3 minutes # Cleanup expired events every 3 minutes
def cleanup(): def cleanup():
while 1: while 1:
expired = time.time()-60*2 # Cleanup if older than 2 minutes expired = time.time() - 60 * 2 # Cleanup if older than 2 minutes
for event in called_db.keys(): for event in called_db.keys():
if called_db[event] < expired: if called_db[event] < expired:
del called_db[event] del called_db[event]
time.sleep(60*3) # Every 3 minutes time.sleep(60 * 3) # Every 3 minutes
gevent.spawn(cleanup) gevent.spawn(cleanup)
if __name__ == "__main__": if __name__ == "__main__":
from gevent import monkey from gevent import monkey
monkey.patch_all() monkey.patch_all()
import random import random
def publish(inner_path): def publish(inner_path):
print "Publishing %s..." % inner_path print "Publishing %s..." % inner_path
return 1 return 1
def cb(thread): def cb(thread):
print "Value:", thread.value print "Value:", thread.value
print "Testing async spam requests rate limit to 1/sec..." print "Testing async spam requests rate limit to 1/sec..."
for i in range(3000): for i in range(3000):
thread = callAsync("publish content.json", 1, publish, "content.json %s" % i) thread = callAsync("publish content.json", 1, publish, "content.json %s" % i)
time.sleep(float(random.randint(1,20))/100000) time.sleep(float(random.randint(1, 20)) / 100000)
print thread.link(cb) print thread.link(cb)
print "Done" print "Done"
time.sleep(2) time.sleep(2)
print "Testing sync spam requests rate limit to 1/sec..." print "Testing sync spam requests rate limit to 1/sec..."
for i in range(5): for i in range(5):
call("publish data.json", 1, publish, "data.json %s" % i) call("publish data.json", 1, publish, "data.json %s" % i)
time.sleep(float(random.randint(1,100))/100) time.sleep(float(random.randint(1, 100)) / 100)
print "Done" print "Done"
print "Testing cleanup"
thread = callAsync("publish content.json single", 1, publish, "content.json single")
print "Needs to cleanup:", called_db, queue_db
print "Waiting 3min for cleanup process..."
time.sleep(60*3)
print "Cleaned up:", called_db, queue_db
print "Testing cleanup"
thread = callAsync("publish content.json single", 1, publish, "content.json single")
print "Needs to cleanup:", called_db, queue_db
print "Waiting 3min for cleanup process..."
time.sleep(60 * 3)
print "Cleaned up:", called_db, queue_db

View file

@ -1,22 +1,22 @@
from lib.PySocks import socks
import socket import socket
from lib.PySocks import socks
def create_connection(address, timeout=None, source_address=None): def create_connection(address, timeout=None, source_address=None):
sock = socks.socksocket() sock = socks.socksocket()
sock.connect(address) sock.connect(address)
return sock return sock
# Dns queries using the proxy # Dns queries using the proxy
def getaddrinfo(*args): def getaddrinfo(*args):
return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))] return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
def monkeyPath(proxy_ip, proxy_port): def monkeyPath(proxy_ip, proxy_port):
print proxy_ip, proxy_port print proxy_ip, proxy_port
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port)) socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port))
socket.socket = socks.socksocket socket.socket = socks.socksocket
socket.create_connection = create_connection socket.create_connection = create_connection
socket.getaddrinfo = getaddrinfo socket.getaddrinfo = getaddrinfo

View file

@ -2,6 +2,7 @@
# Disable SSL compression to save massive memory and cpu # Disable SSL compression to save massive memory and cpu
import logging import logging
from Config import config from Config import config
@ -9,7 +10,10 @@ def disableSSLCompression():
import ctypes import ctypes
import ctypes.util import ctypes.util
try: try:
openssl = ctypes.CDLL(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32', ctypes.RTLD_GLOBAL) openssl = ctypes.CDLL(
ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32',
ctypes.RTLD_GLOBAL
)
openssl.SSL_COMP_get_compression_methods.restype = ctypes.c_void_p openssl.SSL_COMP_get_compression_methods.restype = ctypes.c_void_p
except Exception, err: except Exception, err:
logging.debug("Disable SSL compression failed: %s (normal on Windows)" % err) logging.debug("Disable SSL compression failed: %s (normal on Windows)" % err)
@ -81,7 +85,7 @@ if not hasattr(_ssl, 'sslwrap'):
logging.debug("Missing SSLwrap, readded.") logging.debug("Missing SSLwrap, readded.")
# Add SSLContext to gevent.ssl (Ubutunu 15 fix) # Add SSLContext to gevent.ssl (Ubuntu 15 fix)
try: try:
import gevent import gevent
if not hasattr(gevent.ssl, "SSLContext"): if not hasattr(gevent.ssl, "SSLContext"):

View file

@ -1,36 +1,40 @@
import msgpack, os, struct import os
import struct
import msgpack
def msgpackHeader(size): def msgpackHeader(size):
if size <= 2**8-1: if size <= 2 ** 8 - 1:
return b"\xc4" + struct.pack("B", size) return b"\xc4" + struct.pack("B", size)
elif size <= 2**16-1: elif size <= 2 ** 16 - 1:
return b"\xc5" + struct.pack(">H", size) return b"\xc5" + struct.pack(">H", size)
elif size <= 2**32-1: elif size <= 2 ** 32 - 1:
return b"\xc6" + struct.pack(">I", size) return b"\xc6" + struct.pack(">I", size)
else: else:
raise Exception("huge binary string") raise Exception("huge binary string")
def stream(data, writer): def stream(data, writer):
packer = msgpack.Packer() packer = msgpack.Packer()
writer(packer.pack_map_header(len(data))) writer(packer.pack_map_header(len(data)))
for key, val in data.iteritems(): for key, val in data.iteritems():
writer(packer.pack(key)) writer(packer.pack(key))
if issubclass(type(val), file): # File obj if issubclass(type(val), file): # File obj
max_size = os.fstat(val.fileno()).st_size-val.tell() max_size = os.fstat(val.fileno()).st_size - val.tell()
size = min(max_size, val.read_bytes) size = min(max_size, val.read_bytes)
bytes_left = size bytes_left = size
writer(msgpackHeader(size)) writer(msgpackHeader(size))
buff = 1024*64 buff = 1024 * 64
while 1: while 1:
writer(val.read(min(bytes_left, buff))) writer(val.read(min(bytes_left, buff)))
bytes_left = bytes_left-buff bytes_left = bytes_left - buff
if bytes_left <= 0: break if bytes_left <= 0:
else: # Simple break
writer(packer.pack(val)) else: # Simple
return size writer(packer.pack(val))
return size
class FilePart(file): class FilePart(file):
pass pass

View file

@ -1,10 +1,13 @@
import gevent import re
from gevent import socket import urllib2
import httplib
import re, urllib2, httplib, logging import logging
from urlparse import urlparse from urlparse import urlparse
from xml.dom.minidom import parseString from xml.dom.minidom import parseString
import gevent
from gevent import socket
# Relevant UPnP spec: http://www.upnp.org/specs/gw/UPnP-gw-WANIPConnection-v1-Service.pdf # Relevant UPnP spec: http://www.upnp.org/specs/gw/UPnP-gw-WANIPConnection-v1-Service.pdf
# General TODOs: # General TODOs:
@ -14,222 +17,222 @@ remove_whitespace = re.compile(r'>\s*<')
def _m_search_ssdp(local_ip): def _m_search_ssdp(local_ip):
""" """
Broadcast a UDP SSDP M-SEARCH packet and return response. Broadcast a UDP SSDP M-SEARCH packet and return response.
""" """
search_target = "urn:schemas-upnp-org:device:InternetGatewayDevice:1" search_target = "urn:schemas-upnp-org:device:InternetGatewayDevice:1"
ssdp_request = ''.join( ssdp_request = ''.join(
['M-SEARCH * HTTP/1.1\r\n', ['M-SEARCH * HTTP/1.1\r\n',
'HOST: 239.255.255.250:1900\r\n', 'HOST: 239.255.255.250:1900\r\n',
'MAN: "ssdp:discover"\r\n', 'MAN: "ssdp:discover"\r\n',
'MX: 2\r\n', 'MX: 2\r\n',
'ST: {0}\r\n'.format(search_target), 'ST: {0}\r\n'.format(search_target),
'\r\n'] '\r\n']
) )
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind((local_ip, 10000)) sock.bind((local_ip, 10000))
sock.sendto(ssdp_request, ('239.255.255.250', 1900)) sock.sendto(ssdp_request, ('239.255.255.250', 1900))
sock.settimeout(5) sock.settimeout(5)
try: try:
return sock.recv(2048) return sock.recv(2048)
except socket.error, err: except socket.error, err:
# no reply from IGD, possibly no IGD on LAN # no reply from IGD, possibly no IGD on LAN
logging.debug("UDP SSDP M-SEARCH send error using ip %s: %s" % (local_ip, err)) logging.debug("UDP SSDP M-SEARCH send error using ip %s: %s" % (local_ip, err))
return False return False
def _retrieve_location_from_ssdp(response): def _retrieve_location_from_ssdp(response):
""" """
Parse raw HTTP response to retrieve the UPnP location header Parse raw HTTP response to retrieve the UPnP location header
and return a ParseResult object. and return a ParseResult object.
""" """
parsed = re.findall(r'(?P<name>.*?): (?P<value>.*?)\r\n', response) parsed = re.findall(r'(?P<name>.*?): (?P<value>.*?)\r\n', response)
location_header = filter(lambda x: x[0].lower() == 'location', parsed) location_header = filter(lambda x: x[0].lower() == 'location', parsed)
if not len(location_header): if not len(location_header):
# no location header returned :( # no location header returned :(
return False return False
return urlparse(location_header[0][1]) return urlparse(location_header[0][1])
def _retrieve_igd_profile(url): def _retrieve_igd_profile(url):
""" """
Retrieve the device's UPnP profile. Retrieve the device's UPnP profile.
""" """
return urllib2.urlopen(url.geturl()).read() return urllib2.urlopen(url.geturl()).read()
def _node_val(node): def _node_val(node):
""" """
Get the text value of the first child text node of a node. Get the text value of the first child text node of a node.
""" """
return node.childNodes[0].data return node.childNodes[0].data
def _parse_igd_profile(profile_xml): def _parse_igd_profile(profile_xml):
""" """
Traverse the profile xml DOM looking for either Traverse the profile xml DOM looking for either
WANIPConnection or WANPPPConnection and return WANIPConnection or WANPPPConnection and return
the value found as well as the 'controlURL'. the value found as well as the 'controlURL'.
""" """
dom = parseString(profile_xml) dom = parseString(profile_xml)
service_types = dom.getElementsByTagName('serviceType') service_types = dom.getElementsByTagName('serviceType')
for service in service_types: for service in service_types:
if _node_val(service).find('WANIPConnection') > 0 or \ if _node_val(service).find('WANIPConnection') > 0 or \
_node_val(service).find('WANPPPConnection') > 0: _node_val(service).find('WANPPPConnection') > 0:
control_url = service.parentNode.getElementsByTagName( control_url = service.parentNode.getElementsByTagName(
'controlURL' 'controlURL'
)[0].childNodes[0].data )[0].childNodes[0].data
upnp_schema = _node_val(service).split(':')[-2] upnp_schema = _node_val(service).split(':')[-2]
return control_url, upnp_schema return control_url, upnp_schema
return False return False
def _get_local_ip(): def _get_local_ip():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
# not using <broadcast> because gevents getaddrinfo doesn't like that # not using <broadcast> because gevents getaddrinfo doesn't like that
# using port 1 as per hobbldygoop's comment about port 0 not working on osx: # using port 1 as per hobbldygoop's comment about port 0 not working on osx:
# https://github.com/sirMackk/ZeroNet/commit/fdcd15cf8df0008a2070647d4d28ffedb503fba2#commitcomment-9863928 # https://github.com/sirMackk/ZeroNet/commit/fdcd15cf8df0008a2070647d4d28ffedb503fba2#commitcomment-9863928
s.connect(('239.255.255.250', 1)) s.connect(('239.255.255.250', 1))
return s.getsockname()[0] return s.getsockname()[0]
def _create_soap_message(local_ip, port, description="UPnPPunch", protocol="TCP", def _create_soap_message(local_ip, port, description="UPnPPunch", protocol="TCP",
upnp_schema='WANIPConnection'): upnp_schema='WANIPConnection'):
""" """
Build a SOAP AddPortMapping message. Build a SOAP AddPortMapping message.
""" """
soap_message = """<?xml version="1.0"?> soap_message = """<?xml version="1.0"?>
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/"> <s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
<s:Body> <s:Body>
<u:AddPortMapping xmlns:u="urn:schemas-upnp-org:service:{upnp_schema}:1"> <u:AddPortMapping xmlns:u="urn:schemas-upnp-org:service:{upnp_schema}:1">
<NewRemoteHost></NewRemoteHost> <NewRemoteHost></NewRemoteHost>
<NewExternalPort>{port}</NewExternalPort> <NewExternalPort>{port}</NewExternalPort>
<NewProtocol>{protocol}</NewProtocol> <NewProtocol>{protocol}</NewProtocol>
<NewInternalPort>{port}</NewInternalPort> <NewInternalPort>{port}</NewInternalPort>
<NewInternalClient>{host_ip}</NewInternalClient> <NewInternalClient>{host_ip}</NewInternalClient>
<NewEnabled>1</NewEnabled> <NewEnabled>1</NewEnabled>
<NewPortMappingDescription>{description}</NewPortMappingDescription> <NewPortMappingDescription>{description}</NewPortMappingDescription>
<NewLeaseDuration>0</NewLeaseDuration> <NewLeaseDuration>0</NewLeaseDuration>
</u:AddPortMapping> </u:AddPortMapping>
</s:Body> </s:Body>
</s:Envelope>""".format(port=port, </s:Envelope>""".format(port=port,
protocol=protocol, protocol=protocol,
host_ip=local_ip, host_ip=local_ip,
description=description, description=description,
upnp_schema=upnp_schema) upnp_schema=upnp_schema)
return remove_whitespace.sub('><', soap_message) return remove_whitespace.sub('><', soap_message)
def _parse_for_errors(soap_response): def _parse_for_errors(soap_response):
if soap_response.status == 500: if soap_response.status == 500:
err_dom = parseString(soap_response.read()) err_dom = parseString(soap_response.read())
err_code = _node_val(err_dom.getElementsByTagName('errorCode')[0]) err_code = _node_val(err_dom.getElementsByTagName('errorCode')[0])
err_msg = _node_val( err_msg = _node_val(
err_dom.getElementsByTagName('errorDescription')[0] err_dom.getElementsByTagName('errorDescription')[0]
) )
logging.error('SOAP request error: {0} - {1}'.format(err_code, err_msg)) logging.error('SOAP request error: {0} - {1}'.format(err_code, err_msg))
raise Exception( raise Exception(
'SOAP request error: {0} - {1}'.format(err_code, err_msg) 'SOAP request error: {0} - {1}'.format(err_code, err_msg)
) )
return False return False
else: else:
return True return True
def _send_soap_request(location, upnp_schema, control_url, soap_message): def _send_soap_request(location, upnp_schema, control_url, soap_message):
""" """
Send out SOAP request to UPnP device and return a response. Send out SOAP request to UPnP device and return a response.
""" """
headers = { headers = {
'SOAPAction': ( 'SOAPAction': (
'"urn:schemas-upnp-org:service:{schema}:' '"urn:schemas-upnp-org:service:{schema}:'
'1#AddPortMapping"'.format(schema=upnp_schema) '1#AddPortMapping"'.format(schema=upnp_schema)
), ),
'Content-Type': 'text/xml' 'Content-Type': 'text/xml'
} }
conn = httplib.HTTPConnection(location.hostname, location.port) conn = httplib.HTTPConnection(location.hostname, location.port)
conn.request('POST', control_url, soap_message, headers) conn.request('POST', control_url, soap_message, headers)
response = conn.getresponse() response = conn.getresponse()
conn.close() conn.close()
return _parse_for_errors(response) return _parse_for_errors(response)
def open_port(port=15441, desc="UpnpPunch"): def open_port(port=15441, desc="UpnpPunch"):
""" """
Attempt to forward a port using UPnP. Attempt to forward a port using UPnP.
""" """
local_ips = [_get_local_ip()] local_ips = [_get_local_ip()]
try: try:
local_ips += socket.gethostbyname_ex('')[2] # Get ip by '' hostname not supported on all platform local_ips += socket.gethostbyname_ex('')[2] # Get ip by '' hostname not supported on all platform
except: except:
pass pass
try: try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('8.8.8.8', 0)) # Using google dns route s.connect(('8.8.8.8', 0)) # Using google dns route
local_ips.append(s.getsockname()[0]) local_ips.append(s.getsockname()[0])
except: except:
pass pass
local_ips = list(set(local_ips)) # Delete duplicates local_ips = list(set(local_ips)) # Delete duplicates
logging.debug("Found local ips: %s" % local_ips) logging.debug("Found local ips: %s" % local_ips)
local_ips = local_ips*3 # Retry every ip 3 times local_ips = local_ips * 3 # Retry every ip 3 times
for local_ip in local_ips: for local_ip in local_ips:
logging.debug("Trying using local ip: %s" % local_ip) logging.debug("Trying using local ip: %s" % local_ip)
idg_response = _m_search_ssdp(local_ip) idg_response = _m_search_ssdp(local_ip)
if not idg_response: if not idg_response:
logging.debug("No IGD response") logging.debug("No IGD response")
continue continue
location = _retrieve_location_from_ssdp(idg_response) location = _retrieve_location_from_ssdp(idg_response)
if not location: if not location:
logging.debug("No location") logging.debug("No location")
continue continue
parsed = _parse_igd_profile( parsed = _parse_igd_profile(
_retrieve_igd_profile(location) _retrieve_igd_profile(location)
) )
if not parsed: if not parsed:
logging.debug("IGD parse error using location %s" % repr(location)) logging.debug("IGD parse error using location %s" % repr(location))
continue continue
control_url, upnp_schema = parsed control_url, upnp_schema = parsed
soap_messages = [_create_soap_message(local_ip, port, desc, proto, upnp_schema) soap_messages = [_create_soap_message(local_ip, port, desc, proto, upnp_schema)
for proto in ['TCP', 'UDP']] for proto in ['TCP', 'UDP']]
requests = [gevent.spawn( requests = [gevent.spawn(
_send_soap_request, location, upnp_schema, control_url, message _send_soap_request, location, upnp_schema, control_url, message
) for message in soap_messages] ) for message in soap_messages]
gevent.joinall(requests, timeout=3) gevent.joinall(requests, timeout=3)
if all([request.value for request in requests]): if all([request.value for request in requests]):
return True return True
return False return False
if __name__ == "__main__": if __name__ == "__main__":
from gevent import monkey from gevent import monkey
monkey.patch_socket() monkey.patch_socket()
logging.getLogger().setLevel(logging.DEBUG) logging.getLogger().setLevel(logging.DEBUG)
print open_port(15441, "ZeroNet") print open_port(15441, "ZeroNet")

125
update.py
View file

@ -1,72 +1,81 @@
from gevent import monkey; monkey.patch_all() import urllib
import urllib, zipfile, os, ssl, httplib, socket, re import zipfile
import os
import ssl
import httplib
import socket
import re
import cStringIO as StringIO import cStringIO as StringIO
from gevent import monkey
monkey.patch_all()
def update(): def update():
# Gevent https bug workaround (https://github.com/gevent/gevent/issues/477) # Gevent https bug workaround (https://github.com/gevent/gevent/issues/477)
reload(socket) reload(socket)
reload(httplib) reload(httplib)
reload(ssl) reload(ssl)
print "Downloading.", print "Downloading.",
file = urllib.urlopen("https://github.com/HelloZeroNet/ZeroNet/archive/master.zip") file = urllib.urlopen("https://github.com/HelloZeroNet/ZeroNet/archive/master.zip")
data = StringIO.StringIO() data = StringIO.StringIO()
while True: while True:
buff = file.read(1024*16) buff = file.read(1024 * 16)
if not buff: break if not buff:
data.write(buff) break
print ".", data.write(buff)
print "Downloaded." print ".",
print "Downloaded."
# Checking plugins # Checking plugins
plugins_enabled = [] plugins_enabled = []
plugins_disabled = [] plugins_disabled = []
if os.path.isdir("plugins"): if os.path.isdir("plugins"):
for dir in os.listdir("plugins"): for dir in os.listdir("plugins"):
if dir.startswith("disabled-"): if dir.startswith("disabled-"):
plugins_disabled.append(dir.replace("disabled-", "")) plugins_disabled.append(dir.replace("disabled-", ""))
else: else:
plugins_enabled.append(dir) plugins_enabled.append(dir)
print "Plugins enabled:", plugins_enabled, "disabled:", plugins_disabled print "Plugins enabled:", plugins_enabled, "disabled:", plugins_disabled
print "Extracting...",
zip = zipfile.ZipFile(data)
for inner_path in zip.namelist():
inner_path = inner_path.replace("\\", "/") # Make sure we have unix path
print ".",
dest_path = inner_path.replace("ZeroNet-master/", "")
if not dest_path:
continue
print "Extracting...", # Keep plugin disabled/enabled status
zip = zipfile.ZipFile(data) match = re.match("plugins/([^/]+)", dest_path)
for inner_path in zip.namelist(): if match:
inner_path = inner_path.replace("\\", "/") # Make sure we have unix path plugin_name = match.group(1).replace("disabled-", "")
print ".", if plugin_name in plugins_enabled: # Plugin was enabled
dest_path = inner_path.replace("ZeroNet-master/", "") dest_path = dest_path.replace("plugins/disabled-" + plugin_name, "plugins/" + plugin_name)
if not dest_path: continue elif plugin_name in plugins_disabled: # Plugin was disabled
dest_path = dest_path.replace("plugins/" + plugin_name, "plugins/disabled-" + plugin_name)
print "P",
dest_dir = os.path.dirname(dest_path)
# Keep plugin disabled/enabled status if dest_dir and not os.path.isdir(dest_dir):
match = re.match("plugins/([^/]+)", dest_path) os.makedirs(dest_dir)
if match:
plugin_name = match.group(1).replace("disabled-","")
if plugin_name in plugins_enabled: # Plugin was enabled
dest_path = dest_path.replace("plugins/disabled-"+plugin_name, "plugins/"+plugin_name)
elif plugin_name in plugins_disabled: # Plugin was disabled
dest_path = dest_path.replace("plugins/"+plugin_name, "plugins/disabled-"+plugin_name)
print "P",
dest_dir = os.path.dirname(dest_path) if dest_dir != dest_path.strip("/"):
data = zip.read(inner_path)
try:
open(dest_path, 'wb').write(data)
except Exception, err:
print dest_path, err
if dest_dir and not os.path.isdir(dest_dir): print "Done."
os.makedirs(dest_dir)
if dest_dir != dest_path.strip("/"):
data = zip.read(inner_path)
try:
open(dest_path, 'wb').write(data)
except Exception, err:
print dest_path, err
print "Done."
if __name__ == "__main__": if __name__ == "__main__":
try: try:
update() update()
except Exception, err: except Exception, err:
print "Update error: %s" % err print "Update error: %s" % err
raw_input("Press enter to exit") raw_input("Press enter to exit")