rev280, The whole project reformatted to PEP8, UiRequest getPosted to query posted variables

This commit is contained in:
HelloZeroNet 2015-07-12 20:36:46 +02:00
parent a5741704e4
commit b5ecb62bc6
49 changed files with 5704 additions and 5205 deletions

View file

@ -1,17 +1,22 @@
import re, time, cgi, os
import time
import cgi
import os
from Plugin import PluginManager
from Config import config
@PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object):
def formatTableRow(self, row):
back = []
for format, val in row:
if val == None:
if val is None:
formatted = "n/a"
elif format == "since":
if val:
formatted = "%.0f" % (time.time()-val)
formatted = "%.0f" % (time.time() - val)
else:
formatted = "n/a"
else:
@ -19,17 +24,16 @@ class UiRequestPlugin(object):
back.append("<td>%s</td>" % formatted)
return "<tr>%s</tr>" % "".join(back)
def getObjSize(self, obj, hpy = None):
def getObjSize(self, obj, hpy=None):
if hpy:
return float(hpy.iso(obj).domisize)/1024
return float(hpy.iso(obj).domisize) / 1024
else:
return 0
# /Stats entry point
def actionStats(self):
import gc, sys
import gc
import sys
from Ui import UiRequest
from Crypt import CryptConnection
@ -58,7 +62,10 @@ class UiRequestPlugin(object):
yield "%s | " % config.ip_external
yield "Opened: %s | " % main.file_server.port_opened
yield "Crypt: %s | " % CryptConnection.manager.crypt_supported
yield "In: %.2fMB, Out: %.2fMB | " % (float(main.file_server.bytes_recv)/1024/1024, float(main.file_server.bytes_sent)/1024/1024)
yield "In: %.2fMB, Out: %.2fMB | " % (
float(main.file_server.bytes_recv) / 1024 / 1024,
float(main.file_server.bytes_sent) / 1024 / 1024
)
yield "Peerid: %s | " % main.file_server.peer_id
import psutil
process = psutil.Process(os.getpid())
@ -69,14 +76,17 @@ class UiRequestPlugin(object):
yield "Files: %s | " % len(process.open_files())
yield "Sockets: %s | " % len(process.connections())
yield "Calc size <a href='?size=1'>on</a> <a href='?size=0'>off</a>"
except Exception, err:
except Exception:
pass
yield "<br>"
# Connections
yield "<b>Connections</b> (%s, total made: %s):<br>" % (len(main.file_server.connections), main.file_server.last_connection_id)
yield "<table><tr> <th>id</th> <th>proto</th> <th>type</th> <th>ip</th> <th>open</th> <th>crypt</th> <th>ping</th> <th>buff</th>"
yield "<th>idle</th> <th>open</th> <th>delay</th> <th>out</th> <th>in</th> <th>last sent</th> <th>waiting</th> <th>version</th> <th>peerid</th> </tr>"
yield "<b>Connections</b> (%s, total made: %s):<br>" % (
len(main.file_server.connections), main.file_server.last_connection_id
)
yield "<table><tr> <th>id</th> <th>proto</th> <th>type</th> <th>ip</th> <th>open</th> <th>crypt</th> <th>ping</th>"
yield "<th>buff</th> <th>idle</th> <th>open</th> <th>delay</th> <th>out</th> <th>in</th> <th>last sent</th>"
yield "<th>waiting</th> <th>version</th> <th>peerid</th> </tr>"
for connection in main.file_server.connections:
if "cipher" in dir(connection.sock):
cipher = connection.sock.cipher()[0]
@ -93,30 +103,32 @@ class UiRequestPlugin(object):
("%s", connection.incomplete_buff_recv),
("since", max(connection.last_send_time, connection.last_recv_time)),
("since", connection.start_time),
("%.3f", connection.last_sent_time-connection.last_send_time),
("%.0fkB", connection.bytes_sent/1024),
("%.0fkB", connection.bytes_recv/1024),
("%.3f", connection.last_sent_time - connection.last_send_time),
("%.0fkB", connection.bytes_sent / 1024),
("%.0fkB", connection.bytes_recv / 1024),
("%s", connection.last_cmd),
("%s", connection.waiting_requests.keys()),
("%s r%s", (connection.handshake.get("version"), connection.handshake.get("rev", "?")) ),
("%s r%s", (connection.handshake.get("version"), connection.handshake.get("rev", "?"))),
("%s", connection.handshake.get("peer_id")),
])
yield "</table>"
# Sites
yield "<br><br><b>Sites</b>:"
yield "<table>"
yield "<tr><th>address</th> <th>connected</th> <th>peers</th> <th>content.json</th> </tr>"
for site in self.server.sites.values():
yield self.formatTableRow([
("<a href='#ShowPeers' onclick='document.getElementById(\"peers_%s\").style.display=\"initial\"; return false'>%s</a>", (site.address, site.address)),
(
"""<a href='#' onclick='document.getElementById("peers_%s").style.display="initial"; return false'>%s</a>""",
(site.address, site.address)
),
("%s", [peer.connection.id for peer in site.peers.values() if peer.connection and peer.connection.connected]),
("%s/%s/%s", (
len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]),
len(site.getConnectablePeers(100)),
len(site.peers)
) ),
)),
("%s", len(site.content_manager.contents)),
])
yield "<tr><td id='peers_%s' style='display: none; white-space: pre'>" % site.address
@ -125,55 +137,59 @@ class UiRequestPlugin(object):
yield "<br></td></tr>"
yield "</table>"
# Object types
obj_count = {}
for obj in gc.get_objects():
obj_type = str(type(obj))
if not obj_type in obj_count:
if obj_type not in obj_count:
obj_count[obj_type] = [0, 0]
obj_count[obj_type][0] += 1 # Count
obj_count[obj_type][1] += float(sys.getsizeof(obj))/1024 # Size
obj_count[obj_type][1] += float(sys.getsizeof(obj)) / 1024 # Size
yield "<br><br><b>Objects in memory (types: %s, total: %s, %.2fkb):</b><br>" % (len(obj_count), sum([stat[0] for stat in obj_count.values()]), sum([stat[1] for stat in obj_count.values()]))
yield "<br><br><b>Objects in memory (types: %s, total: %s, %.2fkb):</b><br>" % (
len(obj_count),
sum([stat[0] for stat in obj_count.values()]),
sum([stat[1] for stat in obj_count.values()])
)
for obj, stat in sorted(obj_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count
yield " - %.1fkb = %s x <a href=\"/Listobj?type=%s\">%s</a><br>" % (stat[1], stat[0], obj, cgi.escape(obj))
# Classes
class_count = {}
for obj in gc.get_objects():
obj_type = str(type(obj))
if obj_type != "<type 'instance'>": continue
if obj_type != "<type 'instance'>":
continue
class_name = obj.__class__.__name__
if not class_name in class_count:
if class_name not in class_count:
class_count[class_name] = [0, 0]
class_count[class_name][0] += 1 # Count
class_count[class_name][1] += float(sys.getsizeof(obj))/1024 # Size
class_count[class_name][1] += float(sys.getsizeof(obj)) / 1024 # Size
yield "<br><br><b>Classes in memory (types: %s, total: %s, %.2fkb):</b><br>" % (len(class_count), sum([stat[0] for stat in class_count.values()]), sum([stat[1] for stat in class_count.values()]))
yield "<br><br><b>Classes in memory (types: %s, total: %s, %.2fkb):</b><br>" % (
len(class_count),
sum([stat[0] for stat in class_count.values()]),
sum([stat[1] for stat in class_count.values()])
)
for obj, stat in sorted(class_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count
yield " - %.1fkb = %s x <a href=\"/Dumpobj?class=%s\">%s</a><br>" % (stat[1], stat[0], obj, cgi.escape(obj))
from greenlet import greenlet
objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
yield "<br>Greenlets (%s):<br>" % len(objs)
for obj in objs:
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
from Worker import Worker
objs = [obj for obj in gc.get_objects() if isinstance(obj, Worker)]
yield "<br>Workers (%s):<br>" % len(objs)
for obj in objs:
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
from Connection import Connection
objs = [obj for obj in gc.get_objects() if isinstance(obj, Connection)]
yield "<br>Connections (%s):<br>" % len(objs)
@ -192,33 +208,28 @@ class UiRequestPlugin(object):
for obj in objs:
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
from Site import Site
objs = [obj for obj in gc.get_objects() if isinstance(obj, Site)]
yield "<br>Sites (%s):<br>" % len(objs)
for obj in objs:
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
objs = [obj for obj in gc.get_objects() if isinstance(obj, self.server.log.__class__)]
yield "<br>Loggers (%s):<br>" % len(objs)
for obj in objs:
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj.name)))
objs = [obj for obj in gc.get_objects() if isinstance(obj, UiRequest)]
yield "<br>UiRequests (%s):<br>" % len(objs)
for obj in objs:
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
from Peer import Peer
objs = [obj for obj in gc.get_objects() if isinstance(obj, Peer)]
yield "<br>Peers (%s):<br>" % len(objs)
for obj in objs:
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
objs = [(key, val) for key, val in sys.modules.iteritems() if val is not None]
objs.sort()
yield "<br>Modules (%s):<br>" % len(objs)
@ -226,11 +237,11 @@ class UiRequestPlugin(object):
yield " - %.3fkb: %s %s<br>" % (self.getObjSize(module, hpy), module_name, cgi.escape(repr(module)))
gc.collect() # Implicit grabage collection
yield "Done in %.1f" % (time.time()-s)
yield "Done in %.1f" % (time.time() - s)
def actionDumpobj(self):
import gc, sys
import gc
import sys
self.sendHeader()
class_filter = self.get.get("class")
@ -245,17 +256,18 @@ class UiRequestPlugin(object):
objs = gc.get_objects()
for obj in objs:
obj_type = str(type(obj))
if obj_type != "<type 'instance'>" or obj.__class__.__name__ != class_filter: continue
yield "%.1fkb %s... " % (float(sys.getsizeof(obj))/1024, cgi.escape(str(obj)) )
if obj_type != "<type 'instance'>" or obj.__class__.__name__ != class_filter:
continue
yield "%.1fkb %s... " % (float(sys.getsizeof(obj)) / 1024, cgi.escape(str(obj)))
for attr in dir(obj):
yield "- %s: %s<br>" % (attr, cgi.escape(str(getattr(obj, attr))))
yield "<br>"
gc.collect() # Implicit grabage collection
def actionListobj(self):
import gc, sys
import gc
import sys
self.sendHeader()
type_filter = self.get.get("type")
@ -273,34 +285,42 @@ class UiRequestPlugin(object):
objs = gc.get_objects()
for obj in objs:
obj_type = str(type(obj))
if obj_type != type_filter: continue
refs = [ref for ref in gc.get_referrers(obj) if hasattr(ref, "__class__") and ref.__class__.__name__ not in ["list", "dict", "function", "type", "frame", "WeakSet", "tuple"]]
if not refs: continue
yield "%.1fkb <span title=\"%s\">%s</span>... " % (float(sys.getsizeof(obj))/1024, cgi.escape(str(obj)), cgi.escape(str(obj)[0:100].ljust(100)) )
if obj_type != type_filter:
continue
refs = [
ref for ref in gc.get_referrers(obj)
if hasattr(ref, "__class__") and
ref.__class__.__name__ not in ["list", "dict", "function", "type", "frame", "WeakSet", "tuple"]
]
if not refs:
continue
yield "%.1fkb <span title=\"%s\">%s</span>... " % (
float(sys.getsizeof(obj)) / 1024, cgi.escape(str(obj)), cgi.escape(str(obj)[0:100].ljust(100))
)
for ref in refs:
yield " ["
if "object at" in str(ref) or len(str(ref)) > 100:
yield str(ref.__class__.__name__)
else:
yield str(ref.__class__.__name__)+":"+cgi.escape(str(ref))
yield str(ref.__class__.__name__) + ":" + cgi.escape(str(ref))
yield "] "
ref_type = ref.__class__.__name__
if ref_type not in ref_count:
ref_count[ref_type] = [0,0]
ref_count[ref_type] = [0, 0]
ref_count[ref_type][0] += 1 # Count
ref_count[ref_type][1] += float(sys.getsizeof(obj))/1024 # Size
ref_count[ref_type][1] += float(sys.getsizeof(obj)) / 1024 # Size
yield "<br>"
yield "<br>Object referrer (total: %s, %.2fkb):<br>" % (len(ref_count), sum([stat[1] for stat in ref_count.values()]))
for obj, stat in sorted(ref_count.items(), key=lambda x: x[1][0], reverse=True)[0:30]: # Sorted by count
yield " - %.1fkb = %s x %s<br>" % (stat[1], stat[0], cgi.escape(str(obj)) )
yield " - %.1fkb = %s x %s<br>" % (stat[1], stat[0], cgi.escape(str(obj)))
gc.collect() # Implicit grabage collection
def actionBenchmark(self):
import sys, gc
import sys
import gc
from contextlib import contextmanager
output = self.sendHeader()
@ -313,20 +333,27 @@ class UiRequestPlugin(object):
yield 1
except Exception, err:
output("<br><b>! Error: %s</b><br>" % err)
taken = time.time()-s
multipler = standard/taken
if multipler < 0.3: speed = "Sloooow"
elif multipler < 0.5: speed = "Ehh"
elif multipler < 0.8: speed = "Goodish"
elif multipler < 1.2: speed = "OK"
elif multipler < 1.7: speed = "Fine"
elif multipler < 2.5: speed = "Fast"
elif multipler < 3.5: speed = "WOW"
else: speed = "Insane!!"
taken = time.time() - s
multipler = standard / taken
if multipler < 0.3:
speed = "Sloooow"
elif multipler < 0.5:
speed = "Ehh"
elif multipler < 0.8:
speed = "Goodish"
elif multipler < 1.2:
speed = "OK"
elif multipler < 1.7:
speed = "Fine"
elif multipler < 2.5:
speed = "Fast"
elif multipler < 3.5:
speed = "WOW"
else:
speed = "Insane!!"
output("%.3fs [x%.2f: %s]<br>" % (taken, multipler, speed))
time.sleep(0.01)
yield """
<style>
* { font-family: monospace }
@ -334,7 +361,7 @@ class UiRequestPlugin(object):
</style>
"""
yield "Benchmarking ZeroNet %s (rev%s) Python %s, platform: %s...<br>" % (config.version, config.rev, sys.version, sys.platform)
yield "Benchmarking ZeroNet %s (rev%s) Python %s on: %s...<br>" % (config.version, config.rev, sys.version, sys.platform)
t = time.time()
@ -347,13 +374,12 @@ class UiRequestPlugin(object):
with benchmark("hdPrivatekey x 10", 0.7):
for i in range(10):
privatekey = CryptBitcoin.hdPrivatekey(seed, i*10)
privatekey = CryptBitcoin.hdPrivatekey(seed, i * 10)
yield "."
valid = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
assert privatekey == valid, "%s != %s" % (privatekey, valid)
data = "Hello"*1024 #5k
data = "Hello" * 1024 # 5k
with benchmark("sign x 10", 0.35):
for i in range(10):
yield "."
@ -361,32 +387,31 @@ class UiRequestPlugin(object):
valid = "HFGXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOi+4+BbWHjuwmx0EaKNV1G+kP0tQDxWu0YApxwxZbSmZU="
assert sign == valid, "%s != %s" % (sign, valid)
address = CryptBitcoin.privatekeyToAddress(privatekey)
if CryptBitcoin.opensslVerify: # Openssl avalible
with benchmark("openssl verify x 100", 0.37):
for i in range(100):
if i%10==0: yield "."
if i % 10 == 0:
yield "."
ok = CryptBitcoin.verify(data, address, sign)
assert ok, "does not verify from %s" % address
else:
yield " - openssl verify x 100...not avalible :(<br>"
opensslVerify_bk = CryptBitcoin.opensslVerify # Emulate openssl not found in any way
openssl_verify_bk = CryptBitcoin.opensslVerify # Emulate openssl not found in any way
CryptBitcoin.opensslVerify = None
with benchmark("pure-python verify x 10", 1.6):
for i in range(10):
yield "."
ok = CryptBitcoin.verify(data, address, sign)
assert ok, "does not verify from %s" % address
CryptBitcoin.opensslVerify = opensslVerify_bk
CryptBitcoin.opensslVerify = openssl_verify_bk
yield "<br>CryptHash:<br>"
from Crypt import CryptHash
from cStringIO import StringIO
data = StringIO("Hello"*1024*1024) #5m
data = StringIO("Hello" * 1024 * 1024) # 5m
with benchmark("sha512 x 10 000", 1):
for i in range(10):
for y in range(10000):
@ -395,7 +420,6 @@ class UiRequestPlugin(object):
valid = "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce"
assert hash == valid, "%s != %s" % (hash, valid)
yield "<br>Db:<br>"
from Db import Db
@ -422,7 +446,8 @@ class UiRequestPlugin(object):
}
}
if os.path.isfile("%s/benchmark.db" % config.data_dir): os.unlink("%s/benchmark.db" % config.data_dir)
if os.path.isfile("%s/benchmark.db" % config.data_dir):
os.unlink("%s/benchmark.db" % config.data_dir)
with benchmark("Open x 10", 0.13):
for i in range(10):
@ -431,7 +456,6 @@ class UiRequestPlugin(object):
db.close()
yield "."
db = Db(schema, "%s/benchmark.db" % config.data_dir)
db.checkTables()
import json
@ -446,7 +470,6 @@ class UiRequestPlugin(object):
os.unlink("%s/test_%s.json" % (config.data_dir, u))
yield "."
with benchmark("Buffered insert x 100 x 100", 1.3):
cur = db.getCursor()
cur.execute("BEGIN")
@ -458,7 +481,8 @@ class UiRequestPlugin(object):
json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
db.loadJson("%s/test_%s.json" % (config.data_dir, u), cur=cur)
os.unlink("%s/test_%s.json" % (config.data_dir, u))
if u%10 == 0: yield "."
if u % 10 == 0:
yield "."
cur.execute("COMMIT")
yield " - Total rows in db: %s<br>" % db.execute("SELECT COUNT(*) AS num FROM test").fetchone()[0]
@ -471,11 +495,11 @@ class UiRequestPlugin(object):
res = cur.execute("SELECT * FROM test WHERE test_id = %s" % i)
for row in res:
found += 1
if i%100 == 0: yield "."
if i % 100 == 0:
yield "."
assert found == 20000, "Found: %s != 20000" % found
with benchmark("Not indexed query x 100", 0.6):
found = 0
cur = db.getCursor()
@ -484,11 +508,11 @@ class UiRequestPlugin(object):
res = cur.execute("SELECT * FROM test WHERE json_id = %s" % i)
for row in res:
found += 1
if i%10 == 0: yield "."
if i % 10 == 0:
yield "."
assert found == 18900, "Found: %s != 18900" % found
with benchmark("Like query x 100", 1.8):
found = 0
cur = db.getCursor()
@ -497,18 +521,18 @@ class UiRequestPlugin(object):
res = cur.execute("SELECT * FROM test WHERE title LIKE '%%message %s%%'" % i)
for row in res:
found += 1
if i%10 == 0: yield "."
if i % 10 == 0:
yield "."
assert found == 38900, "Found: %s != 11000" % found
db.close()
if os.path.isfile("%s/benchmark.db" % config.data_dir): os.unlink("%s/benchmark.db" % config.data_dir)
if os.path.isfile("%s/benchmark.db" % config.data_dir):
os.unlink("%s/benchmark.db" % config.data_dir)
gc.collect() # Implicit grabage collection
yield "<br>Done. Total: %.2fs" % (time.time()-t)
yield "<br>Done. Total: %.2fs" % (time.time() - t)
def actionGcCollect(self):
import gc

View file

@ -1,12 +1,17 @@
import re, time, cgi, os, sys
from Plugin import PluginManager
from Config import config
import time
import os
import sys
import atexit
allow_reload = False # No reload supported
from Plugin import PluginManager
from Config import config
allow_reload = False # No source reload supported in this plugin
@PluginManager.registerTo("Actions")
class ActionsPlugin(object):
def main(self):
global notificationicon, winfolders
from lib import notificationicon, winfolders
@ -14,7 +19,10 @@ class ActionsPlugin(object):
self.main = sys.modules["main"]
icon = notificationicon.NotificationIcon(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'trayicon.ico'), "ZeroNet %s" % config.version)
icon = notificationicon.NotificationIcon(
os.path.join(os.path.dirname(os.path.abspath(__file__)), 'trayicon.ico'),
"ZeroNet %s" % config.version
)
self.icon = icon
if not config.debug: # Hide console if not in debug mode
@ -34,14 +42,12 @@ class ActionsPlugin(object):
(self.titleConsole, self.toggleConsole),
(self.titleAutorun, self.toggleAutorun),
"--",
("ZeroNet Twitter", lambda: self.opensite("https://twitter.com/HelloZeroNet") ),
("ZeroNet Reddit", lambda: self.opensite("http://www.reddit.com/r/zeronet/") ),
("ZeroNet Github", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet") ),
("Report bug/request feature", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet/issues") ),
("ZeroNet Twitter", lambda: self.opensite("https://twitter.com/HelloZeroNet")),
("ZeroNet Reddit", lambda: self.opensite("http://www.reddit.com/r/zeronet/")),
("ZeroNet Github", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet")),
("Report bug/request feature", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet/issues")),
"--",
("!Open ZeroNet", lambda: self.opensite("http://%s:%s" % (config.ui_ip, config.ui_port)) ),
#"--",
#("Start ZeroNet when Windows starts", quit),
("!Open ZeroNet", lambda: self.opensite("http://%s:%s" % (config.ui_ip, config.ui_port))),
"--",
("Quit", self.quit),
@ -52,19 +58,16 @@ class ActionsPlugin(object):
super(ActionsPlugin, self).main()
icon._die = True
def quit(self):
self.icon.die()
time.sleep(0.1)
self.main.ui_server.stop()
self.main.file_server.stop()
#sys.exit()
# sys.exit()
def opensite(self, url):
import webbrowser
webbrowser.open(url, new=2)
webbrowser.open(url, new=0)
def titleIp(self):
title = "!IP: %s" % config.ip_external
@ -74,21 +77,22 @@ class ActionsPlugin(object):
title += " (passive)"
return title
def titleConnections(self):
title = "Connections: %s" % len(self.main.file_server.connections)
return title
def titleTransfer(self):
title = "Received: %.2f MB | Sent: %.2f MB" % (float(self.main.file_server.bytes_recv)/1024/1024, float(self.main.file_server.bytes_sent)/1024/1024)
title = "Received: %.2f MB | Sent: %.2f MB" % (
float(self.main.file_server.bytes_recv) / 1024 / 1024,
float(self.main.file_server.bytes_sent) / 1024 / 1024
)
return title
def titleConsole(self):
if self.console: return "+Show console window"
else: return "Show console window"
if self.console:
return "+Show console window"
else:
return "Show console window"
def toggleConsole(self):
if self.console:
@ -98,30 +102,30 @@ class ActionsPlugin(object):
notificationicon.showConsole()
self.console = True
def getAutorunPath(self):
return "%s\\zeronet.cmd" % winfolders.get(winfolders.STARTUP)
def formatAutorun(self):
args = sys.argv[:]
args.insert(0, sys.executable)
if sys.platform == 'win32':
args = ['"%s"' % arg for arg in args]
cmd = " ".join(args)
cmd = cmd.replace("start.py", "zeronet.py").replace('"--open_browser"', "").replace('"default_browser"', "") # Dont open browser on autorun
return "cd /D %s \n%s" % (os.getcwd(), cmd)
# Dont open browser on autorun
cmd = cmd.replace("start.py", "zeronet.py").replace('"--open_browser"', "").replace('"default_browser"', "")
return "cd /D %s \n%s" % (os.getcwd(), cmd)
def isAutorunEnabled(self):
path = self.getAutorunPath()
return os.path.isfile(path) and open(path).read() == self.formatAutorun()
def titleAutorun(self):
if self.isAutorunEnabled(): return "+Start ZeroNet when Windows starts"
else: return "Start ZeroNet when Windows starts"
if self.isAutorunEnabled():
return "+Start ZeroNet when Windows starts"
else:
return "Start ZeroNet when Windows starts"
def toggleAutorun(self):
if self.isAutorunEnabled():

View file

@ -1,8 +1,7 @@
import logging, json, os, re, sys, time
import gevent
import logging
import re
from Plugin import PluginManager
from Config import config
from Debug import Debug
allow_reload = False # No reload supported
@ -13,9 +12,11 @@ log = logging.getLogger("ZeronamePlugin")
class SiteManagerPlugin(object):
zeroname_address = "1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F"
site_zeroname = None
def load(self):
super(SiteManagerPlugin, self).load()
if not self.get(self.zeroname_address): self.need(self.zeroname_address) # Need ZeroName site
if not self.get(self.zeroname_address):
self.need(self.zeroname_address) # Need ZeroName site
# Checks if its a valid address
def isAddress(self, address):
@ -24,12 +25,10 @@ class SiteManagerPlugin(object):
else:
return super(SiteManagerPlugin, self).isAddress(address)
# Return: True if the address is domain
def isDomain(self, address):
return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address)
# Resolve domain
# Return: The address or None
def resolveDomain(self, domain):
@ -40,7 +39,6 @@ class SiteManagerPlugin(object):
db = self.site_zeroname.storage.loadJson("data/names.json")
return db.get(domain)
# Return or create site and start download site files
# Return: Site or None if dns resolve failed
def need(self, address, all_file=True):
@ -53,10 +51,9 @@ class SiteManagerPlugin(object):
return super(SiteManagerPlugin, self).need(address, all_file)
# Return: Site object or None if not found
def get(self, address):
if self.sites == None: # Not loaded yet
if self.sites is None: # Not loaded yet
self.load()
if self.isDomain(address): # Its looks like a domain
address_resolved = self.resolveDomain(address)
@ -72,4 +69,3 @@ class SiteManagerPlugin(object):
else: # Access by site address
site = self.sites.get(address)
return site

View file

@ -1,14 +1,15 @@
import re
from Plugin import PluginManager
@PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object):
def __init__(self, *args, **kwargs):
from Site import SiteManager
self.site_manager = SiteManager.site_manager
super(UiRequestPlugin, self).__init__(*args, **kwargs)
# Media request
def actionSiteMedia(self, path):
match = re.match("/media/(?P<address>[A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", path)
@ -16,10 +17,9 @@ class UiRequestPlugin(object):
domain = match.group("address")
address = self.site_manager.resolveDomain(domain)
if address:
path = "/media/"+address+match.group("inner_path")
path = "/media/" + address + match.group("inner_path")
return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output
# Is mediarequest allowed from that referer
def isMediaRequestAllowed(self, site_address, referer):
referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
@ -37,4 +37,3 @@ class UiRequestPlugin(object):
return True
else: # Invalid referer
return False

View file

@ -1,5 +1,11 @@
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
import time, json, os, sys, re, socket
import time
import json
import os
import sys
import re
import socket
from bitcoinrpc.authproxy import AuthServiceProxy
def publish():
@ -9,9 +15,9 @@ def publish():
os.system("python zeronet.py sitePublish %s" % config["site"])
def processNameOp(domain, value):
if not value.startswith("{"): return False
if not value.startswith("{"):
return False
try:
data = json.loads(value)
except Exception, err:
@ -20,18 +26,22 @@ def processNameOp(domain, value):
if "zeronet" not in data:
print "No zeronet in ", data.keys()
return False
if type(data["zeronet"]) != type({}):
print "Bad type: ", data["zeronet"]
if not isinstance(data["zeronet"], dict):
print "Not dict: ", data["zeronet"]
return False
if not re.match("^[a-z]([a-z0-9-]{0,62}[a-z0-9])?$", domain):
print "Invalid domain: ", domain
return False
if "slave" in sys.argv:
print "Waiting for master update arrive"
time.sleep(30) # Wait 30 sec to allow master updater
#Note: Requires the file data/names.json to exist and contain "{}" to work
# Note: Requires the file data/names.json to exist and contain "{}" to work
names_raw = open(names_path, "rb").read()
names = json.loads(names_raw)
for subdomain, address in data["zeronet"].items():
subdomain = subdomain.lower()
address = re.sub("[^A-Za-z0-9]", "", address)
print subdomain, domain, "->", address
if subdomain:
@ -48,8 +58,6 @@ def processNameOp(domain, value):
return False
def processBlock(block_id):
print "Processing block #%s..." % block_id
s = time.time()
@ -61,13 +69,13 @@ def processBlock(block_id):
for tx in block["tx"]:
try:
transaction = rpc.getrawtransaction(tx, 1)
for vout in transaction.get("vout",[]):
for vout in transaction.get("vout", []):
if "scriptPubKey" in vout and "nameOp" in vout["scriptPubKey"] and "name" in vout["scriptPubKey"]["nameOp"]:
name_op = vout["scriptPubKey"]["nameOp"]
updated += processNameOp(name_op["name"].replace("d/", ""), name_op["value"])
except Exception, err:
print "Error processing tx #%s %s" % (tx, err)
print "Done in %.3fs (updated %s)." % (time.time()-s, updated)
print "Done in %.3fs (updated %s)." % (time.time() - s, updated)
if updated:
publish()
@ -102,7 +110,7 @@ rpc_user = re.search("rpcuser=(.*)$", namecoin_conf, re.M).group(1)
rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1)
rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass)
rpc = AuthServiceProxy(rpc_url, timeout=60*5)
rpc = AuthServiceProxy(rpc_url, timeout=60 * 5)
last_block = int(rpc.getinfo()["blocks"])
@ -111,19 +119,24 @@ if not config["lastprocessed"]: # Start processing from last block
# Processing skipped blocks
print "Processing block from #%s to #%s..." % (config["lastprocessed"], last_block)
for block_id in range(config["lastprocessed"], last_block+1):
for block_id in range(config["lastprocessed"], last_block + 1):
processBlock(block_id)
# processBlock(223911) # Testing zeronetwork.bit
# processBlock(227052) # Testing brainwallets.bit
# processBlock(236824) # Utf8 domain name (invalid should skip)
# processBlock(236752) # Uppercase domain (invalid should skip)
# processBlock(236870) # Encoded domain (should pass)
# sys.exit(0)
while 1:
print "Waiting for new block",
sys.stdout.flush()
while 1:
try:
rpc = AuthServiceProxy(rpc_url, timeout=60*5)
if (int(rpc.getinfo()["blocks"]) > last_block): break
rpc = AuthServiceProxy(rpc_url, timeout=60 * 5)
if (int(rpc.getinfo()["blocks"]) > last_block):
break
time.sleep(1)
rpc.waitforblock()
print "Found"
@ -136,7 +149,7 @@ while 1:
time.sleep(5)
last_block = int(rpc.getinfo()["blocks"])
for block_id in range(config["lastprocessed"]+1, last_block+1):
for block_id in range(config["lastprocessed"] + 1, last_block + 1):
processBlock(block_id)
config["lastprocessed"] = last_block

View file

@ -19,18 +19,16 @@ def lookupDomain(domain):
try:
domain_object = rpc.name_show("d/"+domain)
except Exception, err:
except:
#domain doesn't exist
print "Error looking up domain - does not exist %s %s" % (domain,err)
return None
domain_json = json.loads(domain_object['value'])
try:
domain_address = domain_json["zeronet"][subdomain]
except Exception, err:
except:
#domain exists but doesn't have any zeronet value
print "Error looking up domain - doesn't contain zeronet value %s %s" % (domain,err)
return None
return domain_address
@ -54,3 +52,30 @@ rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1)
rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass)
rpc = AuthServiceProxy(rpc_url, timeout=60*5)
"""
while 1:
print "Waiting for new block",
sys.stdout.flush()
while 1:
try:
rpc = AuthServiceProxy(rpc_url, timeout=60*5)
if (int(rpc.getinfo()["blocks"]) > last_block): break
time.sleep(1)
rpc.waitforblock()
print "Found"
break # Block found
except socket.timeout: # Timeout
print ".",
sys.stdout.flush()
except Exception, err:
print "Exception", err.__class__, err
time.sleep(5)
last_block = int(rpc.getinfo()["blocks"])
for block_id in range(config["lastprocessed"]+1, last_block+1):
processBlock(block_id)
config["lastprocessed"] = last_block
open(config_path, "w").write(json.dumps(config, indent=2))
"""

View file

@ -1,7 +1,11 @@
import argparse, sys, os, time
import argparse
import sys
import os
import ConfigParser
class Config(object):
def __init__(self):
self.version = "0.3.1"
self.rev = 280
@ -11,16 +15,13 @@ class Config(object):
self.parseCommandline(argv) # Parse argv
self.setAttributes()
def __str__(self):
return str(self.arguments).replace("Namespace", "Config") # Using argparse str output
# Convert string to bool
def strToBool(self, v):
return v.lower() in ("yes", "true", "t", "1")
# Create command line arguments
def createArguments(self):
# Platform specific
@ -29,7 +30,8 @@ class Config(object):
else:
coffeescript = None
""" Probably fixed
if sys.platform.lower().startswith("darwin"): # For some reasons openssl doesnt works on mac yet (https://github.com/HelloZeroNet/ZeroNet/issues/94)
if sys.platform.lower().startswith("darwin"):
# For some reasons openssl doesnt works on mac yet (https://github.com/HelloZeroNet/ZeroNet/issues/94)
use_openssl = False
else:
use_openssl = True
@ -38,7 +40,7 @@ class Config(object):
# Create parser
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.register('type','bool', self.strToBool)
parser.register('type', 'bool', self.strToBool)
subparsers = parser.add_subparsers(title="Action to perform", dest="action")
# Main
@ -51,25 +53,29 @@ class Config(object):
action = subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]')
action.add_argument('address', help='Site to sign')
action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?')
action.add_argument('--inner_path', help='File you want to sign (default: content.json)', default="content.json", metavar="inner_path")
action.add_argument('--inner_path', help='File you want to sign (default: content.json)',
default="content.json", metavar="inner_path")
action.add_argument('--publish', help='Publish site after the signing', action='store_true')
# SitePublish
action = subparsers.add_parser("sitePublish", help='Publish site to other peers: address')
action.add_argument('address', help='Site to publish')
action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)', default=None, nargs='?')
action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)', default=15441, nargs='?')
action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)', default="content.json", metavar="inner_path")
action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)',
default=None, nargs='?')
action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)',
default=15441, nargs='?')
action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)',
default="content.json", metavar="inner_path")
# SiteVerify
action = subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
action.add_argument('address', help='Site to verify')
#dbRebuild
# dbRebuild
action = subparsers.add_parser("dbRebuild", help='Rebuild site database cache')
action.add_argument('address', help='Site to rebuild')
#dbQuery
# dbQuery
action = subparsers.add_parser("dbQuery", help='Query site sql cache')
action.add_argument('address', help='Site to query')
action.add_argument('query', help='Sql query')
@ -98,7 +104,6 @@ class Config(object):
action.add_argument('message', help='Message to sign')
action.add_argument('privatekey', help='Private key')
# Config parameters
parser.add_argument('--debug', help='Debug mode', action='store_true')
parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true')
@ -110,26 +115,30 @@ class Config(object):
parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip')
parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port')
parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*')
parser.add_argument('--open_browser', help='Open homepage in web browser automatically', nargs='?', const="default_browser", metavar='browser_name')
parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr', metavar='address')
parser.add_argument('--open_browser', help='Open homepage in web browser automatically',
nargs='?', const="default_browser", metavar='browser_name')
parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr',
metavar='address')
parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, metavar='size')
parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip')
parser.add_argument('--fileserver_port',help='FileServer bind port', default=15441, type=int, metavar='port')
parser.add_argument('--fileserver_port', help='FileServer bind port', default=15441, type=int, metavar='port')
parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true')
parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port')
parser.add_argument('--ip_external', help='External ip (tested on start if None)', metavar='ip')
parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=use_openssl)
parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup',
type='bool', choices=[True, False], default=use_openssl)
parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true')
parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory', type='bool', choices=[True, False], default=True)
parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory',
type='bool', choices=[True, False], default=True)
parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript, metavar='executable_path')
parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript,
metavar='executable_path')
parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev))
return parser
# Find arguments specificed for current action
def getActionArguments(self):
back = {}
@ -138,8 +147,6 @@ class Config(object):
back[argument.dest] = getattr(self, argument.dest)
return back
# Try to find action from sys.argv
def getAction(self, argv):
actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions
@ -150,7 +157,6 @@ class Config(object):
break
return found_action
# Parse command line arguments
def parseCommandline(self, argv):
# Find out if action is specificed on start
@ -159,13 +165,12 @@ class Config(object):
argv.append("main")
self.arguments = self.parser.parse_args(argv[1:])
# Parse config file
def parseConfig(self, argv):
# Find config file path from parameters
config_file = "zeronet.conf"
if "--config_file" in argv:
config_file = argv[argv.index("--config_file")+1]
config_file = argv[argv.index("--config_file") + 1]
# Load config file
if os.path.isfile(config_file):
config = ConfigParser.ConfigParser(allow_no_value=True)
@ -173,13 +178,12 @@ class Config(object):
for section in config.sections():
for key, val in config.items(section):
if section != "global": # If not global prefix key with section
key = section+"_"+key
if val: argv.insert(1, val)
key = section + "_" + key
if val:
argv.insert(1, val)
argv.insert(1, "--%s" % key)
return argv
# Expose arguments as class attributes
def setAttributes(self):
# Set attributes from arguments

View file

@ -1,13 +1,22 @@
import logging, socket, time
from cStringIO import StringIO
import gevent, msgpack
import socket
import time
import gevent
import msgpack
from Config import config
from Debug import Debug
from util import StreamingMsgpack
from Crypt import CryptConnection
class Connection(object):
__slots__ = ("sock", "sock_wrapped", "ip", "port", "peer_id", "id", "protocol", "type", "server", "unpacker", "req_id", "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "last_recv_time", "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "last_ping_delay", "last_req_time", "last_cmd", "name", "updateName", "waiting_requests")
__slots__ = (
"sock", "sock_wrapped", "ip", "port", "peer_id", "id", "protocol", "type", "server", "unpacker", "req_id",
"handshake", "crypt", "connected", "event_connected", "closed", "start_time", "last_recv_time",
"last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent",
"last_ping_delay", "last_req_time", "last_cmd", "name", "updateName", "waiting_requests"
)
def __init__(self, server, ip, port, sock=None):
self.sock = sock
@ -48,41 +57,36 @@ class Connection(object):
self.waiting_requests = {} # Waiting sent requests
def updateName(self):
self.name = "Conn#%2s %-12s [%s]" % (self.id, self.ip, self.protocol)
def __str__(self):
return self.name
def __repr__(self):
return "<%s>" % self.__str__()
def log(self, text):
self.server.log.debug("%s > %s" % (self.name, text))
# Open connection to peer and wait for handshake
def connect(self):
self.log("Connecting...")
self.type = "out"
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((self.ip, int(self.port)))
# Implicit SSL in the future
#self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa")
#self.sock.do_handshake()
#self.crypt = "tls-rsa"
#self.sock_wrapped = True
# self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa")
# self.sock.do_handshake()
# self.crypt = "tls-rsa"
# self.sock_wrapped = True
# Detect protocol
self.send({"cmd": "handshake", "req_id": 0, "params": self.handshakeInfo()})
gevent.spawn(self.messageLoop)
return self.event_connected.get() # Wait for handshake
# Handle incoming connection
def handleIncomingConnection(self, sock):
self.log("Incoming connection...")
@ -97,7 +101,6 @@ class Connection(object):
self.log("Socket peek error: %s" % Debug.formatException(err))
self.messageLoop()
# Message loop for connection
def messageLoop(self):
if not self.sock:
@ -110,8 +113,9 @@ class Connection(object):
self.unpacker = msgpack.Unpacker()
try:
while True:
buff = self.sock.recv(16*1024)
if not buff: break # Connection closed
buff = self.sock.recv(16 * 1024)
if not buff:
break # Connection closed
self.last_recv_time = time.time()
self.incomplete_buff_recv += 1
self.bytes_recv += len(buff)
@ -125,10 +129,10 @@ class Connection(object):
message = None
buff = None
except Exception, err:
if not self.closed: self.log("Socket error: %s" % Debug.formatException(err))
if not self.closed:
self.log("Socket error: %s" % Debug.formatException(err))
self.close() # MessageLoop ended, close connection
# My handshake info
def handshakeInfo(self):
return {
@ -142,10 +146,9 @@ class Connection(object):
"crypt": self.crypt
}
def setHandshake(self, handshake):
self.handshake = handshake
if handshake.get("port_opened", None) == False: # Not connectable
if handshake.get("port_opened", None) is False: # Not connectable
self.port = 0
else:
self.port = handshake["fileserver_port"] # Set peer fileserver port
@ -160,7 +163,6 @@ class Connection(object):
self.crypt = crypt
self.event_connected.set(True) # Mark handshake as done
# Handle incoming message
def handleMessage(self, message):
self.last_message_time = time.time()
@ -169,8 +171,9 @@ class Connection(object):
self.waiting_requests[message["to"]].set(message) # Set the response to event
del self.waiting_requests[message["to"]]
elif message["to"] == 0: # Other peers handshake
ping = time.time()-self.start_time
if config.debug_socket: self.log("Handshake response: %s, ping: %s" % (message, ping))
ping = time.time() - self.start_time
if config.debug_socket:
self.log("Handshake response: %s, ping: %s" % (message, ping))
self.last_ping_delay = ping
# Server switched to crypt, lets do it also if not crypted already
if message.get("crypt") and not self.sock_wrapped:
@ -184,7 +187,8 @@ class Connection(object):
self.log("Unknown response: %s" % message)
elif message.get("cmd"): # Handhsake request
if message["cmd"] == "handshake":
if config.debug_socket: self.log("Handshake request: %s" % message)
if config.debug_socket:
self.log("Handshake request: %s" % message)
self.setHandshake(message["params"])
data = self.handshakeInfo()
data["cmd"] = "response"
@ -199,16 +203,20 @@ class Connection(object):
else:
self.server.handleRequest(self, message)
else: # Old style response, no req_id definied
if config.debug_socket: self.log("Old style response, waiting: %s" % self.waiting_requests.keys())
if config.debug_socket:
self.log("Old style response, waiting: %s" % self.waiting_requests.keys())
last_req_id = min(self.waiting_requests.keys()) # Get the oldest waiting request and set it true
self.waiting_requests[last_req_id].set(message)
del self.waiting_requests[last_req_id] # Remove from waiting request
# Send data to connection
def send(self, message, streaming=False):
if config.debug_socket: self.log("Send: %s, to: %s, streaming: %s, site: %s, inner_path: %s, req_id: %s" % (message.get("cmd"), message.get("to"), streaming, message.get("params", {}).get("site"), message.get("params", {}).get("inner_path"), message.get("req_id")))
if config.debug_socket:
self.log("Send: %s, to: %s, streaming: %s, site: %s, inner_path: %s, req_id: %s" % (
message.get("cmd"), message.get("to"), streaming,
message.get("params", {}).get("site"), message.get("params", {}).get("inner_path"),
message.get("req_id"))
)
self.last_send_time = time.time()
if streaming:
bytes_sent = StreamingMsgpack.stream(message, self.sock.sendall)
@ -224,10 +232,10 @@ class Connection(object):
self.last_sent_time = time.time()
return True
# Create and send a request to peer
def request(self, cmd, params={}):
if self.waiting_requests and self.protocol == "v2" and time.time() - max(self.last_req_time, self.last_recv_time) > 10: # Last command sent more than 10 sec ago, timeout
# Last command sent more than 10 sec ago, timeout
if self.waiting_requests and self.protocol == "v2" and time.time() - max(self.last_req_time, self.last_recv_time) > 10:
self.log("Request %s timeout: %s" % (self.last_cmd, time.time() - self.last_send_time))
self.close()
return False
@ -242,7 +250,6 @@ class Connection(object):
res = event.get() # Wait until event solves
return res
def ping(self):
s = time.time()
response = None
@ -252,20 +259,24 @@ class Connection(object):
except Exception, err:
self.log("Ping error: %s" % Debug.formatException(err))
if response and "body" in response and response["body"] == "Pong!":
self.last_ping_delay = time.time()-s
self.last_ping_delay = time.time() - s
return True
else:
return False
# Close connection
def close(self):
if self.closed: return False # Already closed
if self.closed:
return False # Already closed
self.closed = True
self.connected = False
self.event_connected.set(False)
if config.debug_socket: self.log("Closing connection, waiting_requests: %s, buff: %s..." % (len(self.waiting_requests), self.incomplete_buff_recv))
if config.debug_socket:
self.log(
"Closing connection, waiting_requests: %s, buff: %s..." %
(len(self.waiting_requests), self.incomplete_buff_recv)
)
for request in self.waiting_requests.values(): # Mark pending requests failed
request.set(False)
self.waiting_requests = {}
@ -275,7 +286,8 @@ class Connection(object):
self.sock.shutdown(gevent.socket.SHUT_WR)
self.sock.close()
except Exception, err:
if config.debug_socket: self.log("Close error: %s" % err)
if config.debug_socket:
self.log("Close error: %s" % err)
# Little cleanup
self.sock = None

View file

@ -43,14 +43,16 @@ class ConnectionServer:
# Check msgpack version
if msgpack.version[0] == 0 and msgpack.version[1] < 4:
self.log.error(
"Error: Too old msgpack version: %s (>0.4.0 required), please update using `sudo pip install msgpack-python --upgrade`" %
"Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo pip install msgpack-python --upgrade`" %
str(msgpack.version)
)
sys.exit(0)
if port: # Listen server on a port
self.pool = Pool(1000) # do not accept more than 1000 connections
self.stream_server = StreamServer((ip.replace("*", ""), port), self.handleIncomingConnection, spawn=self.pool, backlog=100)
self.stream_server = StreamServer(
(ip.replace("*", ""), port), self.handleIncomingConnection, spawn=self.pool, backlog=100
)
if request_handler:
self.handleRequest = request_handler
@ -152,25 +154,32 @@ class ConnectionServer:
for connection in self.connections[:]: # Make a copy
idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time)
if connection.unpacker and idle > 30: # Delete the unpacker if not needed
if connection.unpacker and idle > 30:
# Delete the unpacker if not needed
del connection.unpacker
connection.unpacker = None
connection.log("Unpacker deleted")
if idle > 60 * 60: # Wake up after 1h
if idle > 60 * 60:
# Wake up after 1h
connection.log("[Cleanup] After wakeup, idle: %s" % idle)
connection.close()
elif idle > 20 * 60 and connection.last_send_time < time.time() - 10: # Idle more than 20 min and we not send request in last 10 sec
elif idle > 20 * 60 and connection.last_send_time < time.time() - 10:
# Idle more than 20 min and we not send request in last 10 sec
if not connection.ping(): # send ping request
connection.close()
elif idle > 10 and connection.incomplete_buff_recv > 0: # Incompelte data with more than 10 sec idle
elif idle > 10 and connection.incomplete_buff_recv > 0:
# Incompelte data with more than 10 sec idle
connection.log("[Cleanup] Connection buff stalled")
connection.close()
elif idle > 10 and connection.waiting_requests and time.time() - connection.last_send_time > 10: # Sent command and no response in 10 sec
connection.log("[Cleanup] Command %s timeout: %s" % (connection.last_cmd, time.time() - connection.last_send_time))
elif idle > 10 and connection.waiting_requests and time.time() - connection.last_send_time > 10:
# Sent command and no response in 10 sec
connection.log(
"[Cleanup] Command %s timeout: %s" % (connection.last_cmd, time.time() - connection.last_send_time)
)
connection.close()
elif idle > 60 and connection.protocol == "?": # No connection after 1 min

View file

@ -108,7 +108,7 @@ class ContentManager(object):
return total_size
# Find the file info line from self.contents
# Return: { "sha512": "c29d73d30ee8c9c1b5600e8a84447a6de15a3c3db6869aca4a2a578c1721f518", "size": 41 , "content_inner_path": "content.json"}
# Return: { "sha512": "c29d73d...21f518", "size": 41 , "content_inner_path": "content.json"}
def getFileInfo(self, inner_path):
dirs = inner_path.split("/") # Parent dirs of content.json
inner_path_parts = [dirs.pop()] # Filename relative to content.json
@ -279,11 +279,17 @@ class ContentManager(object):
privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
valid_signers = self.getValidSigners(inner_path, new_content)
if privatekey_address not in valid_signers:
return self.log.error("Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address))
return self.log.error(
"Private key invalid! Valid signers: %s, Private key address: %s" %
(valid_signers, privatekey_address)
)
self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key sign the valid signers
new_content["signers_sign"] = CryptBitcoin.sign("%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey)
if inner_path == "content.json" and privatekey_address == self.site.address:
# If signing using the root key, then sign the valid signers
new_content["signers_sign"] = CryptBitcoin.sign(
"%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey
)
if not new_content["signers_sign"]:
self.log.info("Old style address, signers_sign is none")
@ -352,7 +358,9 @@ class ContentManager(object):
if not cert_address: # Cert signer not allowed
self.log.error("Invalid cert signer: %s" % domain)
return False
return CryptBitcoin.verify("%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"])
return CryptBitcoin.verify(
"%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"]
)
# Checks if the content.json content is valid
# Return: True or False
@ -414,10 +422,13 @@ class ContentManager(object):
if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json
return None
elif old_content["modified"] > new_content["modified"]: # We have newer
self.log.debug("We have newer %s (Our: %s, Sent: %s)" % (inner_path, old_content["modified"], new_content["modified"]))
self.log.debug(
"We have newer %s (Our: %s, Sent: %s)" %
(inner_path, old_content["modified"], new_content["modified"])
)
gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers
return False
if new_content["modified"] > time.time() + 60 * 60 * 24: # Content modified in the far future (allow 1 day window)
if new_content["modified"] > time.time() + 60 * 60 * 24: # Content modified in the far future (allow 1 day+)
self.log.error("%s modify is in the future!" % inner_path)
return False
# Check sign
@ -437,7 +448,9 @@ class ContentManager(object):
signs_required = self.getSignsRequired(inner_path, new_content)
if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json
if not CryptBitcoin.verify("%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]):
if not CryptBitcoin.verify(
"%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]
):
self.log.error("%s invalid signers_sign!" % inner_path)
return False
@ -470,8 +483,10 @@ class ContentManager(object):
else:
hash_valid = False
if file_info["size"] != file.tell():
self.log.error("%s file size does not match %s <> %s, Hash: %s" % (inner_path, file.tell(),
file_info["size"], hash_valid))
self.log.error(
"%s file size does not match %s <> %s, Hash: %s" %
(inner_path, file.tell(), file_info["size"], hash_valid)
)
return False
return hash_valid
@ -493,7 +508,9 @@ def testSign():
from Site import Site
site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
content_manager = ContentManager(site)
content_manager.sign("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR")
content_manager.sign(
"data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR"
)
def testVerify():
@ -504,10 +521,14 @@ def testVerify():
print "Loaded contents:", content_manager.contents.keys()
file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json"))
print "content.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False)
print "content.json valid:", content_manager.verifyFile(
"data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False
)
file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json"))
print "messages.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False)
print "messages.json valid:", content_manager.verifyFile(
"data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False
)
def testInfo():

View file

@ -1,11 +1,13 @@
import logging
from lib.BitcoinECC import BitcoinECC
from lib.pybitcointools import bitcoin as btctools
import logging
from Config import config
# Try to load openssl
try:
if not config.use_openssl: raise Exception("Disabled by config")
if not config.use_openssl:
raise Exception("Disabled by config")
from lib.opensslVerify import opensslVerify
logging.info("OpenSSL loaded, version: %s" % opensslVerify.openssl_version)
except Exception, err:
@ -37,12 +39,13 @@ def privatekeyToAddress(privatekey): # Return address from private key
else:
try:
return btctools.privkey_to_address(privatekey)
except Exception, err: # Invalid privatekey
except Exception: # Invalid privatekey
return False
def sign(data, privatekey): # Return sign to data using private key
if privatekey.startswith("23") and len(privatekey) > 52: return None # Old style private key not supported
if privatekey.startswith("23") and len(privatekey) > 52:
return None # Old style private key not supported
sign = btctools.ecdsa_sign(data, privatekey)
return sign

View file

@ -4,7 +4,6 @@ import os
import ssl
from Config import config
import gevent
from util import SslPatch
@ -19,7 +18,6 @@ class CryptConnectionManager:
self.crypt_supported = [] # Supported cryptos
# Select crypt that supported by both sides
# Return: Name of the crypto
def selectCrypt(self, client_supported):
@ -28,34 +26,35 @@ class CryptConnectionManager:
return crypt
return False
# Wrap socket for crypt
# Return: wrapped socket
def wrapSocket(self, sock, crypt, server=False):
if crypt == "tls-rsa":
ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:HIGH:!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:HIGH:"
ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
if server:
return ssl.wrap_socket(sock, server_side=server, keyfile='%s/key-rsa.pem' % config.data_dir, certfile='%s/cert-rsa.pem' % config.data_dir, ciphers=ciphers)
return ssl.wrap_socket(
sock, server_side=server, keyfile='%s/key-rsa.pem' % config.data_dir,
certfile='%s/cert-rsa.pem' % config.data_dir, ciphers=ciphers)
else:
return ssl.wrap_socket(sock, ciphers=ciphers)
else:
return sock
def removeCerts(self):
for file_name in ["cert-rsa.pem", "key-rsa.pem"]:
file_path = "%s/%s" % (config.data_dir, file_name)
if os.path.isfile(file_path): os.unlink(file_path)
if os.path.isfile(file_path):
os.unlink(file_path)
# Load and create cert files is necessary
def loadCerts(self):
if config.disable_encryption: return False
if config.disable_encryption:
return False
if self.loadSslRsaCert():
self.crypt_supported.append("tls-rsa")
# Try to create RSA server cert + sign for connection encryption
# Return: True on success
def loadSslRsaCert(self):
@ -65,7 +64,9 @@ class CryptConnectionManager:
return True # Files already exits
back = subprocess.Popen(
"%s req -x509 -newkey rsa:2048 -sha256 -batch -keyout %s/key-rsa.pem -out %s/cert-rsa.pem -nodes -config %s" % (self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]),
"%s req -x509 -newkey rsa:2048 -sha256 -batch -keyout %s/key-rsa.pem -out %s/cert-rsa.pem -nodes -config %s" % (
self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]
),
shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
).stdout.read().strip()
logging.debug("Generating RSA cert and key PEM files...%s" % back)
@ -76,7 +77,6 @@ class CryptConnectionManager:
logging.error("RSA ECC SSL cert generation failed, cert or key files not exits.")
return False
# Not used yet: Missing on some platform
def createSslEccCert(self):
return False
@ -91,7 +91,8 @@ class CryptConnectionManager:
# Create ECC cert
back = subprocess.Popen(
"%s req -new -key %s/key-ecc.pem -x509 -nodes -out %s/cert-ecc.pem -config %s" % (self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]),
"%s req -new -key %s/key-ecc.pem -x509 -nodes -out %s/cert-ecc.pem -config %s" % (
self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]),
shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
).stdout.read().strip()
self.log.debug("Generating ECC cert PEM file...%s" % back)

View file

@ -1,5 +1,6 @@
import hashlib
def sha1sum(file, blocksize=65536):
if hasattr(file, "endswith"): # Its a string open it
file = open(file, "rb")
@ -29,8 +30,8 @@ if __name__ == "__main__":
import time
s = time.time()
print sha1sum(open("F:\\Temp\\bigfile")),
print time.time()-s
print time.time() - s
s = time.time()
print sha512sum(open("F:\\Temp\\bigfile")),
print time.time()-s
print time.time() - s

View file

@ -1,10 +1,18 @@
import sqlite3, json, time, logging, re, os
import sqlite3
import json
import time
import logging
import re
import os
from DbCursor import DbCursor
class Db:
def __init__(self, schema, db_path):
self.db_path = db_path
self.db_dir = os.path.dirname(db_path)+"/"
self.db_dir = os.path.dirname(db_path) + "/"
self.schema = schema
self.schema["version"] = self.schema.get("version", 1)
self.conn = None
@ -15,7 +23,6 @@ class Db:
self.query_stats = {}
self.db_keyvalues = {}
def connect(self):
self.log.debug("Connecting to %s (sqlite version: %s)..." % (self.db_path, sqlite3.version))
if not os.path.isdir(self.db_dir): # Directory not exist yet
@ -32,26 +39,26 @@ class Db:
self.cur.execute("PRAGMA journal_mode = MEMORY")
self.cur.execute("PRAGMA synchronous = OFF")
# Execute query using dbcursor
def execute(self, query, params = None):
if not self.conn: self.connect()
def execute(self, query, params=None):
if not self.conn:
self.connect()
return self.cur.execute(query, params)
def close(self):
self.log.debug("Closing")
if self.cur: self.cur.close()
if self.conn: self.conn.close()
if self.cur:
self.cur.close()
if self.conn:
self.conn.close()
# Gets a cursor object to database
# Return: Cursor class
def getCursor(self):
if not self.conn: self.connect()
if not self.conn:
self.connect()
return DbCursor(self.conn, self)
# Get the table version
# Return: Table version or None if not exist
def getTableVersion(self, table_name):
@ -74,8 +81,6 @@ class Db:
return self.db_keyvalues.get("table.%s.version" % table_name, 0)
# Check Db tables
# Return: <list> Changed table names
def checkTables(self):
@ -92,10 +97,11 @@ class Db:
["key", "TEXT"],
["value", "INTEGER"],
["json_id", "INTEGER REFERENCES json (json_id)"],
],[
], [
"CREATE UNIQUE INDEX key_id ON keyvalue(json_id, key)"
], version=self.schema["version"])
if changed: changed_tables.append("keyvalue")
if changed:
changed_tables.append("keyvalue")
# Check json table
if self.schema["version"] == 1:
@ -113,23 +119,28 @@ class Db:
], [
"CREATE UNIQUE INDEX path ON json(directory, file_name)"
], version=self.schema["version"])
if changed: changed_tables.append("json")
if changed:
changed_tables.append("json")
# Check schema tables
for table_name, table_settings in self.schema["tables"].items():
changed = cur.needTable(table_name, table_settings["cols"], table_settings["indexes"], version=table_settings["schema_changed"])
if changed: changed_tables.append(table_name)
changed = cur.needTable(
table_name, table_settings["cols"],
table_settings["indexes"], version=table_settings["schema_changed"]
)
if changed:
changed_tables.append(table_name)
cur.execute("COMMIT")
self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time()-s, changed_tables))
self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time() - s, changed_tables))
return changed_tables
# Load json file to db
# Return: True if matched
def loadJson(self, file_path, file = None, cur = None):
if not file_path.startswith(self.db_dir): return False # Not from the db dir: Skipping
def loadJson(self, file_path, file=None, cur=None):
if not file_path.startswith(self.db_dir):
return False # Not from the db dir: Skipping
relative_path = re.sub("^%s" % self.db_dir, "", file_path) # File path realative to db file
# Check if filename matches any of mappings in schema
matched_maps = []
@ -138,10 +149,12 @@ class Db:
matched_maps.append(map_settings)
# No match found for the file
if not matched_maps: return False
if not matched_maps:
return False
# Load the json file
if not file: file = open(file_path)
if not file:
file = open(file_path)
data = json.load(file)
# No cursor specificed
@ -170,13 +183,18 @@ class Db:
for key in map["to_keyvalue"]:
if key not in current_keyvalue: # Keyvalue not exist yet in the db
cur.execute("INSERT INTO keyvalue ?",
cur.execute(
"INSERT INTO keyvalue ?",
{"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
)
elif data.get(key) != current_keyvalue[key]: # Keyvalue different value
cur.execute("UPDATE keyvalue SET value = ? WHERE keyvalue_id = ?", (data.get(key), current_keyvalue_id[key]))
cur.execute(
"UPDATE keyvalue SET value = ? WHERE keyvalue_id = ?",
(data.get(key), current_keyvalue_id[key])
)
"""for key in map.get("to_keyvalue", []):
"""
for key in map.get("to_keyvalue", []):
cur.execute("INSERT OR REPLACE INTO keyvalue ?",
{"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
)
@ -201,19 +219,21 @@ class Db:
cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],))
if node not in data: continue
if node not in data:
continue
table_schema = self.schema["tables"][table_name]
if key_col: # Map as dict
for key, val in data[node].iteritems():
if val_col: # Single value
cur.execute("INSERT OR REPLACE INTO %s ?" % table_name,
{ key_col: key, val_col: val, "json_id": json_row["json_id"] }
cur.execute(
"INSERT OR REPLACE INTO %s ?" % table_name,
{key_col: key, val_col: val, "json_id": json_row["json_id"]}
)
else: # Multi value
if isinstance(val, dict): # Single row
row = val
if import_cols: row = { key: row[key] for key in import_cols } # Filter row by import_cols
if import_cols:
row = {key: row[key] for key in import_cols} # Filter row by import_cols
row[key_col] = key
# Replace in value if necessary
if replaces:
@ -234,7 +254,8 @@ class Db:
row["json_id"] = json_row["json_id"]
cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
if commit_after_done: cur.execute("COMMIT")
if commit_after_done:
cur.execute("COMMIT")
return True
@ -244,7 +265,7 @@ if __name__ == "__main__":
logging.getLogger('').setLevel(logging.DEBUG)
logging.getLogger('').addHandler(console_log)
console_log.setLevel(logging.DEBUG)
dbjson = DbJson(json.load(open("zerotalk.schema.json")), "data/users/zerotalk.db")
dbjson = Db(json.load(open("zerotalk.schema.json")), "data/users/zerotalk.db")
dbjson.collect_stats = True
dbjson.checkTables()
cur = dbjson.getCursor()
@ -254,10 +275,9 @@ if __name__ == "__main__":
for user_dir in os.listdir("data/users"):
if os.path.isdir("data/users/%s" % user_dir):
dbjson.loadJson("data/users/%s/data.json" % user_dir, cur=cur)
#print ".",
# print ".",
cur.logging = True
cur.execute("COMMIT")
print "Done in %.3fs" % (time.time()-s)
print "Done in %.3fs" % (time.time() - s)
for query, stats in sorted(dbjson.query_stats.items()):
print "-", query, stats

View file

@ -1,21 +1,26 @@
import time, re
import time
import re
# Special sqlite cursor
class DbCursor:
def __init__(self, conn, db):
self.conn = conn
self.db = db
self.cursor = conn.cursor()
self.logging = True
self.logging = False
def execute(self, query, params=None):
if isinstance(params, dict): # Make easier select and insert by allowing dict params
if query.startswith("SELECT") or query.startswith("DELETE"): # Convert param dict to SELECT * FROM table WHERE key = ?, key2 = ? format
wheres = "AND ".join([key+" = ?" for key in params])
if query.startswith("SELECT") or query.startswith("DELETE"):
# Convert param dict to SELECT * FROM table WHERE key = ?, key2 = ? format
wheres = "AND ".join([key + " = ?" for key in params])
query = query.replace("?", wheres)
params = params.values()
else: # Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format
else:
# Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format
keys = ", ".join(params.keys())
values = ", ".join(['?' for key in params.keys()])
query = query.replace("?", "(%s) VALUES (%s)" % (keys, values))
@ -27,22 +32,22 @@ class DbCursor:
if params: # Query has parameters
res = self.cursor.execute(query, params)
if self.logging:
self.db.log.debug((query.replace("?", "%s") % params)+" (Done in %.4f)" % (time.time()-s))
self.db.log.debug((query.replace("?", "%s") % params) + " (Done in %.4f)" % (time.time() - s))
else:
res = self.cursor.execute(query)
if self.logging: self.db.log.debug(query+" (Done in %.4f)" % (time.time()-s))
if self.logging:
self.db.log.debug(query + " (Done in %.4f)" % (time.time() - s))
# Log query stats
if self.db.collect_stats:
if query not in self.db.query_stats:
self.db.query_stats[query] = {"call": 0, "time": 0.0}
self.db.query_stats[query]["call"] += 1
self.db.query_stats[query]["time"] += time.time()-s
self.db.query_stats[query]["time"] += time.time() - s
# if query == "BEGIN": self.logging = False # Turn logging off on transaction commit
return res
# Create new table
# Return: True on success
def createTable(self, table, cols):
@ -65,8 +70,6 @@ class DbCursor:
self.execute("CREATE TABLE %s (%s)" % (table, ",".join(col_definitions)))
return True
# Create indexes on table
# Return: True on success
def createIndexes(self, table, indexes):
@ -74,7 +77,6 @@ class DbCursor:
for index in indexes:
self.execute(index)
# Create table if not exist
# Return: True if updated
def needTable(self, table, cols, indexes=None, version=1):
@ -82,15 +84,16 @@ class DbCursor:
if int(current_version) < int(version): # Table need update or not extis
self.db.log.info("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version))
self.createTable(table, cols)
if indexes: self.createIndexes(table, indexes)
self.execute("INSERT OR REPLACE INTO keyvalue ?",
if indexes:
self.createIndexes(table, indexes)
self.execute(
"INSERT OR REPLACE INTO keyvalue ?",
{"json_id": 0, "key": "table.%s.version" % table, "value": version}
)
return True
else: # Not changed
return False
# Get or create a row for json file
# Return: The database row
def getJsonRow(self, file_path):

View file

@ -1,4 +1,7 @@
import sys, os, traceback
import sys
import os
import traceback
# Non fatal exception
class Notify(Exception):
@ -10,9 +13,11 @@ class Notify(Exception):
def formatException(err=None):
if type(err) == Notify: return err
if type(err) == Notify:
return err
exc_type, exc_obj, exc_tb = sys.exc_info()
if not err: err = exc_obj.message
if not err:
err = exc_obj.message
tb = []
for frame in traceback.extract_tb(exc_tb):
path, line, function, text = frame
@ -22,9 +27,8 @@ def formatException(err=None):
if __name__ == "__main__":
try:
print 1/0
print 1 / 0
except Exception, err:
print type(err).__name__
print "1/0 error: %s" % formatException(err)

View file

@ -1,8 +1,13 @@
import gevent, sys, logging
import sys
import logging
import gevent
from Config import config
last_error = None
# Store last error, ignore notify, allow manual error logging
def handleError(*args):
global last_error
@ -11,7 +16,8 @@ def handleError(*args):
silent = True
else:
silent = False
if args[0].__name__ != "Notify": last_error = args
if args[0].__name__ != "Notify":
last_error = args
if not silent and args[0].__name__ != "Notify":
logging.exception("Unhandled exception")
sys.__excepthook__(*args)
@ -25,6 +31,8 @@ def handleErrorNotify(*args):
OriginalGreenlet = gevent.Greenlet
class ErrorhookedGreenlet(OriginalGreenlet):
def _report_error(self, exc_info):
sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
@ -39,8 +47,10 @@ reload(gevent)
if __name__ == "__main__":
import time
from gevent import monkey; monkey.patch_all(thread=False, ssl=False)
from gevent import monkey
monkey.patch_all(thread=False, ssl=False)
import Debug
def sleeper():
print "started"
time.sleep(3)
@ -52,4 +62,3 @@ if __name__ == "__main__":
thread1.throw(Exception("Hello"))
thread2.throw(Debug.Notify("Throw"))
print "killed"

View file

@ -1,13 +1,20 @@
import os, subprocess, re, logging, time
import os
import subprocess
import re
import logging
import time
from Config import config
# Find files with extension in path
def findfiles(path, find_ext):
for root, dirs, files in os.walk(path, topdown = False):
for root, dirs, files in os.walk(path, topdown=False):
for file in sorted(files):
file_path = root+"/"+file
file_path = root + "/" + file
file_ext = file.split(".")[-1]
if file_ext in find_ext and not file.startswith("all."): yield file_path.replace("\\", "/")
if file_ext in find_ext and not file.startswith("all."):
yield file_path.replace("\\", "/")
# Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features
@ -26,12 +33,12 @@ def merge(merged_path):
else:
merged_mtime = 0
changed = {}
for file_path in findfiles(merge_dir, find_ext):
if os.path.getmtime(file_path) > merged_mtime:
changed[file_path] = True
if not changed: return # Assets not changed, nothing to do
if not changed:
return # Assets not changed, nothing to do
if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile
merged_old = open(merged_path, "rb").read().decode("utf8")
@ -53,13 +60,16 @@ def merge(merged_path):
s = time.time()
compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
out = compiler.stdout.read().decode("utf8")
logging.debug("Running: %s (Done in %.2fs)" % (command, time.time()-s))
logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s))
if out and out.startswith("("):
parts.append(out)
else:
error = out
logging.error("%s Compile error: %s" % (file_path, error))
parts.append("alert('%s compile error: %s');" % (file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n") ) )
parts.append(
"alert('%s compile error: %s');" %
(file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n"))
)
else: # Not changed use the old_part
parts.append(old_parts[file_path])
else: # Add to parts
@ -71,7 +81,7 @@ def merge(merged_path):
merged = cssvendor.prefix(merged)
merged = merged.replace("\r", "")
open(merged_path, "wb").write(merged.encode("utf8"))
logging.debug("Merged %s (%.2fs)" % (merged_path, time.time()-s_total))
logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total))
if __name__ == "__main__":

View file

@ -1,5 +1,7 @@
import logging, os, sys, time
import logging
import time
import threading
from Config import config
if config.debug: # Only load pyfilesytem if using debug mode
@ -13,8 +15,10 @@ if config.debug: # Only load pyfilesytem if using debug mode
else:
pyfilesystem = False
class DebugReloader:
def __init__ (self, callback, directory = "/"):
def __init__(self, callback, directory="/"):
self.last_chaged = 0
if pyfilesystem:
self.directory = directory
@ -24,7 +28,6 @@ class DebugReloader:
thread.daemon = True
thread.start()
def addWatcher(self, recursive=True):
try:
time.sleep(1) # Wait for .pyc compiles
@ -33,10 +36,9 @@ class DebugReloader:
except Exception, err:
print "File system watcher failed: %s (on linux pyinotify not gevent compatible yet :( )" % err
def changed(self, evt):
if not evt.path or "%s/" % config.data_dir in evt.path or evt.path.endswith("pyc") or time.time()-self.last_chaged < 1: return False # Ignore *.pyc changes and no reload within 1 sec
#logging.debug("Changed: %s" % evt)
if not evt.path or "%s/" % config.data_dir in evt.path or evt.path.endswith("pyc") or time.time() - self.last_chaged < 1:
return False # Ignore *.pyc changes and no reload within 1 sec
time.sleep(0.1) # Wait for lock release
self.callback()
self.last_chaged = time.time()

View file

@ -11,7 +11,8 @@ from Debug import Debug
from Config import config
from util import RateLimit, StreamingMsgpack
FILE_BUFF = 1024*512
FILE_BUFF = 1024 * 512
# Request from me
class FileRequest(object):
@ -73,13 +74,16 @@ class FileRequest(object):
self.response({"error": "Unknown site"})
return False
if site.settings["own"] and params["inner_path"].endswith("content.json"):
self.log.debug("Someone trying to push a file to own site %s, reload local %s first" % (site.address, params["inner_path"]))
self.log.debug(
"Someone trying to push a file to own site %s, reload local %s first" %
(site.address, params["inner_path"])
)
changed = site.content_manager.loadContent(params["inner_path"], add_bad_files=False)
if changed: # Content.json changed locally
site.settings["size"] = site.content_manager.getTotalSize() # Update site size
buff = StringIO(params["body"])
valid = site.content_manager.verifyFile(params["inner_path"], buff)
if valid == True: # Valid and changed
if valid is True: # Valid and changed
self.log.info("Update for %s looks valid, saving..." % params["inner_path"])
buff.seek(0)
site.storage.write(params["inner_path"], buff)
@ -87,20 +91,28 @@ class FileRequest(object):
site.onFileDone(params["inner_path"]) # Trigger filedone
if params["inner_path"].endswith("content.json"): # Download every changed file from peer
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer = True) # Add or get peer
site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"]), "publish_%s" % params["inner_path"]) # On complete publish to other peers
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer
# On complete publish to other peers
site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"]), "publish_%s" % params["inner_path"])
# Load new content file and download changed files in new thread
gevent.spawn(
lambda: site.downloadContent(params["inner_path"], peer=peer)
) # Load new content file and download changed files in new thread
)
self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]})
elif valid == None: # Not changed
peer = site.addPeer(*params["peer"], return_peer = True) # Add or get peer
elif valid is None: # Not changed
peer = site.addPeer(*params["peer"], return_peer=True) # Add or get peer
if peer:
self.log.debug("Same version, adding new peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) )
self.log.debug(
"Same version, adding new peer for locked files: %s, tasks: %s" %
(peer.key, len(site.worker_manager.tasks))
)
for task in site.worker_manager.tasks: # New peer add to every ongoing task
if task["peers"]: site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) # Download file from this peer too if its peer locked
if task["peers"]:
# Download file from this peer too if its peer locked
site.needFile(task["inner_path"], peer=peer, update=True, blocking=False)
self.response({"ok": "File not changed"})
@ -116,18 +128,21 @@ class FileRequest(object):
return False
try:
file_path = site.storage.getPath(params["inner_path"])
if config.debug_socket: self.log.debug("Opening file: %s" % file_path)
if config.debug_socket:
self.log.debug("Opening file: %s" % file_path)
with StreamingMsgpack.FilePart(file_path, "rb") as file:
file.seek(params["location"])
file.read_bytes = FILE_BUFF
back = {"body": file,
back = {
"body": file,
"size": os.fstat(file.fileno()).st_size,
"location": min(file.tell()+FILE_BUFF, os.fstat(file.fileno()).st_size)
"location": min(file.tell() + FILE_BUFF, os.fstat(file.fileno()).st_size)
}
if config.debug_socket:
self.log.debug("Sending file %s from position %s to %s" % (file_path,
params["location"],
back["location"]))
self.log.debug(
"Sending file %s from position %s to %s" %
(file_path, params["location"], back["location"])
)
self.response(back, streaming=True)
if config.debug_socket:
self.log.debug("File %s sent" % file_path)
@ -159,7 +174,8 @@ class FileRequest(object):
for peer in params["peers"]: # Add sent peers to site
address = self.unpackAddress(peer)
got_peer_keys.append("%s:%s" % address)
if site.addPeer(*address): added += 1
if site.addPeer(*address):
added += 1
# Send back peers that is not in the sent list and connectable (not port 0)
packed_peers = [peer.packAddress() for peer in site.getConnectablePeers(params["need"], got_peer_keys)]
if added:
@ -173,9 +189,11 @@ class FileRequest(object):
if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
return False
modified_files = {inner_path: content["modified"]
modified_files = {
inner_path: content["modified"]
for inner_path, content in site.content_manager.contents.iteritems()
if content["modified"] > params["since"]}
if content["modified"] > params["since"]
}
# Add peer to site if not added before
connected_peer = site.addPeer(self.connection.ip, self.connection.port)

View file

@ -1,5 +1,10 @@
import os, logging, urllib2, re, time
import gevent, msgpack
import logging
import urllib2
import re
import time
import gevent
from Config import config
from FileRequest import FileRequest
from Site import SiteManager
@ -9,6 +14,7 @@ from util import UpnpPunch
class FileServer(ConnectionServer):
def __init__(self):
ConnectionServer.__init__(self, config.fileserver_ip, config.fileserver_port, self.handleRequest)
if config.ip_external: # Ip external definied in arguments
@ -18,30 +24,32 @@ class FileServer(ConnectionServer):
self.port_opened = None # Is file server opened on router
self.sites = SiteManager.site_manager.list()
# Handle request to fileserver
def handleRequest(self, connection, message):
if "params" in message:
self.log.debug("FileRequest: %s %s %s %s" % (str(connection), message["cmd"], message["params"].get("site"), message["params"].get("inner_path")))
self.log.debug(
"FileRequest: %s %s %s %s" %
(str(connection), message["cmd"], message["params"].get("site"), message["params"].get("inner_path"))
)
else:
self.log.debug("FileRequest: %s %s" % (str(connection), req["cmd"]))
self.log.debug("FileRequest: %s %s" % (str(connection), message["cmd"]))
req = FileRequest(self, connection)
req.route(message["cmd"], message.get("req_id"), message.get("params"))
# Reload the FileRequest class to prevent restarts in debug mode
def reload(self):
global FileRequest
import imp
FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest
# Try to open the port using upnp
def openport(self, port=None, check=True):
if not port: port = self.port
if self.port_opened: return True # Port already opened
if not port:
port = self.port
if self.port_opened:
return True # Port already opened
if check: # Check first if its already opened
if self.testOpenport(port)["result"] == True:
if self.testOpenport(port)["result"] is True:
return True # Port already opened
self.log.info("Trying to open port using UpnpPunch...")
@ -52,25 +60,24 @@ class FileServer(ConnectionServer):
self.log.error("UpnpPunch run error: %s" % Debug.formatException(err))
upnp_punch = False
if upnp_punch and self.testOpenport(port)["result"] == True:
if upnp_punch and self.testOpenport(port)["result"] is True:
return True
self.log.info("Upnp mapping failed :( Please forward port %s on your router to your ipaddress" % port)
return False
# Test if the port is open
def testOpenport(self, port = None):
def testOpenport(self, port=None):
time.sleep(1) # Wait for port open
if not port: port = self.port
if not port:
port = self.port
back = self.testOpenportPortchecker(port)
if back["result"] == True: # Successful port check
if back["result"] is True: # Successful port check
return back
else: # Alternative port checker
return self.testOpenportCanyouseeme(port)
def testOpenportPortchecker(self, port = None):
def testOpenportPortchecker(self, port=None):
self.log.info("Checking port %s using portchecker.co..." % port)
try:
data = urllib2.urlopen("http://portchecker.co/check", "port=%s" % port, timeout=20.0).read()
@ -79,6 +86,7 @@ class FileServer(ConnectionServer):
except Exception, err:
message = "Error: %s" % Debug.formatException(err)
data = ""
if "closed" in message or "Error" in message:
self.log.info("[BAD :(] Port closed: %s" % message)
if port == self.port:
@ -102,8 +110,7 @@ class FileServer(ConnectionServer):
config.ip_external = False
return {"result": True, "message": message}
def testOpenportCanyouseeme(self, port = None):
def testOpenportCanyouseeme(self, port=None):
self.log.info("Checking port %s using canyouseeme.org..." % port)
try:
data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read()
@ -111,6 +118,7 @@ class FileServer(ConnectionServer):
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace("&nbsp;", " ")) # Strip http tags
except Exception, err:
message = "Error: %s" % Debug.formatException(err)
if "Error" in message:
self.log.info("[BAD :(] Port closed: %s" % message)
if port == self.port:
@ -134,26 +142,23 @@ class FileServer(ConnectionServer):
config.ip_external = False
return {"result": True, "message": message}
# Set external ip without testing
def setIpExternal(self, ip_external):
logging.info("Setting external ip without testing: %s..." % ip_external)
config.ip_external = ip_external
self.port_opened = True
# Check site file integrity
def checkSite(self, site):
if site.settings["serving"]:
site.announce() # Announce site to tracker
site.update() # Update site's content.json and download changed files
if self.port_opened == False: # In passive mode keep 5 active peer connection to get the updates
if self.port_opened is False: # In passive mode keep 5 active peer connection to get the updates
site.needConnections()
# Check sites integrity
def checkSites(self):
if self.port_opened == None: # Test and open port if not tested yet
if self.port_opened is None: # Test and open port if not tested yet
self.openport()
self.log.debug("Checking sites integrity..")
@ -162,12 +167,11 @@ class FileServer(ConnectionServer):
time.sleep(2) # Prevent too quick request
site = None
# Announce sites every 20 min
def announceSites(self):
import gc
while 1:
time.sleep(20*60) # Announce sites every 20 min
time.sleep(20 * 60) # Announce sites every 20 min
for address, site in self.sites.items():
if site.settings["serving"]:
site.announce() # Announce site to tracker
@ -181,7 +185,7 @@ class FileServer(ConnectionServer):
site.retryBadFiles()
# In passive mode keep 5 active peer connection to get the updates
if self.port_opened == False:
if self.port_opened is False:
site.needConnections()
time.sleep(2) # Prevent too quick request
@ -189,21 +193,22 @@ class FileServer(ConnectionServer):
site = None
gc.collect() # Implicit grabage collection
# Detects if computer back from wakeup
def wakeupWatcher(self):
last_time = time.time()
while 1:
time.sleep(30)
if time.time()-last_time > 60: # If taken more than 60 second then the computer was in sleep mode
self.log.info("Wakeup detected: time wrap from %s to %s (%s sleep seconds), acting like startup..." % (last_time, time.time(), time.time()-last_time))
if time.time() - last_time > 60: # If taken more than 60 second then the computer was in sleep mode
self.log.info(
"Wakeup detected: time wrap from %s to %s (%s sleep seconds), acting like startup..." %
(last_time, time.time(), time.time() - last_time)
)
self.port_opened = None # Check if we still has the open port on router
self.checkSites()
last_time = time.time()
# Bind and start serving sites
def start(self, check_sites = True):
def start(self, check_sites=True):
self.log = logging.getLogger("FileServer")
if config.debug:

View file

@ -1,8 +1,14 @@
import os, logging, gevent, time, msgpack, sys, random, socket, struct
import logging
import gevent
import time
import sys
import socket
import struct
from cStringIO import StringIO
from Config import config
from Debug import Debug
# Communicate remote peers
class Peer(object):
__slots__ = ("ip", "port", "site", "key", "connection_server", "connection", "last_found", "last_response",
@ -49,7 +55,8 @@ class Peer(object):
self.connection = self.connection_server.getConnection(self.ip, self.port)
except Exception, err:
self.onConnectionError()
self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed))
self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" %
(Debug.formatException(err), self.connection_error, self.hash_failed))
self.connection = None
# Check if we have connection to peer
@ -57,7 +64,7 @@ class Peer(object):
if self.connection and self.connection.connected: # We have connection to peer
return self.connection
else: # Try to find from other sites connections
self.connection = self.connection_server.getConnection(self.ip, self.port, create=False) # Do not create new connection if not found
self.connection = self.connection_server.getConnection(self.ip, self.port, create=False)
return self.connection
def __str__(self):
@ -68,7 +75,7 @@ class Peer(object):
# Peer ip:port to packed 6byte format
def packAddress(self):
return socket.inet_aton(self.ip)+struct.pack("H", self.port)
return socket.inet_aton(self.ip) + struct.pack("H", self.port)
def unpackAddress(self, packed):
return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0]
@ -85,16 +92,11 @@ class Peer(object):
self.onConnectionError()
return None # Connection failed
#if cmd != "ping" and self.last_response and time.time() - self.last_response > 20*60: # If last response if older than 20 minute, ping first to see if still alive
# if not self.ping(): return None
for retry in range(1,3): # Retry 3 times
#if config.debug_socket: self.log.debug("sendCmd: %s %s" % (cmd, params.get("inner_path")))
for retry in range(1, 3): # Retry 3 times
try:
response = self.connection.request(cmd, params)
if not response:
raise Exception("Send error")
#if config.debug_socket: self.log.debug("Got response to: %s" % cmd)
if "error" in response:
self.log("%s error: %s" % (cmd, response["error"]))
self.onConnectionError()
@ -108,10 +110,11 @@ class Peer(object):
break
else:
self.onConnectionError()
self.log("%s (connection_error: %s, hash_failed: %s, retry: %s)" % (Debug.formatException(err),
self.connection_error,
self.hash_failed, retry))
time.sleep(1*retry)
self.log(
"%s (connection_error: %s, hash_failed: %s, retry: %s)" %
(Debug.formatException(err), self.connection_error, self.hash_failed, retry)
)
time.sleep(1 * retry)
self.connect()
return None # Failed after 4 retry
@ -121,7 +124,8 @@ class Peer(object):
buff = StringIO()
s = time.time()
while True: # Read in 512k parts
back = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location}) # Get file content from last location
back = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location})
if not back or "body" not in back: # Error
return False
@ -145,7 +149,7 @@ class Peer(object):
response = self.request("ping")
if response and "body" in response and response["body"] == "Pong!":
response_time = time.time()-s
response_time = time.time() - s
break # All fine, exit from for loop
# Timeout reached or bad response
self.onConnectionError()
@ -185,7 +189,8 @@ class Peer(object):
# Stop and remove from site
def remove(self):
self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed))
if self.site and self.key in self.site.peers: del(self.site.peers[self.key])
if self.site and self.key in self.site.peers:
del(self.site.peers[self.key])
if self.connection:
self.connection.close()

View file

@ -1,8 +1,13 @@
import logging, os, sys
import logging
import os
import sys
from Debug import Debug
from Config import config
class PluginManager:
def __init__(self):
self.log = logging.getLogger("PluginManager")
self.plugin_path = "plugins" # Plugin directory
@ -11,28 +16,29 @@ class PluginManager:
sys.path.append(self.plugin_path)
if config.debug: # Auto reload Plugins on file change
from Debug import DebugReloader
DebugReloader(self.reloadPlugins)
# -- Load / Unload --
# Load all plugin
def loadPlugins(self):
for dir_name in os.listdir(self.plugin_path):
dir_path = os.path.join(self.plugin_path, dir_name)
if dir_name.startswith("disabled"): continue # Dont load if disabled
if not os.path.isdir(dir_path): continue # Dont load if not dir
if dir_name.startswith("Debug") and not config.debug: continue # Only load in debug mode if module name starts with Debug
if dir_name.startswith("disabled"):
continue # Dont load if disabled
if not os.path.isdir(dir_path):
continue # Dont load if not dir
if dir_name.startswith("Debug") and not config.debug:
continue # Only load in debug mode if module name starts with Debug
self.log.debug("Loading plugin: %s" % dir_name)
try:
__import__(dir_name)
except Exception, err:
self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err)))
if dir_name not in self.plugin_names: self.plugin_names.append(dir_name)
if dir_name not in self.plugin_names:
self.plugin_names.append(dir_name)
# Reload all plugins
def reloadPlugins(self):
@ -53,23 +59,26 @@ plugin_manager = PluginManager() # Singletone
# -- Decorators --
# Accept plugin to class decorator
def acceptPlugins(base_class):
class_name = base_class.__name__
if class_name in plugin_manager.plugins: # Has plugins
classes = plugin_manager.plugins[class_name][:] # Copy the current plugins
classes.reverse()
classes.append(base_class) # Add the class itself to end of inherience line
PluginedClass = type(class_name, tuple(classes), dict()) # Create the plugined class
plugined_class = type(class_name, tuple(classes), dict()) # Create the plugined class
plugin_manager.log.debug("New class accepts plugins: %s (Loaded plugins: %s)" % (class_name, classes))
else: # No plugins just use the original
PluginedClass = base_class
return PluginedClass
plugined_class = base_class
return plugined_class
# Register plugin to class name decorator
def registerTo(class_name):
plugin_manager.log.debug("New plugin registered to: %s" % class_name)
if class_name not in plugin_manager.plugins: plugin_manager.plugins[class_name] = []
if class_name not in plugin_manager.plugins:
plugin_manager.plugins[class_name] = []
def classDecorator(self):
plugin_manager.plugins[class_name].append(self)
@ -77,20 +86,20 @@ def registerTo(class_name):
return classDecorator
# - Example usage -
if __name__ == "__main__":
@registerTo("Request")
class RequestPlugin(object):
def actionMainPage(self, path):
return "Hello MainPage!"
@accept
@acceptPlugins
class Request(object):
def route(self, path):
func = getattr(self, "action"+path, None)
func = getattr(self, "action" + path, None)
if func:
return func(path)
else:

View file

@ -1,18 +1,34 @@
import os, json, logging, hashlib, re, time, string, random, sys, binascii, struct, socket, urllib, urllib2
from lib.subtl.subtl import UdpTrackerClient
from lib import bencode
import os
import json
import logging
import hashlib
import re
import time
import string
import random
import sys
import binascii
import struct
import socket
import urllib
import urllib2
import gevent
import util
from lib import bencode
from lib.subtl.subtl import UdpTrackerClient
from Config import config
from Peer import Peer
from Worker import WorkerManager
from Crypt import CryptHash
from Debug import Debug
from Content import ContentManager
from SiteStorage import SiteStorage
import SiteManager
class Site:
def __init__(self, address, allow_create=True):
self.address = re.sub("[^A-Za-z0-9]", "", address) # Make sure its correct address
self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging
@ -23,7 +39,7 @@ class Site:
self.peer_blacklist = SiteManager.peer_blacklist # Ignore this peers (eg. myself)
self.last_announce = 0 # Last announce time to tracker
self.worker_manager = WorkerManager(self) # Handle site download from other peers
self.bad_files = {} # SHA512 check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept)
self.bad_files = {} # SHA check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept)
self.content_updated = None # Content.js update time
self.notifications = [] # Pending notifications displayed once on page load [error|ok|info, message, timeout]
self.page_requested = False # Page viewed in browser
@ -33,12 +49,16 @@ class Site:
self.content_manager = ContentManager(self) # Load contents
if not self.settings.get("auth_key"): # To auth user in site (Obsolete, will be removed)
self.settings["auth_key"] = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(24))
self.settings["auth_key"] = ''.join(
random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(24)
)
self.log.debug("New auth key: %s" % self.settings["auth_key"])
self.saveSettings()
if not self.settings.get("wrapper_key"): # To auth websocket permissions
self.settings["wrapper_key"] = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(12))
self.settings["wrapper_key"] = ''.join(
random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(12)
)
self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"])
self.saveSettings()
@ -47,16 +67,12 @@ class Site:
# Add event listeners
self.addEventListeners()
def __str__(self):
return "Site %s" % self.address_short
def __repr__(self):
return "<%s>" % self.__str__()
# Load site settings from data/sites.json
def loadSettings(self):
sites_settings = json.load(open("%s/sites.json" % config.data_dir))
@ -67,10 +83,9 @@ class Site:
permissions = ["ADMIN"]
else:
permissions = []
self.settings = { "own": False, "serving": True, "permissions": permissions } # Default
self.settings = {"own": False, "serving": True, "permissions": permissions} # Default
return
# Save site settings to data/sites.json
def saveSettings(self):
sites_settings = json.load(open("%s/sites.json" % config.data_dir))
@ -78,30 +93,27 @@ class Site:
open("%s/sites.json" % config.data_dir, "w").write(json.dumps(sites_settings, indent=2, sort_keys=True))
return
# Max site size in MB
def getSizeLimit(self):
return self.settings.get("size_limit", config.size_limit)
# Next size limit based on current size
def getNextSizeLimit(self):
size_limits = [10,20,50,100,200,500,1000,2000,5000,10000,20000,50000,100000]
size_limits = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000]
size = self.settings.get("size", 0)
for size_limit in size_limits:
if size*1.2 < size_limit*1024*1024:
if size * 1.2 < size_limit * 1024 * 1024:
return size_limit
return 999999
# Download all file from content.json
def downloadContent(self, inner_path, download_files=True, peer=None):
s = time.time()
self.log.debug("Downloading %s..." % inner_path)
found = self.needFile(inner_path, update=self.bad_files.get(inner_path))
content_inner_dir = self.content_manager.toDir(inner_path)
if not found: return False # Could not download content.json
if not found:
return False # Could not download content.json
self.log.debug("Got %s" % inner_path)
changed = self.content_manager.loadContent(inner_path, load_includes=False)
@ -110,15 +122,16 @@ class Site:
file_threads = []
if download_files:
for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys():
file_inner_path = content_inner_dir+file_relative_path
res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer) # No waiting for finish, return the event
if res != True: # Need downloading
file_inner_path = content_inner_dir + file_relative_path
# Start download and dont wait for finish, return the event
res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer)
if res is not True and res is not False: # Need downloading and file is allowed
file_threads.append(res) # Append evt
# Wait for includes download
include_threads = []
for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys():
file_inner_path = content_inner_dir+file_relative_path
file_inner_path = content_inner_dir + file_relative_path
include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer)
include_threads.append(include_thread)
@ -128,31 +141,30 @@ class Site:
self.log.debug("%s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed)))
gevent.joinall(file_threads)
self.log.debug("%s: All file downloaded in %.2fs" % (inner_path, time.time()-s))
self.log.debug("%s: All file downloaded in %.2fs" % (inner_path, time.time() - s))
return True
# Return bad files with less than 3 retry
def getReachableBadFiles(self):
if not self.bad_files: return False
if not self.bad_files:
return False
return [bad_file for bad_file, retry in self.bad_files.iteritems() if retry < 3]
# Retry download bad files
def retryBadFiles(self):
for bad_file in self.bad_files.keys():
self.needFile(bad_file, update=True, blocking=False)
# Download all files of the site
@util.Noparallel(blocking=False)
def download(self, check_size=False):
self.log.debug("Start downloading...%s" % self.bad_files)
gevent.spawn(self.announce)
if check_size: # Check the size first
valid = downloadContent(download_files=False) # Just download content.json files
if not valid: return False # Cant download content.jsons or size is not fits
valid = self.downloadContent(download_files=False) # Just download content.json files
if not valid:
return False # Cant download content.jsons or size is not fits
# Download everything
found = self.downloadContent("content.json")
@ -160,26 +172,27 @@ class Site:
return found
# Update worker, try to find client that supports listModifications command
def updater(self, peers_try, queried, since):
while 1:
if not peers_try or len(queried) >= 3: # Stop after 3 successful query
break
peer = peers_try.pop(0)
if not peer.connection and len(queried) < 2: peer.connect() # Only open new connection if less than 2 queried already
if not peer.connection or peer.connection.handshake.get("rev",0) < 126: continue # Not compatible
if not peer.connection and len(queried) < 2:
peer.connect() # Only open new connection if less than 2 queried already
if not peer.connection or peer.connection.handshake.get("rev", 0) < 126:
continue # Not compatible
res = peer.listModified(since)
if not res or not "modified_files" in res: continue # Failed query
if not res or "modified_files" not in res:
continue # Failed query
queried.append(peer)
for inner_path, modified in res["modified_files"].iteritems(): # Check if the peer has newer files than we
content = self.content_manager.contents.get(inner_path)
if not content or modified > content["modified"]: # We dont have this file or we have older
self.bad_files[inner_path] = self.bad_files.get(inner_path, 0)+1 # Mark as bad file
self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 # Mark as bad file
gevent.spawn(self.downloadContent, inner_path) # Download the content.json + the changed files
# Check modified content.json files from peers and add modified files to bad_files
# Return: Successfully queried peers [Peer, Peer...]
def checkModifications(self, since=None):
@ -189,13 +202,13 @@ class Site:
peers = self.peers.values()
random.shuffle(peers)
for peer in peers: # Try to find connected good peers, but we must have at least 5 peers
if peer.findConnection() and peer.connection.handshake.get("rev",0) > 125: # Add to the beginning if rev125
if peer.findConnection() and peer.connection.handshake.get("rev", 0) > 125: # Add to the beginning if rev125
peers_try.insert(0, peer)
elif len(peers_try) < 5: # Backup peers, add to end of the try list
peers_try.append(peer)
if since == None: # No since definied, download from last modification time-1day
since = self.settings.get("modified", 60*60*24)-60*60*24
if since is not None: # No since definied, download from last modification time-1day
since = self.settings.get("modified", 60 * 60 * 24) - 60 * 60 * 24
self.log.debug("Try to get listModifications from peers: %s since: %s" % (peers_try, since))
updaters = []
@ -207,7 +220,6 @@ class Site:
self.log.debug("Queried listModifications from: %s" % queried)
return queried
# Update content.json from peers and download changed files
# Return: None
@util.Noparallel()
@ -215,7 +227,8 @@ class Site:
self.content_manager.loadContent("content.json") # Reload content.json
self.content_updated = None # Reset content updated time
self.updateWebsocket(updating=True)
if announce: self.announce()
if announce:
self.announce()
queried = self.checkModifications()
@ -223,12 +236,13 @@ class Site:
self.log.debug("Fallback to old-style update")
self.redownloadContents()
if not self.settings["own"]: self.storage.checkFiles(quick_check=True) # Quick check files based on file size
if not self.settings["own"]:
self.storage.checkFiles(quick_check=True) # Quick check files based on file size
changed = self.content_manager.loadContent("content.json")
if changed:
for changed_file in changed:
self.bad_files[changed_file] = self.bad_files.get(changed_file, 0)+1
self.bad_files[changed_file] = self.bad_files.get(changed_file, 0) + 1
if self.bad_files:
self.download()
@ -236,7 +250,6 @@ class Site:
self.settings["size"] = self.content_manager.getTotalSize() # Update site size
self.updateWebsocket(updated=True)
# Update site by redownload all content.json
def redownloadContents(self):
@ -248,20 +261,22 @@ class Site:
self.log.debug("Waiting %s content.json to finish..." % len(content_threads))
gevent.joinall(content_threads)
# Publish worker
def publisher(self, inner_path, peers, published, limit, event_done=None):
file_size = self.storage.getSize(inner_path)
body = self.storage.read(inner_path)
while 1:
if not peers or len(published) >= limit:
if event_done: event_done.set(True)
if event_done:
event_done.set(True)
break # All peers done, or published engouht
peer = peers.pop(0)
if peer.connection and peer.connection.last_ping_delay: # Peer connected
timeout = timeout = 5+int(file_size/1024)+peer.connection.last_ping_delay # Timeout: 5sec + size in kb + last_ping
else:
timeout = timeout = 5+int(file_size/1024) # Timeout: 5sec + size in kb
# Timeout: 5sec + size in kb + last_ping
timeout = timeout = 5 + int(file_size / 1024) + peer.connection.last_ping_delay
else: # Peer not connected
# Timeout: 5sec + size in kb
timeout = timeout = 5 + int(file_size / 1024)
result = {"exception": "Timeout"}
for retry in range(2):
@ -273,7 +288,8 @@ class Site:
"body": body,
"peer": (config.ip_external, config.fileserver_port)
})
if result: break
if result:
break
except Exception, err:
result = {"exception": Debug.formatException(err)}
@ -281,18 +297,19 @@ class Site:
published.append(peer)
self.log.info("[OK] %s: %s" % (peer.key, result["ok"]))
else:
if result == {"exception": "Timeout"}: peer.onConnectionError()
if result == {"exception": "Timeout"}:
peer.onConnectionError()
self.log.info("[FAILED] %s: %s" % (peer.key, result))
# Update content.json on peers
@util.Noparallel()
def publish(self, limit=5, inner_path="content.json"):
self.log.info( "Publishing to %s/%s peers..." % (min(len(self.peers), limit), len(self.peers)) )
self.log.info("Publishing to %s/%s peers..." % (min(len(self.peers), limit), len(self.peers)))
published = [] # Successfully published (Peer)
publishers = [] # Publisher threads
peers = self.peers.values()
if not peers: return 0 # No peers found
if not peers:
return 0 # No peers found
random.shuffle(peers)
event_done = gevent.event.AsyncResult()
@ -301,18 +318,25 @@ class Site:
publishers.append(publisher)
event_done.get() # Wait for done
if len(published) < min(len(self.peers), limit): time.sleep(0.2) # If less than we need sleep a bit
if len(published) == 0: gevent.joinall(publishers) # No successful publish, wait for all publisher
if len(published) < min(len(self.peers), limit):
time.sleep(0.2) # If less than we need sleep a bit
if len(published) == 0:
gevent.joinall(publishers) # No successful publish, wait for all publisher
# Make sure the connected passive peers got the update
passive_peers = [peer for peer in peers if peer.connection and not peer.connection.closed and peer.key.endswith(":0") and peer not in published] # Every connected passive peer that we not published to
passive_peers = [
peer for peer in peers
if peer.connection and not peer.connection.closed and peer.key.endswith(":0") and peer not in published
] # Every connected passive peer that we not published to
for peer in passive_peers:
gevent.spawn(self.publisher, inner_path, passive_peers, published, limit=10)
self.log.info("Successfuly published to %s peers, publishing to %s more passive peers" % (len(published), len(passive_peers)) )
self.log.info(
"Successfuly published to %s peers, publishing to %s more passive peers" %
(len(published), len(passive_peers))
)
return len(published)
# Copy this site
def clone(self, address, privatekey=None, address_index=None, overwrite=False):
import shutil
@ -325,20 +349,22 @@ class Site:
self.log.debug("Cloning to %s, ignore dirs: %s" % (address, default_dirs))
# Copy root content.json
if not new_site.storage.isFile("content.json") and not overwrite: # Content.json not exist yet, create a new one from source site
if not new_site.storage.isFile("content.json") and not overwrite:
# Content.json not exist yet, create a new one from source site
content_json = self.storage.loadJson("content.json")
if "domain" in content_json:
del content_json["domain"]
content_json["title"] = "my"+content_json["title"]
content_json["title"] = "my" + content_json["title"]
content_json["cloned_from"] = self.address
if address_index: content_json["address_index"] = address_index # Site owner's BIP32 index
if address_index:
content_json["address_index"] = address_index # Site owner's BIP32 index
new_site.storage.writeJson("content.json", content_json)
new_site.content_manager.loadContent("content.json", add_bad_files=False, load_includes=False)
# Copy files
for content_inner_path, content in self.content_manager.contents.items():
for file_relative_path in sorted(content["files"].keys()):
file_inner_path = self.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
file_inner_path = self.content_manager.toDir(content_inner_path) + file_relative_path # Relative to content.json
file_inner_path = file_inner_path.strip("/") # Strip leading /
if file_inner_path.split("/")[0] in default_dirs: # Dont copy directories that has -default postfixed alternative
self.log.debug("[SKIP] %s (has default alternative)" % file_inner_path)
@ -349,7 +375,8 @@ class Site:
file_path_dest = new_site.storage.getPath(file_inner_path)
self.log.debug("[COPY] %s to %s..." % (file_inner_path, file_path_dest))
dest_dir = os.path.dirname(file_path_dest)
if not os.path.isdir(dest_dir): os.makedirs(dest_dir)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
shutil.copy(file_path, file_path_dest)
# If -default in path, create a -default less copy of the file
@ -360,28 +387,32 @@ class Site:
continue
self.log.debug("[COPY] Default file: %s to %s..." % (file_inner_path, file_path_dest))
dest_dir = os.path.dirname(file_path_dest)
if not os.path.isdir(dest_dir): os.makedirs(dest_dir)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
shutil.copy(file_path, file_path_dest)
# Sign if content json
if file_path_dest.endswith("/content.json"):
new_site.storage.onUpdated(file_inner_path.replace("-default", ""))
new_site.content_manager.loadContent(file_inner_path.replace("-default", ""), add_bad_files=False, load_includes=False)
if privatekey: new_site.content_manager.sign(file_inner_path.replace("-default", ""), privatekey)
if privatekey: new_site.content_manager.sign("content.json", privatekey)
new_site.content_manager.loadContent(
file_inner_path.replace("-default", ""), add_bad_files=False, load_includes=False
)
if privatekey:
new_site.content_manager.sign(file_inner_path.replace("-default", ""), privatekey)
if privatekey:
new_site.content_manager.sign("content.json", privatekey)
# Rebuild DB
if new_site.storage.isFile("dbschema.json"): new_site.storage.rebuildDb()
if new_site.storage.isFile("dbschema.json"):
new_site.storage.rebuildDb()
return new_site
# Check and download if file not exist
def needFile(self, inner_path, update=False, blocking=True, peer=None, priority=0):
if self.storage.isFile(inner_path) and not update: # File exist, no need to do anything
return True
elif self.settings["serving"] == False: # Site not serving
elif self.settings["serving"] is False: # Site not serving
return False
else: # Wait until file downloaded
self.bad_files[inner_path] = True # Mark as bad file
@ -392,14 +423,17 @@ class Site:
task = self.worker_manager.addTask("content.json", peer)
task.get()
self.content_manager.loadContent()
if not self.content_manager.contents.get("content.json"): return False # Content.json download failed
if not self.content_manager.contents.get("content.json"):
return False # Content.json download failed
if not inner_path.endswith("content.json") and not self.content_manager.getFileInfo(inner_path): # No info for file, download all content.json first
if not inner_path.endswith("content.json") and not self.content_manager.getFileInfo(inner_path):
# No info for file, download all content.json first
self.log.debug("No info for %s, waiting for all content.json" % inner_path)
success = self.downloadContent("content.json", download_files=False)
if not success: return False
if not self.content_manager.getFileInfo(inner_path): return False # Still no info for file
if not success:
return False
if not self.content_manager.getFileInfo(inner_path):
return False # Still no info for file
task = self.worker_manager.addTask(inner_path, peer, priority=priority)
if blocking:
@ -407,14 +441,15 @@ class Site:
else:
return task
# Add or update a peer to site
def addPeer(self, ip, port, return_peer = False):
if not ip: return False
if (ip, port) in self.peer_blacklist: return False # Ignore blacklist (eg. myself)
def addPeer(self, ip, port, return_peer=False):
if not ip:
return False
if (ip, port) in self.peer_blacklist:
return False # Ignore blacklist (eg. myself)
key = "%s:%s" % (ip, port)
if key in self.peers: # Already has this ip
#self.peers[key].found()
# self.peers[key].found()
if return_peer: # Always return peer
return self.peers[key]
else:
@ -424,7 +459,6 @@ class Site:
self.peers[key] = peer
return peer
# Gather peer from connected peers
@util.Noparallel(blocking=False)
def announcePex(self, query_num=2, need_num=5):
@ -451,16 +485,17 @@ class Site:
if res:
self.worker_manager.onPeers()
self.updateWebsocket(peers_added=res)
if done == query_num: break
if done == query_num:
break
self.log.debug("Queried pex from %s peers got %s new peers." % (done, added))
# Gather peers from tracker
# Return: Complete time or False on error
def announceTracker(self, protocol, ip, port, fileserver_port, address_hash, my_peer_id):
s = time.time()
if protocol == "udp": # Udp tracker
if config.disable_udp: return False # No udp supported
if config.disable_udp:
return False # No udp supported
tracker = UdpTrackerClient(ip, port)
tracker.peer_port = fileserver_port
try:
@ -481,12 +516,12 @@ class Site:
}
req = None
try:
url = "http://"+ip+"?"+urllib.urlencode(params)
url = "http://" + ip + "?" + urllib.urlencode(params)
# Load url
with gevent.Timeout(10, False): # Make sure of timeout
req = urllib2.urlopen(url, timeout=8)
response = req.read()
req.fp._sock.recv=None # Hacky avoidance of memory leak for older python versions
req.fp._sock.recv = None # Hacky avoidance of memory leak for older python versions
req.close()
req = None
if not response:
@ -512,18 +547,20 @@ class Site:
# Adding peers
added = 0
for peer in peers:
if not peer["port"]: continue # Dont add peers with port 0
if self.addPeer(peer["addr"], peer["port"]): added += 1
if not peer["port"]:
continue # Dont add peers with port 0
if self.addPeer(peer["addr"], peer["port"]):
added += 1
if added:
self.worker_manager.onPeers()
self.updateWebsocket(peers_added=added)
self.log.debug("Found %s peers, new: %s" % (len(peers), added))
return time.time()-s
return time.time() - s
# Add myself and get other peers from tracker
def announce(self, force=False):
if time.time() < self.last_announce+30 and not force: return # No reannouncing within 30 secs
if time.time() < self.last_announce + 30 and not force:
return # No reannouncing within 30 secs
self.last_announce = time.time()
errors = []
slow = []
@ -560,17 +597,19 @@ class Site:
self.saveSettings()
if len(errors) < len(SiteManager.TRACKERS): # Less errors than total tracker nums
self.log.debug("Announced port %s to %s trackers in %.3fs, errors: %s, slow: %s" % (fileserver_port, announced, time.time()-s, errors, slow))
self.log.debug(
"Announced port %s to %s trackers in %.3fs, errors: %s, slow: %s" %
(fileserver_port, announced, time.time() - s, errors, slow)
)
else:
self.log.error("Announced to %s trackers in %.3fs, failed" % (announced, time.time()-s))
self.log.error("Announced to %s trackers in %.3fs, failed" % (announced, time.time() - s))
if not [peer for peer in self.peers.values() if peer.connection and peer.connection.connected]: # If no connected peer yet then wait for connections
if not [peer for peer in self.peers.values() if peer.connection and peer.connection.connected]:
# If no connected peer yet then wait for connections
gevent.spawn_later(3, self.announcePex, need_num=10) # Spawn 3 secs later
# self.onFileDone.once(lambda inner_path: self.announcePex(need_num=10), "announcePex_%s" % self.address) # After first file downloaded try to find more peers using pex
else: # Else announce immediately
self.announcePex()
# Keep connections to get the updates (required for passive clients)
def needConnections(self, num=3):
need = min(len(self.peers), num) # Need 3 peer, but max total peers
@ -586,33 +625,37 @@ class Site:
for peer in self.peers.values():
if not peer.connection or not peer.connection.connected: # No peer connection or disconnected
peer.pex() # Initiate peer exchange
if peer.connection and peer.connection.connected: connected += 1 # Successfully connected
if connected >= need: break
if peer.connection and peer.connection.connected:
connected += 1 # Successfully connected
if connected >= need:
break
return connected
# Return: Probably working, connectable Peers
def getConnectablePeers(self, need_num=5, ignore=[]):
peers = self.peers.values()
random.shuffle(peers)
found = []
for peer in peers:
if peer.key.endswith(":0"): continue # Not connectable
if not peer.connection: continue # No connection
if peer.key in ignore: continue # The requester has this peer
if time.time() - peer.connection.last_recv_time > 60*60*2: # Last message more than 2 hours ago
if peer.key.endswith(":0"):
continue # Not connectable
if not peer.connection:
continue # No connection
if peer.key in ignore:
continue # The requester has this peer
if time.time() - peer.connection.last_recv_time > 60 * 60 * 2: # Last message more than 2 hours ago
peer.connection = None # Cleanup: Dead connection
continue
found.append(peer)
if len(found) >= need_num: break # Found requested number of peers
if len(found) >= need_num:
break # Found requested number of peers
if (not found and not ignore) or (need_num > 5 and need_num < 100 and len(found) < need_num): # Not found any peer and the requester dont have any, return not that good peers or Initial pex, but not /Stats page and we can't give enought peer
found = [peer for peer in peers if not peer.key.endswith(":0") and peer.key not in ignore][0:need_num-len(found)]
if (not found and not ignore) or (need_num > 5 and need_num < 100 and len(found) < need_num):
# Return not that good peers: Not found any peer and the requester dont have any or cant give enought peer
found = [peer for peer in peers if not peer.key.endswith(":0") and peer.key not in ignore][0:need_num - len(found)]
return found
# - Events -
# Add event listeners
@ -626,7 +669,6 @@ class Site:
self.onFileDone.append(lambda inner_path: self.fileDone(inner_path))
self.onFileFail.append(lambda inner_path: self.fileFailed(inner_path))
# Send site status update to websocket clients
def updateWebsocket(self, **kwargs):
if kwargs:
@ -636,14 +678,12 @@ class Site:
for ws in self.websockets:
ws.event("siteChanged", self, param)
# File download started
@util.Noparallel(blocking=False)
def fileStarted(self):
time.sleep(0.001) # Wait for other files adds
self.updateWebsocket(file_started=True)
# File downloaded successful
def fileDone(self, inner_path):
# File downloaded, remove it from bad files
@ -657,14 +697,12 @@ class Site:
self.updateWebsocket(file_done=inner_path)
# File download failed
def fileFailed(self, inner_path):
if inner_path == "content.json":
self.content_updated = False
self.log.debug("Can't update content.json")
if inner_path in self.bad_files:
self.bad_files[inner_path] = self.bad_files.get(inner_path, 0)+1
self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1
self.updateWebsocket(file_failed=inner_path)

View file

@ -1,22 +1,25 @@
import json, logging, time, re, os
import gevent
import json
import logging
import re
import os
from Plugin import PluginManager
from Config import config
TRACKERS = [
("udp", "open.demonii.com", 1337),
#("udp", "sugoi.pomf.se", 2710),
#("udp", "tracker.coppersurfer.tk", 80),
# ("udp", "sugoi.pomf.se", 2710),
# ("udp", "tracker.coppersurfer.tk", 80),
("udp", "tracker.leechers-paradise.org", 6969),
("udp", "9.rarbg.com", 2710),
#("udp", "www.eddie4.nl", 6969),
#("udp", "trackr.sytes.net", 80),
#("udp", "tracker4.piratux.com", 6969)
#("http", "exodus.desync.com:80/announce", None), Off
# ("udp", "www.eddie4.nl", 6969),
# ("udp", "trackr.sytes.net", 80),
# ("udp", "tracker4.piratux.com", 6969)
# ("http", "exodus.desync.com:80/announce", None), Off
("http", "tracker.aletorrenty.pl:2710/announce", None),
#("http", "torrent.gresille.org/announce", None), # Slow
#("http", "announce.torrentsmd.com:6969/announce", None), # Off
#("http", "i.bandito.org/announce", None), # Off
# ("http", "torrent.gresille.org/announce", None), # Slow
# ("http", "announce.torrentsmd.com:6969/announce", None), # Off
# ("http", "i.bandito.org/announce", None), # Off
("http", "retracker.telecom.kz/announce", None),
("http", "torrent.gresille.org/announce", None),
@ -25,13 +28,15 @@ TRACKERS = [
@PluginManager.acceptPlugins
class SiteManager(object):
def __init__(self):
self.sites = None
# Load all sites from data/sites.json
def load(self):
from Site import Site
if not self.sites: self.sites = {}
if not self.sites:
self.sites = {}
address_found = []
added = 0
# Load new adresses
@ -47,53 +52,48 @@ class SiteManager(object):
del(self.sites[address])
logging.debug("Removed site: %s" % address)
if added: logging.debug("SiteManager added %s sites" % added)
if added:
logging.debug("SiteManager added %s sites" % added)
# Checks if its a valid address
def isAddress(self, address):
return re.match("^[A-Za-z0-9]{26,35}$", address)
# Return: Site object or None if not found
def get(self, address):
if self.sites == None: # Not loaded yet
if self.sites is None: # Not loaded yet
self.load()
return self.sites.get(address)
# Return or create site and start download site files
def need(self, address, all_file=True):
from Site import Site
new = False
site = self.get(address)
if not site: # Site not exist yet
if not self.isAddress(address): return False # Not address: %s % address
if not self.isAddress(address):
return False # Not address: %s % address
logging.debug("Added new site: %s" % address)
site = Site(address)
self.sites[address] = site
if not site.settings["serving"]: # Maybe it was deleted before
site.settings["serving"] = True
site.saveSettings()
new = True
if all_file: site.download()
if all_file:
site.download()
return site
def delete(self, address):
logging.debug("SiteManager deleted site: %s" % address)
del(self.sites[address])
# Lazy load sites
def list(self):
if self.sites == None: # Not loaded yet
if self.sites is None: # Not loaded yet
self.load()
return self.sites
site_manager = SiteManager() # Singletone
peer_blacklist = [] # Dont download from this peers

View file

@ -1,11 +1,19 @@
import os, re, shutil, json, time, sqlite3
import os
import re
import shutil
import json
import time
import sqlite3
import gevent.event
from Db import Db
from Debug import Debug
from Config import config
class SiteStorage:
def __init__(self, site, allow_create=True):
self.site = site
self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
@ -21,7 +29,6 @@ class SiteStorage:
else:
raise Exception("Directory not exists: %s" % self.directory)
# Load db from dbschema.json
def openDb(self, check=True):
schema = self.loadJson("dbschema.json")
@ -32,34 +39,36 @@ class SiteStorage:
self.db = Db(schema, db_path)
if check and not self.db_checked:
changed_tables = self.db.checkTables()
if changed_tables: self.rebuildDb(delete_db=False) # Todo only update the changed table datas
if changed_tables:
self.rebuildDb(delete_db=False) # Todo only update the changed table datas
def closeDb(self):
if self.db: self.db.close()
if self.db:
self.db.close()
self.event_db_busy = None
self.db = None
# Return db class
def getDb(self):
if not self.db:
self.log.debug("No database, waiting for dbschema.json...")
self.site.needFile("dbschema.json", priority=1)
self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist
if self.has_db: self.openDb()
if self.has_db:
self.openDb()
return self.db
# Rebuild sql cache
def rebuildDb(self, delete_db=True):
self.has_db = self.isFile("dbschema.json")
if not self.has_db: return False
if not self.has_db:
return False
self.event_db_busy = gevent.event.AsyncResult()
schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"])
if os.path.isfile(db_path) and delete_db:
if self.db: self.db.close() # Close db if open
if self.db:
self.db.close() # Close db if open
self.log.info("Deleting %s" % db_path)
try:
os.unlink(db_path)
@ -77,24 +86,27 @@ class SiteStorage:
for content_inner_path, content in self.site.content_manager.contents.items():
content_path = self.getPath(content_inner_path)
if os.path.isfile(content_path): # Missing content.json file
if self.db.loadJson(content_path, cur=cur): found += 1
if self.db.loadJson(content_path, cur=cur):
found += 1
else:
self.log.error("[MISSING] %s" % content_inner_path)
for file_relative_path in content["files"].keys():
if not file_relative_path.endswith(".json"): continue # We only interesed in json files
file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
if not file_relative_path.endswith(".json"):
continue # We only interesed in json files
content_inner_path_dir = self.site.content_manager.toDir(content_inner_path) # Content.json dir relative to site
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
if os.path.isfile(file_path):
if self.db.loadJson(file_path, cur=cur): found += 1
if self.db.loadJson(file_path, cur=cur):
found += 1
else:
self.log.error("[MISSING] %s" % file_inner_path)
cur.execute("END")
self.log.info("Imported %s data file in %ss" % (found, time.time()-s))
self.log.info("Imported %s data file in %ss" % (found, time.time() - s))
self.event_db_busy.set(True) # Event done, notify waiters
self.event_db_busy = None # Clear event
# Execute sql query or rebuild on dberror
def query(self, query, params=None):
if self.event_db_busy: # Db not ready for queries
@ -111,17 +123,14 @@ class SiteStorage:
raise err
return res
# Open file object
def open(self, inner_path, mode="rb"):
return open(self.getPath(inner_path), mode)
# Open file object
def read(self, inner_path, mode="r"):
return open(self.getPath(inner_path), mode).read()
# Write content to file
def write(self, inner_path, content):
file_path = self.getPath(inner_path)
@ -139,7 +148,6 @@ class SiteStorage:
del content
self.onUpdated(inner_path)
# Site content updated
def onUpdated(self, inner_path):
file_path = self.getPath(inner_path)
@ -155,7 +163,6 @@ class SiteStorage:
self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
self.closeDb()
# Load and parse json file
def loadJson(self, inner_path):
with self.open(inner_path) as file:
@ -165,18 +172,18 @@ class SiteStorage:
def writeJson(self, inner_path, data):
content = json.dumps(data, indent=2, sort_keys=True)
# Make it a little more compact by removing unnecessary white space
def compact_list(match):
return "[ "+match.group(1).strip()+" ]"
return "[ " + match.group(1).strip() + " ]"
def compact_dict(match):
return "{ "+match.group(1).strip()+" }"
return "{ " + match.group(1).strip() + " }"
content = re.sub("\[([^,\{\[]{10,100}?)\]", compact_list, content, flags=re.DOTALL)
content = re.sub("\{([^,\[\{]{10,100}?)\}", compact_dict, content, flags=re.DOTALL)
# Write to disk
self.write(inner_path, content)
# Get file size
def getSize(self, inner_path):
path = self.getPath(inner_path)
@ -185,29 +192,24 @@ class SiteStorage:
else:
return 0
# File exist
def isFile(self, inner_path):
return os.path.isfile(self.getPath(inner_path))
# Dir exist
def isDir(self, inner_path):
return os.path.isdir(self.getPath(inner_path))
# Security check and return path of site's file
def getPath(self, inner_path):
inner_path = inner_path.replace("\\", "/") # Windows separator fix
inner_path = re.sub("^%s/" % re.escape(self.directory), "", inner_path) # Remove site directory if begins with it
file_path = self.directory+"/"+inner_path
file_path = self.directory + "/" + inner_path
allowed_dir = os.path.abspath(self.directory) # Only files within this directory allowed
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
raise Exception("File not allowed: %s" % file_path)
return file_path
# Verify all files sha512sum using content.json
def verifyFiles(self, quick_check=False): # Fast = using file size
bad_files = []
@ -219,7 +221,7 @@ class SiteStorage:
self.log.debug("[MISSING] %s" % content_inner_path)
bad_files.append(content_inner_path)
for file_relative_path in content["files"].keys():
file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
file_inner_path = self.site.content_manager.toDir(content_inner_path) + file_relative_path # Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
if not os.path.isfile(file_path):
@ -235,20 +237,21 @@ class SiteStorage:
if not ok:
self.log.debug("[CHANGED] %s" % file_inner_path)
bad_files.append(file_inner_path)
self.log.debug("%s verified: %s files, quick_check: %s, bad files: %s" % (content_inner_path, len(content["files"]), quick_check, bad_files))
self.log.debug(
"%s verified: %s files, quick_check: %s, bad files: %s" %
(content_inner_path, len(content["files"]), quick_check, bad_files)
)
return bad_files
# Check and try to fix site files integrity
def checkFiles(self, quick_check=True):
s = time.time()
bad_files = self.verifyFiles(quick_check)
if bad_files:
for bad_file in bad_files:
self.site.bad_files[bad_file] = self.site.bad_files.get("bad_file", 0)+1
self.log.debug("Checked files in %.2fs... Quick:%s" % (time.time()-s, quick_check))
self.site.bad_files[bad_file] = self.site.bad_files.get("bad_file", 0) + 1
self.log.debug("Checked files in %.2fs... Quick:%s" % (time.time() - s, quick_check))
# Delete site's all file
def deleteFiles(self):
@ -258,31 +261,33 @@ class SiteStorage:
try:
schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"])
if os.path.isfile(db_path): os.unlink(db_path)
if os.path.isfile(db_path):
os.unlink(db_path)
except Exception, err:
self.log.error("Db file delete error: %s" % err)
self.log.debug("Deleting files from content.json...")
files = [] # Get filenames
for content_inner_path, content in self.site.content_manager.contents.items():
files.append(content_inner_path)
for file_relative_path in content["files"].keys():
file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
file_inner_path = self.site.content_manager.toDir(content_inner_path) + file_relative_path # Relative to site dir
files.append(file_inner_path)
for inner_path in files:
path = self.getPath(inner_path)
if os.path.isfile(path): os.unlink(path)
if os.path.isfile(path):
os.unlink(path)
self.log.debug("Deleting empty dirs...")
for root, dirs, files in os.walk(self.directory, topdown=False):
for dir in dirs:
path = os.path.join(root,dir)
path = os.path.join(root, dir)
if os.path.isdir(path) and os.listdir(path) == []:
os.removedirs(path)
self.log.debug("Removing %s" % path)
if os.path.isdir(self.directory) and os.listdir(self.directory) == []: os.removedirs(self.directory) # Remove sites directory if empty
if os.path.isdir(self.directory) and os.listdir(self.directory) == []:
os.removedirs(self.directory) # Remove sites directory if empty
if os.path.isdir(self.directory):
self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory)

View file

@ -0,0 +1,140 @@
import time
import socket
import msgpack
print "Connecting..."
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 1234))
print "1 Threaded: Send, receive 10000 ping request...",
s = time.time()
for i in range(10000):
sock.sendall(msgpack.packb({"cmd": "Ping"}))
req = sock.recv(16 * 1024)
print time.time() - s, repr(req), time.time() - s
print "1 Threaded: Send, receive, decode 10000 ping request...",
s = time.time()
unpacker = msgpack.Unpacker()
reqs = 0
for i in range(10000):
sock.sendall(msgpack.packb({"cmd": "Ping"}))
unpacker.feed(sock.recv(16 * 1024))
for req in unpacker:
reqs += 1
print "Found:", req, "x", reqs, time.time() - s
print "1 Threaded: Send, receive, decode, reconnect 1000 ping request...",
s = time.time()
unpacker = msgpack.Unpacker()
reqs = 0
for i in range(1000):
sock.sendall(msgpack.packb({"cmd": "Ping"}))
unpacker.feed(sock.recv(16 * 1024))
for req in unpacker:
reqs += 1
sock.close()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 1234))
print "Found:", req, "x", reqs, time.time() - s
print "1 Threaded: Request, receive, decode 10000 x 10k data request...",
s = time.time()
unpacker = msgpack.Unpacker()
reqs = 0
for i in range(10000):
sock.sendall(msgpack.packb({"cmd": "Bigdata"}))
"""buff = StringIO()
data = sock.recv(16*1024)
buff.write(data)
if not data:
break
while not data.endswith("\n"):
data = sock.recv(16*1024)
if not data: break
buff.write(data)
req = msgpack.unpackb(buff.getvalue().strip("\n"))
reqs += 1"""
req_found = False
while not req_found:
buff = sock.recv(16 * 1024)
unpacker.feed(buff)
for req in unpacker:
reqs += 1
req_found = True
break # Only process one request
print "Found:", len(req["res"]), "x", reqs, time.time() - s
print "10 Threaded: Request, receive, decode 10000 x 10k data request...",
import gevent
s = time.time()
reqs = 0
req = None
def requester():
global reqs, req
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 1234))
unpacker = msgpack.Unpacker()
for i in range(1000):
sock.sendall(msgpack.packb({"cmd": "Bigdata"}))
req_found = False
while not req_found:
buff = sock.recv(16 * 1024)
unpacker.feed(buff)
for req in unpacker:
reqs += 1
req_found = True
break # Only process one request
threads = []
for i in range(10):
threads.append(gevent.spawn(requester))
gevent.joinall(threads)
print "Found:", len(req["res"]), "x", reqs, time.time() - s
print "1 Threaded: ZeroMQ Send, receive 1000 ping request...",
s = time.time()
import zmq.green as zmq
c = zmq.Context()
zmq_sock = c.socket(zmq.REQ)
zmq_sock.connect('tcp://127.0.0.1:1234')
for i in range(1000):
zmq_sock.send(msgpack.packb({"cmd": "Ping"}))
req = zmq_sock.recv(16 * 1024)
print "Found:", req, time.time() - s
print "1 Threaded: ZeroMQ Send, receive 1000 x 10k data request...",
s = time.time()
import zmq.green as zmq
c = zmq.Context()
zmq_sock = c.socket(zmq.REQ)
zmq_sock.connect('tcp://127.0.0.1:1234')
for i in range(1000):
zmq_sock.send(msgpack.packb({"cmd": "Bigdata"}))
req = msgpack.unpackb(zmq_sock.recv(1024 * 1024))
print "Found:", len(req["res"]), time.time() - s
print "1 Threaded: direct ZeroMQ Send, receive 1000 x 10k data request...",
s = time.time()
import zmq.green as zmq
c = zmq.Context()
zmq_sock = c.socket(zmq.REQ)
zmq_sock.connect('tcp://127.0.0.1:1233')
for i in range(1000):
zmq_sock.send(msgpack.packb({"cmd": "Bigdata"}))
req = msgpack.unpackb(zmq_sock.recv(1024 * 1024))
print "Found:", len(req["res"]), time.time() - s

View file

@ -1,11 +1,16 @@
#!/usr/bin/python2
from gevent import monkey; monkey.patch_all()
import os, time, sys, socket, ssl
from gevent import monkey
monkey.patch_all()
import os
import time
import sys
import socket
import ssl
sys.path.append(os.path.abspath("src")) # Imports relative to src dir
import cStringIO as StringIO
import gevent
from gevent.queue import Queue, Empty, JoinableQueue
from gevent.server import StreamServer
from gevent.pool import Pool
from util import SslPatch
@ -13,25 +18,31 @@ from util import SslPatch
# Server
socks = []
data = os.urandom(1024*100)
data = os.urandom(1024 * 100)
data += "\n"
def handle(sock_raw, addr):
socks.append(sock_raw)
sock = sock_raw
#sock = ctx.wrap_socket(sock, server_side=True)
#if sock_raw.recv( 1, gevent.socket.MSG_PEEK ) == "\x16":
# sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='key-cz.pem', certfile='cert-cz.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
#fp = os.fdopen(sock.fileno(), 'rb', 1024*512)
# sock = ctx.wrap_socket(sock, server_side=True)
# if sock_raw.recv( 1, gevent.socket.MSG_PEEK ) == "\x16":
# sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='key-cz.pem',
# certfile='cert-cz.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
# fp = os.fdopen(sock.fileno(), 'rb', 1024*512)
try:
while True:
line = sock.recv(16*1024)
if not line: break
line = sock.recv(16 * 1024)
if not line:
break
if line == "bye\n":
break
elif line == "gotssl\n":
sock.sendall("yes\n")
sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='data/key-rsa.pem', certfile='data/cert-rsa.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
sock = gevent.ssl.wrap_socket(
sock_raw, server_side=True, keyfile='data/key-rsa.pem', certfile='data/cert-rsa.pem',
ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1
)
else:
sock.sendall(data)
except Exception, err:
@ -44,7 +55,7 @@ def handle(sock_raw, addr):
socks.remove(sock_raw)
pool = Pool(1000) # do not accept more than 10000 connections
server = StreamServer(('127.0.0.1', 1234), handle) #
server = StreamServer(('127.0.0.1', 1234), handle)
server.start()
@ -54,36 +65,38 @@ server.start()
total_num = 0
total_bytes = 0
clipher = None
ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDH+AES128:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:AES128-SHA:HIGH:!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDH+AES128:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:AES128-SHA:HIGH:" + \
"!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
# ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
def getData():
global total_num, total_bytes, clipher
data = None
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#sock = socket.ssl(s)
#sock = ssl.wrap_socket(sock)
# sock = socket.ssl(s)
# sock = ssl.wrap_socket(sock)
sock.connect(("127.0.0.1", 1234))
#sock.do_handshake()
#clipher = sock.cipher()
# sock.do_handshake()
# clipher = sock.cipher()
sock.send("gotssl\n")
if sock.recv(128) == "yes\n":
sock = ssl.wrap_socket(sock, ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
sock.do_handshake()
clipher = sock.cipher()
for req in range(100):
sock.sendall("req\n")
buff = StringIO.StringIO()
data = sock.recv(16*1024)
data = sock.recv(16 * 1024)
buff.write(data)
if not data:
break
while not data.endswith("\n"):
data = sock.recv(16*1024)
if not data: break
data = sock.recv(16 * 1024)
if not data:
break
buff.write(data)
total_num += 1
total_bytes += buff.tell()
@ -95,15 +108,18 @@ def getData():
s = time.time()
def info():
import psutil, os
import psutil
import os
process = psutil.Process(os.getpid())
if "memory_info" in dir(process):
memory_info = process.memory_info
else:
memory_info = process.get_memory_info
while 1:
print total_num, "req", (total_bytes/1024), "kbytes", "transfered in", time.time()-s, "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20)
print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s,
print "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20)
time.sleep(1)
gevent.spawn(info)
@ -115,7 +131,7 @@ for test in range(10):
gevent.joinall(clients)
print total_num, "req", (total_bytes/1024), "kbytes", "transfered in", time.time()-s
print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s
# Separate client/server process:
# 10*10*100:

View file

@ -1,4 +1,10 @@
import time, re, os, mimetypes, json, cgi
import time
import re
import os
import mimetypes
import json
import cgi
from Config import config
from Site import SiteManager
from User import UserManager
@ -16,15 +22,25 @@ status_texts = {
@PluginManager.acceptPlugins
class UiRequest(object):
def __init__(self, server, get, env, start_response):
if server:
self.server = server
self.log = server.log
self.get = get # Get parameters
self.env = env # Enviroment settings
self.start_response = start_response # Start response function
self.user = None
# Return posted variables as dict
def getPosted(self):
if self.env['REQUEST_METHOD'] == "POST":
return dict(cgi.parse_qsl(
self.env['wsgi.input'].readline().decode()
))
else:
return {}
# Call the request handler function base on path
def route(self, path):
@ -57,22 +73,19 @@ class UiRequest(object):
if body:
return body
else:
func = getattr(self, "action"+path.lstrip("/"), None) # Check if we have action+request_path function
func = getattr(self, "action" + path.lstrip("/"), None) # Check if we have action+request_path function
if func:
return func()
else:
return self.error404(path)
# The request is proxied by chrome extension
def isProxyRequest(self):
return self.env["PATH_INFO"].startswith("http://")
def isAjaxRequest(self):
return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest"
# Get mime by filename
def getContentType(self, file_name):
content_type = mimetypes.guess_type(file_name)[0]
@ -83,7 +96,6 @@ class UiRequest(object):
content_type = "application/octet-stream"
return content_type
# Returns: <dict> Cookies based on self.env
def getCookies(self):
raw_cookies = self.env.get('HTTP_COOKIE')
@ -93,43 +105,44 @@ class UiRequest(object):
else:
return {}
def getCurrentUser(self):
if self.user: return self.user # Cache
if self.user:
return self.user # Cache
self.user = UserManager.user_manager.get() # Get user
if not self.user:
self.user = UserManager.user_manager.create()
return self.user
# Send response headers
def sendHeader(self, status=200, content_type="text/html", extra_headers=[]):
if content_type == "text/html": content_type = "text/html; charset=utf-8"
if content_type == "text/html":
content_type = "text/html; charset=utf-8"
headers = []
headers.append(("Version", "HTTP/1.1"))
headers.append(("Access-Control-Allow-Origin", "*")) # Allow json access
if self.env["REQUEST_METHOD"] == "OPTIONS":
headers.append(("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept")) # Allow json access
# Allow json access
headers.append(("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept"))
if (self.env["REQUEST_METHOD"] == "OPTIONS" or not self.isAjaxRequest()) and status == 200 and (content_type == "text/css" or content_type.startswith("application") or self.env["REQUEST_METHOD"] == "OPTIONS" or content_type.startswith("image")): # Cache Css, Js, Image files for 10min
cacheable_type = (
content_type == "text/css" or content_type.startswith("image") or
self.env["REQUEST_METHOD"] == "OPTIONS" or content_type == "application/javascript"
)
if status == 200 and cacheable_type: # Cache Css, Js, Image files for 10min
headers.append(("Cache-Control", "public, max-age=600")) # Cache 10 min
else: # Images, Css, Js
headers.append(("Cache-Control", "no-cache, no-store, private, must-revalidate, max-age=0")) # No caching at all
#headers.append(("Cache-Control", "public, max-age=604800")) # Cache 1 week
headers.append(("Content-Type", content_type))
for extra_header in extra_headers:
headers.append(extra_header)
return self.start_response(status_texts[status], headers)
# Renders a template
def render(self, template_path, *args, **kwargs):
#template = SimpleTemplate(open(template_path), lookup=[os.path.dirname(template_path)])
#yield str(template.render(*args, **kwargs).encode("utf8"))
template = open(template_path).read().decode("utf8")
return template.format(**kwargs).encode("utf8")
# - Actions -
# Redirect to an url
@ -137,40 +150,46 @@ class UiRequest(object):
self.start_response('301 Redirect', [('Location', url)])
yield "Location changed: %s" % url
def actionIndex(self):
return self.actionRedirect("/"+config.homepage)
return self.actionRedirect("/" + config.homepage)
# Render a file from media with iframe site wrapper
def actionWrapper(self, path, extra_headers=None):
if not extra_headers: extra_headers = []
if self.get.get("wrapper") == "False": return self.actionSiteMedia("/media"+path) # Only serve html files with frame
if not extra_headers:
extra_headers = []
if self.get.get("wrapper") == "False":
return self.actionSiteMedia("/media" + path) # Only serve html files with frame
match = re.match("/(?P<address>[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path)
if match:
address = match.group("address")
inner_path = match.group("inner_path").lstrip("/")
if "." in inner_path and not inner_path.endswith(".html"): return self.actionSiteMedia("/media"+path) # Only serve html files with frame
if self.env.get("HTTP_X_REQUESTED_WITH"): return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper
if "." in inner_path and not inner_path.endswith(".html"):
return self.actionSiteMedia("/media" + path) # Only serve html files with frame
if self.env.get("HTTP_X_REQUESTED_WITH"):
return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper
file_inner_path = inner_path
if not file_inner_path: file_inner_path = "index.html" # If inner path defaults to index.html
if not file_inner_path:
file_inner_path = "index.html" # If inner path defaults to index.html
if not inner_path and not path.endswith("/"): inner_path = address+"/" # Fix relative resources loading if missing / end of site address
if not inner_path and not path.endswith("/"):
inner_path = address + "/" # Fix relative resources loading if missing / end of site address
inner_path = re.sub(".*/(.+)", "\\1", inner_path) # Load innerframe relative to current url
site = SiteManager.site_manager.get(address)
if site and site.content_manager.contents.get("content.json") and (not site.getReachableBadFiles() or site.settings["own"]): # Its downloaded or own
if (
site and site.content_manager.contents.get("content.json") and
(not site.getReachableBadFiles() or site.settings["own"])
): # Its downloaded or own
title = site.content_manager.contents["content.json"]["title"]
else:
title = "Loading %s..." % address
site = SiteManager.site_manager.need(address) # Start download site
if not site: return False
#extra_headers.append(("X-Frame-Options", "DENY"))
if not site:
return False
self.sendHeader(extra_headers=extra_headers[:])
@ -179,27 +198,31 @@ class UiRequest(object):
body_style = ""
meta_tags = ""
if self.env.get("QUERY_STRING"): query_string = "?"+self.env["QUERY_STRING"]+"&wrapper=False"
else: query_string = "?wrapper=False"
if self.env.get("QUERY_STRING"):
query_string = "?" + self.env["QUERY_STRING"] + "&wrapper=False"
else:
query_string = "?wrapper=False"
if self.isProxyRequest(): # Its a remote proxy request
if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1
server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"]
else: # Remote client, use SERVER_NAME as server's real address
server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"])
homepage = "http://zero/"+config.homepage
homepage = "http://zero/" + config.homepage
else: # Use relative path
server_url = ""
homepage = "/"+config.homepage
homepage = "/" + config.homepage
if site.content_manager.contents.get("content.json") : # Got content.json
if site.content_manager.contents.get("content.json"): # Got content.json
content = site.content_manager.contents["content.json"]
if content.get("background-color"):
body_style += "background-color: "+cgi.escape(site.content_manager.contents["content.json"]["background-color"], True)+";"
body_style += "background-color: %s;" % \
cgi.escape(site.content_manager.contents["content.json"]["background-color"], True)
if content.get("viewport"):
meta_tags += '<meta name="viewport" id="viewport" content="%s">' % cgi.escape(content["viewport"], True)
return self.render("src/Ui/template/wrapper.html",
return self.render(
"src/Ui/template/wrapper.html",
server_url=server_url,
inner_path=inner_path,
file_inner_path=file_inner_path,
@ -218,17 +241,16 @@ class UiRequest(object):
else: # Bad url
return False
# Returns if media request allowed from that referer
def isMediaRequestAllowed(self, site_address, referer):
referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
return referer_path.startswith("/"+site_address)
return referer_path.startswith("/" + site_address)
# Serve a media for site
def actionSiteMedia(self, path):
path = path.replace("/index.html/", "/") # Base Backward compatibility fix
if path.endswith("/"): path = path+"index.html"
if path.endswith("/"):
path = path + "index.html"
match = re.match("/media/(?P<address>[A-Za-z0-9\._-]+)/(?P<inner_path>.*)", path)
@ -242,22 +264,27 @@ class UiRequest(object):
file_path = "%s/%s/%s" % (config.data_dir, address, match.group("inner_path"))
allowed_dir = os.path.abspath("%s/%s" % (config.data_dir, address)) # Only files within data/sitehash allowed
data_dir = os.path.abspath("data") # No files from data/ allowed
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir) or allowed_dir == data_dir: # File not in allowed path
if (
".." in file_path
or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir)
or allowed_dir == data_dir
): # File not in allowed path
return self.error403()
else:
if config.debug and file_path.split("/")[-1].startswith("all."): # When debugging merge *.css to all.css and *.js to all.js
if config.debug and file_path.split("/")[-1].startswith("all."):
# If debugging merge *.css to all.css and *.js to all.js
site = self.server.sites.get(address)
if site.settings["own"]:
from Debug import DebugMedia
DebugMedia.merge(file_path)
if os.path.isfile(file_path): # File exits
#self.sendHeader(content_type=self.getContentType(file_path)) # ?? Get Exception without this
# self.sendHeader(content_type=self.getContentType(file_path)) # ?? Get Exception without this
return self.actionFile(file_path)
else: # File not exits, try to download
site = SiteManager.site_manager.need(address, all_file=False)
result = site.needFile(match.group("inner_path"), priority=1) # Wait until file downloads
if result:
#self.sendHeader(content_type=self.getContentType(file_path))
# self.sendHeader(content_type=self.getContentType(file_path))
return self.actionFile(file_path)
else:
self.log.debug("File not found: %s" % match.group("inner_path"))
@ -266,31 +293,33 @@ class UiRequest(object):
else: # Bad url
return self.error404(path)
# Serve a media for ui
def actionUiMedia(self, path):
match = re.match("/uimedia/(?P<inner_path>.*)", path)
if match: # Looks like a valid path
file_path = "src/Ui/media/%s" % match.group("inner_path")
allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): # File not in allowed path
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
# File not in allowed path
return self.error403()
else:
if config.debug and match.group("inner_path").startswith("all."): # When debugging merge *.css to all.css and *.js to all.js
if config.debug and match.group("inner_path").startswith("all."):
# If debugging merge *.css to all.css and *.js to all.js
from Debug import DebugMedia
DebugMedia.merge(file_path)
return self.actionFile(file_path)
else: # Bad url
return self.error400()
# Stream a file to client
def actionFile(self, file_path, block_size = 64*1024):
def actionFile(self, file_path, block_size=64 * 1024):
if os.path.isfile(file_path):
# Try to figure out content type by extension
content_type = self.getContentType(file_path)
self.sendHeader(content_type = content_type) # TODO: Dont allow external access: extra_headers=[("Content-Security-Policy", "default-src 'unsafe-inline' data: http://localhost:43110 ws://localhost:43110")]
# TODO: Dont allow external access: extra_headers=
# [("Content-Security-Policy", "default-src 'unsafe-inline' data: http://localhost:43110 ws://localhost:43110")]
self.sendHeader(content_type=content_type)
if self.env["REQUEST_METHOD"] != "OPTIONS":
file = open(file_path, "rb")
while 1:
@ -306,7 +335,6 @@ class UiRequest(object):
else: # File not exits
yield self.error404(file_path)
# On websocket connection
def actionWebsocket(self):
ws = self.env.get("wsgi.websocket")
@ -315,7 +343,8 @@ class UiRequest(object):
# Find site by wrapper_key
site = None
for site_check in self.server.sites.values():
if site_check.settings["wrapper_key"] == wrapper_key: site = site_check
if site_check.settings["wrapper_key"] == wrapper_key:
site = site_check
if site: # Correct wrapper key
user = self.getCurrentUser()
@ -325,7 +354,8 @@ class UiRequest(object):
ui_websocket = UiWebsocket(ws, site, self.server, user)
site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events
ui_websocket.start()
for site_check in self.server.sites.values(): # Remove websocket from every site (admin sites allowed to join other sites event channels)
for site_check in self.server.sites.values():
# Remove websocket from every site (admin sites allowed to join other sites event channels)
if ui_websocket in site_check.websockets:
site_check.websockets.remove(ui_websocket)
return "Bye."
@ -333,10 +363,9 @@ class UiRequest(object):
self.log.error("Wrapper key not found: %s" % wrapper_key)
return self.error403()
else:
start_response("400 Bad Request", [])
self.start_response("400 Bad Request", [])
return "Not a websocket!"
# Debug last error
def actionDebug(self):
# Raise last error from DebugHook
@ -348,7 +377,6 @@ class UiRequest(object):
self.sendHeader()
return "No error! :)"
# Just raise an error to get console
def actionConsole(self):
import sys
@ -356,19 +384,17 @@ class UiRequest(object):
main = sys.modules["main"]
raise Exception("Here is your console")
# - Tests -
def actionTestStream(self):
self.sendHeader()
yield " "*1080 # Overflow browser's buffer
yield " " * 1080 # Overflow browser's buffer
yield "He"
time.sleep(1)
yield "llo!"
yield "Running websockets: %s" % len(self.server.websockets)
self.server.sendMessage("Hello!")
# - Errors -
# Send bad request error
@ -376,30 +402,27 @@ class UiRequest(object):
self.sendHeader(400)
return "Bad Request"
# You are not allowed to access this
def error403(self, message="Forbidden"):
self.sendHeader(403)
return message
# Send file not found error
def error404(self, path = None):
def error404(self, path=None):
self.sendHeader(404)
return "Not Found: %s" % path.encode("utf8")
# Internal server error
def error500(self, message = ":("):
def error500(self, message=":("):
self.sendHeader(500)
return "<h1>Server error</h1>%s" % cgi.escape(message)
# - Reload for eaiser developing -
#def reload():
#import imp, sys
#global UiWebsocket
#UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket
#reload(sys.modules["User.UserManager"])
#UserManager.reloadModule()
#self.user = UserManager.user_manager.getCurrent()
# def reload():
# import imp, sys
# global UiWebsocket
# UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket
# reload(sys.modules["User.UserManager"])
# UserManager.reloadModule()
# self.user = UserManager.user_manager.getCurrent()

View file

@ -1,7 +1,11 @@
import logging, time, cgi, string, random
import logging
import time
import cgi
from gevent.pywsgi import WSGIServer
from gevent.pywsgi import WSGIHandler
from lib.geventwebsocket.handler import WebSocketHandler
from UiRequest import UiRequest
from Site import SiteManager
from Config import config
@ -10,13 +14,13 @@ from Debug import Debug
# Skip websocket handler if not necessary
class UiWSGIHandler(WSGIHandler):
def __init__(self, *args, **kwargs):
self.server = args[2]
super(UiWSGIHandler, self).__init__(*args, **kwargs)
self.args = args
self.kwargs = kwargs
def run_application(self):
self.server.sockets[self.client_address] = self.socket
if "HTTP_UPGRADE" in self.environ: # Websocket request
@ -31,7 +35,7 @@ class UiWSGIHandler(WSGIHandler):
del self.server.sockets[self.client_address]
sys.modules["main"].DebugHook.handleError()
else: # Standard HTTP request
#print self.application.__class__.__name__
# print self.application.__class__.__name__
try:
super(UiWSGIHandler, self).run_application()
except Exception, err:
@ -45,17 +49,15 @@ class UiWSGIHandler(WSGIHandler):
class UiServer:
def __init__(self):
self.ip = config.ui_ip
self.port = config.ui_port
if self.ip == "*": self.ip = "" # Bind all
#self.sidebar_websockets = [] # Sidebar websocket connections
#self.auth_key = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(12)) # Global admin auth key
if self.ip == "*":
self.ip = "" # Bind all
self.sites = SiteManager.site_manager.list()
self.log = logging.getLogger(__name__)
# Handle WSGI request
def handleRequest(self, env, start_response):
path = env["PATH_INFO"]
@ -73,16 +75,15 @@ class UiServer:
logging.debug("UiRequest error: %s" % Debug.formatException(err))
return ui_request.error500("Err: %s" % Debug.formatException(err))
# Reload the UiRequest class to prevent restarts in debug mode
def reload(self):
global UiRequest
import imp, sys
import imp
import sys
reload(sys.modules["User.UserManager"])
reload(sys.modules["Ui.UiWebsocket"])
UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest
#UiRequest.reload()
# UiRequest.reload()
# Bind and run the server
def start(self):
@ -119,7 +120,6 @@ class UiServer:
self.server.serve_forever()
self.log.debug("Stopped.")
def stop(self):
self.log.debug("Stopping...")
# Close WS sockets
@ -133,11 +133,10 @@ class UiServer:
sock._sock.close()
sock.close()
sock_closed += 1
except Exception, err:
except Exception:
pass
self.log.debug("Socket closed: %s" % sock_closed)
self.server.socket.close()
self.server.stop()
time.sleep(1)

View file

@ -1,12 +1,20 @@
import json, gevent, time, sys, hashlib
import json
import time
import sys
import hashlib
import gevent
from Config import config
from Site import SiteManager
from Debug import Debug
from util import QueryJson, RateLimit
from Plugin import PluginManager
@PluginManager.acceptPlugins
class UiWebsocket(object):
def __init__(self, ws, site, server, user):
self.ws = ws
self.site = site
@ -19,15 +27,27 @@ class UiWebsocket(object):
self.sending = False # Currently sending to client
self.send_queue = [] # Messages to send to client
# Start listener loop
def start(self):
ws = self.ws
if self.site.address == config.homepage and not self.site.page_requested: # Add open fileserver port message or closed port error to homepage at first request after start
if sys.modules["main"].file_server.port_opened == True:
self.site.notifications.append(["done", "Congratulation, your port <b>"+str(config.fileserver_port)+"</b> is opened. <br>You are full member of ZeroNet network!", 10000])
elif sys.modules["main"].file_server.port_opened == False:
self.site.notifications.append(["error", "Your network connection is restricted. Please, open <b>"+str(config.fileserver_port)+"</b> port <br>on your router to become full member of ZeroNet network.", 0])
if self.site.address == config.homepage and not self.site.page_requested:
# Add open fileserver port message or closed port error to homepage at first request after start
if sys.modules["main"].file_server.port_opened is True:
self.site.notifications.append([
"done",
"Congratulation, your port <b>%s</b> is opened.<br>You are full member of ZeroNet network!" %
config.fileserver_port,
10000
])
elif sys.modules["main"].file_server.port_opened is False:
self.site.notifications.append([
"error",
"""
Your network connection is restricted. Please, open <b>%s</b> port<br>
on your router to become full member of ZeroNet network.
""" % config.fileserver_port,
0
])
self.site.page_requested = True # Dont add connection notification anymore
for notification in self.site.notifications: # Send pending notification messages
@ -45,7 +65,6 @@ class UiWebsocket(object):
self.log.error("WebSocket error: %s" % Debug.formatException(err))
return "Bye."
# Event in a channel
def event(self, channel, *params):
if channel in self.channels: # We are joined to channel
@ -56,24 +75,22 @@ class UiWebsocket(object):
site_info.update(params[1])
self.cmd("setSiteInfo", site_info)
# Send response to client (to = message.id)
def response(self, to, result):
self.send({"cmd": "response", "to": to, "result": result})
# Send a command
def cmd(self, cmd, params={}, cb = None):
def cmd(self, cmd, params={}, cb=None):
self.send({"cmd": cmd, "params": params}, cb)
# Encode to json and send message
def send(self, message, cb = None):
def send(self, message, cb=None):
message["id"] = self.next_message_id # Add message id to allow response
self.next_message_id += 1
if cb: # Callback after client responsed
self.waiting_cb[message["id"]] = cb
if self.sending: return # Already sending
if self.sending:
return # Already sending
self.send_queue.append(message)
try:
while self.send_queue:
@ -84,7 +101,6 @@ class UiWebsocket(object):
except Exception, err:
self.log.debug("Websocket send error: %s" % Debug.formatException(err))
# Handle incoming messages
def handleRequest(self, data):
req = json.loads(data)
@ -96,7 +112,11 @@ class UiWebsocket(object):
permissions = permissions[:]
permissions.append("ADMIN")
admin_commands = ("sitePause", "siteResume", "siteDelete", "siteList", "siteSetLimit", "siteClone", "channelJoinAllsite", "serverUpdate", "certSet")
admin_commands = (
"sitePause", "siteResume", "siteDelete", "siteList", "siteSetLimit", "siteClone",
"channelJoinAllsite",
"serverUpdate", "certSet"
)
if cmd == "response": # It's a response to a command
return self.actionResponse(req["to"], req["result"])
@ -117,7 +137,6 @@ class UiWebsocket(object):
else:
func(req["id"], params)
# Format site info
def formatSiteInfo(self, site, create_user=True):
content = site.content_manager.contents.get("content.json")
@ -125,9 +144,12 @@ class UiWebsocket(object):
content = content.copy()
content["files"] = len(content.get("files", {}))
content["includes"] = len(content.get("includes", {}))
if "sign" in content: del(content["sign"])
if "signs" in content: del(content["signs"])
if "signers_sign" in content: del(content["signers_sign"])
if "sign" in content:
del(content["sign"])
if "signs" in content:
del(content["signs"])
if "signers_sign" in content:
del(content["signers_sign"])
settings = site.settings.copy()
del settings["wrapper_key"] # Dont expose wrapper key
@ -150,11 +172,12 @@ class UiWebsocket(object):
"workers": len(site.worker_manager.workers),
"content": content
}
if site.settings["own"]: ret["privatekey"] = bool(self.user.getSiteData(site.address, create=create_user).get("privatekey"))
if site.settings["serving"] and content: ret["peers"] += 1 # Add myself if serving
if site.settings["own"]:
ret["privatekey"] = bool(self.user.getSiteData(site.address, create=create_user).get("privatekey"))
if site.settings["serving"] and content:
ret["peers"] += 1 # Add myself if serving
return ret
def formatServerInfo(self):
return {
"ip_external": bool(sys.modules["main"].file_server.port_opened),
@ -169,7 +192,6 @@ class UiWebsocket(object):
"plugins": PluginManager.plugin_manager.plugin_names
}
# - Actions -
# Do callback on response {"cmd": "response", "to": message_id, "result": result}
@ -179,33 +201,28 @@ class UiWebsocket(object):
else:
self.log.error("Websocket callback not found: %s, %s" % (to, result))
# Send a simple pong answer
def actionPing(self, to):
self.response(to, "pong")
# Send site details
def actionSiteInfo(self, to, file_status = None):
def actionSiteInfo(self, to, file_status=None):
ret = self.formatSiteInfo(self.site)
if file_status: # Client queries file status
if self.site.storage.isFile(file_status): # File exits, add event done
ret["event"] = ("file_done", file_status)
self.response(to, ret)
# Join to an event channel
def actionChannelJoin(self, to, channel):
if channel not in self.channels:
self.channels.append(channel)
# Server variables
def actionServerInfo(self, to):
ret = self.formatServerInfo()
self.response(to, ret)
# Sign content.json
def actionSiteSign(self, to, privatekey=None, inner_path="content.json"):
site = self.site
@ -219,8 +236,10 @@ class UiWebsocket(object):
extend["cert_user_id"] = self.user.getCertUserId(site.address)
extend["cert_sign"] = cert["cert_sign"]
if not site.settings["own"] and self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path):
if (
not site.settings["own"] and
self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path)
):
return self.response(to, "Forbidden, you can only modify your own sites")
if privatekey == "stored":
privatekey = self.user.getSiteData(self.site.address).get("privatekey")
@ -230,10 +249,7 @@ class UiWebsocket(object):
# Signing
site.content_manager.loadContent(add_bad_files=False) # Reload content.json, ignore errors to make it up-to-date
signed = site.content_manager.sign(inner_path, privatekey, extend=extend) # Sign using private key sent by user
if signed:
#if inner_path == "content_json": self.cmd("notification", ["done", "Private key correct, content signed!", 5000]) # Display message for 5 sec
pass
else:
if not signed:
self.cmd("notification", ["error", "Content sign failed: invalid private key."])
self.response(to, "Site sign failed")
return
@ -243,7 +259,6 @@ class UiWebsocket(object):
return inner_path
# Sign and publish content.json
def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True):
if sign:
@ -257,39 +272,48 @@ class UiWebsocket(object):
self.site.saveSettings()
self.site.announce()
event_name = "publish %s %s" % (self.site.address, inner_path)
thread = RateLimit.callAsync(event_name, 7, self.site.publish, 5, inner_path) # Only publish once in 7 second to 5 peers
notification = "linked" not in dir(thread) # Only display notification on first callback
thread.linked = True
thread.link(lambda thread: self.cbSitePublish(to, thread, notification)) # At the end callback with request id and thread
# Callback of site publish
def cbSitePublish(self, to, thread, notification=True):
site = self.site
published = thread.value
if published>0: # Successfuly published
if notification: self.cmd("notification", ["done", "Content published to %s peers." % published, 5000])
if published > 0: # Successfuly published
if notification:
self.cmd("notification", ["done", "Content published to %s peers." % published, 5000])
self.response(to, "ok")
if notification: site.updateWebsocket() # Send updated site data to local websocket clients
if notification:
site.updateWebsocket() # Send updated site data to local websocket clients
else:
if len(site.peers) == 0:
if sys.modules["main"].file_server.port_opened:
if notification: self.cmd("notification", ["info", "No peers found, but your content is ready to access."])
if notification:
self.cmd("notification", ["info", "No peers found, but your content is ready to access."])
self.response(to, "ok")
else:
if notification: self.cmd("notification", ["info", "Your network connection is restricted. Please, open <b>"+str(config.fileserver_port)+"</b> port <br>on your router to make your site accessible for everyone."])
if notification:
self.cmd("notification", [
"info",
"""Your network connection is restricted. Please, open <b>%s</b> port <br>
on your router to make your site accessible for everyone.""" % config.fileserver_port
])
self.response(to, "Port not opened.")
else:
if notification: self.cmd("notification", ["error", "Content publish failed."])
if notification:
self.cmd("notification", ["error", "Content publish failed."])
self.response(to, "Content publish failed.")
# Write a file to disk
def actionFileWrite(self, to, inner_path, content_base64):
if not self.site.settings["own"] and self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path):
if (
not self.site.settings["own"] and
self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path)
):
return self.response(to, "Forbidden, you can only modify your own files")
try:
@ -309,8 +333,6 @@ class UiWebsocket(object):
if ws != self:
ws.event("siteChanged", self.site, {"event": ["file_done", inner_path]})
# Find data in json files
def actionFileQuery(self, to, dir_inner_path, query):
# s = time.time()
@ -319,7 +341,6 @@ class UiWebsocket(object):
# self.log.debug("FileQuery %s %s done in %s" % (dir_inner_path, query, time.time()-s))
return self.response(to, rows)
# Sql query
def actionDbQuery(self, to, query, params=None, wait_for=None):
rows = []
@ -332,17 +353,16 @@ class UiWebsocket(object):
rows.append(dict(row))
return self.response(to, rows)
# Return file content
def actionFileGet(self, to, inner_path, required=True):
try:
if required: self.site.needFile(inner_path, priority=1)
if required:
self.site.needFile(inner_path, priority=1)
body = self.site.storage.read(inner_path)
except:
body = None
return self.response(to, body)
def actionFileRules(self, to, inner_path):
rules = self.site.content_manager.getRules(inner_path)
if inner_path.endswith("content.json"):
@ -353,20 +373,21 @@ class UiWebsocket(object):
rules["current_size"] = 0
return self.response(to, rules)
# Add certificate to user
def actionCertAdd(self, to, domain, auth_type, auth_user_name, cert):
try:
res = self.user.addCert(self.user.getAuthAddress(self.site.address), domain, auth_type, auth_user_name, cert)
if res == True:
self.cmd("notification", ["done", "New certificate added: <b>%s/%s@%s</b>." % (auth_type, auth_user_name, domain)])
if res is True:
self.cmd(
"notification",
["done", "New certificate added: <b>%s/%s@%s</b>." % (auth_type, auth_user_name, domain)]
)
self.response(to, "ok")
else:
self.response(to, "Not changed")
except Exception, err:
self.response(to, {"error": err.message})
# Select certificate for site
def actionCertSelect(self, to, accepted_domains=[]):
accounts = []
@ -378,13 +399,12 @@ class UiWebsocket(object):
for domain, cert in self.user.certs.items():
if auth_address == cert["auth_address"]:
active = domain
title = cert["auth_user_name"]+"@"+domain
title = cert["auth_user_name"] + "@" + domain
if domain in accepted_domains:
accounts.append([domain, title, ""])
else:
accounts.append([domain, title, "disabled"])
# Render the html
body = "<span style='padding-bottom: 5px; display: inline-block'>Select account you want to use in this site:</span>"
# Accounts
@ -399,10 +419,14 @@ class UiWebsocket(object):
more_domains = [domain for domain in accepted_domains if domain not in self.user.certs] # Domainains we not displayed yet
if more_domains:
# body+= "<small style='margin-top: 10px; display: block'>Accepted authorization providers by the site:</small>"
body+= "<div style='background-color: #F7F7F7; margin-right: -30px'>"
body += "<div style='background-color: #F7F7F7; margin-right: -30px'>"
for domain in more_domains:
body += "<a href='/%s' onclick='wrapper.gotoSite(this)' target='_blank' class='select'><small style='float: right; margin-right: 40px; margin-top: -1px'>Register &raquo;</small>%s</a>" % (domain, domain)
body+= "</div>"
body += """
<a href='/%s' onclick='wrapper.gotoSite(this)' target='_blank' class='select'>
<small style='float: right; margin-right: 40px; margin-top: -1px'>Register &raquo;</small>%s
</a>
""" % (domain, domain)
body += "</div>"
body += """
<script>
@ -417,13 +441,11 @@ class UiWebsocket(object):
# Send the notification
self.cmd("notification", ["ask", body])
# Set certificate that used for authenticate user for site
def actionCertSet(self, to, domain):
self.user.setCert(self.site.address, domain)
self.site.updateWebsocket(cert_changed=domain)
# - Admin actions -
# List all site info
@ -431,11 +453,11 @@ class UiWebsocket(object):
ret = []
SiteManager.site_manager.load() # Reload sites
for site in self.server.sites.values():
if not site.content_manager.contents.get("content.json"): continue # Broken site
if not site.content_manager.contents.get("content.json"):
continue # Broken site
ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing
self.response(to, ret)
# Join to an event channel on all sites
def actionChannelJoinAllsite(self, to, channel):
if channel not in self.channels: # Add channel to channels
@ -445,7 +467,6 @@ class UiWebsocket(object):
if self not in site.websockets:
site.websockets.append(self)
# Update site content.json
def actionSiteUpdate(self, to, address):
site = self.server.sites.get(address)
@ -454,7 +475,6 @@ class UiWebsocket(object):
else:
self.response(to, {"error": "Unknown site: %s" % address})
# Pause site serving
def actionSitePause(self, to, address):
site = self.server.sites.get(address)
@ -466,7 +486,6 @@ class UiWebsocket(object):
else:
self.response(to, {"error": "Unknown site: %s" % address})
# Resume site serving
def actionSiteResume(self, to, address):
site = self.server.sites.get(address)
@ -479,7 +498,6 @@ class UiWebsocket(object):
else:
self.response(to, {"error": "Unknown site: %s" % address})
def actionSiteDelete(self, to, address):
site = self.server.sites.get(address)
if site:
@ -493,7 +511,6 @@ class UiWebsocket(object):
else:
self.response(to, {"error": "Unknown site: %s" % address})
def actionSiteClone(self, to, address):
self.cmd("notification", ["info", "Cloning site..."])
site = self.server.sites.get(address)
@ -505,17 +522,14 @@ class UiWebsocket(object):
self.cmd("notification", ["done", "Site cloned<script>window.top.location = '/%s'</script>" % new_address])
gevent.spawn(new_site.announce)
def actionSiteSetLimit(self, to, size_limit):
self.site.settings["size_limit"] = size_limit
self.site.saveSettings()
self.response(to, "Site size limit changed to %sMB" % size_limit)
self.site.download()
def actionServerUpdate(self, to):
self.cmd("updating")
sys.modules["main"].update_after_shutdown = True
sys.modules["main"].file_server.stop()
sys.modules["main"].ui_server.stop()

View file

@ -1,4 +1,7 @@
import logging, json, time
import logging
import json
import time
from Crypt import CryptBitcoin
from Plugin import PluginManager
from Config import config
@ -6,6 +9,7 @@ from Config import config
@PluginManager.acceptPlugins
class User(object):
def __init__(self, master_address=None, master_seed=None, data={}):
if master_seed:
self.master_seed = master_seed
@ -27,7 +31,8 @@ class User(object):
if self.master_address not in users:
users[self.master_address] = {} # Create if not exist
user_data = users[self.master_address]
if self.master_seed: user_data["master_seed"] = self.master_seed
if self.master_seed:
user_data["master_seed"] = self.master_seed
user_data["sites"] = self.sites
user_data["certs"] = self.certs
open("%s/users.json" % config.data_dir, "w").write(json.dumps(users, indent=2, sort_keys=True))
@ -40,7 +45,8 @@ class User(object):
# Return: {"auth_address": "xxx", "auth_privatekey": "xxx"}
def getSiteData(self, address, create=True):
if address not in self.sites: # Generate new BIP32 child key based on site address
if not create: return {"auth_address": None, "auth_privatekey": None} # Dont create user yet
if not create:
return {"auth_address": None, "auth_privatekey": None} # Dont create user yet
s = time.time()
address_id = self.getAddressAuthIndex(address) # Convert site address to int
auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id)
@ -49,17 +55,18 @@ class User(object):
"auth_privatekey": auth_privatekey
}
self.save()
self.log.debug("Added new site: %s in %.3fs" % (address, time.time()-s))
self.log.debug("Added new site: %s in %.3fs" % (address, time.time() - s))
return self.sites[address]
# Get data for a new, unique site
# Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}]
def getNewSiteData(self):
import random
bip32_index = random.randrange(2**256) % 100000000
bip32_index = random.randrange(2 ** 256) % 100000000
site_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, bip32_index)
site_address = CryptBitcoin.privatekeyToAddress(site_privatekey)
if site_address in self.sites: raise Exception("Random error: site exist!")
if site_address in self.sites:
raise Exception("Random error: site exist!")
# Save to sites
self.getSiteData(site_address)
self.sites[site_address]["privatekey"] = site_privatekey
@ -85,7 +92,8 @@ class User(object):
# Add cert for the user
def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign):
domain = domain.lower()
auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0] # Find privatekey by auth address
# Find privatekey by auth address
auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0]
cert_node = {
"auth_address": auth_address,
"auth_privatekey": auth_privatekey,
@ -95,7 +103,10 @@ class User(object):
}
# Check if we have already cert for that domain and its not the same
if self.certs.get(domain) and self.certs[domain] != cert_node:
raise Exception("You already have certificate for this domain: %s/%s@%s" % (self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain))
raise Exception(
"You already have certificate for this domain: %s/%s@%s" %
(self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain)
)
elif self.certs.get(domain) == cert_node: # Same, not updated
return None
else: # Not exist yet, add
@ -113,17 +124,19 @@ class User(object):
return site_data
# Get cert for the site address
# Return: { "auth_address": ..., "auth_privatekey":..., "auth_type": "web", "auth_user_name": "nofish", "cert_sign": ... } or None
# Return: { "auth_address":.., "auth_privatekey":.., "auth_type": "web", "auth_user_name": "nofish", "cert_sign":.. } or None
def getCert(self, address):
site_data = self.getSiteData(address, create=False)
if not site_data or not "cert" in site_data: return None # Site dont have cert
if not site_data or "cert" not in site_data:
return None # Site dont have cert
return self.certs.get(site_data["cert"])
# Get cert user name for the site address
# Return: user@certprovider.bit or None
def getCertUserId(self, address):
site_data = self.getSiteData(address, create=False)
if not site_data or not "cert" in site_data: return None # Site dont have cert
if not site_data or "cert" not in site_data:
return None # Site dont have cert
cert = self.certs.get(site_data["cert"])
if cert:
return cert["auth_user_name"]+"@"+site_data["cert"]
return cert["auth_user_name"] + "@" + site_data["cert"]

View file

@ -1,5 +1,4 @@
# Included modules
import os
import json
import logging
@ -75,9 +74,9 @@ def reloadModule():
import imp
global User, UserManager, user_manager
User = imp.load_source("User", "src/User/User.py").User # Reload source
#module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module
#UserManager = module.UserManager
#user_manager = module.user_manager
# module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module
# UserManager = module.UserManager
# user_manager = module.user_manager
# Reload users
user_manager = UserManager()
user_manager.load()

View file

@ -1,8 +1,12 @@
import gevent, time, logging, shutil, os
from Peer import Peer
import time
import gevent
from Debug import Debug
class Worker(object):
def __init__(self, manager, peer):
self.manager = manager
self.peer = peer
@ -11,15 +15,12 @@ class Worker(object):
self.running = False
self.thread = None
def __str__(self):
return "Worker %s %s" % (self.manager.site.address_short, self.key)
def __repr__(self):
return "<%s>" % self.__str__()
# Downloader thread
def downloader(self):
self.peer.hash_failed = 0 # Reset hash error counter
@ -29,14 +30,15 @@ class Worker(object):
if not task: # Die, no more task
self.manager.log.debug("%s: No task found, stopping" % self.key)
break
if not task["time_started"]: task["time_started"] = time.time() # Task started now
if not task["time_started"]:
task["time_started"] = time.time() # Task started now
if task["workers_num"] > 0: # Wait a bit if someone already working on it
self.manager.log.debug("%s: Someone already working on %s, sleeping 1 sec..." % (self.key, task["inner_path"]))
time.sleep(1)
self.manager.log.debug("%s: %s, task done after sleep: %s" % (self.key, task["inner_path"], task["done"]))
if task["done"] == False:
if task["done"] is False:
self.task = task
site = task["site"]
task["workers_num"] += 1
@ -45,28 +47,32 @@ class Worker(object):
except Exception, err:
self.manager.log.debug("%s: getFile error: err" % (self.key, err))
buff = None
if self.running == False: # Worker no longer needed or got killed
if self.running is False: # Worker no longer needed or got killed
self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"]))
break
if buff: # Download ok
correct = site.content_manager.verifyFile(task["inner_path"], buff)
else: # Download error
correct = False
if correct == True or correct == None: # Hash ok or same file
if correct is True or correct is None: # Hash ok or same file
self.manager.log.debug("%s: Hash correct: %s" % (self.key, task["inner_path"]))
if correct == True and task["done"] == False: # Save if changed and task not done yet
if correct is True and task["done"] is False: # Save if changed and task not done yet
buff.seek(0)
file_path = site.storage.getPath(task["inner_path"])
site.storage.write(task["inner_path"], buff)
if task["done"] == False: self.manager.doneTask(task)
if task["done"] is False:
self.manager.doneTask(task)
task["workers_num"] -= 1
self.task = None
else: # Hash failed
self.manager.log.debug("%s: Hash failed: %s, failed peers: %s" % (self.key, task["inner_path"], len(task["failed"])))
self.manager.log.debug(
"%s: Hash failed: %s, failed peers: %s" %
(self.key, task["inner_path"], len(task["failed"]))
)
task["failed"].append(self.peer)
self.task = None
self.peer.hash_failed += 1
if self.peer.hash_failed >= max(len(self.manager.tasks), 3): # More fails than tasks number but atleast 3: Broken peer
if self.peer.hash_failed >= max(len(self.manager.tasks), 3):
# Broken peer: More fails than tasks number but atleast 3
break
task["workers_num"] -= 1
time.sleep(1)
@ -74,13 +80,11 @@ class Worker(object):
self.running = False
self.manager.removeWorker(self)
# Start the worker
def start(self):
self.running = True
self.thread = gevent.spawn(self.downloader)
# Skip current task
def skip(self):
self.manager.log.debug("%s: Force skipping" % self.key)
@ -88,7 +92,6 @@ class Worker(object):
self.thread.kill(exception=Debug.Notify("Worker stopped"))
self.start()
# Force stop the worker
def stop(self):
self.manager.log.debug("%s: Force stopping" % self.key)

View file

@ -1,29 +1,33 @@
import time
import logging
import random
import gevent
from Worker import Worker
import gevent, time, logging, random
MAX_WORKERS = 10
MAX_WORKERS = 10 # Max concurent workers
# Worker manager for site
class WorkerManager:
def __init__(self, site):
self.site = site
self.workers = {} # Key: ip:port, Value: Worker.Worker
self.tasks = [] # {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0, "failed": peer_ids}
self.tasks = []
# {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False,
# "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0, "failed": peer_ids}
self.started_task_num = 0 # Last added task num
self.running = True
self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short)
self.process_taskchecker = gevent.spawn(self.checkTasks)
def __str__(self):
return "WorkerManager %s" % self.site.address_short
def __repr__(self):
return "<%s>" % self.__str__()
# Check expired tasks
def checkTasks(self):
while self.running:
@ -32,13 +36,15 @@ class WorkerManager:
# Clean up workers
for worker in self.workers.values():
if worker.task and worker.task["done"]: worker.skip() # Stop workers with task done
if worker.task and worker.task["done"]:
worker.skip() # Stop workers with task done
if not self.tasks: continue
if not self.tasks:
continue
tasks = self.tasks[:] # Copy it so removing elements wont cause any problem
for task in tasks:
if task["time_started"] and time.time() >= task["time_started"]+60: # Task taking too long time, skip it
if task["time_started"] and time.time() >= task["time_started"] + 60: # Task taking too long time, skip it
self.log.debug("Timeout, Skipping: %s" % task)
# Skip to next file workers
workers = self.findWorkers(task)
@ -47,12 +53,13 @@ class WorkerManager:
worker.skip()
else:
self.failTask(task)
elif time.time() >= task["time_added"]+60 and not self.workers: # No workers left
elif time.time() >= task["time_added"] + 60 and not self.workers: # No workers left
self.log.debug("Timeout, Cleanup task: %s" % task)
# Remove task
self.failTask(task)
elif (task["time_started"] and time.time() >= task["time_started"]+15) or not self.workers: # Task started more than 15 sec ago or no workers
elif (task["time_started"] and time.time() >= task["time_started"] + 15) or not self.workers:
# Task started more than 15 sec ago or no workers
self.log.debug("Task taking more than 15 secs, find more peers: %s" % task["inner_path"])
task["site"].announce() # Find more peers
if task["peers"]: # Release the peer lock
@ -61,39 +68,38 @@ class WorkerManager:
self.startWorkers()
break # One reannounce per loop
self.log.debug("checkTasks stopped running")
# Tasks sorted by this
def taskSorter(self, task):
if task["inner_path"] == "content.json": return 9999 # Content.json always prority
if task["inner_path"] == "index.html": return 9998 # index.html also important
if task["inner_path"] == "content.json":
return 9999 # Content.json always prority
if task["inner_path"] == "index.html":
return 9998 # index.html also important
priority = task["priority"]
if task["inner_path"].endswith(".js") or task["inner_path"].endswith(".css"): priority += 1 # download js and css files first
return priority-task["workers_num"] # Prefer more priority and less workers
if task["inner_path"].endswith(".js") or task["inner_path"].endswith(".css"):
priority += 1 # download js and css files first
return priority - task["workers_num"] # Prefer more priority and less workers
# Returns the next free or less worked task
def getTask(self, peer):
self.tasks.sort(key=self.taskSorter, reverse=True) # Sort tasks by priority and worker numbers
for task in self.tasks: # Find a task
if task["peers"] and peer not in task["peers"]: continue # This peer not allowed to pick this task
if peer in task["failed"]: continue # Peer already tried to solve this, but failed
if task["peers"] and peer not in task["peers"]:
continue # This peer not allowed to pick this task
if peer in task["failed"]:
continue # Peer already tried to solve this, but failed
return task
# New peers added to site
def onPeers(self):
self.startWorkers()
# Add new worker
def addWorker(self, peer):
key = peer.key
if key not in self.workers and len(self.workers) < MAX_WORKERS: # We dont have worker for that peer and workers num less than max
if key not in self.workers and len(self.workers) < MAX_WORKERS:
# We dont have worker for that peer and workers num less than max
worker = Worker(self, peer)
self.workers[key] = worker
worker.key = key
@ -102,18 +108,21 @@ class WorkerManager:
else: # We have woker for this peer or its over the limit
return False
# Start workers to process tasks
def startWorkers(self, peers=None):
if not self.tasks: return False # No task for workers
if len(self.workers) >= MAX_WORKERS and not peers: return False # Workers number already maxed and no starting peers definied
if not peers: peers = self.site.peers.values() # No peers definied, use any from site
if not self.tasks:
return False # No task for workers
if len(self.workers) >= MAX_WORKERS and not peers:
return False # Workers number already maxed and no starting peers definied
if not peers:
peers = self.site.peers.values() # No peers definied, use any from site
random.shuffle(peers)
for peer in peers: # One worker for every peer
if peers and peer not in peers: continue # If peers definied and peer not valid
if peers and peer not in peers:
continue # If peers definied and peer not valid
worker = self.addWorker(peer)
if worker: self.log.debug("Added worker: %s, workers: %s/%s" % (peer.key, len(self.workers), MAX_WORKERS))
if worker:
self.log.debug("Added worker: %s, workers: %s/%s" % (peer.key, len(self.workers), MAX_WORKERS))
# Stop all worker
def stopWorkers(self):
@ -123,16 +132,14 @@ class WorkerManager:
for task in tasks: # Mark all current task as failed
self.failTask(task)
# Find workers by task
def findWorkers(self, task):
workers = []
for worker in self.workers.values():
if worker.task == task: workers.append(worker)
if worker.task == task:
workers.append(worker)
return workers
# Ends and remove a worker
def removeWorker(self, worker):
worker.running = False
@ -140,9 +147,8 @@ class WorkerManager:
del(self.workers[worker.key])
self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), MAX_WORKERS))
# Create new task and return asyncresult
def addTask(self, inner_path, peer=None, priority = 0):
def addTask(self, inner_path, peer=None, priority=0):
self.site.onFileStart(inner_path) # First task, trigger site download started
task = self.findTask(inner_path)
if task: # Already has task for that file
@ -155,7 +161,6 @@ class WorkerManager:
self.log.debug("Removed peer %s from failed %s" % (peer.key, task["inner_path"]))
self.startWorkers([peer])
if priority:
task["priority"] += priority # Boost on priority
return task["evt"]
@ -165,14 +170,19 @@ class WorkerManager:
peers = [peer] # Only download from this peer
else:
peers = None
task = {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_added": time.time(), "time_started": None, "peers": peers, "priority": priority, "failed": []}
task = {
"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False,
"time_added": time.time(), "time_started": None, "peers": peers, "priority": priority, "failed": []
}
self.tasks.append(task)
self.started_task_num += 1
self.log.debug("New task: %s, peer lock: %s, priority: %s, tasks: %s" % (task["inner_path"], peers, priority, self.started_task_num))
self.log.debug(
"New task: %s, peer lock: %s, priority: %s, tasks: %s" %
(task["inner_path"], peers, priority, self.started_task_num)
)
self.startWorkers(peers)
return evt
# Find a task using inner_path
def findTask(self, inner_path):
for task in self.tasks:
@ -180,7 +190,6 @@ class WorkerManager:
return task
return None # Not found
# Mark a task failed
def failTask(self, task):
if task in self.tasks:
@ -191,7 +200,6 @@ class WorkerManager:
if not self.tasks:
self.started_task_num = 0
# Mark a task done
def doneTask(self, task):
task["done"] = True
@ -201,4 +209,3 @@ class WorkerManager:
if not self.tasks:
self.started_task_num = 0
self.site.onComplete() # No more task trigger site complete

View file

@ -1,14 +1,24 @@
import re
def prefix(content):
content = re.sub("@keyframes (.*? {.*?[^ ]})", "@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n", content, flags=re.DOTALL)
content = re.sub('([^-\*])(border-radius|box-shadow|transition|animation|box-sizing|transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])', '\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content)
content = re.sub('(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])',
'\\1: -webkit-\\2(\\3);'+
'\\1: -moz-\\2(\\3);'+
'\\1: -o-\\2(\\3);'+
'\\1: -ms-\\2(\\3);'+
'\\1: \\2(\\3);', content)
content = re.sub(
"@keyframes (.*? {.*?[^ ]})", "@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n",
content, flags=re.DOTALL
)
content = re.sub(
'([^-\*])(border-radius|box-shadow|transition|animation|box-sizing|' +
'transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])',
'\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content
)
content = re.sub(
'(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])',
'\\1: -webkit-\\2(\\3);' +
'\\1: -moz-\\2(\\3);' +
'\\1: -o-\\2(\\3);' +
'\\1: -ms-\\2(\\3);' +
'\\1: \\2(\\3);', content
)
return content
if __name__ == "__main__":

View file

@ -16,8 +16,10 @@ update_after_shutdown = False # If set True then update and restart zeronet aft
from Config import config
# Create necessary files and dirs
if not os.path.isdir(config.log_dir): os.mkdir(config.log_dir)
if not os.path.isdir(config.data_dir): os.mkdir(config.data_dir)
if not os.path.isdir(config.log_dir):
os.mkdir(config.log_dir)
if not os.path.isdir(config.data_dir):
os.mkdir(config.data_dir)
if not os.path.isfile("%s/sites.json" % config.data_dir):
open("%s/sites.json" % config.data_dir, "w").write("{}")
if not os.path.isfile("%s/users.json" % config.data_dir):
@ -54,8 +56,7 @@ if config.debug:
else:
console_log.setLevel(logging.INFO) # Display only important info to console
monkey.patch_all(thread=False) # Make time, socket gevent compatible. Not thread: pyfilesystem and system tray icon not compatible
monkey.patch_all(thread=False) # Not thread: pyfilesystem and system tray icon not compatible
# Log current config
@ -81,6 +82,7 @@ PluginManager.plugin_manager.loadPlugins()
@PluginManager.acceptPlugins
class Actions(object):
# Default action: Start serving UiServer and FileServer
def main(self):
logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__))
global ui_server, file_server
@ -113,8 +115,10 @@ class Actions(object):
logging.info("----------------------------------------------------------------------")
while True:
if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes": break
else: logging.info("Please, secure it now, you going to need it to modify your site!")
if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes":
break
else:
logging.info("Please, secure it now, you going to need it to modify your site!")
logging.info("Creating directory structure...")
from Site import Site
@ -132,7 +136,7 @@ class Actions(object):
def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False):
from Site import Site
logging.info("Signing site: %s..." % address)
site = Site(address, allow_create = False)
site = Site(address, allow_create=False)
if not privatekey: # If no privatekey in args then ask it now
import getpass
@ -151,7 +155,10 @@ class Actions(object):
for content_inner_path in site.content_manager.contents:
logging.info("Verifing %s signature..." % content_inner_path)
if site.content_manager.verifyFile(content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False) == True:
file_correct = site.content_manager.verifyFile(
content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False
)
if file_correct is True:
logging.info("[OK] %s signed by address %s!" % (content_inner_path, address))
else:
logging.error("[ERROR] %s: invalid file!" % content_inner_path)
@ -160,7 +167,7 @@ class Actions(object):
logging.info("Verifying site files...")
bad_files += site.storage.verifyFiles()
if not bad_files:
logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time()-s))
logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s))
else:
logging.error("[ERROR] Error during verifying site files!")
@ -170,7 +177,7 @@ class Actions(object):
site = Site(address)
s = time.time()
site.storage.rebuildDb()
logging.info("Done in %.3fs" % (time.time()-s))
logging.info("Done in %.3fs" % (time.time() - s))
def dbQuery(self, address, query):
from Site import Site
@ -188,17 +195,15 @@ class Actions(object):
s = time.time()
site.announce()
print "Response time: %.3fs" % (time.time()-s)
print "Response time: %.3fs" % (time.time() - s)
print site.peers
def siteNeedFile(self, address, inner_path):
from Site import Site
site = Site(address)
site.announce()
print site.needFile(inner_path, update=True)
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"):
global file_server
from Site import Site
@ -222,7 +227,7 @@ class Actions(object):
gevent.joinall([file_server_thread], timeout=60)
logging.info("Done.")
else:
logging.info("No peers found for this site, sitePublish command only works if you already have peers serving your site")
logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
# Crypto commands
def cryptPrivatekeyToAddress(self, privatekey=None):
@ -252,7 +257,7 @@ class Actions(object):
for i in range(5):
s = time.time()
print peer.ping(),
print "Response time: %.3fs (crypt: %s)" % (time.time()-s, peer.connection.crypt)
print "Response time: %.3fs (crypt: %s)" % (time.time() - s, peer.connection.crypt)
time.sleep(1)
def peerGetFile(self, peer_ip, peer_port, site, filename):
@ -266,7 +271,7 @@ class Actions(object):
peer = Peer(peer_ip, peer_port)
s = time.time()
print peer.getFile(site, filename).read()
print "Response time: %.3fs" % (time.time()-s)
print "Response time: %.3fs" % (time.time() - s)
def peerCmd(self, peer_ip, peer_port, cmd, parameters):
logging.info("Opening a simple connection server")
@ -284,9 +289,10 @@ class Actions(object):
logging.info("Response: %s" % peer.request(cmd, parameters))
actions = Actions()
# Starts here when running zeronet.py
def start():
# Call function
func = getattr(actions, config.action, None)

View file

@ -1,17 +1,17 @@
# Based on http://stackoverflow.com/a/2022629
class Event(list):
def __call__(self, *args, **kwargs):
for f in self[:]:
if "once" in dir(f) and f in self:
self.remove(f)
f(*args, **kwargs)
def __repr__(self):
return "Event(%s)" % list.__repr__(self)
def once(self, func, name=None):
func.once = True
func.name = None
@ -25,35 +25,32 @@ class Event(list):
return self
def testBenchmark():
def say(pre, text):
print "%s Say: %s" % (pre, text)
import time
s = time.time()
onChanged = Event()
on_changed = Event()
for i in range(1000):
onChanged.once(lambda pre: say(pre, "once"), "once")
print "Created 1000 once in %.3fs" % (time.time()-s)
onChanged("#1")
on_changed.once(lambda pre: say(pre, "once"), "once")
print "Created 1000 once in %.3fs" % (time.time() - s)
on_changed("#1")
def testUsage():
def say(pre, text):
print "%s Say: %s" % (pre, text)
onChanged = Event()
onChanged.once(lambda pre: say(pre, "once"))
onChanged.once(lambda pre: say(pre, "once"))
onChanged.once(lambda pre: say(pre, "namedonce"), "namedonce")
onChanged.once(lambda pre: say(pre, "namedonce"), "namedonce")
onChanged.append(lambda pre: say(pre, "always"))
onChanged("#1")
onChanged("#2")
onChanged("#3")
on_changed = Event()
on_changed.once(lambda pre: say(pre, "once"))
on_changed.once(lambda pre: say(pre, "once"))
on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce")
on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce")
on_changed.append(lambda pre: say(pre, "always"))
on_changed("#1")
on_changed("#2")
on_changed("#3")
if __name__ == "__main__":

View file

@ -12,8 +12,11 @@ except AttributeError:
OldSSLSocket = __ssl__.SSLSocket
class NewSSLSocket(OldSSLSocket):
"""Fix SSLSocket constructor."""
def __init__(
self, sock, keyfile=None, certfile=None, server_side=False, cert_reqs=0,
ssl_version=2, ca_certs=None, do_handshake_on_connect=True,

View file

@ -1,11 +1,13 @@
import urllib2, logging
import urllib2
import logging
import GeventSslPatch
from Config import config
def get(url):
logging.debug("Get %s" % url)
req = urllib2.Request(url)
req.add_header('User-Agent', "ZeroNet %s (https://github.com/HelloZeroNet/ZeroNet)" % config.version)
req.add_header('Accept', 'application/json')
return urllib2.urlopen(req)

View file

@ -1,12 +1,13 @@
import gevent, time
import gevent
import time
class Noparallel(object): # Only allow function running once in same time
def __init__(self,blocking=True):
def __init__(self, blocking=True):
self.threads = {}
self.blocking = blocking # Blocking: Acts like normal function else thread returned
def __call__(self, func):
def wrapper(*args, **kwargs):
key = (func, tuple(args), tuple(kwargs.items())) # Unique key for function including parameters
@ -38,10 +39,12 @@ class Noparallel(object): # Only allow function running once in same time
# Cleanup finished threads
def cleanup(self, key, thread):
if key in self.threads: del(self.threads[key])
if key in self.threads:
del(self.threads[key])
class Test():
@Noparallel()
def count(self, num=5):
for i in range(num):
@ -51,6 +54,7 @@ class Test():
class TestNoblock():
@Noparallel(blocking=False)
def count(self, num=5):
for i in range(num):
@ -85,6 +89,7 @@ def testBlocking():
print thread1.value, thread2.value, thread3.value, thread4.value
print "Done."
def testNoblocking():
test = TestNoblock()
test2 = TestNoblock()
@ -106,13 +111,13 @@ def testNoblocking():
print "Joining thread4"
thread4.join()
print thread1.value, thread2.value, thread3.value, thread4.value
print "Done."
def testBenchmark():
import time
def printThreadNum():
import gc
from greenlet import greenlet
@ -123,13 +128,12 @@ def testBenchmark():
test = TestNoblock()
s = time.time()
for i in range(3):
gevent.spawn(test.count, i+1)
print "Created in %.3fs" % (time.time()-s)
gevent.spawn(test.count, i + 1)
print "Created in %.3fs" % (time.time() - s)
printThreadNum()
time.sleep(5)
if __name__ == "__main__":
from gevent import monkey
monkey.patch_all()
@ -139,4 +143,3 @@ if __name__ == "__main__":
testBlocking()
print "Testing noblocking mode..."
testNoblocking()
print [instance.threads for instance in registry]

View file

@ -1,17 +1,22 @@
import json, re, os
import json
import re
import os
def queryFile(file_path, filter_path, filter_key = None, filter_val = None):
def queryFile(file_path, filter_path, filter_key=None, filter_val=None):
back = []
data = json.load(open(file_path))
if filter_path == ['']: return [data]
if filter_path == ['']:
return [data]
for key in filter_path: # Get to the point
data = data.get(key)
if not data: return
if not data:
return
for row in data:
if filter_val: # Filter by value
if row[filter_key] == filter_val: back.append(row)
if row[filter_key] == filter_val:
back.append(row)
else:
back.append(row)
@ -19,7 +24,7 @@ def queryFile(file_path, filter_path, filter_key = None, filter_val = None):
# Find in json files
# Return: [{u'body': u'Hello Topic 1!!', 'inner_path': '1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6', u'added': 1422740732, u'message_id': 1},...]
# Return: [{u'body': u'Hello Topic 1!!', 'inner_path': '1KRxE1...beEp6', u'added': 1422740732, u'message_id': 1},...]
def query(path_pattern, filter):
if "=" in filter: # Filter by value
filter_path, filter_val = filter.split("=")
@ -40,22 +45,20 @@ def query(path_pattern, filter):
root = root.replace("\\", "/")
inner_path = root.replace(root_dir, "").strip("/")
for file_name in files:
if file_pattern != file_name: continue
if file_pattern != file_name:
continue
try:
res = queryFile(root+"/"+file_name, filter_path, filter_key, filter_val)
if not res: continue
except Exception, err: # Json load error
# print file_name, filter, err
res = queryFile(root + "/" + file_name, filter_path, filter_key, filter_val)
if not res:
continue
except Exception: # Json load error
continue
for row in res:
row["inner_path"] = inner_path
yield row
if __name__ == "__main__":
#for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "topics")):
# print row
for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")):
print row

View file

@ -9,6 +9,8 @@ queue_db = {} # Commands queued to run
# Register event as called
# Return: None
def called(event):
called_db[event] = time.time()
@ -19,7 +21,7 @@ def isAllowed(event, allowed_again=10):
last_called = called_db.get(event)
if not last_called: # Its not called before
return True
elif time.time()-last_called >= allowed_again:
elif time.time() - last_called >= allowed_again:
del called_db[event] # Delete last call time to save memory
return True
else:
@ -34,16 +36,16 @@ def callQueue(event):
return func(*args, **kwargs)
# Rate limit and delay function call if needed, If the function called again within the rate limit interval then previous queued call will be dropped
# Return: Immedietly gevent thread
# Rate limit and delay function call if necessary
# If the function called again within the rate limit interval then previous queued call will be dropped
# Return: Immediately gevent thread
def callAsync(event, allowed_again=10, func=None, *args, **kwargs):
if isAllowed(event, allowed_again): # Not called recently, call it now
called(event)
# print "Calling now"
return gevent.spawn(func, *args, **kwargs)
else: # Called recently, schedule it for later
time_left = allowed_again-max(0, time.time()-called_db[event])
time_left = allowed_again - max(0, time.time() - called_db[event])
log.debug("Added to queue (%.2fs left): %s " % (time_left, event))
if not queue_db.get(event): # Function call not queued yet
thread = gevent.spawn_later(time_left, lambda: callQueue(event)) # Call this function later
@ -64,7 +66,7 @@ def call(event, allowed_again=10, func=None, *args, **kwargs):
return func(*args, **kwargs)
else: # Called recently, schedule it for later
time_left = max(0, allowed_again-(time.time()-called_db[event]))
time_left = max(0, allowed_again - (time.time() - called_db[event]))
# print "Time left: %s" % time_left, args, kwargs
log.debug("Calling sync (%.2fs left): %s" % (time_left, event))
time.sleep(time_left)
@ -78,11 +80,11 @@ def call(event, allowed_again=10, func=None, *args, **kwargs):
# Cleanup expired events every 3 minutes
def cleanup():
while 1:
expired = time.time()-60*2 # Cleanup if older than 2 minutes
expired = time.time() - 60 * 2 # Cleanup if older than 2 minutes
for event in called_db.keys():
if called_db[event] < expired:
del called_db[event]
time.sleep(60*3) # Every 3 minutes
time.sleep(60 * 3) # Every 3 minutes
gevent.spawn(cleanup)
@ -101,7 +103,7 @@ if __name__ == "__main__":
print "Testing async spam requests rate limit to 1/sec..."
for i in range(3000):
thread = callAsync("publish content.json", 1, publish, "content.json %s" % i)
time.sleep(float(random.randint(1,20))/100000)
time.sleep(float(random.randint(1, 20)) / 100000)
print thread.link(cb)
print "Done"
@ -110,13 +112,12 @@ if __name__ == "__main__":
print "Testing sync spam requests rate limit to 1/sec..."
for i in range(5):
call("publish data.json", 1, publish, "data.json %s" % i)
time.sleep(float(random.randint(1,100))/100)
time.sleep(float(random.randint(1, 100)) / 100)
print "Done"
print "Testing cleanup"
thread = callAsync("publish content.json single", 1, publish, "content.json single")
print "Needs to cleanup:", called_db, queue_db
print "Waiting 3min for cleanup process..."
time.sleep(60*3)
time.sleep(60 * 3)
print "Cleaned up:", called_db, queue_db

View file

@ -1,6 +1,7 @@
from lib.PySocks import socks
import socket
from lib.PySocks import socks
def create_connection(address, timeout=None, source_address=None):
sock = socks.socksocket()
@ -19,4 +20,3 @@ def monkeyPath(proxy_ip, proxy_port):
socket.socket = socks.socksocket
socket.create_connection = create_connection
socket.getaddrinfo = getaddrinfo

View file

@ -2,6 +2,7 @@
# Disable SSL compression to save massive memory and cpu
import logging
from Config import config
@ -9,7 +10,10 @@ def disableSSLCompression():
import ctypes
import ctypes.util
try:
openssl = ctypes.CDLL(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32', ctypes.RTLD_GLOBAL)
openssl = ctypes.CDLL(
ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32',
ctypes.RTLD_GLOBAL
)
openssl.SSL_COMP_get_compression_methods.restype = ctypes.c_void_p
except Exception, err:
logging.debug("Disable SSL compression failed: %s (normal on Windows)" % err)
@ -81,7 +85,7 @@ if not hasattr(_ssl, 'sslwrap'):
logging.debug("Missing SSLwrap, readded.")
# Add SSLContext to gevent.ssl (Ubutunu 15 fix)
# Add SSLContext to gevent.ssl (Ubuntu 15 fix)
try:
import gevent
if not hasattr(gevent.ssl, "SSLContext"):

View file

@ -1,12 +1,15 @@
import msgpack, os, struct
import os
import struct
import msgpack
def msgpackHeader(size):
if size <= 2**8-1:
if size <= 2 ** 8 - 1:
return b"\xc4" + struct.pack("B", size)
elif size <= 2**16-1:
elif size <= 2 ** 16 - 1:
return b"\xc5" + struct.pack(">H", size)
elif size <= 2**32-1:
elif size <= 2 ** 32 - 1:
return b"\xc6" + struct.pack(">I", size)
else:
raise Exception("huge binary string")
@ -18,15 +21,16 @@ def stream(data, writer):
for key, val in data.iteritems():
writer(packer.pack(key))
if issubclass(type(val), file): # File obj
max_size = os.fstat(val.fileno()).st_size-val.tell()
max_size = os.fstat(val.fileno()).st_size - val.tell()
size = min(max_size, val.read_bytes)
bytes_left = size
writer(msgpackHeader(size))
buff = 1024*64
buff = 1024 * 64
while 1:
writer(val.read(min(bytes_left, buff)))
bytes_left = bytes_left-buff
if bytes_left <= 0: break
bytes_left = bytes_left - buff
if bytes_left <= 0:
break
else: # Simple
writer(packer.pack(val))
return size

View file

@ -1,10 +1,13 @@
import gevent
from gevent import socket
import re, urllib2, httplib, logging
import re
import urllib2
import httplib
import logging
from urlparse import urlparse
from xml.dom.minidom import parseString
import gevent
from gevent import socket
# Relevant UPnP spec: http://www.upnp.org/specs/gw/UPnP-gw-WANIPConnection-v1-Service.pdf
# General TODOs:
@ -188,7 +191,7 @@ def open_port(port=15441, desc="UpnpPunch"):
local_ips = list(set(local_ips)) # Delete duplicates
logging.debug("Found local ips: %s" % local_ips)
local_ips = local_ips*3 # Retry every ip 3 times
local_ips = local_ips * 3 # Retry every ip 3 times
for local_ip in local_ips:
logging.debug("Trying using local ip: %s" % local_ip)

View file

@ -1,7 +1,16 @@
from gevent import monkey; monkey.patch_all()
import urllib, zipfile, os, ssl, httplib, socket, re
import urllib
import zipfile
import os
import ssl
import httplib
import socket
import re
import cStringIO as StringIO
from gevent import monkey
monkey.patch_all()
def update():
# Gevent https bug workaround (https://github.com/gevent/gevent/issues/477)
reload(socket)
@ -12,8 +21,9 @@ def update():
file = urllib.urlopen("https://github.com/HelloZeroNet/ZeroNet/archive/master.zip")
data = StringIO.StringIO()
while True:
buff = file.read(1024*16)
if not buff: break
buff = file.read(1024 * 16)
if not buff:
break
data.write(buff)
print ".",
print "Downloaded."
@ -29,24 +39,23 @@ def update():
plugins_enabled.append(dir)
print "Plugins enabled:", plugins_enabled, "disabled:", plugins_disabled
print "Extracting...",
zip = zipfile.ZipFile(data)
for inner_path in zip.namelist():
inner_path = inner_path.replace("\\", "/") # Make sure we have unix path
print ".",
dest_path = inner_path.replace("ZeroNet-master/", "")
if not dest_path: continue
if not dest_path:
continue
# Keep plugin disabled/enabled status
match = re.match("plugins/([^/]+)", dest_path)
if match:
plugin_name = match.group(1).replace("disabled-","")
plugin_name = match.group(1).replace("disabled-", "")
if plugin_name in plugins_enabled: # Plugin was enabled
dest_path = dest_path.replace("plugins/disabled-"+plugin_name, "plugins/"+plugin_name)
dest_path = dest_path.replace("plugins/disabled-" + plugin_name, "plugins/" + plugin_name)
elif plugin_name in plugins_disabled: # Plugin was disabled
dest_path = dest_path.replace("plugins/"+plugin_name, "plugins/disabled-"+plugin_name)
dest_path = dest_path.replace("plugins/" + plugin_name, "plugins/disabled-" + plugin_name)
print "P",
dest_dir = os.path.dirname(dest_path)