rev280, The whole project reformatted to PEP8, UiRequest getPosted to query posted variables

This commit is contained in:
HelloZeroNet 2015-07-12 20:36:46 +02:00
parent a5741704e4
commit b5ecb62bc6
49 changed files with 5704 additions and 5205 deletions

View file

@ -1,17 +1,22 @@
import re, time, cgi, os import time
import cgi
import os
from Plugin import PluginManager from Plugin import PluginManager
from Config import config from Config import config
@PluginManager.registerTo("UiRequest") @PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object): class UiRequestPlugin(object):
def formatTableRow(self, row): def formatTableRow(self, row):
back = [] back = []
for format, val in row: for format, val in row:
if val == None: if val is None:
formatted = "n/a" formatted = "n/a"
elif format == "since": elif format == "since":
if val: if val:
formatted = "%.0f" % (time.time()-val) formatted = "%.0f" % (time.time() - val)
else: else:
formatted = "n/a" formatted = "n/a"
else: else:
@ -19,17 +24,16 @@ class UiRequestPlugin(object):
back.append("<td>%s</td>" % formatted) back.append("<td>%s</td>" % formatted)
return "<tr>%s</tr>" % "".join(back) return "<tr>%s</tr>" % "".join(back)
def getObjSize(self, obj, hpy=None):
def getObjSize(self, obj, hpy = None):
if hpy: if hpy:
return float(hpy.iso(obj).domisize)/1024 return float(hpy.iso(obj).domisize) / 1024
else: else:
return 0 return 0
# /Stats entry point # /Stats entry point
def actionStats(self): def actionStats(self):
import gc, sys import gc
import sys
from Ui import UiRequest from Ui import UiRequest
from Crypt import CryptConnection from Crypt import CryptConnection
@ -58,7 +62,10 @@ class UiRequestPlugin(object):
yield "%s | " % config.ip_external yield "%s | " % config.ip_external
yield "Opened: %s | " % main.file_server.port_opened yield "Opened: %s | " % main.file_server.port_opened
yield "Crypt: %s | " % CryptConnection.manager.crypt_supported yield "Crypt: %s | " % CryptConnection.manager.crypt_supported
yield "In: %.2fMB, Out: %.2fMB | " % (float(main.file_server.bytes_recv)/1024/1024, float(main.file_server.bytes_sent)/1024/1024) yield "In: %.2fMB, Out: %.2fMB | " % (
float(main.file_server.bytes_recv) / 1024 / 1024,
float(main.file_server.bytes_sent) / 1024 / 1024
)
yield "Peerid: %s | " % main.file_server.peer_id yield "Peerid: %s | " % main.file_server.peer_id
import psutil import psutil
process = psutil.Process(os.getpid()) process = psutil.Process(os.getpid())
@ -69,14 +76,17 @@ class UiRequestPlugin(object):
yield "Files: %s | " % len(process.open_files()) yield "Files: %s | " % len(process.open_files())
yield "Sockets: %s | " % len(process.connections()) yield "Sockets: %s | " % len(process.connections())
yield "Calc size <a href='?size=1'>on</a> <a href='?size=0'>off</a>" yield "Calc size <a href='?size=1'>on</a> <a href='?size=0'>off</a>"
except Exception, err: except Exception:
pass pass
yield "<br>" yield "<br>"
# Connections # Connections
yield "<b>Connections</b> (%s, total made: %s):<br>" % (len(main.file_server.connections), main.file_server.last_connection_id) yield "<b>Connections</b> (%s, total made: %s):<br>" % (
yield "<table><tr> <th>id</th> <th>proto</th> <th>type</th> <th>ip</th> <th>open</th> <th>crypt</th> <th>ping</th> <th>buff</th>" len(main.file_server.connections), main.file_server.last_connection_id
yield "<th>idle</th> <th>open</th> <th>delay</th> <th>out</th> <th>in</th> <th>last sent</th> <th>waiting</th> <th>version</th> <th>peerid</th> </tr>" )
yield "<table><tr> <th>id</th> <th>proto</th> <th>type</th> <th>ip</th> <th>open</th> <th>crypt</th> <th>ping</th>"
yield "<th>buff</th> <th>idle</th> <th>open</th> <th>delay</th> <th>out</th> <th>in</th> <th>last sent</th>"
yield "<th>waiting</th> <th>version</th> <th>peerid</th> </tr>"
for connection in main.file_server.connections: for connection in main.file_server.connections:
if "cipher" in dir(connection.sock): if "cipher" in dir(connection.sock):
cipher = connection.sock.cipher()[0] cipher = connection.sock.cipher()[0]
@ -93,30 +103,32 @@ class UiRequestPlugin(object):
("%s", connection.incomplete_buff_recv), ("%s", connection.incomplete_buff_recv),
("since", max(connection.last_send_time, connection.last_recv_time)), ("since", max(connection.last_send_time, connection.last_recv_time)),
("since", connection.start_time), ("since", connection.start_time),
("%.3f", connection.last_sent_time-connection.last_send_time), ("%.3f", connection.last_sent_time - connection.last_send_time),
("%.0fkB", connection.bytes_sent/1024), ("%.0fkB", connection.bytes_sent / 1024),
("%.0fkB", connection.bytes_recv/1024), ("%.0fkB", connection.bytes_recv / 1024),
("%s", connection.last_cmd), ("%s", connection.last_cmd),
("%s", connection.waiting_requests.keys()), ("%s", connection.waiting_requests.keys()),
("%s r%s", (connection.handshake.get("version"), connection.handshake.get("rev", "?")) ), ("%s r%s", (connection.handshake.get("version"), connection.handshake.get("rev", "?"))),
("%s", connection.handshake.get("peer_id")), ("%s", connection.handshake.get("peer_id")),
]) ])
yield "</table>" yield "</table>"
# Sites # Sites
yield "<br><br><b>Sites</b>:" yield "<br><br><b>Sites</b>:"
yield "<table>" yield "<table>"
yield "<tr><th>address</th> <th>connected</th> <th>peers</th> <th>content.json</th> </tr>" yield "<tr><th>address</th> <th>connected</th> <th>peers</th> <th>content.json</th> </tr>"
for site in self.server.sites.values(): for site in self.server.sites.values():
yield self.formatTableRow([ yield self.formatTableRow([
("<a href='#ShowPeers' onclick='document.getElementById(\"peers_%s\").style.display=\"initial\"; return false'>%s</a>", (site.address, site.address)), (
"""<a href='#' onclick='document.getElementById("peers_%s").style.display="initial"; return false'>%s</a>""",
(site.address, site.address)
),
("%s", [peer.connection.id for peer in site.peers.values() if peer.connection and peer.connection.connected]), ("%s", [peer.connection.id for peer in site.peers.values() if peer.connection and peer.connection.connected]),
("%s/%s/%s", ( ("%s/%s/%s", (
len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]), len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]),
len(site.getConnectablePeers(100)), len(site.getConnectablePeers(100)),
len(site.peers) len(site.peers)
) ), )),
("%s", len(site.content_manager.contents)), ("%s", len(site.content_manager.contents)),
]) ])
yield "<tr><td id='peers_%s' style='display: none; white-space: pre'>" % site.address yield "<tr><td id='peers_%s' style='display: none; white-space: pre'>" % site.address
@ -125,55 +137,59 @@ class UiRequestPlugin(object):
yield "<br></td></tr>" yield "<br></td></tr>"
yield "</table>" yield "</table>"
# Object types # Object types
obj_count = {} obj_count = {}
for obj in gc.get_objects(): for obj in gc.get_objects():
obj_type = str(type(obj)) obj_type = str(type(obj))
if not obj_type in obj_count: if obj_type not in obj_count:
obj_count[obj_type] = [0, 0] obj_count[obj_type] = [0, 0]
obj_count[obj_type][0] += 1 # Count obj_count[obj_type][0] += 1 # Count
obj_count[obj_type][1] += float(sys.getsizeof(obj))/1024 # Size obj_count[obj_type][1] += float(sys.getsizeof(obj)) / 1024 # Size
yield "<br><br><b>Objects in memory (types: %s, total: %s, %.2fkb):</b><br>" % (len(obj_count), sum([stat[0] for stat in obj_count.values()]), sum([stat[1] for stat in obj_count.values()])) yield "<br><br><b>Objects in memory (types: %s, total: %s, %.2fkb):</b><br>" % (
len(obj_count),
sum([stat[0] for stat in obj_count.values()]),
sum([stat[1] for stat in obj_count.values()])
)
for obj, stat in sorted(obj_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count for obj, stat in sorted(obj_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count
yield " - %.1fkb = %s x <a href=\"/Listobj?type=%s\">%s</a><br>" % (stat[1], stat[0], obj, cgi.escape(obj)) yield " - %.1fkb = %s x <a href=\"/Listobj?type=%s\">%s</a><br>" % (stat[1], stat[0], obj, cgi.escape(obj))
# Classes # Classes
class_count = {} class_count = {}
for obj in gc.get_objects(): for obj in gc.get_objects():
obj_type = str(type(obj)) obj_type = str(type(obj))
if obj_type != "<type 'instance'>": continue if obj_type != "<type 'instance'>":
continue
class_name = obj.__class__.__name__ class_name = obj.__class__.__name__
if not class_name in class_count: if class_name not in class_count:
class_count[class_name] = [0, 0] class_count[class_name] = [0, 0]
class_count[class_name][0] += 1 # Count class_count[class_name][0] += 1 # Count
class_count[class_name][1] += float(sys.getsizeof(obj))/1024 # Size class_count[class_name][1] += float(sys.getsizeof(obj)) / 1024 # Size
yield "<br><br><b>Classes in memory (types: %s, total: %s, %.2fkb):</b><br>" % (len(class_count), sum([stat[0] for stat in class_count.values()]), sum([stat[1] for stat in class_count.values()])) yield "<br><br><b>Classes in memory (types: %s, total: %s, %.2fkb):</b><br>" % (
len(class_count),
sum([stat[0] for stat in class_count.values()]),
sum([stat[1] for stat in class_count.values()])
)
for obj, stat in sorted(class_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count for obj, stat in sorted(class_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count
yield " - %.1fkb = %s x <a href=\"/Dumpobj?class=%s\">%s</a><br>" % (stat[1], stat[0], obj, cgi.escape(obj)) yield " - %.1fkb = %s x <a href=\"/Dumpobj?class=%s\">%s</a><br>" % (stat[1], stat[0], obj, cgi.escape(obj))
from greenlet import greenlet from greenlet import greenlet
objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)] objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
yield "<br>Greenlets (%s):<br>" % len(objs) yield "<br>Greenlets (%s):<br>" % len(objs)
for obj in objs: for obj in objs:
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
from Worker import Worker from Worker import Worker
objs = [obj for obj in gc.get_objects() if isinstance(obj, Worker)] objs = [obj for obj in gc.get_objects() if isinstance(obj, Worker)]
yield "<br>Workers (%s):<br>" % len(objs) yield "<br>Workers (%s):<br>" % len(objs)
for obj in objs: for obj in objs:
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
from Connection import Connection from Connection import Connection
objs = [obj for obj in gc.get_objects() if isinstance(obj, Connection)] objs = [obj for obj in gc.get_objects() if isinstance(obj, Connection)]
yield "<br>Connections (%s):<br>" % len(objs) yield "<br>Connections (%s):<br>" % len(objs)
@ -192,33 +208,28 @@ class UiRequestPlugin(object):
for obj in objs: for obj in objs:
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
from Site import Site from Site import Site
objs = [obj for obj in gc.get_objects() if isinstance(obj, Site)] objs = [obj for obj in gc.get_objects() if isinstance(obj, Site)]
yield "<br>Sites (%s):<br>" % len(objs) yield "<br>Sites (%s):<br>" % len(objs)
for obj in objs: for obj in objs:
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
objs = [obj for obj in gc.get_objects() if isinstance(obj, self.server.log.__class__)] objs = [obj for obj in gc.get_objects() if isinstance(obj, self.server.log.__class__)]
yield "<br>Loggers (%s):<br>" % len(objs) yield "<br>Loggers (%s):<br>" % len(objs)
for obj in objs: for obj in objs:
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj.name))) yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj.name)))
objs = [obj for obj in gc.get_objects() if isinstance(obj, UiRequest)] objs = [obj for obj in gc.get_objects() if isinstance(obj, UiRequest)]
yield "<br>UiRequests (%s):<br>" % len(objs) yield "<br>UiRequests (%s):<br>" % len(objs)
for obj in objs: for obj in objs:
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
from Peer import Peer from Peer import Peer
objs = [obj for obj in gc.get_objects() if isinstance(obj, Peer)] objs = [obj for obj in gc.get_objects() if isinstance(obj, Peer)]
yield "<br>Peers (%s):<br>" % len(objs) yield "<br>Peers (%s):<br>" % len(objs)
for obj in objs: for obj in objs:
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
objs = [(key, val) for key, val in sys.modules.iteritems() if val is not None] objs = [(key, val) for key, val in sys.modules.iteritems() if val is not None]
objs.sort() objs.sort()
yield "<br>Modules (%s):<br>" % len(objs) yield "<br>Modules (%s):<br>" % len(objs)
@ -226,11 +237,11 @@ class UiRequestPlugin(object):
yield " - %.3fkb: %s %s<br>" % (self.getObjSize(module, hpy), module_name, cgi.escape(repr(module))) yield " - %.3fkb: %s %s<br>" % (self.getObjSize(module, hpy), module_name, cgi.escape(repr(module)))
gc.collect() # Implicit grabage collection gc.collect() # Implicit grabage collection
yield "Done in %.1f" % (time.time()-s) yield "Done in %.1f" % (time.time() - s)
def actionDumpobj(self): def actionDumpobj(self):
import gc, sys import gc
import sys
self.sendHeader() self.sendHeader()
class_filter = self.get.get("class") class_filter = self.get.get("class")
@ -245,17 +256,18 @@ class UiRequestPlugin(object):
objs = gc.get_objects() objs = gc.get_objects()
for obj in objs: for obj in objs:
obj_type = str(type(obj)) obj_type = str(type(obj))
if obj_type != "<type 'instance'>" or obj.__class__.__name__ != class_filter: continue if obj_type != "<type 'instance'>" or obj.__class__.__name__ != class_filter:
yield "%.1fkb %s... " % (float(sys.getsizeof(obj))/1024, cgi.escape(str(obj)) ) continue
yield "%.1fkb %s... " % (float(sys.getsizeof(obj)) / 1024, cgi.escape(str(obj)))
for attr in dir(obj): for attr in dir(obj):
yield "- %s: %s<br>" % (attr, cgi.escape(str(getattr(obj, attr)))) yield "- %s: %s<br>" % (attr, cgi.escape(str(getattr(obj, attr))))
yield "<br>" yield "<br>"
gc.collect() # Implicit grabage collection gc.collect() # Implicit grabage collection
def actionListobj(self): def actionListobj(self):
import gc, sys import gc
import sys
self.sendHeader() self.sendHeader()
type_filter = self.get.get("type") type_filter = self.get.get("type")
@ -273,34 +285,42 @@ class UiRequestPlugin(object):
objs = gc.get_objects() objs = gc.get_objects()
for obj in objs: for obj in objs:
obj_type = str(type(obj)) obj_type = str(type(obj))
if obj_type != type_filter: continue if obj_type != type_filter:
refs = [ref for ref in gc.get_referrers(obj) if hasattr(ref, "__class__") and ref.__class__.__name__ not in ["list", "dict", "function", "type", "frame", "WeakSet", "tuple"]] continue
if not refs: continue refs = [
yield "%.1fkb <span title=\"%s\">%s</span>... " % (float(sys.getsizeof(obj))/1024, cgi.escape(str(obj)), cgi.escape(str(obj)[0:100].ljust(100)) ) ref for ref in gc.get_referrers(obj)
if hasattr(ref, "__class__") and
ref.__class__.__name__ not in ["list", "dict", "function", "type", "frame", "WeakSet", "tuple"]
]
if not refs:
continue
yield "%.1fkb <span title=\"%s\">%s</span>... " % (
float(sys.getsizeof(obj)) / 1024, cgi.escape(str(obj)), cgi.escape(str(obj)[0:100].ljust(100))
)
for ref in refs: for ref in refs:
yield " [" yield " ["
if "object at" in str(ref) or len(str(ref)) > 100: if "object at" in str(ref) or len(str(ref)) > 100:
yield str(ref.__class__.__name__) yield str(ref.__class__.__name__)
else: else:
yield str(ref.__class__.__name__)+":"+cgi.escape(str(ref)) yield str(ref.__class__.__name__) + ":" + cgi.escape(str(ref))
yield "] " yield "] "
ref_type = ref.__class__.__name__ ref_type = ref.__class__.__name__
if ref_type not in ref_count: if ref_type not in ref_count:
ref_count[ref_type] = [0,0] ref_count[ref_type] = [0, 0]
ref_count[ref_type][0] += 1 # Count ref_count[ref_type][0] += 1 # Count
ref_count[ref_type][1] += float(sys.getsizeof(obj))/1024 # Size ref_count[ref_type][1] += float(sys.getsizeof(obj)) / 1024 # Size
yield "<br>" yield "<br>"
yield "<br>Object referrer (total: %s, %.2fkb):<br>" % (len(ref_count), sum([stat[1] for stat in ref_count.values()])) yield "<br>Object referrer (total: %s, %.2fkb):<br>" % (len(ref_count), sum([stat[1] for stat in ref_count.values()]))
for obj, stat in sorted(ref_count.items(), key=lambda x: x[1][0], reverse=True)[0:30]: # Sorted by count for obj, stat in sorted(ref_count.items(), key=lambda x: x[1][0], reverse=True)[0:30]: # Sorted by count
yield " - %.1fkb = %s x %s<br>" % (stat[1], stat[0], cgi.escape(str(obj)) ) yield " - %.1fkb = %s x %s<br>" % (stat[1], stat[0], cgi.escape(str(obj)))
gc.collect() # Implicit grabage collection gc.collect() # Implicit grabage collection
def actionBenchmark(self): def actionBenchmark(self):
import sys, gc import sys
import gc
from contextlib import contextmanager from contextlib import contextmanager
output = self.sendHeader() output = self.sendHeader()
@ -313,20 +333,27 @@ class UiRequestPlugin(object):
yield 1 yield 1
except Exception, err: except Exception, err:
output("<br><b>! Error: %s</b><br>" % err) output("<br><b>! Error: %s</b><br>" % err)
taken = time.time()-s taken = time.time() - s
multipler = standard/taken multipler = standard / taken
if multipler < 0.3: speed = "Sloooow" if multipler < 0.3:
elif multipler < 0.5: speed = "Ehh" speed = "Sloooow"
elif multipler < 0.8: speed = "Goodish" elif multipler < 0.5:
elif multipler < 1.2: speed = "OK" speed = "Ehh"
elif multipler < 1.7: speed = "Fine" elif multipler < 0.8:
elif multipler < 2.5: speed = "Fast" speed = "Goodish"
elif multipler < 3.5: speed = "WOW" elif multipler < 1.2:
else: speed = "Insane!!" speed = "OK"
elif multipler < 1.7:
speed = "Fine"
elif multipler < 2.5:
speed = "Fast"
elif multipler < 3.5:
speed = "WOW"
else:
speed = "Insane!!"
output("%.3fs [x%.2f: %s]<br>" % (taken, multipler, speed)) output("%.3fs [x%.2f: %s]<br>" % (taken, multipler, speed))
time.sleep(0.01) time.sleep(0.01)
yield """ yield """
<style> <style>
* { font-family: monospace } * { font-family: monospace }
@ -334,7 +361,7 @@ class UiRequestPlugin(object):
</style> </style>
""" """
yield "Benchmarking ZeroNet %s (rev%s) Python %s, platform: %s...<br>" % (config.version, config.rev, sys.version, sys.platform) yield "Benchmarking ZeroNet %s (rev%s) Python %s on: %s...<br>" % (config.version, config.rev, sys.version, sys.platform)
t = time.time() t = time.time()
@ -347,13 +374,12 @@ class UiRequestPlugin(object):
with benchmark("hdPrivatekey x 10", 0.7): with benchmark("hdPrivatekey x 10", 0.7):
for i in range(10): for i in range(10):
privatekey = CryptBitcoin.hdPrivatekey(seed, i*10) privatekey = CryptBitcoin.hdPrivatekey(seed, i * 10)
yield "." yield "."
valid = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk" valid = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
assert privatekey == valid, "%s != %s" % (privatekey, valid) assert privatekey == valid, "%s != %s" % (privatekey, valid)
data = "Hello" * 1024 # 5k
data = "Hello"*1024 #5k
with benchmark("sign x 10", 0.35): with benchmark("sign x 10", 0.35):
for i in range(10): for i in range(10):
yield "." yield "."
@ -361,32 +387,31 @@ class UiRequestPlugin(object):
valid = "HFGXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOi+4+BbWHjuwmx0EaKNV1G+kP0tQDxWu0YApxwxZbSmZU=" valid = "HFGXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOi+4+BbWHjuwmx0EaKNV1G+kP0tQDxWu0YApxwxZbSmZU="
assert sign == valid, "%s != %s" % (sign, valid) assert sign == valid, "%s != %s" % (sign, valid)
address = CryptBitcoin.privatekeyToAddress(privatekey) address = CryptBitcoin.privatekeyToAddress(privatekey)
if CryptBitcoin.opensslVerify: # Openssl avalible if CryptBitcoin.opensslVerify: # Openssl avalible
with benchmark("openssl verify x 100", 0.37): with benchmark("openssl verify x 100", 0.37):
for i in range(100): for i in range(100):
if i%10==0: yield "." if i % 10 == 0:
yield "."
ok = CryptBitcoin.verify(data, address, sign) ok = CryptBitcoin.verify(data, address, sign)
assert ok, "does not verify from %s" % address assert ok, "does not verify from %s" % address
else: else:
yield " - openssl verify x 100...not avalible :(<br>" yield " - openssl verify x 100...not avalible :(<br>"
opensslVerify_bk = CryptBitcoin.opensslVerify # Emulate openssl not found in any way openssl_verify_bk = CryptBitcoin.opensslVerify # Emulate openssl not found in any way
CryptBitcoin.opensslVerify = None CryptBitcoin.opensslVerify = None
with benchmark("pure-python verify x 10", 1.6): with benchmark("pure-python verify x 10", 1.6):
for i in range(10): for i in range(10):
yield "." yield "."
ok = CryptBitcoin.verify(data, address, sign) ok = CryptBitcoin.verify(data, address, sign)
assert ok, "does not verify from %s" % address assert ok, "does not verify from %s" % address
CryptBitcoin.opensslVerify = opensslVerify_bk CryptBitcoin.opensslVerify = openssl_verify_bk
yield "<br>CryptHash:<br>" yield "<br>CryptHash:<br>"
from Crypt import CryptHash from Crypt import CryptHash
from cStringIO import StringIO from cStringIO import StringIO
data = StringIO("Hello"*1024*1024) #5m data = StringIO("Hello" * 1024 * 1024) # 5m
with benchmark("sha512 x 10 000", 1): with benchmark("sha512 x 10 000", 1):
for i in range(10): for i in range(10):
for y in range(10000): for y in range(10000):
@ -395,7 +420,6 @@ class UiRequestPlugin(object):
valid = "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce" valid = "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce"
assert hash == valid, "%s != %s" % (hash, valid) assert hash == valid, "%s != %s" % (hash, valid)
yield "<br>Db:<br>" yield "<br>Db:<br>"
from Db import Db from Db import Db
@ -422,7 +446,8 @@ class UiRequestPlugin(object):
} }
} }
if os.path.isfile("%s/benchmark.db" % config.data_dir): os.unlink("%s/benchmark.db" % config.data_dir) if os.path.isfile("%s/benchmark.db" % config.data_dir):
os.unlink("%s/benchmark.db" % config.data_dir)
with benchmark("Open x 10", 0.13): with benchmark("Open x 10", 0.13):
for i in range(10): for i in range(10):
@ -431,7 +456,6 @@ class UiRequestPlugin(object):
db.close() db.close()
yield "." yield "."
db = Db(schema, "%s/benchmark.db" % config.data_dir) db = Db(schema, "%s/benchmark.db" % config.data_dir)
db.checkTables() db.checkTables()
import json import json
@ -446,7 +470,6 @@ class UiRequestPlugin(object):
os.unlink("%s/test_%s.json" % (config.data_dir, u)) os.unlink("%s/test_%s.json" % (config.data_dir, u))
yield "." yield "."
with benchmark("Buffered insert x 100 x 100", 1.3): with benchmark("Buffered insert x 100 x 100", 1.3):
cur = db.getCursor() cur = db.getCursor()
cur.execute("BEGIN") cur.execute("BEGIN")
@ -458,7 +481,8 @@ class UiRequestPlugin(object):
json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w")) json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
db.loadJson("%s/test_%s.json" % (config.data_dir, u), cur=cur) db.loadJson("%s/test_%s.json" % (config.data_dir, u), cur=cur)
os.unlink("%s/test_%s.json" % (config.data_dir, u)) os.unlink("%s/test_%s.json" % (config.data_dir, u))
if u%10 == 0: yield "." if u % 10 == 0:
yield "."
cur.execute("COMMIT") cur.execute("COMMIT")
yield " - Total rows in db: %s<br>" % db.execute("SELECT COUNT(*) AS num FROM test").fetchone()[0] yield " - Total rows in db: %s<br>" % db.execute("SELECT COUNT(*) AS num FROM test").fetchone()[0]
@ -471,11 +495,11 @@ class UiRequestPlugin(object):
res = cur.execute("SELECT * FROM test WHERE test_id = %s" % i) res = cur.execute("SELECT * FROM test WHERE test_id = %s" % i)
for row in res: for row in res:
found += 1 found += 1
if i%100 == 0: yield "." if i % 100 == 0:
yield "."
assert found == 20000, "Found: %s != 20000" % found assert found == 20000, "Found: %s != 20000" % found
with benchmark("Not indexed query x 100", 0.6): with benchmark("Not indexed query x 100", 0.6):
found = 0 found = 0
cur = db.getCursor() cur = db.getCursor()
@ -484,11 +508,11 @@ class UiRequestPlugin(object):
res = cur.execute("SELECT * FROM test WHERE json_id = %s" % i) res = cur.execute("SELECT * FROM test WHERE json_id = %s" % i)
for row in res: for row in res:
found += 1 found += 1
if i%10 == 0: yield "." if i % 10 == 0:
yield "."
assert found == 18900, "Found: %s != 18900" % found assert found == 18900, "Found: %s != 18900" % found
with benchmark("Like query x 100", 1.8): with benchmark("Like query x 100", 1.8):
found = 0 found = 0
cur = db.getCursor() cur = db.getCursor()
@ -497,18 +521,18 @@ class UiRequestPlugin(object):
res = cur.execute("SELECT * FROM test WHERE title LIKE '%%message %s%%'" % i) res = cur.execute("SELECT * FROM test WHERE title LIKE '%%message %s%%'" % i)
for row in res: for row in res:
found += 1 found += 1
if i%10 == 0: yield "." if i % 10 == 0:
yield "."
assert found == 38900, "Found: %s != 11000" % found assert found == 38900, "Found: %s != 11000" % found
db.close() db.close()
if os.path.isfile("%s/benchmark.db" % config.data_dir): os.unlink("%s/benchmark.db" % config.data_dir) if os.path.isfile("%s/benchmark.db" % config.data_dir):
os.unlink("%s/benchmark.db" % config.data_dir)
gc.collect() # Implicit grabage collection gc.collect() # Implicit grabage collection
yield "<br>Done. Total: %.2fs" % (time.time()-t) yield "<br>Done. Total: %.2fs" % (time.time() - t)
def actionGcCollect(self): def actionGcCollect(self):
import gc import gc

View file

@ -1,12 +1,17 @@
import re, time, cgi, os, sys import time
from Plugin import PluginManager import os
from Config import config import sys
import atexit import atexit
allow_reload = False # No reload supported from Plugin import PluginManager
from Config import config
allow_reload = False # No source reload supported in this plugin
@PluginManager.registerTo("Actions") @PluginManager.registerTo("Actions")
class ActionsPlugin(object): class ActionsPlugin(object):
def main(self): def main(self):
global notificationicon, winfolders global notificationicon, winfolders
from lib import notificationicon, winfolders from lib import notificationicon, winfolders
@ -14,7 +19,10 @@ class ActionsPlugin(object):
self.main = sys.modules["main"] self.main = sys.modules["main"]
icon = notificationicon.NotificationIcon(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'trayicon.ico'), "ZeroNet %s" % config.version) icon = notificationicon.NotificationIcon(
os.path.join(os.path.dirname(os.path.abspath(__file__)), 'trayicon.ico'),
"ZeroNet %s" % config.version
)
self.icon = icon self.icon = icon
if not config.debug: # Hide console if not in debug mode if not config.debug: # Hide console if not in debug mode
@ -34,14 +42,12 @@ class ActionsPlugin(object):
(self.titleConsole, self.toggleConsole), (self.titleConsole, self.toggleConsole),
(self.titleAutorun, self.toggleAutorun), (self.titleAutorun, self.toggleAutorun),
"--", "--",
("ZeroNet Twitter", lambda: self.opensite("https://twitter.com/HelloZeroNet") ), ("ZeroNet Twitter", lambda: self.opensite("https://twitter.com/HelloZeroNet")),
("ZeroNet Reddit", lambda: self.opensite("http://www.reddit.com/r/zeronet/") ), ("ZeroNet Reddit", lambda: self.opensite("http://www.reddit.com/r/zeronet/")),
("ZeroNet Github", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet") ), ("ZeroNet Github", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet")),
("Report bug/request feature", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet/issues") ), ("Report bug/request feature", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet/issues")),
"--", "--",
("!Open ZeroNet", lambda: self.opensite("http://%s:%s" % (config.ui_ip, config.ui_port)) ), ("!Open ZeroNet", lambda: self.opensite("http://%s:%s" % (config.ui_ip, config.ui_port))),
#"--",
#("Start ZeroNet when Windows starts", quit),
"--", "--",
("Quit", self.quit), ("Quit", self.quit),
@ -52,19 +58,16 @@ class ActionsPlugin(object):
super(ActionsPlugin, self).main() super(ActionsPlugin, self).main()
icon._die = True icon._die = True
def quit(self): def quit(self):
self.icon.die() self.icon.die()
time.sleep(0.1) time.sleep(0.1)
self.main.ui_server.stop() self.main.ui_server.stop()
self.main.file_server.stop() self.main.file_server.stop()
#sys.exit() # sys.exit()
def opensite(self, url): def opensite(self, url):
import webbrowser import webbrowser
webbrowser.open(url, new=2) webbrowser.open(url, new=0)
def titleIp(self): def titleIp(self):
title = "!IP: %s" % config.ip_external title = "!IP: %s" % config.ip_external
@ -74,21 +77,22 @@ class ActionsPlugin(object):
title += " (passive)" title += " (passive)"
return title return title
def titleConnections(self): def titleConnections(self):
title = "Connections: %s" % len(self.main.file_server.connections) title = "Connections: %s" % len(self.main.file_server.connections)
return title return title
def titleTransfer(self): def titleTransfer(self):
title = "Received: %.2f MB | Sent: %.2f MB" % (float(self.main.file_server.bytes_recv)/1024/1024, float(self.main.file_server.bytes_sent)/1024/1024) title = "Received: %.2f MB | Sent: %.2f MB" % (
float(self.main.file_server.bytes_recv) / 1024 / 1024,
float(self.main.file_server.bytes_sent) / 1024 / 1024
)
return title return title
def titleConsole(self): def titleConsole(self):
if self.console: return "+Show console window" if self.console:
else: return "Show console window" return "+Show console window"
else:
return "Show console window"
def toggleConsole(self): def toggleConsole(self):
if self.console: if self.console:
@ -98,30 +102,30 @@ class ActionsPlugin(object):
notificationicon.showConsole() notificationicon.showConsole()
self.console = True self.console = True
def getAutorunPath(self): def getAutorunPath(self):
return "%s\\zeronet.cmd" % winfolders.get(winfolders.STARTUP) return "%s\\zeronet.cmd" % winfolders.get(winfolders.STARTUP)
def formatAutorun(self): def formatAutorun(self):
args = sys.argv[:] args = sys.argv[:]
args.insert(0, sys.executable) args.insert(0, sys.executable)
if sys.platform == 'win32': if sys.platform == 'win32':
args = ['"%s"' % arg for arg in args] args = ['"%s"' % arg for arg in args]
cmd = " ".join(args) cmd = " ".join(args)
cmd = cmd.replace("start.py", "zeronet.py").replace('"--open_browser"', "").replace('"default_browser"', "") # Dont open browser on autorun
return "cd /D %s \n%s" % (os.getcwd(), cmd)
# Dont open browser on autorun
cmd = cmd.replace("start.py", "zeronet.py").replace('"--open_browser"', "").replace('"default_browser"', "")
return "cd /D %s \n%s" % (os.getcwd(), cmd)
def isAutorunEnabled(self): def isAutorunEnabled(self):
path = self.getAutorunPath() path = self.getAutorunPath()
return os.path.isfile(path) and open(path).read() == self.formatAutorun() return os.path.isfile(path) and open(path).read() == self.formatAutorun()
def titleAutorun(self): def titleAutorun(self):
if self.isAutorunEnabled(): return "+Start ZeroNet when Windows starts" if self.isAutorunEnabled():
else: return "Start ZeroNet when Windows starts" return "+Start ZeroNet when Windows starts"
else:
return "Start ZeroNet when Windows starts"
def toggleAutorun(self): def toggleAutorun(self):
if self.isAutorunEnabled(): if self.isAutorunEnabled():

View file

@ -1,8 +1,7 @@
import logging, json, os, re, sys, time import logging
import gevent import re
from Plugin import PluginManager from Plugin import PluginManager
from Config import config
from Debug import Debug
allow_reload = False # No reload supported allow_reload = False # No reload supported
@ -13,9 +12,11 @@ log = logging.getLogger("ZeronamePlugin")
class SiteManagerPlugin(object): class SiteManagerPlugin(object):
zeroname_address = "1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F" zeroname_address = "1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F"
site_zeroname = None site_zeroname = None
def load(self): def load(self):
super(SiteManagerPlugin, self).load() super(SiteManagerPlugin, self).load()
if not self.get(self.zeroname_address): self.need(self.zeroname_address) # Need ZeroName site if not self.get(self.zeroname_address):
self.need(self.zeroname_address) # Need ZeroName site
# Checks if its a valid address # Checks if its a valid address
def isAddress(self, address): def isAddress(self, address):
@ -24,12 +25,10 @@ class SiteManagerPlugin(object):
else: else:
return super(SiteManagerPlugin, self).isAddress(address) return super(SiteManagerPlugin, self).isAddress(address)
# Return: True if the address is domain # Return: True if the address is domain
def isDomain(self, address): def isDomain(self, address):
return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address) return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address)
# Resolve domain # Resolve domain
# Return: The address or None # Return: The address or None
def resolveDomain(self, domain): def resolveDomain(self, domain):
@ -40,7 +39,6 @@ class SiteManagerPlugin(object):
db = self.site_zeroname.storage.loadJson("data/names.json") db = self.site_zeroname.storage.loadJson("data/names.json")
return db.get(domain) return db.get(domain)
# Return or create site and start download site files # Return or create site and start download site files
# Return: Site or None if dns resolve failed # Return: Site or None if dns resolve failed
def need(self, address, all_file=True): def need(self, address, all_file=True):
@ -53,10 +51,9 @@ class SiteManagerPlugin(object):
return super(SiteManagerPlugin, self).need(address, all_file) return super(SiteManagerPlugin, self).need(address, all_file)
# Return: Site object or None if not found # Return: Site object or None if not found
def get(self, address): def get(self, address):
if self.sites == None: # Not loaded yet if self.sites is None: # Not loaded yet
self.load() self.load()
if self.isDomain(address): # Its looks like a domain if self.isDomain(address): # Its looks like a domain
address_resolved = self.resolveDomain(address) address_resolved = self.resolveDomain(address)
@ -72,4 +69,3 @@ class SiteManagerPlugin(object):
else: # Access by site address else: # Access by site address
site = self.sites.get(address) site = self.sites.get(address)
return site return site

View file

@ -1,14 +1,15 @@
import re import re
from Plugin import PluginManager from Plugin import PluginManager
@PluginManager.registerTo("UiRequest") @PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object): class UiRequestPlugin(object):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
from Site import SiteManager from Site import SiteManager
self.site_manager = SiteManager.site_manager self.site_manager = SiteManager.site_manager
super(UiRequestPlugin, self).__init__(*args, **kwargs) super(UiRequestPlugin, self).__init__(*args, **kwargs)
# Media request # Media request
def actionSiteMedia(self, path): def actionSiteMedia(self, path):
match = re.match("/media/(?P<address>[A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", path) match = re.match("/media/(?P<address>[A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", path)
@ -16,10 +17,9 @@ class UiRequestPlugin(object):
domain = match.group("address") domain = match.group("address")
address = self.site_manager.resolveDomain(domain) address = self.site_manager.resolveDomain(domain)
if address: if address:
path = "/media/"+address+match.group("inner_path") path = "/media/" + address + match.group("inner_path")
return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output
# Is mediarequest allowed from that referer # Is mediarequest allowed from that referer
def isMediaRequestAllowed(self, site_address, referer): def isMediaRequestAllowed(self, site_address, referer):
referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
@ -37,4 +37,3 @@ class UiRequestPlugin(object):
return True return True
else: # Invalid referer else: # Invalid referer
return False return False

View file

@ -1,5 +1,11 @@
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException import time
import time, json, os, sys, re, socket import json
import os
import sys
import re
import socket
from bitcoinrpc.authproxy import AuthServiceProxy
def publish(): def publish():
@ -9,9 +15,9 @@ def publish():
os.system("python zeronet.py sitePublish %s" % config["site"]) os.system("python zeronet.py sitePublish %s" % config["site"])
def processNameOp(domain, value): def processNameOp(domain, value):
if not value.startswith("{"): return False if not value.startswith("{"):
return False
try: try:
data = json.loads(value) data = json.loads(value)
except Exception, err: except Exception, err:
@ -20,18 +26,22 @@ def processNameOp(domain, value):
if "zeronet" not in data: if "zeronet" not in data:
print "No zeronet in ", data.keys() print "No zeronet in ", data.keys()
return False return False
if type(data["zeronet"]) != type({}): if not isinstance(data["zeronet"], dict):
print "Bad type: ", data["zeronet"] print "Not dict: ", data["zeronet"]
return False
if not re.match("^[a-z]([a-z0-9-]{0,62}[a-z0-9])?$", domain):
print "Invalid domain: ", domain
return False return False
if "slave" in sys.argv: if "slave" in sys.argv:
print "Waiting for master update arrive" print "Waiting for master update arrive"
time.sleep(30) # Wait 30 sec to allow master updater time.sleep(30) # Wait 30 sec to allow master updater
#Note: Requires the file data/names.json to exist and contain "{}" to work # Note: Requires the file data/names.json to exist and contain "{}" to work
names_raw = open(names_path, "rb").read() names_raw = open(names_path, "rb").read()
names = json.loads(names_raw) names = json.loads(names_raw)
for subdomain, address in data["zeronet"].items(): for subdomain, address in data["zeronet"].items():
subdomain = subdomain.lower()
address = re.sub("[^A-Za-z0-9]", "", address) address = re.sub("[^A-Za-z0-9]", "", address)
print subdomain, domain, "->", address print subdomain, domain, "->", address
if subdomain: if subdomain:
@ -48,8 +58,6 @@ def processNameOp(domain, value):
return False return False
def processBlock(block_id): def processBlock(block_id):
print "Processing block #%s..." % block_id print "Processing block #%s..." % block_id
s = time.time() s = time.time()
@ -61,13 +69,13 @@ def processBlock(block_id):
for tx in block["tx"]: for tx in block["tx"]:
try: try:
transaction = rpc.getrawtransaction(tx, 1) transaction = rpc.getrawtransaction(tx, 1)
for vout in transaction.get("vout",[]): for vout in transaction.get("vout", []):
if "scriptPubKey" in vout and "nameOp" in vout["scriptPubKey"] and "name" in vout["scriptPubKey"]["nameOp"]: if "scriptPubKey" in vout and "nameOp" in vout["scriptPubKey"] and "name" in vout["scriptPubKey"]["nameOp"]:
name_op = vout["scriptPubKey"]["nameOp"] name_op = vout["scriptPubKey"]["nameOp"]
updated += processNameOp(name_op["name"].replace("d/", ""), name_op["value"]) updated += processNameOp(name_op["name"].replace("d/", ""), name_op["value"])
except Exception, err: except Exception, err:
print "Error processing tx #%s %s" % (tx, err) print "Error processing tx #%s %s" % (tx, err)
print "Done in %.3fs (updated %s)." % (time.time()-s, updated) print "Done in %.3fs (updated %s)." % (time.time() - s, updated)
if updated: if updated:
publish() publish()
@ -102,7 +110,7 @@ rpc_user = re.search("rpcuser=(.*)$", namecoin_conf, re.M).group(1)
rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1) rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1)
rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass) rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass)
rpc = AuthServiceProxy(rpc_url, timeout=60*5) rpc = AuthServiceProxy(rpc_url, timeout=60 * 5)
last_block = int(rpc.getinfo()["blocks"]) last_block = int(rpc.getinfo()["blocks"])
@ -111,19 +119,24 @@ if not config["lastprocessed"]: # Start processing from last block
# Processing skipped blocks # Processing skipped blocks
print "Processing block from #%s to #%s..." % (config["lastprocessed"], last_block) print "Processing block from #%s to #%s..." % (config["lastprocessed"], last_block)
for block_id in range(config["lastprocessed"], last_block+1): for block_id in range(config["lastprocessed"], last_block + 1):
processBlock(block_id) processBlock(block_id)
# processBlock(223911) # Testing zeronetwork.bit # processBlock(223911) # Testing zeronetwork.bit
# processBlock(227052) # Testing brainwallets.bit # processBlock(227052) # Testing brainwallets.bit
# processBlock(236824) # Utf8 domain name (invalid should skip)
# processBlock(236752) # Uppercase domain (invalid should skip)
# processBlock(236870) # Encoded domain (should pass)
# sys.exit(0)
while 1: while 1:
print "Waiting for new block", print "Waiting for new block",
sys.stdout.flush() sys.stdout.flush()
while 1: while 1:
try: try:
rpc = AuthServiceProxy(rpc_url, timeout=60*5) rpc = AuthServiceProxy(rpc_url, timeout=60 * 5)
if (int(rpc.getinfo()["blocks"]) > last_block): break if (int(rpc.getinfo()["blocks"]) > last_block):
break
time.sleep(1) time.sleep(1)
rpc.waitforblock() rpc.waitforblock()
print "Found" print "Found"
@ -136,7 +149,7 @@ while 1:
time.sleep(5) time.sleep(5)
last_block = int(rpc.getinfo()["blocks"]) last_block = int(rpc.getinfo()["blocks"])
for block_id in range(config["lastprocessed"]+1, last_block+1): for block_id in range(config["lastprocessed"] + 1, last_block + 1):
processBlock(block_id) processBlock(block_id)
config["lastprocessed"] = last_block config["lastprocessed"] = last_block

View file

@ -19,18 +19,16 @@ def lookupDomain(domain):
try: try:
domain_object = rpc.name_show("d/"+domain) domain_object = rpc.name_show("d/"+domain)
except Exception, err: except:
#domain doesn't exist #domain doesn't exist
print "Error looking up domain - does not exist %s %s" % (domain,err)
return None return None
domain_json = json.loads(domain_object['value']) domain_json = json.loads(domain_object['value'])
try: try:
domain_address = domain_json["zeronet"][subdomain] domain_address = domain_json["zeronet"][subdomain]
except Exception, err: except:
#domain exists but doesn't have any zeronet value #domain exists but doesn't have any zeronet value
print "Error looking up domain - doesn't contain zeronet value %s %s" % (domain,err)
return None return None
return domain_address return domain_address
@ -54,3 +52,30 @@ rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1)
rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass) rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass)
rpc = AuthServiceProxy(rpc_url, timeout=60*5) rpc = AuthServiceProxy(rpc_url, timeout=60*5)
"""
while 1:
print "Waiting for new block",
sys.stdout.flush()
while 1:
try:
rpc = AuthServiceProxy(rpc_url, timeout=60*5)
if (int(rpc.getinfo()["blocks"]) > last_block): break
time.sleep(1)
rpc.waitforblock()
print "Found"
break # Block found
except socket.timeout: # Timeout
print ".",
sys.stdout.flush()
except Exception, err:
print "Exception", err.__class__, err
time.sleep(5)
last_block = int(rpc.getinfo()["blocks"])
for block_id in range(config["lastprocessed"]+1, last_block+1):
processBlock(block_id)
config["lastprocessed"] = last_block
open(config_path, "w").write(json.dumps(config, indent=2))
"""

View file

@ -1,7 +1,11 @@
import argparse, sys, os, time import argparse
import sys
import os
import ConfigParser import ConfigParser
class Config(object): class Config(object):
def __init__(self): def __init__(self):
self.version = "0.3.1" self.version = "0.3.1"
self.rev = 280 self.rev = 280
@ -11,16 +15,13 @@ class Config(object):
self.parseCommandline(argv) # Parse argv self.parseCommandline(argv) # Parse argv
self.setAttributes() self.setAttributes()
def __str__(self): def __str__(self):
return str(self.arguments).replace("Namespace", "Config") # Using argparse str output return str(self.arguments).replace("Namespace", "Config") # Using argparse str output
# Convert string to bool # Convert string to bool
def strToBool(self, v): def strToBool(self, v):
return v.lower() in ("yes", "true", "t", "1") return v.lower() in ("yes", "true", "t", "1")
# Create command line arguments # Create command line arguments
def createArguments(self): def createArguments(self):
# Platform specific # Platform specific
@ -29,7 +30,8 @@ class Config(object):
else: else:
coffeescript = None coffeescript = None
""" Probably fixed """ Probably fixed
if sys.platform.lower().startswith("darwin"): # For some reasons openssl doesnt works on mac yet (https://github.com/HelloZeroNet/ZeroNet/issues/94) if sys.platform.lower().startswith("darwin"):
# For some reasons openssl doesnt works on mac yet (https://github.com/HelloZeroNet/ZeroNet/issues/94)
use_openssl = False use_openssl = False
else: else:
use_openssl = True use_openssl = True
@ -38,7 +40,7 @@ class Config(object):
# Create parser # Create parser
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.register('type','bool', self.strToBool) parser.register('type', 'bool', self.strToBool)
subparsers = parser.add_subparsers(title="Action to perform", dest="action") subparsers = parser.add_subparsers(title="Action to perform", dest="action")
# Main # Main
@ -51,25 +53,29 @@ class Config(object):
action = subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]') action = subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]')
action.add_argument('address', help='Site to sign') action.add_argument('address', help='Site to sign')
action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?') action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?')
action.add_argument('--inner_path', help='File you want to sign (default: content.json)', default="content.json", metavar="inner_path") action.add_argument('--inner_path', help='File you want to sign (default: content.json)',
default="content.json", metavar="inner_path")
action.add_argument('--publish', help='Publish site after the signing', action='store_true') action.add_argument('--publish', help='Publish site after the signing', action='store_true')
# SitePublish # SitePublish
action = subparsers.add_parser("sitePublish", help='Publish site to other peers: address') action = subparsers.add_parser("sitePublish", help='Publish site to other peers: address')
action.add_argument('address', help='Site to publish') action.add_argument('address', help='Site to publish')
action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)', default=None, nargs='?') action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)',
action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)', default=15441, nargs='?') default=None, nargs='?')
action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)', default="content.json", metavar="inner_path") action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)',
default=15441, nargs='?')
action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)',
default="content.json", metavar="inner_path")
# SiteVerify # SiteVerify
action = subparsers.add_parser("siteVerify", help='Verify site files using sha512: address') action = subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
action.add_argument('address', help='Site to verify') action.add_argument('address', help='Site to verify')
#dbRebuild # dbRebuild
action = subparsers.add_parser("dbRebuild", help='Rebuild site database cache') action = subparsers.add_parser("dbRebuild", help='Rebuild site database cache')
action.add_argument('address', help='Site to rebuild') action.add_argument('address', help='Site to rebuild')
#dbQuery # dbQuery
action = subparsers.add_parser("dbQuery", help='Query site sql cache') action = subparsers.add_parser("dbQuery", help='Query site sql cache')
action.add_argument('address', help='Site to query') action.add_argument('address', help='Site to query')
action.add_argument('query', help='Sql query') action.add_argument('query', help='Sql query')
@ -98,7 +104,6 @@ class Config(object):
action.add_argument('message', help='Message to sign') action.add_argument('message', help='Message to sign')
action.add_argument('privatekey', help='Private key') action.add_argument('privatekey', help='Private key')
# Config parameters # Config parameters
parser.add_argument('--debug', help='Debug mode', action='store_true') parser.add_argument('--debug', help='Debug mode', action='store_true')
parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true') parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true')
@ -110,26 +115,30 @@ class Config(object):
parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip') parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip')
parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port') parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port')
parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*') parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*')
parser.add_argument('--open_browser', help='Open homepage in web browser automatically', nargs='?', const="default_browser", metavar='browser_name') parser.add_argument('--open_browser', help='Open homepage in web browser automatically',
parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr', metavar='address') nargs='?', const="default_browser", metavar='browser_name')
parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr',
metavar='address')
parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, metavar='size') parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, metavar='size')
parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip') parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip')
parser.add_argument('--fileserver_port',help='FileServer bind port', default=15441, type=int, metavar='port') parser.add_argument('--fileserver_port', help='FileServer bind port', default=15441, type=int, metavar='port')
parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true') parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true')
parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port') parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port')
parser.add_argument('--ip_external', help='External ip (tested on start if None)', metavar='ip') parser.add_argument('--ip_external', help='External ip (tested on start if None)', metavar='ip')
parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=use_openssl) parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup',
type='bool', choices=[True, False], default=use_openssl)
parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true') parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true')
parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory', type='bool', choices=[True, False], default=True) parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory',
type='bool', choices=[True, False], default=True)
parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript, metavar='executable_path') parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript,
metavar='executable_path')
parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev)) parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev))
return parser return parser
# Find arguments specificed for current action # Find arguments specificed for current action
def getActionArguments(self): def getActionArguments(self):
back = {} back = {}
@ -138,8 +147,6 @@ class Config(object):
back[argument.dest] = getattr(self, argument.dest) back[argument.dest] = getattr(self, argument.dest)
return back return back
# Try to find action from sys.argv # Try to find action from sys.argv
def getAction(self, argv): def getAction(self, argv):
actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions
@ -150,7 +157,6 @@ class Config(object):
break break
return found_action return found_action
# Parse command line arguments # Parse command line arguments
def parseCommandline(self, argv): def parseCommandline(self, argv):
# Find out if action is specificed on start # Find out if action is specificed on start
@ -159,13 +165,12 @@ class Config(object):
argv.append("main") argv.append("main")
self.arguments = self.parser.parse_args(argv[1:]) self.arguments = self.parser.parse_args(argv[1:])
# Parse config file # Parse config file
def parseConfig(self, argv): def parseConfig(self, argv):
# Find config file path from parameters # Find config file path from parameters
config_file = "zeronet.conf" config_file = "zeronet.conf"
if "--config_file" in argv: if "--config_file" in argv:
config_file = argv[argv.index("--config_file")+1] config_file = argv[argv.index("--config_file") + 1]
# Load config file # Load config file
if os.path.isfile(config_file): if os.path.isfile(config_file):
config = ConfigParser.ConfigParser(allow_no_value=True) config = ConfigParser.ConfigParser(allow_no_value=True)
@ -173,13 +178,12 @@ class Config(object):
for section in config.sections(): for section in config.sections():
for key, val in config.items(section): for key, val in config.items(section):
if section != "global": # If not global prefix key with section if section != "global": # If not global prefix key with section
key = section+"_"+key key = section + "_" + key
if val: argv.insert(1, val) if val:
argv.insert(1, val)
argv.insert(1, "--%s" % key) argv.insert(1, "--%s" % key)
return argv return argv
# Expose arguments as class attributes # Expose arguments as class attributes
def setAttributes(self): def setAttributes(self):
# Set attributes from arguments # Set attributes from arguments

View file

@ -1,13 +1,22 @@
import logging, socket, time import socket
from cStringIO import StringIO import time
import gevent, msgpack
import gevent
import msgpack
from Config import config from Config import config
from Debug import Debug from Debug import Debug
from util import StreamingMsgpack from util import StreamingMsgpack
from Crypt import CryptConnection from Crypt import CryptConnection
class Connection(object): class Connection(object):
__slots__ = ("sock", "sock_wrapped", "ip", "port", "peer_id", "id", "protocol", "type", "server", "unpacker", "req_id", "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "last_recv_time", "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "last_ping_delay", "last_req_time", "last_cmd", "name", "updateName", "waiting_requests") __slots__ = (
"sock", "sock_wrapped", "ip", "port", "peer_id", "id", "protocol", "type", "server", "unpacker", "req_id",
"handshake", "crypt", "connected", "event_connected", "closed", "start_time", "last_recv_time",
"last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent",
"last_ping_delay", "last_req_time", "last_cmd", "name", "updateName", "waiting_requests"
)
def __init__(self, server, ip, port, sock=None): def __init__(self, server, ip, port, sock=None):
self.sock = sock self.sock = sock
@ -48,41 +57,36 @@ class Connection(object):
self.waiting_requests = {} # Waiting sent requests self.waiting_requests = {} # Waiting sent requests
def updateName(self): def updateName(self):
self.name = "Conn#%2s %-12s [%s]" % (self.id, self.ip, self.protocol) self.name = "Conn#%2s %-12s [%s]" % (self.id, self.ip, self.protocol)
def __str__(self): def __str__(self):
return self.name return self.name
def __repr__(self): def __repr__(self):
return "<%s>" % self.__str__() return "<%s>" % self.__str__()
def log(self, text): def log(self, text):
self.server.log.debug("%s > %s" % (self.name, text)) self.server.log.debug("%s > %s" % (self.name, text))
# Open connection to peer and wait for handshake # Open connection to peer and wait for handshake
def connect(self): def connect(self):
self.log("Connecting...") self.log("Connecting...")
self.type = "out" self.type = "out"
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((self.ip, int(self.port))) self.sock.connect((self.ip, int(self.port)))
# Implicit SSL in the future # Implicit SSL in the future
#self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa") # self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa")
#self.sock.do_handshake() # self.sock.do_handshake()
#self.crypt = "tls-rsa" # self.crypt = "tls-rsa"
#self.sock_wrapped = True # self.sock_wrapped = True
# Detect protocol # Detect protocol
self.send({"cmd": "handshake", "req_id": 0, "params": self.handshakeInfo()}) self.send({"cmd": "handshake", "req_id": 0, "params": self.handshakeInfo()})
gevent.spawn(self.messageLoop) gevent.spawn(self.messageLoop)
return self.event_connected.get() # Wait for handshake return self.event_connected.get() # Wait for handshake
# Handle incoming connection # Handle incoming connection
def handleIncomingConnection(self, sock): def handleIncomingConnection(self, sock):
self.log("Incoming connection...") self.log("Incoming connection...")
@ -97,7 +101,6 @@ class Connection(object):
self.log("Socket peek error: %s" % Debug.formatException(err)) self.log("Socket peek error: %s" % Debug.formatException(err))
self.messageLoop() self.messageLoop()
# Message loop for connection # Message loop for connection
def messageLoop(self): def messageLoop(self):
if not self.sock: if not self.sock:
@ -110,8 +113,9 @@ class Connection(object):
self.unpacker = msgpack.Unpacker() self.unpacker = msgpack.Unpacker()
try: try:
while True: while True:
buff = self.sock.recv(16*1024) buff = self.sock.recv(16 * 1024)
if not buff: break # Connection closed if not buff:
break # Connection closed
self.last_recv_time = time.time() self.last_recv_time = time.time()
self.incomplete_buff_recv += 1 self.incomplete_buff_recv += 1
self.bytes_recv += len(buff) self.bytes_recv += len(buff)
@ -125,10 +129,10 @@ class Connection(object):
message = None message = None
buff = None buff = None
except Exception, err: except Exception, err:
if not self.closed: self.log("Socket error: %s" % Debug.formatException(err)) if not self.closed:
self.log("Socket error: %s" % Debug.formatException(err))
self.close() # MessageLoop ended, close connection self.close() # MessageLoop ended, close connection
# My handshake info # My handshake info
def handshakeInfo(self): def handshakeInfo(self):
return { return {
@ -142,10 +146,9 @@ class Connection(object):
"crypt": self.crypt "crypt": self.crypt
} }
def setHandshake(self, handshake): def setHandshake(self, handshake):
self.handshake = handshake self.handshake = handshake
if handshake.get("port_opened", None) == False: # Not connectable if handshake.get("port_opened", None) is False: # Not connectable
self.port = 0 self.port = 0
else: else:
self.port = handshake["fileserver_port"] # Set peer fileserver port self.port = handshake["fileserver_port"] # Set peer fileserver port
@ -160,7 +163,6 @@ class Connection(object):
self.crypt = crypt self.crypt = crypt
self.event_connected.set(True) # Mark handshake as done self.event_connected.set(True) # Mark handshake as done
# Handle incoming message # Handle incoming message
def handleMessage(self, message): def handleMessage(self, message):
self.last_message_time = time.time() self.last_message_time = time.time()
@ -169,8 +171,9 @@ class Connection(object):
self.waiting_requests[message["to"]].set(message) # Set the response to event self.waiting_requests[message["to"]].set(message) # Set the response to event
del self.waiting_requests[message["to"]] del self.waiting_requests[message["to"]]
elif message["to"] == 0: # Other peers handshake elif message["to"] == 0: # Other peers handshake
ping = time.time()-self.start_time ping = time.time() - self.start_time
if config.debug_socket: self.log("Handshake response: %s, ping: %s" % (message, ping)) if config.debug_socket:
self.log("Handshake response: %s, ping: %s" % (message, ping))
self.last_ping_delay = ping self.last_ping_delay = ping
# Server switched to crypt, lets do it also if not crypted already # Server switched to crypt, lets do it also if not crypted already
if message.get("crypt") and not self.sock_wrapped: if message.get("crypt") and not self.sock_wrapped:
@ -184,7 +187,8 @@ class Connection(object):
self.log("Unknown response: %s" % message) self.log("Unknown response: %s" % message)
elif message.get("cmd"): # Handhsake request elif message.get("cmd"): # Handhsake request
if message["cmd"] == "handshake": if message["cmd"] == "handshake":
if config.debug_socket: self.log("Handshake request: %s" % message) if config.debug_socket:
self.log("Handshake request: %s" % message)
self.setHandshake(message["params"]) self.setHandshake(message["params"])
data = self.handshakeInfo() data = self.handshakeInfo()
data["cmd"] = "response" data["cmd"] = "response"
@ -199,16 +203,20 @@ class Connection(object):
else: else:
self.server.handleRequest(self, message) self.server.handleRequest(self, message)
else: # Old style response, no req_id definied else: # Old style response, no req_id definied
if config.debug_socket: self.log("Old style response, waiting: %s" % self.waiting_requests.keys()) if config.debug_socket:
self.log("Old style response, waiting: %s" % self.waiting_requests.keys())
last_req_id = min(self.waiting_requests.keys()) # Get the oldest waiting request and set it true last_req_id = min(self.waiting_requests.keys()) # Get the oldest waiting request and set it true
self.waiting_requests[last_req_id].set(message) self.waiting_requests[last_req_id].set(message)
del self.waiting_requests[last_req_id] # Remove from waiting request del self.waiting_requests[last_req_id] # Remove from waiting request
# Send data to connection # Send data to connection
def send(self, message, streaming=False): def send(self, message, streaming=False):
if config.debug_socket: self.log("Send: %s, to: %s, streaming: %s, site: %s, inner_path: %s, req_id: %s" % (message.get("cmd"), message.get("to"), streaming, message.get("params", {}).get("site"), message.get("params", {}).get("inner_path"), message.get("req_id"))) if config.debug_socket:
self.log("Send: %s, to: %s, streaming: %s, site: %s, inner_path: %s, req_id: %s" % (
message.get("cmd"), message.get("to"), streaming,
message.get("params", {}).get("site"), message.get("params", {}).get("inner_path"),
message.get("req_id"))
)
self.last_send_time = time.time() self.last_send_time = time.time()
if streaming: if streaming:
bytes_sent = StreamingMsgpack.stream(message, self.sock.sendall) bytes_sent = StreamingMsgpack.stream(message, self.sock.sendall)
@ -224,10 +232,10 @@ class Connection(object):
self.last_sent_time = time.time() self.last_sent_time = time.time()
return True return True
# Create and send a request to peer # Create and send a request to peer
def request(self, cmd, params={}): def request(self, cmd, params={}):
if self.waiting_requests and self.protocol == "v2" and time.time() - max(self.last_req_time, self.last_recv_time) > 10: # Last command sent more than 10 sec ago, timeout # Last command sent more than 10 sec ago, timeout
if self.waiting_requests and self.protocol == "v2" and time.time() - max(self.last_req_time, self.last_recv_time) > 10:
self.log("Request %s timeout: %s" % (self.last_cmd, time.time() - self.last_send_time)) self.log("Request %s timeout: %s" % (self.last_cmd, time.time() - self.last_send_time))
self.close() self.close()
return False return False
@ -242,7 +250,6 @@ class Connection(object):
res = event.get() # Wait until event solves res = event.get() # Wait until event solves
return res return res
def ping(self): def ping(self):
s = time.time() s = time.time()
response = None response = None
@ -252,20 +259,24 @@ class Connection(object):
except Exception, err: except Exception, err:
self.log("Ping error: %s" % Debug.formatException(err)) self.log("Ping error: %s" % Debug.formatException(err))
if response and "body" in response and response["body"] == "Pong!": if response and "body" in response and response["body"] == "Pong!":
self.last_ping_delay = time.time()-s self.last_ping_delay = time.time() - s
return True return True
else: else:
return False return False
# Close connection # Close connection
def close(self): def close(self):
if self.closed: return False # Already closed if self.closed:
return False # Already closed
self.closed = True self.closed = True
self.connected = False self.connected = False
self.event_connected.set(False) self.event_connected.set(False)
if config.debug_socket: self.log("Closing connection, waiting_requests: %s, buff: %s..." % (len(self.waiting_requests), self.incomplete_buff_recv)) if config.debug_socket:
self.log(
"Closing connection, waiting_requests: %s, buff: %s..." %
(len(self.waiting_requests), self.incomplete_buff_recv)
)
for request in self.waiting_requests.values(): # Mark pending requests failed for request in self.waiting_requests.values(): # Mark pending requests failed
request.set(False) request.set(False)
self.waiting_requests = {} self.waiting_requests = {}
@ -275,7 +286,8 @@ class Connection(object):
self.sock.shutdown(gevent.socket.SHUT_WR) self.sock.shutdown(gevent.socket.SHUT_WR)
self.sock.close() self.sock.close()
except Exception, err: except Exception, err:
if config.debug_socket: self.log("Close error: %s" % err) if config.debug_socket:
self.log("Close error: %s" % err)
# Little cleanup # Little cleanup
self.sock = None self.sock = None

View file

@ -43,14 +43,16 @@ class ConnectionServer:
# Check msgpack version # Check msgpack version
if msgpack.version[0] == 0 and msgpack.version[1] < 4: if msgpack.version[0] == 0 and msgpack.version[1] < 4:
self.log.error( self.log.error(
"Error: Too old msgpack version: %s (>0.4.0 required), please update using `sudo pip install msgpack-python --upgrade`" % "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo pip install msgpack-python --upgrade`" %
str(msgpack.version) str(msgpack.version)
) )
sys.exit(0) sys.exit(0)
if port: # Listen server on a port if port: # Listen server on a port
self.pool = Pool(1000) # do not accept more than 1000 connections self.pool = Pool(1000) # do not accept more than 1000 connections
self.stream_server = StreamServer((ip.replace("*", ""), port), self.handleIncomingConnection, spawn=self.pool, backlog=100) self.stream_server = StreamServer(
(ip.replace("*", ""), port), self.handleIncomingConnection, spawn=self.pool, backlog=100
)
if request_handler: if request_handler:
self.handleRequest = request_handler self.handleRequest = request_handler
@ -152,25 +154,32 @@ class ConnectionServer:
for connection in self.connections[:]: # Make a copy for connection in self.connections[:]: # Make a copy
idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time) idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time)
if connection.unpacker and idle > 30: # Delete the unpacker if not needed if connection.unpacker and idle > 30:
# Delete the unpacker if not needed
del connection.unpacker del connection.unpacker
connection.unpacker = None connection.unpacker = None
connection.log("Unpacker deleted") connection.log("Unpacker deleted")
if idle > 60 * 60: # Wake up after 1h if idle > 60 * 60:
# Wake up after 1h
connection.log("[Cleanup] After wakeup, idle: %s" % idle) connection.log("[Cleanup] After wakeup, idle: %s" % idle)
connection.close() connection.close()
elif idle > 20 * 60 and connection.last_send_time < time.time() - 10: # Idle more than 20 min and we not send request in last 10 sec elif idle > 20 * 60 and connection.last_send_time < time.time() - 10:
# Idle more than 20 min and we not send request in last 10 sec
if not connection.ping(): # send ping request if not connection.ping(): # send ping request
connection.close() connection.close()
elif idle > 10 and connection.incomplete_buff_recv > 0: # Incompelte data with more than 10 sec idle elif idle > 10 and connection.incomplete_buff_recv > 0:
# Incompelte data with more than 10 sec idle
connection.log("[Cleanup] Connection buff stalled") connection.log("[Cleanup] Connection buff stalled")
connection.close() connection.close()
elif idle > 10 and connection.waiting_requests and time.time() - connection.last_send_time > 10: # Sent command and no response in 10 sec elif idle > 10 and connection.waiting_requests and time.time() - connection.last_send_time > 10:
connection.log("[Cleanup] Command %s timeout: %s" % (connection.last_cmd, time.time() - connection.last_send_time)) # Sent command and no response in 10 sec
connection.log(
"[Cleanup] Command %s timeout: %s" % (connection.last_cmd, time.time() - connection.last_send_time)
)
connection.close() connection.close()
elif idle > 60 and connection.protocol == "?": # No connection after 1 min elif idle > 60 and connection.protocol == "?": # No connection after 1 min

View file

@ -108,7 +108,7 @@ class ContentManager(object):
return total_size return total_size
# Find the file info line from self.contents # Find the file info line from self.contents
# Return: { "sha512": "c29d73d30ee8c9c1b5600e8a84447a6de15a3c3db6869aca4a2a578c1721f518", "size": 41 , "content_inner_path": "content.json"} # Return: { "sha512": "c29d73d...21f518", "size": 41 , "content_inner_path": "content.json"}
def getFileInfo(self, inner_path): def getFileInfo(self, inner_path):
dirs = inner_path.split("/") # Parent dirs of content.json dirs = inner_path.split("/") # Parent dirs of content.json
inner_path_parts = [dirs.pop()] # Filename relative to content.json inner_path_parts = [dirs.pop()] # Filename relative to content.json
@ -279,11 +279,17 @@ class ContentManager(object):
privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey) privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
valid_signers = self.getValidSigners(inner_path, new_content) valid_signers = self.getValidSigners(inner_path, new_content)
if privatekey_address not in valid_signers: if privatekey_address not in valid_signers:
return self.log.error("Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address)) return self.log.error(
"Private key invalid! Valid signers: %s, Private key address: %s" %
(valid_signers, privatekey_address)
)
self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers)) self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key sign the valid signers if inner_path == "content.json" and privatekey_address == self.site.address:
new_content["signers_sign"] = CryptBitcoin.sign("%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey) # If signing using the root key, then sign the valid signers
new_content["signers_sign"] = CryptBitcoin.sign(
"%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey
)
if not new_content["signers_sign"]: if not new_content["signers_sign"]:
self.log.info("Old style address, signers_sign is none") self.log.info("Old style address, signers_sign is none")
@ -352,7 +358,9 @@ class ContentManager(object):
if not cert_address: # Cert signer not allowed if not cert_address: # Cert signer not allowed
self.log.error("Invalid cert signer: %s" % domain) self.log.error("Invalid cert signer: %s" % domain)
return False return False
return CryptBitcoin.verify("%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"]) return CryptBitcoin.verify(
"%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"]
)
# Checks if the content.json content is valid # Checks if the content.json content is valid
# Return: True or False # Return: True or False
@ -414,10 +422,13 @@ class ContentManager(object):
if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json
return None return None
elif old_content["modified"] > new_content["modified"]: # We have newer elif old_content["modified"] > new_content["modified"]: # We have newer
self.log.debug("We have newer %s (Our: %s, Sent: %s)" % (inner_path, old_content["modified"], new_content["modified"])) self.log.debug(
"We have newer %s (Our: %s, Sent: %s)" %
(inner_path, old_content["modified"], new_content["modified"])
)
gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers
return False return False
if new_content["modified"] > time.time() + 60 * 60 * 24: # Content modified in the far future (allow 1 day window) if new_content["modified"] > time.time() + 60 * 60 * 24: # Content modified in the far future (allow 1 day+)
self.log.error("%s modify is in the future!" % inner_path) self.log.error("%s modify is in the future!" % inner_path)
return False return False
# Check sign # Check sign
@ -437,7 +448,9 @@ class ContentManager(object):
signs_required = self.getSignsRequired(inner_path, new_content) signs_required = self.getSignsRequired(inner_path, new_content)
if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json
if not CryptBitcoin.verify("%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]): if not CryptBitcoin.verify(
"%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]
):
self.log.error("%s invalid signers_sign!" % inner_path) self.log.error("%s invalid signers_sign!" % inner_path)
return False return False
@ -470,8 +483,10 @@ class ContentManager(object):
else: else:
hash_valid = False hash_valid = False
if file_info["size"] != file.tell(): if file_info["size"] != file.tell():
self.log.error("%s file size does not match %s <> %s, Hash: %s" % (inner_path, file.tell(), self.log.error(
file_info["size"], hash_valid)) "%s file size does not match %s <> %s, Hash: %s" %
(inner_path, file.tell(), file_info["size"], hash_valid)
)
return False return False
return hash_valid return hash_valid
@ -493,7 +508,9 @@ def testSign():
from Site import Site from Site import Site
site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
content_manager = ContentManager(site) content_manager = ContentManager(site)
content_manager.sign("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR") content_manager.sign(
"data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR"
)
def testVerify(): def testVerify():
@ -504,10 +521,14 @@ def testVerify():
print "Loaded contents:", content_manager.contents.keys() print "Loaded contents:", content_manager.contents.keys()
file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json")) file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json"))
print "content.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False) print "content.json valid:", content_manager.verifyFile(
"data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False
)
file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json")) file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json"))
print "messages.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False) print "messages.json valid:", content_manager.verifyFile(
"data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False
)
def testInfo(): def testInfo():

View file

@ -1,11 +1,13 @@
import logging
from lib.BitcoinECC import BitcoinECC from lib.BitcoinECC import BitcoinECC
from lib.pybitcointools import bitcoin as btctools from lib.pybitcointools import bitcoin as btctools
import logging
from Config import config from Config import config
# Try to load openssl # Try to load openssl
try: try:
if not config.use_openssl: raise Exception("Disabled by config") if not config.use_openssl:
raise Exception("Disabled by config")
from lib.opensslVerify import opensslVerify from lib.opensslVerify import opensslVerify
logging.info("OpenSSL loaded, version: %s" % opensslVerify.openssl_version) logging.info("OpenSSL loaded, version: %s" % opensslVerify.openssl_version)
except Exception, err: except Exception, err:
@ -37,12 +39,13 @@ def privatekeyToAddress(privatekey): # Return address from private key
else: else:
try: try:
return btctools.privkey_to_address(privatekey) return btctools.privkey_to_address(privatekey)
except Exception, err: # Invalid privatekey except Exception: # Invalid privatekey
return False return False
def sign(data, privatekey): # Return sign to data using private key def sign(data, privatekey): # Return sign to data using private key
if privatekey.startswith("23") and len(privatekey) > 52: return None # Old style private key not supported if privatekey.startswith("23") and len(privatekey) > 52:
return None # Old style private key not supported
sign = btctools.ecdsa_sign(data, privatekey) sign = btctools.ecdsa_sign(data, privatekey)
return sign return sign

View file

@ -4,7 +4,6 @@ import os
import ssl import ssl
from Config import config from Config import config
import gevent
from util import SslPatch from util import SslPatch
@ -19,7 +18,6 @@ class CryptConnectionManager:
self.crypt_supported = [] # Supported cryptos self.crypt_supported = [] # Supported cryptos
# Select crypt that supported by both sides # Select crypt that supported by both sides
# Return: Name of the crypto # Return: Name of the crypto
def selectCrypt(self, client_supported): def selectCrypt(self, client_supported):
@ -28,34 +26,35 @@ class CryptConnectionManager:
return crypt return crypt
return False return False
# Wrap socket for crypt # Wrap socket for crypt
# Return: wrapped socket # Return: wrapped socket
def wrapSocket(self, sock, crypt, server=False): def wrapSocket(self, sock, crypt, server=False):
if crypt == "tls-rsa": if crypt == "tls-rsa":
ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:HIGH:!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK" ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:HIGH:"
ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
if server: if server:
return ssl.wrap_socket(sock, server_side=server, keyfile='%s/key-rsa.pem' % config.data_dir, certfile='%s/cert-rsa.pem' % config.data_dir, ciphers=ciphers) return ssl.wrap_socket(
sock, server_side=server, keyfile='%s/key-rsa.pem' % config.data_dir,
certfile='%s/cert-rsa.pem' % config.data_dir, ciphers=ciphers)
else: else:
return ssl.wrap_socket(sock, ciphers=ciphers) return ssl.wrap_socket(sock, ciphers=ciphers)
else: else:
return sock return sock
def removeCerts(self): def removeCerts(self):
for file_name in ["cert-rsa.pem", "key-rsa.pem"]: for file_name in ["cert-rsa.pem", "key-rsa.pem"]:
file_path = "%s/%s" % (config.data_dir, file_name) file_path = "%s/%s" % (config.data_dir, file_name)
if os.path.isfile(file_path): os.unlink(file_path) if os.path.isfile(file_path):
os.unlink(file_path)
# Load and create cert files is necessary # Load and create cert files is necessary
def loadCerts(self): def loadCerts(self):
if config.disable_encryption: return False if config.disable_encryption:
return False
if self.loadSslRsaCert(): if self.loadSslRsaCert():
self.crypt_supported.append("tls-rsa") self.crypt_supported.append("tls-rsa")
# Try to create RSA server cert + sign for connection encryption # Try to create RSA server cert + sign for connection encryption
# Return: True on success # Return: True on success
def loadSslRsaCert(self): def loadSslRsaCert(self):
@ -65,7 +64,9 @@ class CryptConnectionManager:
return True # Files already exits return True # Files already exits
back = subprocess.Popen( back = subprocess.Popen(
"%s req -x509 -newkey rsa:2048 -sha256 -batch -keyout %s/key-rsa.pem -out %s/cert-rsa.pem -nodes -config %s" % (self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]), "%s req -x509 -newkey rsa:2048 -sha256 -batch -keyout %s/key-rsa.pem -out %s/cert-rsa.pem -nodes -config %s" % (
self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]
),
shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
).stdout.read().strip() ).stdout.read().strip()
logging.debug("Generating RSA cert and key PEM files...%s" % back) logging.debug("Generating RSA cert and key PEM files...%s" % back)
@ -76,7 +77,6 @@ class CryptConnectionManager:
logging.error("RSA ECC SSL cert generation failed, cert or key files not exits.") logging.error("RSA ECC SSL cert generation failed, cert or key files not exits.")
return False return False
# Not used yet: Missing on some platform # Not used yet: Missing on some platform
def createSslEccCert(self): def createSslEccCert(self):
return False return False
@ -91,7 +91,8 @@ class CryptConnectionManager:
# Create ECC cert # Create ECC cert
back = subprocess.Popen( back = subprocess.Popen(
"%s req -new -key %s/key-ecc.pem -x509 -nodes -out %s/cert-ecc.pem -config %s" % (self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]), "%s req -new -key %s/key-ecc.pem -x509 -nodes -out %s/cert-ecc.pem -config %s" % (
self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]),
shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
).stdout.read().strip() ).stdout.read().strip()
self.log.debug("Generating ECC cert PEM file...%s" % back) self.log.debug("Generating ECC cert PEM file...%s" % back)

View file

@ -1,5 +1,6 @@
import hashlib import hashlib
def sha1sum(file, blocksize=65536): def sha1sum(file, blocksize=65536):
if hasattr(file, "endswith"): # Its a string open it if hasattr(file, "endswith"): # Its a string open it
file = open(file, "rb") file = open(file, "rb")
@ -29,8 +30,8 @@ if __name__ == "__main__":
import time import time
s = time.time() s = time.time()
print sha1sum(open("F:\\Temp\\bigfile")), print sha1sum(open("F:\\Temp\\bigfile")),
print time.time()-s print time.time() - s
s = time.time() s = time.time()
print sha512sum(open("F:\\Temp\\bigfile")), print sha512sum(open("F:\\Temp\\bigfile")),
print time.time()-s print time.time() - s

View file

@ -1,10 +1,18 @@
import sqlite3, json, time, logging, re, os import sqlite3
import json
import time
import logging
import re
import os
from DbCursor import DbCursor from DbCursor import DbCursor
class Db: class Db:
def __init__(self, schema, db_path): def __init__(self, schema, db_path):
self.db_path = db_path self.db_path = db_path
self.db_dir = os.path.dirname(db_path)+"/" self.db_dir = os.path.dirname(db_path) + "/"
self.schema = schema self.schema = schema
self.schema["version"] = self.schema.get("version", 1) self.schema["version"] = self.schema.get("version", 1)
self.conn = None self.conn = None
@ -15,7 +23,6 @@ class Db:
self.query_stats = {} self.query_stats = {}
self.db_keyvalues = {} self.db_keyvalues = {}
def connect(self): def connect(self):
self.log.debug("Connecting to %s (sqlite version: %s)..." % (self.db_path, sqlite3.version)) self.log.debug("Connecting to %s (sqlite version: %s)..." % (self.db_path, sqlite3.version))
if not os.path.isdir(self.db_dir): # Directory not exist yet if not os.path.isdir(self.db_dir): # Directory not exist yet
@ -32,26 +39,26 @@ class Db:
self.cur.execute("PRAGMA journal_mode = MEMORY") self.cur.execute("PRAGMA journal_mode = MEMORY")
self.cur.execute("PRAGMA synchronous = OFF") self.cur.execute("PRAGMA synchronous = OFF")
# Execute query using dbcursor # Execute query using dbcursor
def execute(self, query, params = None): def execute(self, query, params=None):
if not self.conn: self.connect() if not self.conn:
self.connect()
return self.cur.execute(query, params) return self.cur.execute(query, params)
def close(self): def close(self):
self.log.debug("Closing") self.log.debug("Closing")
if self.cur: self.cur.close() if self.cur:
if self.conn: self.conn.close() self.cur.close()
if self.conn:
self.conn.close()
# Gets a cursor object to database # Gets a cursor object to database
# Return: Cursor class # Return: Cursor class
def getCursor(self): def getCursor(self):
if not self.conn: self.connect() if not self.conn:
self.connect()
return DbCursor(self.conn, self) return DbCursor(self.conn, self)
# Get the table version # Get the table version
# Return: Table version or None if not exist # Return: Table version or None if not exist
def getTableVersion(self, table_name): def getTableVersion(self, table_name):
@ -74,8 +81,6 @@ class Db:
return self.db_keyvalues.get("table.%s.version" % table_name, 0) return self.db_keyvalues.get("table.%s.version" % table_name, 0)
# Check Db tables # Check Db tables
# Return: <list> Changed table names # Return: <list> Changed table names
def checkTables(self): def checkTables(self):
@ -92,10 +97,11 @@ class Db:
["key", "TEXT"], ["key", "TEXT"],
["value", "INTEGER"], ["value", "INTEGER"],
["json_id", "INTEGER REFERENCES json (json_id)"], ["json_id", "INTEGER REFERENCES json (json_id)"],
],[ ], [
"CREATE UNIQUE INDEX key_id ON keyvalue(json_id, key)" "CREATE UNIQUE INDEX key_id ON keyvalue(json_id, key)"
], version=self.schema["version"]) ], version=self.schema["version"])
if changed: changed_tables.append("keyvalue") if changed:
changed_tables.append("keyvalue")
# Check json table # Check json table
if self.schema["version"] == 1: if self.schema["version"] == 1:
@ -113,23 +119,28 @@ class Db:
], [ ], [
"CREATE UNIQUE INDEX path ON json(directory, file_name)" "CREATE UNIQUE INDEX path ON json(directory, file_name)"
], version=self.schema["version"]) ], version=self.schema["version"])
if changed: changed_tables.append("json") if changed:
changed_tables.append("json")
# Check schema tables # Check schema tables
for table_name, table_settings in self.schema["tables"].items(): for table_name, table_settings in self.schema["tables"].items():
changed = cur.needTable(table_name, table_settings["cols"], table_settings["indexes"], version=table_settings["schema_changed"]) changed = cur.needTable(
if changed: changed_tables.append(table_name) table_name, table_settings["cols"],
table_settings["indexes"], version=table_settings["schema_changed"]
)
if changed:
changed_tables.append(table_name)
cur.execute("COMMIT") cur.execute("COMMIT")
self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time()-s, changed_tables)) self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time() - s, changed_tables))
return changed_tables return changed_tables
# Load json file to db # Load json file to db
# Return: True if matched # Return: True if matched
def loadJson(self, file_path, file = None, cur = None): def loadJson(self, file_path, file=None, cur=None):
if not file_path.startswith(self.db_dir): return False # Not from the db dir: Skipping if not file_path.startswith(self.db_dir):
return False # Not from the db dir: Skipping
relative_path = re.sub("^%s" % self.db_dir, "", file_path) # File path realative to db file relative_path = re.sub("^%s" % self.db_dir, "", file_path) # File path realative to db file
# Check if filename matches any of mappings in schema # Check if filename matches any of mappings in schema
matched_maps = [] matched_maps = []
@ -138,10 +149,12 @@ class Db:
matched_maps.append(map_settings) matched_maps.append(map_settings)
# No match found for the file # No match found for the file
if not matched_maps: return False if not matched_maps:
return False
# Load the json file # Load the json file
if not file: file = open(file_path) if not file:
file = open(file_path)
data = json.load(file) data = json.load(file)
# No cursor specificed # No cursor specificed
@ -170,13 +183,18 @@ class Db:
for key in map["to_keyvalue"]: for key in map["to_keyvalue"]:
if key not in current_keyvalue: # Keyvalue not exist yet in the db if key not in current_keyvalue: # Keyvalue not exist yet in the db
cur.execute("INSERT INTO keyvalue ?", cur.execute(
"INSERT INTO keyvalue ?",
{"key": key, "value": data.get(key), "json_id": json_row["json_id"]} {"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
) )
elif data.get(key) != current_keyvalue[key]: # Keyvalue different value elif data.get(key) != current_keyvalue[key]: # Keyvalue different value
cur.execute("UPDATE keyvalue SET value = ? WHERE keyvalue_id = ?", (data.get(key), current_keyvalue_id[key])) cur.execute(
"UPDATE keyvalue SET value = ? WHERE keyvalue_id = ?",
(data.get(key), current_keyvalue_id[key])
)
"""for key in map.get("to_keyvalue", []): """
for key in map.get("to_keyvalue", []):
cur.execute("INSERT OR REPLACE INTO keyvalue ?", cur.execute("INSERT OR REPLACE INTO keyvalue ?",
{"key": key, "value": data.get(key), "json_id": json_row["json_id"]} {"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
) )
@ -201,19 +219,21 @@ class Db:
cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],)) cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],))
if node not in data: continue if node not in data:
continue
table_schema = self.schema["tables"][table_name]
if key_col: # Map as dict if key_col: # Map as dict
for key, val in data[node].iteritems(): for key, val in data[node].iteritems():
if val_col: # Single value if val_col: # Single value
cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, cur.execute(
{ key_col: key, val_col: val, "json_id": json_row["json_id"] } "INSERT OR REPLACE INTO %s ?" % table_name,
{key_col: key, val_col: val, "json_id": json_row["json_id"]}
) )
else: # Multi value else: # Multi value
if isinstance(val, dict): # Single row if isinstance(val, dict): # Single row
row = val row = val
if import_cols: row = { key: row[key] for key in import_cols } # Filter row by import_cols if import_cols:
row = {key: row[key] for key in import_cols} # Filter row by import_cols
row[key_col] = key row[key_col] = key
# Replace in value if necessary # Replace in value if necessary
if replaces: if replaces:
@ -234,7 +254,8 @@ class Db:
row["json_id"] = json_row["json_id"] row["json_id"] = json_row["json_id"]
cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row) cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
if commit_after_done: cur.execute("COMMIT") if commit_after_done:
cur.execute("COMMIT")
return True return True
@ -244,7 +265,7 @@ if __name__ == "__main__":
logging.getLogger('').setLevel(logging.DEBUG) logging.getLogger('').setLevel(logging.DEBUG)
logging.getLogger('').addHandler(console_log) logging.getLogger('').addHandler(console_log)
console_log.setLevel(logging.DEBUG) console_log.setLevel(logging.DEBUG)
dbjson = DbJson(json.load(open("zerotalk.schema.json")), "data/users/zerotalk.db") dbjson = Db(json.load(open("zerotalk.schema.json")), "data/users/zerotalk.db")
dbjson.collect_stats = True dbjson.collect_stats = True
dbjson.checkTables() dbjson.checkTables()
cur = dbjson.getCursor() cur = dbjson.getCursor()
@ -254,10 +275,9 @@ if __name__ == "__main__":
for user_dir in os.listdir("data/users"): for user_dir in os.listdir("data/users"):
if os.path.isdir("data/users/%s" % user_dir): if os.path.isdir("data/users/%s" % user_dir):
dbjson.loadJson("data/users/%s/data.json" % user_dir, cur=cur) dbjson.loadJson("data/users/%s/data.json" % user_dir, cur=cur)
#print ".", # print ".",
cur.logging = True cur.logging = True
cur.execute("COMMIT") cur.execute("COMMIT")
print "Done in %.3fs" % (time.time()-s) print "Done in %.3fs" % (time.time() - s)
for query, stats in sorted(dbjson.query_stats.items()): for query, stats in sorted(dbjson.query_stats.items()):
print "-", query, stats print "-", query, stats

View file

@ -1,21 +1,26 @@
import time, re import time
import re
# Special sqlite cursor # Special sqlite cursor
class DbCursor: class DbCursor:
def __init__(self, conn, db): def __init__(self, conn, db):
self.conn = conn self.conn = conn
self.db = db self.db = db
self.cursor = conn.cursor() self.cursor = conn.cursor()
self.logging = True self.logging = False
def execute(self, query, params=None): def execute(self, query, params=None):
if isinstance(params, dict): # Make easier select and insert by allowing dict params if isinstance(params, dict): # Make easier select and insert by allowing dict params
if query.startswith("SELECT") or query.startswith("DELETE"): # Convert param dict to SELECT * FROM table WHERE key = ?, key2 = ? format if query.startswith("SELECT") or query.startswith("DELETE"):
wheres = "AND ".join([key+" = ?" for key in params]) # Convert param dict to SELECT * FROM table WHERE key = ?, key2 = ? format
wheres = "AND ".join([key + " = ?" for key in params])
query = query.replace("?", wheres) query = query.replace("?", wheres)
params = params.values() params = params.values()
else: # Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format else:
# Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format
keys = ", ".join(params.keys()) keys = ", ".join(params.keys())
values = ", ".join(['?' for key in params.keys()]) values = ", ".join(['?' for key in params.keys()])
query = query.replace("?", "(%s) VALUES (%s)" % (keys, values)) query = query.replace("?", "(%s) VALUES (%s)" % (keys, values))
@ -27,22 +32,22 @@ class DbCursor:
if params: # Query has parameters if params: # Query has parameters
res = self.cursor.execute(query, params) res = self.cursor.execute(query, params)
if self.logging: if self.logging:
self.db.log.debug((query.replace("?", "%s") % params)+" (Done in %.4f)" % (time.time()-s)) self.db.log.debug((query.replace("?", "%s") % params) + " (Done in %.4f)" % (time.time() - s))
else: else:
res = self.cursor.execute(query) res = self.cursor.execute(query)
if self.logging: self.db.log.debug(query+" (Done in %.4f)" % (time.time()-s)) if self.logging:
self.db.log.debug(query + " (Done in %.4f)" % (time.time() - s))
# Log query stats # Log query stats
if self.db.collect_stats: if self.db.collect_stats:
if query not in self.db.query_stats: if query not in self.db.query_stats:
self.db.query_stats[query] = {"call": 0, "time": 0.0} self.db.query_stats[query] = {"call": 0, "time": 0.0}
self.db.query_stats[query]["call"] += 1 self.db.query_stats[query]["call"] += 1
self.db.query_stats[query]["time"] += time.time()-s self.db.query_stats[query]["time"] += time.time() - s
# if query == "BEGIN": self.logging = False # Turn logging off on transaction commit # if query == "BEGIN": self.logging = False # Turn logging off on transaction commit
return res return res
# Create new table # Create new table
# Return: True on success # Return: True on success
def createTable(self, table, cols): def createTable(self, table, cols):
@ -65,8 +70,6 @@ class DbCursor:
self.execute("CREATE TABLE %s (%s)" % (table, ",".join(col_definitions))) self.execute("CREATE TABLE %s (%s)" % (table, ",".join(col_definitions)))
return True return True
# Create indexes on table # Create indexes on table
# Return: True on success # Return: True on success
def createIndexes(self, table, indexes): def createIndexes(self, table, indexes):
@ -74,7 +77,6 @@ class DbCursor:
for index in indexes: for index in indexes:
self.execute(index) self.execute(index)
# Create table if not exist # Create table if not exist
# Return: True if updated # Return: True if updated
def needTable(self, table, cols, indexes=None, version=1): def needTable(self, table, cols, indexes=None, version=1):
@ -82,15 +84,16 @@ class DbCursor:
if int(current_version) < int(version): # Table need update or not extis if int(current_version) < int(version): # Table need update or not extis
self.db.log.info("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version)) self.db.log.info("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version))
self.createTable(table, cols) self.createTable(table, cols)
if indexes: self.createIndexes(table, indexes) if indexes:
self.execute("INSERT OR REPLACE INTO keyvalue ?", self.createIndexes(table, indexes)
self.execute(
"INSERT OR REPLACE INTO keyvalue ?",
{"json_id": 0, "key": "table.%s.version" % table, "value": version} {"json_id": 0, "key": "table.%s.version" % table, "value": version}
) )
return True return True
else: # Not changed else: # Not changed
return False return False
# Get or create a row for json file # Get or create a row for json file
# Return: The database row # Return: The database row
def getJsonRow(self, file_path): def getJsonRow(self, file_path):

View file

@ -1,4 +1,7 @@
import sys, os, traceback import sys
import os
import traceback
# Non fatal exception # Non fatal exception
class Notify(Exception): class Notify(Exception):
@ -10,9 +13,11 @@ class Notify(Exception):
def formatException(err=None): def formatException(err=None):
if type(err) == Notify: return err if type(err) == Notify:
return err
exc_type, exc_obj, exc_tb = sys.exc_info() exc_type, exc_obj, exc_tb = sys.exc_info()
if not err: err = exc_obj.message if not err:
err = exc_obj.message
tb = [] tb = []
for frame in traceback.extract_tb(exc_tb): for frame in traceback.extract_tb(exc_tb):
path, line, function, text = frame path, line, function, text = frame
@ -22,9 +27,8 @@ def formatException(err=None):
if __name__ == "__main__": if __name__ == "__main__":
try: try:
print 1/0 print 1 / 0
except Exception, err: except Exception, err:
print type(err).__name__ print type(err).__name__
print "1/0 error: %s" % formatException(err) print "1/0 error: %s" % formatException(err)

View file

@ -1,8 +1,13 @@
import gevent, sys, logging import sys
import logging
import gevent
from Config import config from Config import config
last_error = None last_error = None
# Store last error, ignore notify, allow manual error logging # Store last error, ignore notify, allow manual error logging
def handleError(*args): def handleError(*args):
global last_error global last_error
@ -11,7 +16,8 @@ def handleError(*args):
silent = True silent = True
else: else:
silent = False silent = False
if args[0].__name__ != "Notify": last_error = args if args[0].__name__ != "Notify":
last_error = args
if not silent and args[0].__name__ != "Notify": if not silent and args[0].__name__ != "Notify":
logging.exception("Unhandled exception") logging.exception("Unhandled exception")
sys.__excepthook__(*args) sys.__excepthook__(*args)
@ -25,6 +31,8 @@ def handleErrorNotify(*args):
OriginalGreenlet = gevent.Greenlet OriginalGreenlet = gevent.Greenlet
class ErrorhookedGreenlet(OriginalGreenlet): class ErrorhookedGreenlet(OriginalGreenlet):
def _report_error(self, exc_info): def _report_error(self, exc_info):
sys.excepthook(exc_info[0], exc_info[1], exc_info[2]) sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
@ -39,8 +47,10 @@ reload(gevent)
if __name__ == "__main__": if __name__ == "__main__":
import time import time
from gevent import monkey; monkey.patch_all(thread=False, ssl=False) from gevent import monkey
monkey.patch_all(thread=False, ssl=False)
import Debug import Debug
def sleeper(): def sleeper():
print "started" print "started"
time.sleep(3) time.sleep(3)
@ -52,4 +62,3 @@ if __name__ == "__main__":
thread1.throw(Exception("Hello")) thread1.throw(Exception("Hello"))
thread2.throw(Debug.Notify("Throw")) thread2.throw(Debug.Notify("Throw"))
print "killed" print "killed"

View file

@ -1,13 +1,20 @@
import os, subprocess, re, logging, time import os
import subprocess
import re
import logging
import time
from Config import config from Config import config
# Find files with extension in path # Find files with extension in path
def findfiles(path, find_ext): def findfiles(path, find_ext):
for root, dirs, files in os.walk(path, topdown = False): for root, dirs, files in os.walk(path, topdown=False):
for file in sorted(files): for file in sorted(files):
file_path = root+"/"+file file_path = root + "/" + file
file_ext = file.split(".")[-1] file_ext = file.split(".")[-1]
if file_ext in find_ext and not file.startswith("all."): yield file_path.replace("\\", "/") if file_ext in find_ext and not file.startswith("all."):
yield file_path.replace("\\", "/")
# Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features # Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features
@ -26,12 +33,12 @@ def merge(merged_path):
else: else:
merged_mtime = 0 merged_mtime = 0
changed = {} changed = {}
for file_path in findfiles(merge_dir, find_ext): for file_path in findfiles(merge_dir, find_ext):
if os.path.getmtime(file_path) > merged_mtime: if os.path.getmtime(file_path) > merged_mtime:
changed[file_path] = True changed[file_path] = True
if not changed: return # Assets not changed, nothing to do if not changed:
return # Assets not changed, nothing to do
if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile
merged_old = open(merged_path, "rb").read().decode("utf8") merged_old = open(merged_path, "rb").read().decode("utf8")
@ -53,13 +60,16 @@ def merge(merged_path):
s = time.time() s = time.time()
compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE) compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
out = compiler.stdout.read().decode("utf8") out = compiler.stdout.read().decode("utf8")
logging.debug("Running: %s (Done in %.2fs)" % (command, time.time()-s)) logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s))
if out and out.startswith("("): if out and out.startswith("("):
parts.append(out) parts.append(out)
else: else:
error = out error = out
logging.error("%s Compile error: %s" % (file_path, error)) logging.error("%s Compile error: %s" % (file_path, error))
parts.append("alert('%s compile error: %s');" % (file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n") ) ) parts.append(
"alert('%s compile error: %s');" %
(file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n"))
)
else: # Not changed use the old_part else: # Not changed use the old_part
parts.append(old_parts[file_path]) parts.append(old_parts[file_path])
else: # Add to parts else: # Add to parts
@ -71,7 +81,7 @@ def merge(merged_path):
merged = cssvendor.prefix(merged) merged = cssvendor.prefix(merged)
merged = merged.replace("\r", "") merged = merged.replace("\r", "")
open(merged_path, "wb").write(merged.encode("utf8")) open(merged_path, "wb").write(merged.encode("utf8"))
logging.debug("Merged %s (%.2fs)" % (merged_path, time.time()-s_total)) logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total))
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -1,5 +1,7 @@
import logging, os, sys, time import logging
import time
import threading import threading
from Config import config from Config import config
if config.debug: # Only load pyfilesytem if using debug mode if config.debug: # Only load pyfilesytem if using debug mode
@ -13,8 +15,10 @@ if config.debug: # Only load pyfilesytem if using debug mode
else: else:
pyfilesystem = False pyfilesystem = False
class DebugReloader: class DebugReloader:
def __init__ (self, callback, directory = "/"):
def __init__(self, callback, directory="/"):
self.last_chaged = 0 self.last_chaged = 0
if pyfilesystem: if pyfilesystem:
self.directory = directory self.directory = directory
@ -24,7 +28,6 @@ class DebugReloader:
thread.daemon = True thread.daemon = True
thread.start() thread.start()
def addWatcher(self, recursive=True): def addWatcher(self, recursive=True):
try: try:
time.sleep(1) # Wait for .pyc compiles time.sleep(1) # Wait for .pyc compiles
@ -33,10 +36,9 @@ class DebugReloader:
except Exception, err: except Exception, err:
print "File system watcher failed: %s (on linux pyinotify not gevent compatible yet :( )" % err print "File system watcher failed: %s (on linux pyinotify not gevent compatible yet :( )" % err
def changed(self, evt): def changed(self, evt):
if not evt.path or "%s/" % config.data_dir in evt.path or evt.path.endswith("pyc") or time.time()-self.last_chaged < 1: return False # Ignore *.pyc changes and no reload within 1 sec if not evt.path or "%s/" % config.data_dir in evt.path or evt.path.endswith("pyc") or time.time() - self.last_chaged < 1:
#logging.debug("Changed: %s" % evt) return False # Ignore *.pyc changes and no reload within 1 sec
time.sleep(0.1) # Wait for lock release time.sleep(0.1) # Wait for lock release
self.callback() self.callback()
self.last_chaged = time.time() self.last_chaged = time.time()

View file

@ -11,7 +11,8 @@ from Debug import Debug
from Config import config from Config import config
from util import RateLimit, StreamingMsgpack from util import RateLimit, StreamingMsgpack
FILE_BUFF = 1024*512 FILE_BUFF = 1024 * 512
# Request from me # Request from me
class FileRequest(object): class FileRequest(object):
@ -73,13 +74,16 @@ class FileRequest(object):
self.response({"error": "Unknown site"}) self.response({"error": "Unknown site"})
return False return False
if site.settings["own"] and params["inner_path"].endswith("content.json"): if site.settings["own"] and params["inner_path"].endswith("content.json"):
self.log.debug("Someone trying to push a file to own site %s, reload local %s first" % (site.address, params["inner_path"])) self.log.debug(
"Someone trying to push a file to own site %s, reload local %s first" %
(site.address, params["inner_path"])
)
changed = site.content_manager.loadContent(params["inner_path"], add_bad_files=False) changed = site.content_manager.loadContent(params["inner_path"], add_bad_files=False)
if changed: # Content.json changed locally if changed: # Content.json changed locally
site.settings["size"] = site.content_manager.getTotalSize() # Update site size site.settings["size"] = site.content_manager.getTotalSize() # Update site size
buff = StringIO(params["body"]) buff = StringIO(params["body"])
valid = site.content_manager.verifyFile(params["inner_path"], buff) valid = site.content_manager.verifyFile(params["inner_path"], buff)
if valid == True: # Valid and changed if valid is True: # Valid and changed
self.log.info("Update for %s looks valid, saving..." % params["inner_path"]) self.log.info("Update for %s looks valid, saving..." % params["inner_path"])
buff.seek(0) buff.seek(0)
site.storage.write(params["inner_path"], buff) site.storage.write(params["inner_path"], buff)
@ -87,20 +91,28 @@ class FileRequest(object):
site.onFileDone(params["inner_path"]) # Trigger filedone site.onFileDone(params["inner_path"]) # Trigger filedone
if params["inner_path"].endswith("content.json"): # Download every changed file from peer if params["inner_path"].endswith("content.json"): # Download every changed file from peer
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer = True) # Add or get peer peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer
site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"]), "publish_%s" % params["inner_path"]) # On complete publish to other peers # On complete publish to other peers
site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"]), "publish_%s" % params["inner_path"])
# Load new content file and download changed files in new thread
gevent.spawn( gevent.spawn(
lambda: site.downloadContent(params["inner_path"], peer=peer) lambda: site.downloadContent(params["inner_path"], peer=peer)
) # Load new content file and download changed files in new thread )
self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]}) self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]})
elif valid == None: # Not changed elif valid is None: # Not changed
peer = site.addPeer(*params["peer"], return_peer = True) # Add or get peer peer = site.addPeer(*params["peer"], return_peer=True) # Add or get peer
if peer: if peer:
self.log.debug("Same version, adding new peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) ) self.log.debug(
"Same version, adding new peer for locked files: %s, tasks: %s" %
(peer.key, len(site.worker_manager.tasks))
)
for task in site.worker_manager.tasks: # New peer add to every ongoing task for task in site.worker_manager.tasks: # New peer add to every ongoing task
if task["peers"]: site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) # Download file from this peer too if its peer locked if task["peers"]:
# Download file from this peer too if its peer locked
site.needFile(task["inner_path"], peer=peer, update=True, blocking=False)
self.response({"ok": "File not changed"}) self.response({"ok": "File not changed"})
@ -116,18 +128,21 @@ class FileRequest(object):
return False return False
try: try:
file_path = site.storage.getPath(params["inner_path"]) file_path = site.storage.getPath(params["inner_path"])
if config.debug_socket: self.log.debug("Opening file: %s" % file_path) if config.debug_socket:
self.log.debug("Opening file: %s" % file_path)
with StreamingMsgpack.FilePart(file_path, "rb") as file: with StreamingMsgpack.FilePart(file_path, "rb") as file:
file.seek(params["location"]) file.seek(params["location"])
file.read_bytes = FILE_BUFF file.read_bytes = FILE_BUFF
back = {"body": file, back = {
"body": file,
"size": os.fstat(file.fileno()).st_size, "size": os.fstat(file.fileno()).st_size,
"location": min(file.tell()+FILE_BUFF, os.fstat(file.fileno()).st_size) "location": min(file.tell() + FILE_BUFF, os.fstat(file.fileno()).st_size)
} }
if config.debug_socket: if config.debug_socket:
self.log.debug("Sending file %s from position %s to %s" % (file_path, self.log.debug(
params["location"], "Sending file %s from position %s to %s" %
back["location"])) (file_path, params["location"], back["location"])
)
self.response(back, streaming=True) self.response(back, streaming=True)
if config.debug_socket: if config.debug_socket:
self.log.debug("File %s sent" % file_path) self.log.debug("File %s sent" % file_path)
@ -159,7 +174,8 @@ class FileRequest(object):
for peer in params["peers"]: # Add sent peers to site for peer in params["peers"]: # Add sent peers to site
address = self.unpackAddress(peer) address = self.unpackAddress(peer)
got_peer_keys.append("%s:%s" % address) got_peer_keys.append("%s:%s" % address)
if site.addPeer(*address): added += 1 if site.addPeer(*address):
added += 1
# Send back peers that is not in the sent list and connectable (not port 0) # Send back peers that is not in the sent list and connectable (not port 0)
packed_peers = [peer.packAddress() for peer in site.getConnectablePeers(params["need"], got_peer_keys)] packed_peers = [peer.packAddress() for peer in site.getConnectablePeers(params["need"], got_peer_keys)]
if added: if added:
@ -173,9 +189,11 @@ class FileRequest(object):
if not site or not site.settings["serving"]: # Site unknown or not serving if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"}) self.response({"error": "Unknown site"})
return False return False
modified_files = {inner_path: content["modified"] modified_files = {
inner_path: content["modified"]
for inner_path, content in site.content_manager.contents.iteritems() for inner_path, content in site.content_manager.contents.iteritems()
if content["modified"] > params["since"]} if content["modified"] > params["since"]
}
# Add peer to site if not added before # Add peer to site if not added before
connected_peer = site.addPeer(self.connection.ip, self.connection.port) connected_peer = site.addPeer(self.connection.ip, self.connection.port)

View file

@ -1,5 +1,10 @@
import os, logging, urllib2, re, time import logging
import gevent, msgpack import urllib2
import re
import time
import gevent
from Config import config from Config import config
from FileRequest import FileRequest from FileRequest import FileRequest
from Site import SiteManager from Site import SiteManager
@ -9,6 +14,7 @@ from util import UpnpPunch
class FileServer(ConnectionServer): class FileServer(ConnectionServer):
def __init__(self): def __init__(self):
ConnectionServer.__init__(self, config.fileserver_ip, config.fileserver_port, self.handleRequest) ConnectionServer.__init__(self, config.fileserver_ip, config.fileserver_port, self.handleRequest)
if config.ip_external: # Ip external definied in arguments if config.ip_external: # Ip external definied in arguments
@ -18,30 +24,32 @@ class FileServer(ConnectionServer):
self.port_opened = None # Is file server opened on router self.port_opened = None # Is file server opened on router
self.sites = SiteManager.site_manager.list() self.sites = SiteManager.site_manager.list()
# Handle request to fileserver # Handle request to fileserver
def handleRequest(self, connection, message): def handleRequest(self, connection, message):
if "params" in message: if "params" in message:
self.log.debug("FileRequest: %s %s %s %s" % (str(connection), message["cmd"], message["params"].get("site"), message["params"].get("inner_path"))) self.log.debug(
"FileRequest: %s %s %s %s" %
(str(connection), message["cmd"], message["params"].get("site"), message["params"].get("inner_path"))
)
else: else:
self.log.debug("FileRequest: %s %s" % (str(connection), req["cmd"])) self.log.debug("FileRequest: %s %s" % (str(connection), message["cmd"]))
req = FileRequest(self, connection) req = FileRequest(self, connection)
req.route(message["cmd"], message.get("req_id"), message.get("params")) req.route(message["cmd"], message.get("req_id"), message.get("params"))
# Reload the FileRequest class to prevent restarts in debug mode # Reload the FileRequest class to prevent restarts in debug mode
def reload(self): def reload(self):
global FileRequest global FileRequest
import imp import imp
FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest
# Try to open the port using upnp # Try to open the port using upnp
def openport(self, port=None, check=True): def openport(self, port=None, check=True):
if not port: port = self.port if not port:
if self.port_opened: return True # Port already opened port = self.port
if self.port_opened:
return True # Port already opened
if check: # Check first if its already opened if check: # Check first if its already opened
if self.testOpenport(port)["result"] == True: if self.testOpenport(port)["result"] is True:
return True # Port already opened return True # Port already opened
self.log.info("Trying to open port using UpnpPunch...") self.log.info("Trying to open port using UpnpPunch...")
@ -52,25 +60,24 @@ class FileServer(ConnectionServer):
self.log.error("UpnpPunch run error: %s" % Debug.formatException(err)) self.log.error("UpnpPunch run error: %s" % Debug.formatException(err))
upnp_punch = False upnp_punch = False
if upnp_punch and self.testOpenport(port)["result"] == True: if upnp_punch and self.testOpenport(port)["result"] is True:
return True return True
self.log.info("Upnp mapping failed :( Please forward port %s on your router to your ipaddress" % port) self.log.info("Upnp mapping failed :( Please forward port %s on your router to your ipaddress" % port)
return False return False
# Test if the port is open # Test if the port is open
def testOpenport(self, port = None): def testOpenport(self, port=None):
time.sleep(1) # Wait for port open time.sleep(1) # Wait for port open
if not port: port = self.port if not port:
port = self.port
back = self.testOpenportPortchecker(port) back = self.testOpenportPortchecker(port)
if back["result"] == True: # Successful port check if back["result"] is True: # Successful port check
return back return back
else: # Alternative port checker else: # Alternative port checker
return self.testOpenportCanyouseeme(port) return self.testOpenportCanyouseeme(port)
def testOpenportPortchecker(self, port=None):
def testOpenportPortchecker(self, port = None):
self.log.info("Checking port %s using portchecker.co..." % port) self.log.info("Checking port %s using portchecker.co..." % port)
try: try:
data = urllib2.urlopen("http://portchecker.co/check", "port=%s" % port, timeout=20.0).read() data = urllib2.urlopen("http://portchecker.co/check", "port=%s" % port, timeout=20.0).read()
@ -79,6 +86,7 @@ class FileServer(ConnectionServer):
except Exception, err: except Exception, err:
message = "Error: %s" % Debug.formatException(err) message = "Error: %s" % Debug.formatException(err)
data = "" data = ""
if "closed" in message or "Error" in message: if "closed" in message or "Error" in message:
self.log.info("[BAD :(] Port closed: %s" % message) self.log.info("[BAD :(] Port closed: %s" % message)
if port == self.port: if port == self.port:
@ -102,8 +110,7 @@ class FileServer(ConnectionServer):
config.ip_external = False config.ip_external = False
return {"result": True, "message": message} return {"result": True, "message": message}
def testOpenportCanyouseeme(self, port=None):
def testOpenportCanyouseeme(self, port = None):
self.log.info("Checking port %s using canyouseeme.org..." % port) self.log.info("Checking port %s using canyouseeme.org..." % port)
try: try:
data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read() data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read()
@ -111,6 +118,7 @@ class FileServer(ConnectionServer):
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace("&nbsp;", " ")) # Strip http tags message = re.sub("<.*?>", "", message.replace("<br>", " ").replace("&nbsp;", " ")) # Strip http tags
except Exception, err: except Exception, err:
message = "Error: %s" % Debug.formatException(err) message = "Error: %s" % Debug.formatException(err)
if "Error" in message: if "Error" in message:
self.log.info("[BAD :(] Port closed: %s" % message) self.log.info("[BAD :(] Port closed: %s" % message)
if port == self.port: if port == self.port:
@ -134,26 +142,23 @@ class FileServer(ConnectionServer):
config.ip_external = False config.ip_external = False
return {"result": True, "message": message} return {"result": True, "message": message}
# Set external ip without testing # Set external ip without testing
def setIpExternal(self, ip_external): def setIpExternal(self, ip_external):
logging.info("Setting external ip without testing: %s..." % ip_external) logging.info("Setting external ip without testing: %s..." % ip_external)
config.ip_external = ip_external config.ip_external = ip_external
self.port_opened = True self.port_opened = True
# Check site file integrity # Check site file integrity
def checkSite(self, site): def checkSite(self, site):
if site.settings["serving"]: if site.settings["serving"]:
site.announce() # Announce site to tracker site.announce() # Announce site to tracker
site.update() # Update site's content.json and download changed files site.update() # Update site's content.json and download changed files
if self.port_opened == False: # In passive mode keep 5 active peer connection to get the updates if self.port_opened is False: # In passive mode keep 5 active peer connection to get the updates
site.needConnections() site.needConnections()
# Check sites integrity # Check sites integrity
def checkSites(self): def checkSites(self):
if self.port_opened == None: # Test and open port if not tested yet if self.port_opened is None: # Test and open port if not tested yet
self.openport() self.openport()
self.log.debug("Checking sites integrity..") self.log.debug("Checking sites integrity..")
@ -162,12 +167,11 @@ class FileServer(ConnectionServer):
time.sleep(2) # Prevent too quick request time.sleep(2) # Prevent too quick request
site = None site = None
# Announce sites every 20 min # Announce sites every 20 min
def announceSites(self): def announceSites(self):
import gc import gc
while 1: while 1:
time.sleep(20*60) # Announce sites every 20 min time.sleep(20 * 60) # Announce sites every 20 min
for address, site in self.sites.items(): for address, site in self.sites.items():
if site.settings["serving"]: if site.settings["serving"]:
site.announce() # Announce site to tracker site.announce() # Announce site to tracker
@ -181,7 +185,7 @@ class FileServer(ConnectionServer):
site.retryBadFiles() site.retryBadFiles()
# In passive mode keep 5 active peer connection to get the updates # In passive mode keep 5 active peer connection to get the updates
if self.port_opened == False: if self.port_opened is False:
site.needConnections() site.needConnections()
time.sleep(2) # Prevent too quick request time.sleep(2) # Prevent too quick request
@ -189,21 +193,22 @@ class FileServer(ConnectionServer):
site = None site = None
gc.collect() # Implicit grabage collection gc.collect() # Implicit grabage collection
# Detects if computer back from wakeup # Detects if computer back from wakeup
def wakeupWatcher(self): def wakeupWatcher(self):
last_time = time.time() last_time = time.time()
while 1: while 1:
time.sleep(30) time.sleep(30)
if time.time()-last_time > 60: # If taken more than 60 second then the computer was in sleep mode if time.time() - last_time > 60: # If taken more than 60 second then the computer was in sleep mode
self.log.info("Wakeup detected: time wrap from %s to %s (%s sleep seconds), acting like startup..." % (last_time, time.time(), time.time()-last_time)) self.log.info(
"Wakeup detected: time wrap from %s to %s (%s sleep seconds), acting like startup..." %
(last_time, time.time(), time.time() - last_time)
)
self.port_opened = None # Check if we still has the open port on router self.port_opened = None # Check if we still has the open port on router
self.checkSites() self.checkSites()
last_time = time.time() last_time = time.time()
# Bind and start serving sites # Bind and start serving sites
def start(self, check_sites = True): def start(self, check_sites=True):
self.log = logging.getLogger("FileServer") self.log = logging.getLogger("FileServer")
if config.debug: if config.debug:

View file

@ -1,8 +1,14 @@
import os, logging, gevent, time, msgpack, sys, random, socket, struct import logging
import gevent
import time
import sys
import socket
import struct
from cStringIO import StringIO from cStringIO import StringIO
from Config import config
from Debug import Debug from Debug import Debug
# Communicate remote peers # Communicate remote peers
class Peer(object): class Peer(object):
__slots__ = ("ip", "port", "site", "key", "connection_server", "connection", "last_found", "last_response", __slots__ = ("ip", "port", "site", "key", "connection_server", "connection", "last_found", "last_response",
@ -49,7 +55,8 @@ class Peer(object):
self.connection = self.connection_server.getConnection(self.ip, self.port) self.connection = self.connection_server.getConnection(self.ip, self.port)
except Exception, err: except Exception, err:
self.onConnectionError() self.onConnectionError()
self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed)) self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" %
(Debug.formatException(err), self.connection_error, self.hash_failed))
self.connection = None self.connection = None
# Check if we have connection to peer # Check if we have connection to peer
@ -57,7 +64,7 @@ class Peer(object):
if self.connection and self.connection.connected: # We have connection to peer if self.connection and self.connection.connected: # We have connection to peer
return self.connection return self.connection
else: # Try to find from other sites connections else: # Try to find from other sites connections
self.connection = self.connection_server.getConnection(self.ip, self.port, create=False) # Do not create new connection if not found self.connection = self.connection_server.getConnection(self.ip, self.port, create=False)
return self.connection return self.connection
def __str__(self): def __str__(self):
@ -68,7 +75,7 @@ class Peer(object):
# Peer ip:port to packed 6byte format # Peer ip:port to packed 6byte format
def packAddress(self): def packAddress(self):
return socket.inet_aton(self.ip)+struct.pack("H", self.port) return socket.inet_aton(self.ip) + struct.pack("H", self.port)
def unpackAddress(self, packed): def unpackAddress(self, packed):
return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0] return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0]
@ -85,16 +92,11 @@ class Peer(object):
self.onConnectionError() self.onConnectionError()
return None # Connection failed return None # Connection failed
#if cmd != "ping" and self.last_response and time.time() - self.last_response > 20*60: # If last response if older than 20 minute, ping first to see if still alive for retry in range(1, 3): # Retry 3 times
# if not self.ping(): return None
for retry in range(1,3): # Retry 3 times
#if config.debug_socket: self.log.debug("sendCmd: %s %s" % (cmd, params.get("inner_path")))
try: try:
response = self.connection.request(cmd, params) response = self.connection.request(cmd, params)
if not response: if not response:
raise Exception("Send error") raise Exception("Send error")
#if config.debug_socket: self.log.debug("Got response to: %s" % cmd)
if "error" in response: if "error" in response:
self.log("%s error: %s" % (cmd, response["error"])) self.log("%s error: %s" % (cmd, response["error"]))
self.onConnectionError() self.onConnectionError()
@ -108,10 +110,11 @@ class Peer(object):
break break
else: else:
self.onConnectionError() self.onConnectionError()
self.log("%s (connection_error: %s, hash_failed: %s, retry: %s)" % (Debug.formatException(err), self.log(
self.connection_error, "%s (connection_error: %s, hash_failed: %s, retry: %s)" %
self.hash_failed, retry)) (Debug.formatException(err), self.connection_error, self.hash_failed, retry)
time.sleep(1*retry) )
time.sleep(1 * retry)
self.connect() self.connect()
return None # Failed after 4 retry return None # Failed after 4 retry
@ -121,7 +124,8 @@ class Peer(object):
buff = StringIO() buff = StringIO()
s = time.time() s = time.time()
while True: # Read in 512k parts while True: # Read in 512k parts
back = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location}) # Get file content from last location back = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location})
if not back or "body" not in back: # Error if not back or "body" not in back: # Error
return False return False
@ -145,7 +149,7 @@ class Peer(object):
response = self.request("ping") response = self.request("ping")
if response and "body" in response and response["body"] == "Pong!": if response and "body" in response and response["body"] == "Pong!":
response_time = time.time()-s response_time = time.time() - s
break # All fine, exit from for loop break # All fine, exit from for loop
# Timeout reached or bad response # Timeout reached or bad response
self.onConnectionError() self.onConnectionError()
@ -185,7 +189,8 @@ class Peer(object):
# Stop and remove from site # Stop and remove from site
def remove(self): def remove(self):
self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed)) self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed))
if self.site and self.key in self.site.peers: del(self.site.peers[self.key]) if self.site and self.key in self.site.peers:
del(self.site.peers[self.key])
if self.connection: if self.connection:
self.connection.close() self.connection.close()

View file

@ -1,8 +1,13 @@
import logging, os, sys import logging
import os
import sys
from Debug import Debug from Debug import Debug
from Config import config from Config import config
class PluginManager: class PluginManager:
def __init__(self): def __init__(self):
self.log = logging.getLogger("PluginManager") self.log = logging.getLogger("PluginManager")
self.plugin_path = "plugins" # Plugin directory self.plugin_path = "plugins" # Plugin directory
@ -11,28 +16,29 @@ class PluginManager:
sys.path.append(self.plugin_path) sys.path.append(self.plugin_path)
if config.debug: # Auto reload Plugins on file change if config.debug: # Auto reload Plugins on file change
from Debug import DebugReloader from Debug import DebugReloader
DebugReloader(self.reloadPlugins) DebugReloader(self.reloadPlugins)
# -- Load / Unload -- # -- Load / Unload --
# Load all plugin # Load all plugin
def loadPlugins(self): def loadPlugins(self):
for dir_name in os.listdir(self.plugin_path): for dir_name in os.listdir(self.plugin_path):
dir_path = os.path.join(self.plugin_path, dir_name) dir_path = os.path.join(self.plugin_path, dir_name)
if dir_name.startswith("disabled"): continue # Dont load if disabled if dir_name.startswith("disabled"):
if not os.path.isdir(dir_path): continue # Dont load if not dir continue # Dont load if disabled
if dir_name.startswith("Debug") and not config.debug: continue # Only load in debug mode if module name starts with Debug if not os.path.isdir(dir_path):
continue # Dont load if not dir
if dir_name.startswith("Debug") and not config.debug:
continue # Only load in debug mode if module name starts with Debug
self.log.debug("Loading plugin: %s" % dir_name) self.log.debug("Loading plugin: %s" % dir_name)
try: try:
__import__(dir_name) __import__(dir_name)
except Exception, err: except Exception, err:
self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err))) self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err)))
if dir_name not in self.plugin_names: self.plugin_names.append(dir_name) if dir_name not in self.plugin_names:
self.plugin_names.append(dir_name)
# Reload all plugins # Reload all plugins
def reloadPlugins(self): def reloadPlugins(self):
@ -53,23 +59,26 @@ plugin_manager = PluginManager() # Singletone
# -- Decorators -- # -- Decorators --
# Accept plugin to class decorator # Accept plugin to class decorator
def acceptPlugins(base_class): def acceptPlugins(base_class):
class_name = base_class.__name__ class_name = base_class.__name__
if class_name in plugin_manager.plugins: # Has plugins if class_name in plugin_manager.plugins: # Has plugins
classes = plugin_manager.plugins[class_name][:] # Copy the current plugins classes = plugin_manager.plugins[class_name][:] # Copy the current plugins
classes.reverse() classes.reverse()
classes.append(base_class) # Add the class itself to end of inherience line classes.append(base_class) # Add the class itself to end of inherience line
PluginedClass = type(class_name, tuple(classes), dict()) # Create the plugined class plugined_class = type(class_name, tuple(classes), dict()) # Create the plugined class
plugin_manager.log.debug("New class accepts plugins: %s (Loaded plugins: %s)" % (class_name, classes)) plugin_manager.log.debug("New class accepts plugins: %s (Loaded plugins: %s)" % (class_name, classes))
else: # No plugins just use the original else: # No plugins just use the original
PluginedClass = base_class plugined_class = base_class
return PluginedClass return plugined_class
# Register plugin to class name decorator # Register plugin to class name decorator
def registerTo(class_name): def registerTo(class_name):
plugin_manager.log.debug("New plugin registered to: %s" % class_name) plugin_manager.log.debug("New plugin registered to: %s" % class_name)
if class_name not in plugin_manager.plugins: plugin_manager.plugins[class_name] = [] if class_name not in plugin_manager.plugins:
plugin_manager.plugins[class_name] = []
def classDecorator(self): def classDecorator(self):
plugin_manager.plugins[class_name].append(self) plugin_manager.plugins[class_name].append(self)
@ -77,20 +86,20 @@ def registerTo(class_name):
return classDecorator return classDecorator
# - Example usage - # - Example usage -
if __name__ == "__main__": if __name__ == "__main__":
@registerTo("Request") @registerTo("Request")
class RequestPlugin(object): class RequestPlugin(object):
def actionMainPage(self, path): def actionMainPage(self, path):
return "Hello MainPage!" return "Hello MainPage!"
@acceptPlugins
@accept
class Request(object): class Request(object):
def route(self, path): def route(self, path):
func = getattr(self, "action"+path, None) func = getattr(self, "action" + path, None)
if func: if func:
return func(path) return func(path)
else: else:

View file

@ -1,18 +1,34 @@
import os, json, logging, hashlib, re, time, string, random, sys, binascii, struct, socket, urllib, urllib2 import os
from lib.subtl.subtl import UdpTrackerClient import json
from lib import bencode import logging
import hashlib
import re
import time
import string
import random
import sys
import binascii
import struct
import socket
import urllib
import urllib2
import gevent import gevent
import util import util
from lib import bencode
from lib.subtl.subtl import UdpTrackerClient
from Config import config from Config import config
from Peer import Peer from Peer import Peer
from Worker import WorkerManager from Worker import WorkerManager
from Crypt import CryptHash
from Debug import Debug from Debug import Debug
from Content import ContentManager from Content import ContentManager
from SiteStorage import SiteStorage from SiteStorage import SiteStorage
import SiteManager import SiteManager
class Site: class Site:
def __init__(self, address, allow_create=True): def __init__(self, address, allow_create=True):
self.address = re.sub("[^A-Za-z0-9]", "", address) # Make sure its correct address self.address = re.sub("[^A-Za-z0-9]", "", address) # Make sure its correct address
self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging
@ -23,7 +39,7 @@ class Site:
self.peer_blacklist = SiteManager.peer_blacklist # Ignore this peers (eg. myself) self.peer_blacklist = SiteManager.peer_blacklist # Ignore this peers (eg. myself)
self.last_announce = 0 # Last announce time to tracker self.last_announce = 0 # Last announce time to tracker
self.worker_manager = WorkerManager(self) # Handle site download from other peers self.worker_manager = WorkerManager(self) # Handle site download from other peers
self.bad_files = {} # SHA512 check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept) self.bad_files = {} # SHA check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept)
self.content_updated = None # Content.js update time self.content_updated = None # Content.js update time
self.notifications = [] # Pending notifications displayed once on page load [error|ok|info, message, timeout] self.notifications = [] # Pending notifications displayed once on page load [error|ok|info, message, timeout]
self.page_requested = False # Page viewed in browser self.page_requested = False # Page viewed in browser
@ -33,12 +49,16 @@ class Site:
self.content_manager = ContentManager(self) # Load contents self.content_manager = ContentManager(self) # Load contents
if not self.settings.get("auth_key"): # To auth user in site (Obsolete, will be removed) if not self.settings.get("auth_key"): # To auth user in site (Obsolete, will be removed)
self.settings["auth_key"] = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(24)) self.settings["auth_key"] = ''.join(
random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(24)
)
self.log.debug("New auth key: %s" % self.settings["auth_key"]) self.log.debug("New auth key: %s" % self.settings["auth_key"])
self.saveSettings() self.saveSettings()
if not self.settings.get("wrapper_key"): # To auth websocket permissions if not self.settings.get("wrapper_key"): # To auth websocket permissions
self.settings["wrapper_key"] = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(12)) self.settings["wrapper_key"] = ''.join(
random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(12)
)
self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"]) self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"])
self.saveSettings() self.saveSettings()
@ -47,16 +67,12 @@ class Site:
# Add event listeners # Add event listeners
self.addEventListeners() self.addEventListeners()
def __str__(self): def __str__(self):
return "Site %s" % self.address_short return "Site %s" % self.address_short
def __repr__(self): def __repr__(self):
return "<%s>" % self.__str__() return "<%s>" % self.__str__()
# Load site settings from data/sites.json # Load site settings from data/sites.json
def loadSettings(self): def loadSettings(self):
sites_settings = json.load(open("%s/sites.json" % config.data_dir)) sites_settings = json.load(open("%s/sites.json" % config.data_dir))
@ -67,10 +83,9 @@ class Site:
permissions = ["ADMIN"] permissions = ["ADMIN"]
else: else:
permissions = [] permissions = []
self.settings = { "own": False, "serving": True, "permissions": permissions } # Default self.settings = {"own": False, "serving": True, "permissions": permissions} # Default
return return
# Save site settings to data/sites.json # Save site settings to data/sites.json
def saveSettings(self): def saveSettings(self):
sites_settings = json.load(open("%s/sites.json" % config.data_dir)) sites_settings = json.load(open("%s/sites.json" % config.data_dir))
@ -78,30 +93,27 @@ class Site:
open("%s/sites.json" % config.data_dir, "w").write(json.dumps(sites_settings, indent=2, sort_keys=True)) open("%s/sites.json" % config.data_dir, "w").write(json.dumps(sites_settings, indent=2, sort_keys=True))
return return
# Max site size in MB # Max site size in MB
def getSizeLimit(self): def getSizeLimit(self):
return self.settings.get("size_limit", config.size_limit) return self.settings.get("size_limit", config.size_limit)
# Next size limit based on current size # Next size limit based on current size
def getNextSizeLimit(self): def getNextSizeLimit(self):
size_limits = [10,20,50,100,200,500,1000,2000,5000,10000,20000,50000,100000] size_limits = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000]
size = self.settings.get("size", 0) size = self.settings.get("size", 0)
for size_limit in size_limits: for size_limit in size_limits:
if size*1.2 < size_limit*1024*1024: if size * 1.2 < size_limit * 1024 * 1024:
return size_limit return size_limit
return 999999 return 999999
# Download all file from content.json # Download all file from content.json
def downloadContent(self, inner_path, download_files=True, peer=None): def downloadContent(self, inner_path, download_files=True, peer=None):
s = time.time() s = time.time()
self.log.debug("Downloading %s..." % inner_path) self.log.debug("Downloading %s..." % inner_path)
found = self.needFile(inner_path, update=self.bad_files.get(inner_path)) found = self.needFile(inner_path, update=self.bad_files.get(inner_path))
content_inner_dir = self.content_manager.toDir(inner_path) content_inner_dir = self.content_manager.toDir(inner_path)
if not found: return False # Could not download content.json if not found:
return False # Could not download content.json
self.log.debug("Got %s" % inner_path) self.log.debug("Got %s" % inner_path)
changed = self.content_manager.loadContent(inner_path, load_includes=False) changed = self.content_manager.loadContent(inner_path, load_includes=False)
@ -110,15 +122,16 @@ class Site:
file_threads = [] file_threads = []
if download_files: if download_files:
for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys(): for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys():
file_inner_path = content_inner_dir+file_relative_path file_inner_path = content_inner_dir + file_relative_path
res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer) # No waiting for finish, return the event # Start download and dont wait for finish, return the event
if res != True: # Need downloading res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer)
if res is not True and res is not False: # Need downloading and file is allowed
file_threads.append(res) # Append evt file_threads.append(res) # Append evt
# Wait for includes download # Wait for includes download
include_threads = [] include_threads = []
for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys(): for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys():
file_inner_path = content_inner_dir+file_relative_path file_inner_path = content_inner_dir + file_relative_path
include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer) include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer)
include_threads.append(include_thread) include_threads.append(include_thread)
@ -128,31 +141,30 @@ class Site:
self.log.debug("%s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed))) self.log.debug("%s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed)))
gevent.joinall(file_threads) gevent.joinall(file_threads)
self.log.debug("%s: All file downloaded in %.2fs" % (inner_path, time.time()-s)) self.log.debug("%s: All file downloaded in %.2fs" % (inner_path, time.time() - s))
return True return True
# Return bad files with less than 3 retry # Return bad files with less than 3 retry
def getReachableBadFiles(self): def getReachableBadFiles(self):
if not self.bad_files: return False if not self.bad_files:
return False
return [bad_file for bad_file, retry in self.bad_files.iteritems() if retry < 3] return [bad_file for bad_file, retry in self.bad_files.iteritems() if retry < 3]
# Retry download bad files # Retry download bad files
def retryBadFiles(self): def retryBadFiles(self):
for bad_file in self.bad_files.keys(): for bad_file in self.bad_files.keys():
self.needFile(bad_file, update=True, blocking=False) self.needFile(bad_file, update=True, blocking=False)
# Download all files of the site # Download all files of the site
@util.Noparallel(blocking=False) @util.Noparallel(blocking=False)
def download(self, check_size=False): def download(self, check_size=False):
self.log.debug("Start downloading...%s" % self.bad_files) self.log.debug("Start downloading...%s" % self.bad_files)
gevent.spawn(self.announce) gevent.spawn(self.announce)
if check_size: # Check the size first if check_size: # Check the size first
valid = downloadContent(download_files=False) # Just download content.json files valid = self.downloadContent(download_files=False) # Just download content.json files
if not valid: return False # Cant download content.jsons or size is not fits if not valid:
return False # Cant download content.jsons or size is not fits
# Download everything # Download everything
found = self.downloadContent("content.json") found = self.downloadContent("content.json")
@ -160,26 +172,27 @@ class Site:
return found return found
# Update worker, try to find client that supports listModifications command # Update worker, try to find client that supports listModifications command
def updater(self, peers_try, queried, since): def updater(self, peers_try, queried, since):
while 1: while 1:
if not peers_try or len(queried) >= 3: # Stop after 3 successful query if not peers_try or len(queried) >= 3: # Stop after 3 successful query
break break
peer = peers_try.pop(0) peer = peers_try.pop(0)
if not peer.connection and len(queried) < 2: peer.connect() # Only open new connection if less than 2 queried already if not peer.connection and len(queried) < 2:
if not peer.connection or peer.connection.handshake.get("rev",0) < 126: continue # Not compatible peer.connect() # Only open new connection if less than 2 queried already
if not peer.connection or peer.connection.handshake.get("rev", 0) < 126:
continue # Not compatible
res = peer.listModified(since) res = peer.listModified(since)
if not res or not "modified_files" in res: continue # Failed query if not res or "modified_files" not in res:
continue # Failed query
queried.append(peer) queried.append(peer)
for inner_path, modified in res["modified_files"].iteritems(): # Check if the peer has newer files than we for inner_path, modified in res["modified_files"].iteritems(): # Check if the peer has newer files than we
content = self.content_manager.contents.get(inner_path) content = self.content_manager.contents.get(inner_path)
if not content or modified > content["modified"]: # We dont have this file or we have older if not content or modified > content["modified"]: # We dont have this file or we have older
self.bad_files[inner_path] = self.bad_files.get(inner_path, 0)+1 # Mark as bad file self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 # Mark as bad file
gevent.spawn(self.downloadContent, inner_path) # Download the content.json + the changed files gevent.spawn(self.downloadContent, inner_path) # Download the content.json + the changed files
# Check modified content.json files from peers and add modified files to bad_files # Check modified content.json files from peers and add modified files to bad_files
# Return: Successfully queried peers [Peer, Peer...] # Return: Successfully queried peers [Peer, Peer...]
def checkModifications(self, since=None): def checkModifications(self, since=None):
@ -189,13 +202,13 @@ class Site:
peers = self.peers.values() peers = self.peers.values()
random.shuffle(peers) random.shuffle(peers)
for peer in peers: # Try to find connected good peers, but we must have at least 5 peers for peer in peers: # Try to find connected good peers, but we must have at least 5 peers
if peer.findConnection() and peer.connection.handshake.get("rev",0) > 125: # Add to the beginning if rev125 if peer.findConnection() and peer.connection.handshake.get("rev", 0) > 125: # Add to the beginning if rev125
peers_try.insert(0, peer) peers_try.insert(0, peer)
elif len(peers_try) < 5: # Backup peers, add to end of the try list elif len(peers_try) < 5: # Backup peers, add to end of the try list
peers_try.append(peer) peers_try.append(peer)
if since == None: # No since definied, download from last modification time-1day if since is not None: # No since definied, download from last modification time-1day
since = self.settings.get("modified", 60*60*24)-60*60*24 since = self.settings.get("modified", 60 * 60 * 24) - 60 * 60 * 24
self.log.debug("Try to get listModifications from peers: %s since: %s" % (peers_try, since)) self.log.debug("Try to get listModifications from peers: %s since: %s" % (peers_try, since))
updaters = [] updaters = []
@ -207,7 +220,6 @@ class Site:
self.log.debug("Queried listModifications from: %s" % queried) self.log.debug("Queried listModifications from: %s" % queried)
return queried return queried
# Update content.json from peers and download changed files # Update content.json from peers and download changed files
# Return: None # Return: None
@util.Noparallel() @util.Noparallel()
@ -215,7 +227,8 @@ class Site:
self.content_manager.loadContent("content.json") # Reload content.json self.content_manager.loadContent("content.json") # Reload content.json
self.content_updated = None # Reset content updated time self.content_updated = None # Reset content updated time
self.updateWebsocket(updating=True) self.updateWebsocket(updating=True)
if announce: self.announce() if announce:
self.announce()
queried = self.checkModifications() queried = self.checkModifications()
@ -223,12 +236,13 @@ class Site:
self.log.debug("Fallback to old-style update") self.log.debug("Fallback to old-style update")
self.redownloadContents() self.redownloadContents()
if not self.settings["own"]: self.storage.checkFiles(quick_check=True) # Quick check files based on file size if not self.settings["own"]:
self.storage.checkFiles(quick_check=True) # Quick check files based on file size
changed = self.content_manager.loadContent("content.json") changed = self.content_manager.loadContent("content.json")
if changed: if changed:
for changed_file in changed: for changed_file in changed:
self.bad_files[changed_file] = self.bad_files.get(changed_file, 0)+1 self.bad_files[changed_file] = self.bad_files.get(changed_file, 0) + 1
if self.bad_files: if self.bad_files:
self.download() self.download()
@ -236,7 +250,6 @@ class Site:
self.settings["size"] = self.content_manager.getTotalSize() # Update site size self.settings["size"] = self.content_manager.getTotalSize() # Update site size
self.updateWebsocket(updated=True) self.updateWebsocket(updated=True)
# Update site by redownload all content.json # Update site by redownload all content.json
def redownloadContents(self): def redownloadContents(self):
@ -248,20 +261,22 @@ class Site:
self.log.debug("Waiting %s content.json to finish..." % len(content_threads)) self.log.debug("Waiting %s content.json to finish..." % len(content_threads))
gevent.joinall(content_threads) gevent.joinall(content_threads)
# Publish worker # Publish worker
def publisher(self, inner_path, peers, published, limit, event_done=None): def publisher(self, inner_path, peers, published, limit, event_done=None):
file_size = self.storage.getSize(inner_path) file_size = self.storage.getSize(inner_path)
body = self.storage.read(inner_path) body = self.storage.read(inner_path)
while 1: while 1:
if not peers or len(published) >= limit: if not peers or len(published) >= limit:
if event_done: event_done.set(True) if event_done:
event_done.set(True)
break # All peers done, or published engouht break # All peers done, or published engouht
peer = peers.pop(0) peer = peers.pop(0)
if peer.connection and peer.connection.last_ping_delay: # Peer connected if peer.connection and peer.connection.last_ping_delay: # Peer connected
timeout = timeout = 5+int(file_size/1024)+peer.connection.last_ping_delay # Timeout: 5sec + size in kb + last_ping # Timeout: 5sec + size in kb + last_ping
else: timeout = timeout = 5 + int(file_size / 1024) + peer.connection.last_ping_delay
timeout = timeout = 5+int(file_size/1024) # Timeout: 5sec + size in kb else: # Peer not connected
# Timeout: 5sec + size in kb
timeout = timeout = 5 + int(file_size / 1024)
result = {"exception": "Timeout"} result = {"exception": "Timeout"}
for retry in range(2): for retry in range(2):
@ -273,7 +288,8 @@ class Site:
"body": body, "body": body,
"peer": (config.ip_external, config.fileserver_port) "peer": (config.ip_external, config.fileserver_port)
}) })
if result: break if result:
break
except Exception, err: except Exception, err:
result = {"exception": Debug.formatException(err)} result = {"exception": Debug.formatException(err)}
@ -281,18 +297,19 @@ class Site:
published.append(peer) published.append(peer)
self.log.info("[OK] %s: %s" % (peer.key, result["ok"])) self.log.info("[OK] %s: %s" % (peer.key, result["ok"]))
else: else:
if result == {"exception": "Timeout"}: peer.onConnectionError() if result == {"exception": "Timeout"}:
peer.onConnectionError()
self.log.info("[FAILED] %s: %s" % (peer.key, result)) self.log.info("[FAILED] %s: %s" % (peer.key, result))
# Update content.json on peers # Update content.json on peers
@util.Noparallel() @util.Noparallel()
def publish(self, limit=5, inner_path="content.json"): def publish(self, limit=5, inner_path="content.json"):
self.log.info( "Publishing to %s/%s peers..." % (min(len(self.peers), limit), len(self.peers)) ) self.log.info("Publishing to %s/%s peers..." % (min(len(self.peers), limit), len(self.peers)))
published = [] # Successfully published (Peer) published = [] # Successfully published (Peer)
publishers = [] # Publisher threads publishers = [] # Publisher threads
peers = self.peers.values() peers = self.peers.values()
if not peers: return 0 # No peers found if not peers:
return 0 # No peers found
random.shuffle(peers) random.shuffle(peers)
event_done = gevent.event.AsyncResult() event_done = gevent.event.AsyncResult()
@ -301,18 +318,25 @@ class Site:
publishers.append(publisher) publishers.append(publisher)
event_done.get() # Wait for done event_done.get() # Wait for done
if len(published) < min(len(self.peers), limit): time.sleep(0.2) # If less than we need sleep a bit if len(published) < min(len(self.peers), limit):
if len(published) == 0: gevent.joinall(publishers) # No successful publish, wait for all publisher time.sleep(0.2) # If less than we need sleep a bit
if len(published) == 0:
gevent.joinall(publishers) # No successful publish, wait for all publisher
# Make sure the connected passive peers got the update # Make sure the connected passive peers got the update
passive_peers = [peer for peer in peers if peer.connection and not peer.connection.closed and peer.key.endswith(":0") and peer not in published] # Every connected passive peer that we not published to passive_peers = [
peer for peer in peers
if peer.connection and not peer.connection.closed and peer.key.endswith(":0") and peer not in published
] # Every connected passive peer that we not published to
for peer in passive_peers: for peer in passive_peers:
gevent.spawn(self.publisher, inner_path, passive_peers, published, limit=10) gevent.spawn(self.publisher, inner_path, passive_peers, published, limit=10)
self.log.info("Successfuly published to %s peers, publishing to %s more passive peers" % (len(published), len(passive_peers)) ) self.log.info(
"Successfuly published to %s peers, publishing to %s more passive peers" %
(len(published), len(passive_peers))
)
return len(published) return len(published)
# Copy this site # Copy this site
def clone(self, address, privatekey=None, address_index=None, overwrite=False): def clone(self, address, privatekey=None, address_index=None, overwrite=False):
import shutil import shutil
@ -325,20 +349,22 @@ class Site:
self.log.debug("Cloning to %s, ignore dirs: %s" % (address, default_dirs)) self.log.debug("Cloning to %s, ignore dirs: %s" % (address, default_dirs))
# Copy root content.json # Copy root content.json
if not new_site.storage.isFile("content.json") and not overwrite: # Content.json not exist yet, create a new one from source site if not new_site.storage.isFile("content.json") and not overwrite:
# Content.json not exist yet, create a new one from source site
content_json = self.storage.loadJson("content.json") content_json = self.storage.loadJson("content.json")
if "domain" in content_json: if "domain" in content_json:
del content_json["domain"] del content_json["domain"]
content_json["title"] = "my"+content_json["title"] content_json["title"] = "my" + content_json["title"]
content_json["cloned_from"] = self.address content_json["cloned_from"] = self.address
if address_index: content_json["address_index"] = address_index # Site owner's BIP32 index if address_index:
content_json["address_index"] = address_index # Site owner's BIP32 index
new_site.storage.writeJson("content.json", content_json) new_site.storage.writeJson("content.json", content_json)
new_site.content_manager.loadContent("content.json", add_bad_files=False, load_includes=False) new_site.content_manager.loadContent("content.json", add_bad_files=False, load_includes=False)
# Copy files # Copy files
for content_inner_path, content in self.content_manager.contents.items(): for content_inner_path, content in self.content_manager.contents.items():
for file_relative_path in sorted(content["files"].keys()): for file_relative_path in sorted(content["files"].keys()):
file_inner_path = self.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json file_inner_path = self.content_manager.toDir(content_inner_path) + file_relative_path # Relative to content.json
file_inner_path = file_inner_path.strip("/") # Strip leading / file_inner_path = file_inner_path.strip("/") # Strip leading /
if file_inner_path.split("/")[0] in default_dirs: # Dont copy directories that has -default postfixed alternative if file_inner_path.split("/")[0] in default_dirs: # Dont copy directories that has -default postfixed alternative
self.log.debug("[SKIP] %s (has default alternative)" % file_inner_path) self.log.debug("[SKIP] %s (has default alternative)" % file_inner_path)
@ -349,7 +375,8 @@ class Site:
file_path_dest = new_site.storage.getPath(file_inner_path) file_path_dest = new_site.storage.getPath(file_inner_path)
self.log.debug("[COPY] %s to %s..." % (file_inner_path, file_path_dest)) self.log.debug("[COPY] %s to %s..." % (file_inner_path, file_path_dest))
dest_dir = os.path.dirname(file_path_dest) dest_dir = os.path.dirname(file_path_dest)
if not os.path.isdir(dest_dir): os.makedirs(dest_dir) if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
shutil.copy(file_path, file_path_dest) shutil.copy(file_path, file_path_dest)
# If -default in path, create a -default less copy of the file # If -default in path, create a -default less copy of the file
@ -360,28 +387,32 @@ class Site:
continue continue
self.log.debug("[COPY] Default file: %s to %s..." % (file_inner_path, file_path_dest)) self.log.debug("[COPY] Default file: %s to %s..." % (file_inner_path, file_path_dest))
dest_dir = os.path.dirname(file_path_dest) dest_dir = os.path.dirname(file_path_dest)
if not os.path.isdir(dest_dir): os.makedirs(dest_dir) if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
shutil.copy(file_path, file_path_dest) shutil.copy(file_path, file_path_dest)
# Sign if content json # Sign if content json
if file_path_dest.endswith("/content.json"): if file_path_dest.endswith("/content.json"):
new_site.storage.onUpdated(file_inner_path.replace("-default", "")) new_site.storage.onUpdated(file_inner_path.replace("-default", ""))
new_site.content_manager.loadContent(file_inner_path.replace("-default", ""), add_bad_files=False, load_includes=False) new_site.content_manager.loadContent(
if privatekey: new_site.content_manager.sign(file_inner_path.replace("-default", ""), privatekey) file_inner_path.replace("-default", ""), add_bad_files=False, load_includes=False
)
if privatekey: new_site.content_manager.sign("content.json", privatekey) if privatekey:
new_site.content_manager.sign(file_inner_path.replace("-default", ""), privatekey)
if privatekey:
new_site.content_manager.sign("content.json", privatekey)
# Rebuild DB # Rebuild DB
if new_site.storage.isFile("dbschema.json"): new_site.storage.rebuildDb() if new_site.storage.isFile("dbschema.json"):
new_site.storage.rebuildDb()
return new_site return new_site
# Check and download if file not exist # Check and download if file not exist
def needFile(self, inner_path, update=False, blocking=True, peer=None, priority=0): def needFile(self, inner_path, update=False, blocking=True, peer=None, priority=0):
if self.storage.isFile(inner_path) and not update: # File exist, no need to do anything if self.storage.isFile(inner_path) and not update: # File exist, no need to do anything
return True return True
elif self.settings["serving"] == False: # Site not serving elif self.settings["serving"] is False: # Site not serving
return False return False
else: # Wait until file downloaded else: # Wait until file downloaded
self.bad_files[inner_path] = True # Mark as bad file self.bad_files[inner_path] = True # Mark as bad file
@ -392,14 +423,17 @@ class Site:
task = self.worker_manager.addTask("content.json", peer) task = self.worker_manager.addTask("content.json", peer)
task.get() task.get()
self.content_manager.loadContent() self.content_manager.loadContent()
if not self.content_manager.contents.get("content.json"): return False # Content.json download failed if not self.content_manager.contents.get("content.json"):
return False # Content.json download failed
if not inner_path.endswith("content.json") and not self.content_manager.getFileInfo(inner_path): # No info for file, download all content.json first if not inner_path.endswith("content.json") and not self.content_manager.getFileInfo(inner_path):
# No info for file, download all content.json first
self.log.debug("No info for %s, waiting for all content.json" % inner_path) self.log.debug("No info for %s, waiting for all content.json" % inner_path)
success = self.downloadContent("content.json", download_files=False) success = self.downloadContent("content.json", download_files=False)
if not success: return False if not success:
if not self.content_manager.getFileInfo(inner_path): return False # Still no info for file return False
if not self.content_manager.getFileInfo(inner_path):
return False # Still no info for file
task = self.worker_manager.addTask(inner_path, peer, priority=priority) task = self.worker_manager.addTask(inner_path, peer, priority=priority)
if blocking: if blocking:
@ -407,14 +441,15 @@ class Site:
else: else:
return task return task
# Add or update a peer to site # Add or update a peer to site
def addPeer(self, ip, port, return_peer = False): def addPeer(self, ip, port, return_peer=False):
if not ip: return False if not ip:
if (ip, port) in self.peer_blacklist: return False # Ignore blacklist (eg. myself) return False
if (ip, port) in self.peer_blacklist:
return False # Ignore blacklist (eg. myself)
key = "%s:%s" % (ip, port) key = "%s:%s" % (ip, port)
if key in self.peers: # Already has this ip if key in self.peers: # Already has this ip
#self.peers[key].found() # self.peers[key].found()
if return_peer: # Always return peer if return_peer: # Always return peer
return self.peers[key] return self.peers[key]
else: else:
@ -424,7 +459,6 @@ class Site:
self.peers[key] = peer self.peers[key] = peer
return peer return peer
# Gather peer from connected peers # Gather peer from connected peers
@util.Noparallel(blocking=False) @util.Noparallel(blocking=False)
def announcePex(self, query_num=2, need_num=5): def announcePex(self, query_num=2, need_num=5):
@ -451,16 +485,17 @@ class Site:
if res: if res:
self.worker_manager.onPeers() self.worker_manager.onPeers()
self.updateWebsocket(peers_added=res) self.updateWebsocket(peers_added=res)
if done == query_num: break if done == query_num:
break
self.log.debug("Queried pex from %s peers got %s new peers." % (done, added)) self.log.debug("Queried pex from %s peers got %s new peers." % (done, added))
# Gather peers from tracker # Gather peers from tracker
# Return: Complete time or False on error # Return: Complete time or False on error
def announceTracker(self, protocol, ip, port, fileserver_port, address_hash, my_peer_id): def announceTracker(self, protocol, ip, port, fileserver_port, address_hash, my_peer_id):
s = time.time() s = time.time()
if protocol == "udp": # Udp tracker if protocol == "udp": # Udp tracker
if config.disable_udp: return False # No udp supported if config.disable_udp:
return False # No udp supported
tracker = UdpTrackerClient(ip, port) tracker = UdpTrackerClient(ip, port)
tracker.peer_port = fileserver_port tracker.peer_port = fileserver_port
try: try:
@ -481,12 +516,12 @@ class Site:
} }
req = None req = None
try: try:
url = "http://"+ip+"?"+urllib.urlencode(params) url = "http://" + ip + "?" + urllib.urlencode(params)
# Load url # Load url
with gevent.Timeout(10, False): # Make sure of timeout with gevent.Timeout(10, False): # Make sure of timeout
req = urllib2.urlopen(url, timeout=8) req = urllib2.urlopen(url, timeout=8)
response = req.read() response = req.read()
req.fp._sock.recv=None # Hacky avoidance of memory leak for older python versions req.fp._sock.recv = None # Hacky avoidance of memory leak for older python versions
req.close() req.close()
req = None req = None
if not response: if not response:
@ -512,18 +547,20 @@ class Site:
# Adding peers # Adding peers
added = 0 added = 0
for peer in peers: for peer in peers:
if not peer["port"]: continue # Dont add peers with port 0 if not peer["port"]:
if self.addPeer(peer["addr"], peer["port"]): added += 1 continue # Dont add peers with port 0
if self.addPeer(peer["addr"], peer["port"]):
added += 1
if added: if added:
self.worker_manager.onPeers() self.worker_manager.onPeers()
self.updateWebsocket(peers_added=added) self.updateWebsocket(peers_added=added)
self.log.debug("Found %s peers, new: %s" % (len(peers), added)) self.log.debug("Found %s peers, new: %s" % (len(peers), added))
return time.time()-s return time.time() - s
# Add myself and get other peers from tracker # Add myself and get other peers from tracker
def announce(self, force=False): def announce(self, force=False):
if time.time() < self.last_announce+30 and not force: return # No reannouncing within 30 secs if time.time() < self.last_announce + 30 and not force:
return # No reannouncing within 30 secs
self.last_announce = time.time() self.last_announce = time.time()
errors = [] errors = []
slow = [] slow = []
@ -560,17 +597,19 @@ class Site:
self.saveSettings() self.saveSettings()
if len(errors) < len(SiteManager.TRACKERS): # Less errors than total tracker nums if len(errors) < len(SiteManager.TRACKERS): # Less errors than total tracker nums
self.log.debug("Announced port %s to %s trackers in %.3fs, errors: %s, slow: %s" % (fileserver_port, announced, time.time()-s, errors, slow)) self.log.debug(
"Announced port %s to %s trackers in %.3fs, errors: %s, slow: %s" %
(fileserver_port, announced, time.time() - s, errors, slow)
)
else: else:
self.log.error("Announced to %s trackers in %.3fs, failed" % (announced, time.time()-s)) self.log.error("Announced to %s trackers in %.3fs, failed" % (announced, time.time() - s))
if not [peer for peer in self.peers.values() if peer.connection and peer.connection.connected]: # If no connected peer yet then wait for connections if not [peer for peer in self.peers.values() if peer.connection and peer.connection.connected]:
# If no connected peer yet then wait for connections
gevent.spawn_later(3, self.announcePex, need_num=10) # Spawn 3 secs later gevent.spawn_later(3, self.announcePex, need_num=10) # Spawn 3 secs later
# self.onFileDone.once(lambda inner_path: self.announcePex(need_num=10), "announcePex_%s" % self.address) # After first file downloaded try to find more peers using pex
else: # Else announce immediately else: # Else announce immediately
self.announcePex() self.announcePex()
# Keep connections to get the updates (required for passive clients) # Keep connections to get the updates (required for passive clients)
def needConnections(self, num=3): def needConnections(self, num=3):
need = min(len(self.peers), num) # Need 3 peer, but max total peers need = min(len(self.peers), num) # Need 3 peer, but max total peers
@ -586,33 +625,37 @@ class Site:
for peer in self.peers.values(): for peer in self.peers.values():
if not peer.connection or not peer.connection.connected: # No peer connection or disconnected if not peer.connection or not peer.connection.connected: # No peer connection or disconnected
peer.pex() # Initiate peer exchange peer.pex() # Initiate peer exchange
if peer.connection and peer.connection.connected: connected += 1 # Successfully connected if peer.connection and peer.connection.connected:
if connected >= need: break connected += 1 # Successfully connected
if connected >= need:
break
return connected return connected
# Return: Probably working, connectable Peers # Return: Probably working, connectable Peers
def getConnectablePeers(self, need_num=5, ignore=[]): def getConnectablePeers(self, need_num=5, ignore=[]):
peers = self.peers.values() peers = self.peers.values()
random.shuffle(peers) random.shuffle(peers)
found = [] found = []
for peer in peers: for peer in peers:
if peer.key.endswith(":0"): continue # Not connectable if peer.key.endswith(":0"):
if not peer.connection: continue # No connection continue # Not connectable
if peer.key in ignore: continue # The requester has this peer if not peer.connection:
if time.time() - peer.connection.last_recv_time > 60*60*2: # Last message more than 2 hours ago continue # No connection
if peer.key in ignore:
continue # The requester has this peer
if time.time() - peer.connection.last_recv_time > 60 * 60 * 2: # Last message more than 2 hours ago
peer.connection = None # Cleanup: Dead connection peer.connection = None # Cleanup: Dead connection
continue continue
found.append(peer) found.append(peer)
if len(found) >= need_num: break # Found requested number of peers if len(found) >= need_num:
break # Found requested number of peers
if (not found and not ignore) or (need_num > 5 and need_num < 100 and len(found) < need_num): # Not found any peer and the requester dont have any, return not that good peers or Initial pex, but not /Stats page and we can't give enought peer if (not found and not ignore) or (need_num > 5 and need_num < 100 and len(found) < need_num):
found = [peer for peer in peers if not peer.key.endswith(":0") and peer.key not in ignore][0:need_num-len(found)] # Return not that good peers: Not found any peer and the requester dont have any or cant give enought peer
found = [peer for peer in peers if not peer.key.endswith(":0") and peer.key not in ignore][0:need_num - len(found)]
return found return found
# - Events - # - Events -
# Add event listeners # Add event listeners
@ -626,7 +669,6 @@ class Site:
self.onFileDone.append(lambda inner_path: self.fileDone(inner_path)) self.onFileDone.append(lambda inner_path: self.fileDone(inner_path))
self.onFileFail.append(lambda inner_path: self.fileFailed(inner_path)) self.onFileFail.append(lambda inner_path: self.fileFailed(inner_path))
# Send site status update to websocket clients # Send site status update to websocket clients
def updateWebsocket(self, **kwargs): def updateWebsocket(self, **kwargs):
if kwargs: if kwargs:
@ -636,14 +678,12 @@ class Site:
for ws in self.websockets: for ws in self.websockets:
ws.event("siteChanged", self, param) ws.event("siteChanged", self, param)
# File download started # File download started
@util.Noparallel(blocking=False) @util.Noparallel(blocking=False)
def fileStarted(self): def fileStarted(self):
time.sleep(0.001) # Wait for other files adds time.sleep(0.001) # Wait for other files adds
self.updateWebsocket(file_started=True) self.updateWebsocket(file_started=True)
# File downloaded successful # File downloaded successful
def fileDone(self, inner_path): def fileDone(self, inner_path):
# File downloaded, remove it from bad files # File downloaded, remove it from bad files
@ -657,14 +697,12 @@ class Site:
self.updateWebsocket(file_done=inner_path) self.updateWebsocket(file_done=inner_path)
# File download failed # File download failed
def fileFailed(self, inner_path): def fileFailed(self, inner_path):
if inner_path == "content.json": if inner_path == "content.json":
self.content_updated = False self.content_updated = False
self.log.debug("Can't update content.json") self.log.debug("Can't update content.json")
if inner_path in self.bad_files: if inner_path in self.bad_files:
self.bad_files[inner_path] = self.bad_files.get(inner_path, 0)+1 self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1
self.updateWebsocket(file_failed=inner_path) self.updateWebsocket(file_failed=inner_path)

View file

@ -1,22 +1,25 @@
import json, logging, time, re, os import json
import gevent import logging
import re
import os
from Plugin import PluginManager from Plugin import PluginManager
from Config import config from Config import config
TRACKERS = [ TRACKERS = [
("udp", "open.demonii.com", 1337), ("udp", "open.demonii.com", 1337),
#("udp", "sugoi.pomf.se", 2710), # ("udp", "sugoi.pomf.se", 2710),
#("udp", "tracker.coppersurfer.tk", 80), # ("udp", "tracker.coppersurfer.tk", 80),
("udp", "tracker.leechers-paradise.org", 6969), ("udp", "tracker.leechers-paradise.org", 6969),
("udp", "9.rarbg.com", 2710), ("udp", "9.rarbg.com", 2710),
#("udp", "www.eddie4.nl", 6969), # ("udp", "www.eddie4.nl", 6969),
#("udp", "trackr.sytes.net", 80), # ("udp", "trackr.sytes.net", 80),
#("udp", "tracker4.piratux.com", 6969) # ("udp", "tracker4.piratux.com", 6969)
#("http", "exodus.desync.com:80/announce", None), Off # ("http", "exodus.desync.com:80/announce", None), Off
("http", "tracker.aletorrenty.pl:2710/announce", None), ("http", "tracker.aletorrenty.pl:2710/announce", None),
#("http", "torrent.gresille.org/announce", None), # Slow # ("http", "torrent.gresille.org/announce", None), # Slow
#("http", "announce.torrentsmd.com:6969/announce", None), # Off # ("http", "announce.torrentsmd.com:6969/announce", None), # Off
#("http", "i.bandito.org/announce", None), # Off # ("http", "i.bandito.org/announce", None), # Off
("http", "retracker.telecom.kz/announce", None), ("http", "retracker.telecom.kz/announce", None),
("http", "torrent.gresille.org/announce", None), ("http", "torrent.gresille.org/announce", None),
@ -25,13 +28,15 @@ TRACKERS = [
@PluginManager.acceptPlugins @PluginManager.acceptPlugins
class SiteManager(object): class SiteManager(object):
def __init__(self): def __init__(self):
self.sites = None self.sites = None
# Load all sites from data/sites.json # Load all sites from data/sites.json
def load(self): def load(self):
from Site import Site from Site import Site
if not self.sites: self.sites = {} if not self.sites:
self.sites = {}
address_found = [] address_found = []
added = 0 added = 0
# Load new adresses # Load new adresses
@ -47,53 +52,48 @@ class SiteManager(object):
del(self.sites[address]) del(self.sites[address])
logging.debug("Removed site: %s" % address) logging.debug("Removed site: %s" % address)
if added: logging.debug("SiteManager added %s sites" % added) if added:
logging.debug("SiteManager added %s sites" % added)
# Checks if its a valid address # Checks if its a valid address
def isAddress(self, address): def isAddress(self, address):
return re.match("^[A-Za-z0-9]{26,35}$", address) return re.match("^[A-Za-z0-9]{26,35}$", address)
# Return: Site object or None if not found # Return: Site object or None if not found
def get(self, address): def get(self, address):
if self.sites == None: # Not loaded yet if self.sites is None: # Not loaded yet
self.load() self.load()
return self.sites.get(address) return self.sites.get(address)
# Return or create site and start download site files # Return or create site and start download site files
def need(self, address, all_file=True): def need(self, address, all_file=True):
from Site import Site from Site import Site
new = False
site = self.get(address) site = self.get(address)
if not site: # Site not exist yet if not site: # Site not exist yet
if not self.isAddress(address): return False # Not address: %s % address if not self.isAddress(address):
return False # Not address: %s % address
logging.debug("Added new site: %s" % address) logging.debug("Added new site: %s" % address)
site = Site(address) site = Site(address)
self.sites[address] = site self.sites[address] = site
if not site.settings["serving"]: # Maybe it was deleted before if not site.settings["serving"]: # Maybe it was deleted before
site.settings["serving"] = True site.settings["serving"] = True
site.saveSettings() site.saveSettings()
new = True
if all_file: site.download() if all_file:
site.download()
return site return site
def delete(self, address): def delete(self, address):
logging.debug("SiteManager deleted site: %s" % address) logging.debug("SiteManager deleted site: %s" % address)
del(self.sites[address]) del(self.sites[address])
# Lazy load sites # Lazy load sites
def list(self): def list(self):
if self.sites == None: # Not loaded yet if self.sites is None: # Not loaded yet
self.load() self.load()
return self.sites return self.sites
site_manager = SiteManager() # Singletone site_manager = SiteManager() # Singletone
peer_blacklist = [] # Dont download from this peers peer_blacklist = [] # Dont download from this peers

View file

@ -1,11 +1,19 @@
import os, re, shutil, json, time, sqlite3 import os
import re
import shutil
import json
import time
import sqlite3
import gevent.event import gevent.event
from Db import Db from Db import Db
from Debug import Debug from Debug import Debug
from Config import config from Config import config
class SiteStorage: class SiteStorage:
def __init__(self, site, allow_create=True): def __init__(self, site, allow_create=True):
self.site = site self.site = site
self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
@ -21,7 +29,6 @@ class SiteStorage:
else: else:
raise Exception("Directory not exists: %s" % self.directory) raise Exception("Directory not exists: %s" % self.directory)
# Load db from dbschema.json # Load db from dbschema.json
def openDb(self, check=True): def openDb(self, check=True):
schema = self.loadJson("dbschema.json") schema = self.loadJson("dbschema.json")
@ -32,34 +39,36 @@ class SiteStorage:
self.db = Db(schema, db_path) self.db = Db(schema, db_path)
if check and not self.db_checked: if check and not self.db_checked:
changed_tables = self.db.checkTables() changed_tables = self.db.checkTables()
if changed_tables: self.rebuildDb(delete_db=False) # Todo only update the changed table datas if changed_tables:
self.rebuildDb(delete_db=False) # Todo only update the changed table datas
def closeDb(self): def closeDb(self):
if self.db: self.db.close() if self.db:
self.db.close()
self.event_db_busy = None self.event_db_busy = None
self.db = None self.db = None
# Return db class # Return db class
def getDb(self): def getDb(self):
if not self.db: if not self.db:
self.log.debug("No database, waiting for dbschema.json...") self.log.debug("No database, waiting for dbschema.json...")
self.site.needFile("dbschema.json", priority=1) self.site.needFile("dbschema.json", priority=1)
self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist
if self.has_db: self.openDb() if self.has_db:
self.openDb()
return self.db return self.db
# Rebuild sql cache # Rebuild sql cache
def rebuildDb(self, delete_db=True): def rebuildDb(self, delete_db=True):
self.has_db = self.isFile("dbschema.json") self.has_db = self.isFile("dbschema.json")
if not self.has_db: return False if not self.has_db:
return False
self.event_db_busy = gevent.event.AsyncResult() self.event_db_busy = gevent.event.AsyncResult()
schema = self.loadJson("dbschema.json") schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"]) db_path = self.getPath(schema["db_file"])
if os.path.isfile(db_path) and delete_db: if os.path.isfile(db_path) and delete_db:
if self.db: self.db.close() # Close db if open if self.db:
self.db.close() # Close db if open
self.log.info("Deleting %s" % db_path) self.log.info("Deleting %s" % db_path)
try: try:
os.unlink(db_path) os.unlink(db_path)
@ -77,24 +86,27 @@ class SiteStorage:
for content_inner_path, content in self.site.content_manager.contents.items(): for content_inner_path, content in self.site.content_manager.contents.items():
content_path = self.getPath(content_inner_path) content_path = self.getPath(content_inner_path)
if os.path.isfile(content_path): # Missing content.json file if os.path.isfile(content_path): # Missing content.json file
if self.db.loadJson(content_path, cur=cur): found += 1 if self.db.loadJson(content_path, cur=cur):
found += 1
else: else:
self.log.error("[MISSING] %s" % content_inner_path) self.log.error("[MISSING] %s" % content_inner_path)
for file_relative_path in content["files"].keys(): for file_relative_path in content["files"].keys():
if not file_relative_path.endswith(".json"): continue # We only interesed in json files if not file_relative_path.endswith(".json"):
file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json continue # We only interesed in json files
content_inner_path_dir = self.site.content_manager.toDir(content_inner_path) # Content.json dir relative to site
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading / file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path) file_path = self.getPath(file_inner_path)
if os.path.isfile(file_path): if os.path.isfile(file_path):
if self.db.loadJson(file_path, cur=cur): found += 1 if self.db.loadJson(file_path, cur=cur):
found += 1
else: else:
self.log.error("[MISSING] %s" % file_inner_path) self.log.error("[MISSING] %s" % file_inner_path)
cur.execute("END") cur.execute("END")
self.log.info("Imported %s data file in %ss" % (found, time.time()-s)) self.log.info("Imported %s data file in %ss" % (found, time.time() - s))
self.event_db_busy.set(True) # Event done, notify waiters self.event_db_busy.set(True) # Event done, notify waiters
self.event_db_busy = None # Clear event self.event_db_busy = None # Clear event
# Execute sql query or rebuild on dberror # Execute sql query or rebuild on dberror
def query(self, query, params=None): def query(self, query, params=None):
if self.event_db_busy: # Db not ready for queries if self.event_db_busy: # Db not ready for queries
@ -111,17 +123,14 @@ class SiteStorage:
raise err raise err
return res return res
# Open file object # Open file object
def open(self, inner_path, mode="rb"): def open(self, inner_path, mode="rb"):
return open(self.getPath(inner_path), mode) return open(self.getPath(inner_path), mode)
# Open file object # Open file object
def read(self, inner_path, mode="r"): def read(self, inner_path, mode="r"):
return open(self.getPath(inner_path), mode).read() return open(self.getPath(inner_path), mode).read()
# Write content to file # Write content to file
def write(self, inner_path, content): def write(self, inner_path, content):
file_path = self.getPath(inner_path) file_path = self.getPath(inner_path)
@ -139,7 +148,6 @@ class SiteStorage:
del content del content
self.onUpdated(inner_path) self.onUpdated(inner_path)
# Site content updated # Site content updated
def onUpdated(self, inner_path): def onUpdated(self, inner_path):
file_path = self.getPath(inner_path) file_path = self.getPath(inner_path)
@ -155,7 +163,6 @@ class SiteStorage:
self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err))) self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
self.closeDb() self.closeDb()
# Load and parse json file # Load and parse json file
def loadJson(self, inner_path): def loadJson(self, inner_path):
with self.open(inner_path) as file: with self.open(inner_path) as file:
@ -165,18 +172,18 @@ class SiteStorage:
def writeJson(self, inner_path, data): def writeJson(self, inner_path, data):
content = json.dumps(data, indent=2, sort_keys=True) content = json.dumps(data, indent=2, sort_keys=True)
# Make it a little more compact by removing unnecessary white space # Make it a little more compact by removing unnecessary white space
def compact_list(match): def compact_list(match):
return "[ "+match.group(1).strip()+" ]" return "[ " + match.group(1).strip() + " ]"
def compact_dict(match): def compact_dict(match):
return "{ "+match.group(1).strip()+" }" return "{ " + match.group(1).strip() + " }"
content = re.sub("\[([^,\{\[]{10,100}?)\]", compact_list, content, flags=re.DOTALL) content = re.sub("\[([^,\{\[]{10,100}?)\]", compact_list, content, flags=re.DOTALL)
content = re.sub("\{([^,\[\{]{10,100}?)\}", compact_dict, content, flags=re.DOTALL) content = re.sub("\{([^,\[\{]{10,100}?)\}", compact_dict, content, flags=re.DOTALL)
# Write to disk # Write to disk
self.write(inner_path, content) self.write(inner_path, content)
# Get file size # Get file size
def getSize(self, inner_path): def getSize(self, inner_path):
path = self.getPath(inner_path) path = self.getPath(inner_path)
@ -185,29 +192,24 @@ class SiteStorage:
else: else:
return 0 return 0
# File exist # File exist
def isFile(self, inner_path): def isFile(self, inner_path):
return os.path.isfile(self.getPath(inner_path)) return os.path.isfile(self.getPath(inner_path))
# Dir exist # Dir exist
def isDir(self, inner_path): def isDir(self, inner_path):
return os.path.isdir(self.getPath(inner_path)) return os.path.isdir(self.getPath(inner_path))
# Security check and return path of site's file # Security check and return path of site's file
def getPath(self, inner_path): def getPath(self, inner_path):
inner_path = inner_path.replace("\\", "/") # Windows separator fix inner_path = inner_path.replace("\\", "/") # Windows separator fix
inner_path = re.sub("^%s/" % re.escape(self.directory), "", inner_path) # Remove site directory if begins with it inner_path = re.sub("^%s/" % re.escape(self.directory), "", inner_path) # Remove site directory if begins with it
file_path = self.directory+"/"+inner_path file_path = self.directory + "/" + inner_path
allowed_dir = os.path.abspath(self.directory) # Only files within this directory allowed allowed_dir = os.path.abspath(self.directory) # Only files within this directory allowed
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
raise Exception("File not allowed: %s" % file_path) raise Exception("File not allowed: %s" % file_path)
return file_path return file_path
# Verify all files sha512sum using content.json # Verify all files sha512sum using content.json
def verifyFiles(self, quick_check=False): # Fast = using file size def verifyFiles(self, quick_check=False): # Fast = using file size
bad_files = [] bad_files = []
@ -219,7 +221,7 @@ class SiteStorage:
self.log.debug("[MISSING] %s" % content_inner_path) self.log.debug("[MISSING] %s" % content_inner_path)
bad_files.append(content_inner_path) bad_files.append(content_inner_path)
for file_relative_path in content["files"].keys(): for file_relative_path in content["files"].keys():
file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json file_inner_path = self.site.content_manager.toDir(content_inner_path) + file_relative_path # Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading / file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path) file_path = self.getPath(file_inner_path)
if not os.path.isfile(file_path): if not os.path.isfile(file_path):
@ -235,20 +237,21 @@ class SiteStorage:
if not ok: if not ok:
self.log.debug("[CHANGED] %s" % file_inner_path) self.log.debug("[CHANGED] %s" % file_inner_path)
bad_files.append(file_inner_path) bad_files.append(file_inner_path)
self.log.debug("%s verified: %s files, quick_check: %s, bad files: %s" % (content_inner_path, len(content["files"]), quick_check, bad_files)) self.log.debug(
"%s verified: %s files, quick_check: %s, bad files: %s" %
(content_inner_path, len(content["files"]), quick_check, bad_files)
)
return bad_files return bad_files
# Check and try to fix site files integrity # Check and try to fix site files integrity
def checkFiles(self, quick_check=True): def checkFiles(self, quick_check=True):
s = time.time() s = time.time()
bad_files = self.verifyFiles(quick_check) bad_files = self.verifyFiles(quick_check)
if bad_files: if bad_files:
for bad_file in bad_files: for bad_file in bad_files:
self.site.bad_files[bad_file] = self.site.bad_files.get("bad_file", 0)+1 self.site.bad_files[bad_file] = self.site.bad_files.get("bad_file", 0) + 1
self.log.debug("Checked files in %.2fs... Quick:%s" % (time.time()-s, quick_check)) self.log.debug("Checked files in %.2fs... Quick:%s" % (time.time() - s, quick_check))
# Delete site's all file # Delete site's all file
def deleteFiles(self): def deleteFiles(self):
@ -258,31 +261,33 @@ class SiteStorage:
try: try:
schema = self.loadJson("dbschema.json") schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"]) db_path = self.getPath(schema["db_file"])
if os.path.isfile(db_path): os.unlink(db_path) if os.path.isfile(db_path):
os.unlink(db_path)
except Exception, err: except Exception, err:
self.log.error("Db file delete error: %s" % err) self.log.error("Db file delete error: %s" % err)
self.log.debug("Deleting files from content.json...") self.log.debug("Deleting files from content.json...")
files = [] # Get filenames files = [] # Get filenames
for content_inner_path, content in self.site.content_manager.contents.items(): for content_inner_path, content in self.site.content_manager.contents.items():
files.append(content_inner_path) files.append(content_inner_path)
for file_relative_path in content["files"].keys(): for file_relative_path in content["files"].keys():
file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json file_inner_path = self.site.content_manager.toDir(content_inner_path) + file_relative_path # Relative to site dir
files.append(file_inner_path) files.append(file_inner_path)
for inner_path in files: for inner_path in files:
path = self.getPath(inner_path) path = self.getPath(inner_path)
if os.path.isfile(path): os.unlink(path) if os.path.isfile(path):
os.unlink(path)
self.log.debug("Deleting empty dirs...") self.log.debug("Deleting empty dirs...")
for root, dirs, files in os.walk(self.directory, topdown=False): for root, dirs, files in os.walk(self.directory, topdown=False):
for dir in dirs: for dir in dirs:
path = os.path.join(root,dir) path = os.path.join(root, dir)
if os.path.isdir(path) and os.listdir(path) == []: if os.path.isdir(path) and os.listdir(path) == []:
os.removedirs(path) os.removedirs(path)
self.log.debug("Removing %s" % path) self.log.debug("Removing %s" % path)
if os.path.isdir(self.directory) and os.listdir(self.directory) == []: os.removedirs(self.directory) # Remove sites directory if empty if os.path.isdir(self.directory) and os.listdir(self.directory) == []:
os.removedirs(self.directory) # Remove sites directory if empty
if os.path.isdir(self.directory): if os.path.isdir(self.directory):
self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory) self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory)

View file

@ -0,0 +1,140 @@
import time
import socket
import msgpack
print "Connecting..."
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 1234))
print "1 Threaded: Send, receive 10000 ping request...",
s = time.time()
for i in range(10000):
sock.sendall(msgpack.packb({"cmd": "Ping"}))
req = sock.recv(16 * 1024)
print time.time() - s, repr(req), time.time() - s
print "1 Threaded: Send, receive, decode 10000 ping request...",
s = time.time()
unpacker = msgpack.Unpacker()
reqs = 0
for i in range(10000):
sock.sendall(msgpack.packb({"cmd": "Ping"}))
unpacker.feed(sock.recv(16 * 1024))
for req in unpacker:
reqs += 1
print "Found:", req, "x", reqs, time.time() - s
print "1 Threaded: Send, receive, decode, reconnect 1000 ping request...",
s = time.time()
unpacker = msgpack.Unpacker()
reqs = 0
for i in range(1000):
sock.sendall(msgpack.packb({"cmd": "Ping"}))
unpacker.feed(sock.recv(16 * 1024))
for req in unpacker:
reqs += 1
sock.close()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 1234))
print "Found:", req, "x", reqs, time.time() - s
print "1 Threaded: Request, receive, decode 10000 x 10k data request...",
s = time.time()
unpacker = msgpack.Unpacker()
reqs = 0
for i in range(10000):
sock.sendall(msgpack.packb({"cmd": "Bigdata"}))
"""buff = StringIO()
data = sock.recv(16*1024)
buff.write(data)
if not data:
break
while not data.endswith("\n"):
data = sock.recv(16*1024)
if not data: break
buff.write(data)
req = msgpack.unpackb(buff.getvalue().strip("\n"))
reqs += 1"""
req_found = False
while not req_found:
buff = sock.recv(16 * 1024)
unpacker.feed(buff)
for req in unpacker:
reqs += 1
req_found = True
break # Only process one request
print "Found:", len(req["res"]), "x", reqs, time.time() - s
print "10 Threaded: Request, receive, decode 10000 x 10k data request...",
import gevent
s = time.time()
reqs = 0
req = None
def requester():
global reqs, req
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 1234))
unpacker = msgpack.Unpacker()
for i in range(1000):
sock.sendall(msgpack.packb({"cmd": "Bigdata"}))
req_found = False
while not req_found:
buff = sock.recv(16 * 1024)
unpacker.feed(buff)
for req in unpacker:
reqs += 1
req_found = True
break # Only process one request
threads = []
for i in range(10):
threads.append(gevent.spawn(requester))
gevent.joinall(threads)
print "Found:", len(req["res"]), "x", reqs, time.time() - s
print "1 Threaded: ZeroMQ Send, receive 1000 ping request...",
s = time.time()
import zmq.green as zmq
c = zmq.Context()
zmq_sock = c.socket(zmq.REQ)
zmq_sock.connect('tcp://127.0.0.1:1234')
for i in range(1000):
zmq_sock.send(msgpack.packb({"cmd": "Ping"}))
req = zmq_sock.recv(16 * 1024)
print "Found:", req, time.time() - s
print "1 Threaded: ZeroMQ Send, receive 1000 x 10k data request...",
s = time.time()
import zmq.green as zmq
c = zmq.Context()
zmq_sock = c.socket(zmq.REQ)
zmq_sock.connect('tcp://127.0.0.1:1234')
for i in range(1000):
zmq_sock.send(msgpack.packb({"cmd": "Bigdata"}))
req = msgpack.unpackb(zmq_sock.recv(1024 * 1024))
print "Found:", len(req["res"]), time.time() - s
print "1 Threaded: direct ZeroMQ Send, receive 1000 x 10k data request...",
s = time.time()
import zmq.green as zmq
c = zmq.Context()
zmq_sock = c.socket(zmq.REQ)
zmq_sock.connect('tcp://127.0.0.1:1233')
for i in range(1000):
zmq_sock.send(msgpack.packb({"cmd": "Bigdata"}))
req = msgpack.unpackb(zmq_sock.recv(1024 * 1024))
print "Found:", len(req["res"]), time.time() - s

View file

@ -1,11 +1,16 @@
#!/usr/bin/python2 #!/usr/bin/python2
from gevent import monkey; monkey.patch_all() from gevent import monkey
import os, time, sys, socket, ssl monkey.patch_all()
import os
import time
import sys
import socket
import ssl
sys.path.append(os.path.abspath("src")) # Imports relative to src dir sys.path.append(os.path.abspath("src")) # Imports relative to src dir
import cStringIO as StringIO import cStringIO as StringIO
import gevent import gevent
from gevent.queue import Queue, Empty, JoinableQueue
from gevent.server import StreamServer from gevent.server import StreamServer
from gevent.pool import Pool from gevent.pool import Pool
from util import SslPatch from util import SslPatch
@ -13,25 +18,31 @@ from util import SslPatch
# Server # Server
socks = [] socks = []
data = os.urandom(1024*100) data = os.urandom(1024 * 100)
data += "\n" data += "\n"
def handle(sock_raw, addr): def handle(sock_raw, addr):
socks.append(sock_raw) socks.append(sock_raw)
sock = sock_raw sock = sock_raw
#sock = ctx.wrap_socket(sock, server_side=True) # sock = ctx.wrap_socket(sock, server_side=True)
#if sock_raw.recv( 1, gevent.socket.MSG_PEEK ) == "\x16": # if sock_raw.recv( 1, gevent.socket.MSG_PEEK ) == "\x16":
# sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='key-cz.pem', certfile='cert-cz.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1) # sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='key-cz.pem',
#fp = os.fdopen(sock.fileno(), 'rb', 1024*512) # certfile='cert-cz.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
# fp = os.fdopen(sock.fileno(), 'rb', 1024*512)
try: try:
while True: while True:
line = sock.recv(16*1024) line = sock.recv(16 * 1024)
if not line: break if not line:
break
if line == "bye\n": if line == "bye\n":
break break
elif line == "gotssl\n": elif line == "gotssl\n":
sock.sendall("yes\n") sock.sendall("yes\n")
sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='data/key-rsa.pem', certfile='data/cert-rsa.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1) sock = gevent.ssl.wrap_socket(
sock_raw, server_side=True, keyfile='data/key-rsa.pem', certfile='data/cert-rsa.pem',
ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1
)
else: else:
sock.sendall(data) sock.sendall(data)
except Exception, err: except Exception, err:
@ -44,7 +55,7 @@ def handle(sock_raw, addr):
socks.remove(sock_raw) socks.remove(sock_raw)
pool = Pool(1000) # do not accept more than 10000 connections pool = Pool(1000) # do not accept more than 10000 connections
server = StreamServer(('127.0.0.1', 1234), handle) # server = StreamServer(('127.0.0.1', 1234), handle)
server.start() server.start()
@ -54,36 +65,38 @@ server.start()
total_num = 0 total_num = 0
total_bytes = 0 total_bytes = 0
clipher = None clipher = None
ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDH+AES128:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:AES128-SHA:HIGH:!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK" ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDH+AES128:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:AES128-SHA:HIGH:" + \
"!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
# ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) # ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
def getData(): def getData():
global total_num, total_bytes, clipher global total_num, total_bytes, clipher
data = None data = None
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#sock = socket.ssl(s) # sock = socket.ssl(s)
#sock = ssl.wrap_socket(sock) # sock = ssl.wrap_socket(sock)
sock.connect(("127.0.0.1", 1234)) sock.connect(("127.0.0.1", 1234))
#sock.do_handshake() # sock.do_handshake()
#clipher = sock.cipher() # clipher = sock.cipher()
sock.send("gotssl\n") sock.send("gotssl\n")
if sock.recv(128) == "yes\n": if sock.recv(128) == "yes\n":
sock = ssl.wrap_socket(sock, ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1) sock = ssl.wrap_socket(sock, ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
sock.do_handshake() sock.do_handshake()
clipher = sock.cipher() clipher = sock.cipher()
for req in range(100): for req in range(100):
sock.sendall("req\n") sock.sendall("req\n")
buff = StringIO.StringIO() buff = StringIO.StringIO()
data = sock.recv(16*1024) data = sock.recv(16 * 1024)
buff.write(data) buff.write(data)
if not data: if not data:
break break
while not data.endswith("\n"): while not data.endswith("\n"):
data = sock.recv(16*1024) data = sock.recv(16 * 1024)
if not data: break if not data:
break
buff.write(data) buff.write(data)
total_num += 1 total_num += 1
total_bytes += buff.tell() total_bytes += buff.tell()
@ -95,15 +108,18 @@ def getData():
s = time.time() s = time.time()
def info(): def info():
import psutil, os import psutil
import os
process = psutil.Process(os.getpid()) process = psutil.Process(os.getpid())
if "memory_info" in dir(process): if "memory_info" in dir(process):
memory_info = process.memory_info memory_info = process.memory_info
else: else:
memory_info = process.get_memory_info memory_info = process.get_memory_info
while 1: while 1:
print total_num, "req", (total_bytes/1024), "kbytes", "transfered in", time.time()-s, "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20) print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s,
print "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20)
time.sleep(1) time.sleep(1)
gevent.spawn(info) gevent.spawn(info)
@ -115,7 +131,7 @@ for test in range(10):
gevent.joinall(clients) gevent.joinall(clients)
print total_num, "req", (total_bytes/1024), "kbytes", "transfered in", time.time()-s print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s
# Separate client/server process: # Separate client/server process:
# 10*10*100: # 10*10*100:

View file

@ -1,4 +1,10 @@
import time, re, os, mimetypes, json, cgi import time
import re
import os
import mimetypes
import json
import cgi
from Config import config from Config import config
from Site import SiteManager from Site import SiteManager
from User import UserManager from User import UserManager
@ -16,15 +22,25 @@ status_texts = {
@PluginManager.acceptPlugins @PluginManager.acceptPlugins
class UiRequest(object): class UiRequest(object):
def __init__(self, server, get, env, start_response): def __init__(self, server, get, env, start_response):
if server: if server:
self.server = server self.server = server
self.log = server.log self.log = server.log
self.get = get # Get parameters self.get = get # Get parameters
self.env = env # Enviroment settings self.env = env # Enviroment settings
self.start_response = start_response # Start response function self.start_response = start_response # Start response function
self.user = None self.user = None
# Return posted variables as dict
def getPosted(self):
if self.env['REQUEST_METHOD'] == "POST":
return dict(cgi.parse_qsl(
self.env['wsgi.input'].readline().decode()
))
else:
return {}
# Call the request handler function base on path # Call the request handler function base on path
def route(self, path): def route(self, path):
@ -57,22 +73,19 @@ class UiRequest(object):
if body: if body:
return body return body
else: else:
func = getattr(self, "action"+path.lstrip("/"), None) # Check if we have action+request_path function func = getattr(self, "action" + path.lstrip("/"), None) # Check if we have action+request_path function
if func: if func:
return func() return func()
else: else:
return self.error404(path) return self.error404(path)
# The request is proxied by chrome extension # The request is proxied by chrome extension
def isProxyRequest(self): def isProxyRequest(self):
return self.env["PATH_INFO"].startswith("http://") return self.env["PATH_INFO"].startswith("http://")
def isAjaxRequest(self): def isAjaxRequest(self):
return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest" return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest"
# Get mime by filename # Get mime by filename
def getContentType(self, file_name): def getContentType(self, file_name):
content_type = mimetypes.guess_type(file_name)[0] content_type = mimetypes.guess_type(file_name)[0]
@ -83,7 +96,6 @@ class UiRequest(object):
content_type = "application/octet-stream" content_type = "application/octet-stream"
return content_type return content_type
# Returns: <dict> Cookies based on self.env # Returns: <dict> Cookies based on self.env
def getCookies(self): def getCookies(self):
raw_cookies = self.env.get('HTTP_COOKIE') raw_cookies = self.env.get('HTTP_COOKIE')
@ -93,43 +105,44 @@ class UiRequest(object):
else: else:
return {} return {}
def getCurrentUser(self): def getCurrentUser(self):
if self.user: return self.user # Cache if self.user:
return self.user # Cache
self.user = UserManager.user_manager.get() # Get user self.user = UserManager.user_manager.get() # Get user
if not self.user: if not self.user:
self.user = UserManager.user_manager.create() self.user = UserManager.user_manager.create()
return self.user return self.user
# Send response headers # Send response headers
def sendHeader(self, status=200, content_type="text/html", extra_headers=[]): def sendHeader(self, status=200, content_type="text/html", extra_headers=[]):
if content_type == "text/html": content_type = "text/html; charset=utf-8" if content_type == "text/html":
content_type = "text/html; charset=utf-8"
headers = [] headers = []
headers.append(("Version", "HTTP/1.1")) headers.append(("Version", "HTTP/1.1"))
headers.append(("Access-Control-Allow-Origin", "*")) # Allow json access headers.append(("Access-Control-Allow-Origin", "*")) # Allow json access
if self.env["REQUEST_METHOD"] == "OPTIONS": if self.env["REQUEST_METHOD"] == "OPTIONS":
headers.append(("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept")) # Allow json access # Allow json access
headers.append(("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept"))
if (self.env["REQUEST_METHOD"] == "OPTIONS" or not self.isAjaxRequest()) and status == 200 and (content_type == "text/css" or content_type.startswith("application") or self.env["REQUEST_METHOD"] == "OPTIONS" or content_type.startswith("image")): # Cache Css, Js, Image files for 10min cacheable_type = (
content_type == "text/css" or content_type.startswith("image") or
self.env["REQUEST_METHOD"] == "OPTIONS" or content_type == "application/javascript"
)
if status == 200 and cacheable_type: # Cache Css, Js, Image files for 10min
headers.append(("Cache-Control", "public, max-age=600")) # Cache 10 min headers.append(("Cache-Control", "public, max-age=600")) # Cache 10 min
else: # Images, Css, Js else: # Images, Css, Js
headers.append(("Cache-Control", "no-cache, no-store, private, must-revalidate, max-age=0")) # No caching at all headers.append(("Cache-Control", "no-cache, no-store, private, must-revalidate, max-age=0")) # No caching at all
#headers.append(("Cache-Control", "public, max-age=604800")) # Cache 1 week
headers.append(("Content-Type", content_type)) headers.append(("Content-Type", content_type))
for extra_header in extra_headers: for extra_header in extra_headers:
headers.append(extra_header) headers.append(extra_header)
return self.start_response(status_texts[status], headers) return self.start_response(status_texts[status], headers)
# Renders a template # Renders a template
def render(self, template_path, *args, **kwargs): def render(self, template_path, *args, **kwargs):
#template = SimpleTemplate(open(template_path), lookup=[os.path.dirname(template_path)])
#yield str(template.render(*args, **kwargs).encode("utf8"))
template = open(template_path).read().decode("utf8") template = open(template_path).read().decode("utf8")
return template.format(**kwargs).encode("utf8") return template.format(**kwargs).encode("utf8")
# - Actions - # - Actions -
# Redirect to an url # Redirect to an url
@ -137,40 +150,46 @@ class UiRequest(object):
self.start_response('301 Redirect', [('Location', url)]) self.start_response('301 Redirect', [('Location', url)])
yield "Location changed: %s" % url yield "Location changed: %s" % url
def actionIndex(self): def actionIndex(self):
return self.actionRedirect("/"+config.homepage) return self.actionRedirect("/" + config.homepage)
# Render a file from media with iframe site wrapper # Render a file from media with iframe site wrapper
def actionWrapper(self, path, extra_headers=None): def actionWrapper(self, path, extra_headers=None):
if not extra_headers: extra_headers = [] if not extra_headers:
if self.get.get("wrapper") == "False": return self.actionSiteMedia("/media"+path) # Only serve html files with frame extra_headers = []
if self.get.get("wrapper") == "False":
return self.actionSiteMedia("/media" + path) # Only serve html files with frame
match = re.match("/(?P<address>[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path) match = re.match("/(?P<address>[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path)
if match: if match:
address = match.group("address") address = match.group("address")
inner_path = match.group("inner_path").lstrip("/") inner_path = match.group("inner_path").lstrip("/")
if "." in inner_path and not inner_path.endswith(".html"): return self.actionSiteMedia("/media"+path) # Only serve html files with frame if "." in inner_path and not inner_path.endswith(".html"):
if self.env.get("HTTP_X_REQUESTED_WITH"): return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper return self.actionSiteMedia("/media" + path) # Only serve html files with frame
if self.env.get("HTTP_X_REQUESTED_WITH"):
return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper
file_inner_path = inner_path file_inner_path = inner_path
if not file_inner_path: file_inner_path = "index.html" # If inner path defaults to index.html if not file_inner_path:
file_inner_path = "index.html" # If inner path defaults to index.html
if not inner_path and not path.endswith("/"): inner_path = address+"/" # Fix relative resources loading if missing / end of site address if not inner_path and not path.endswith("/"):
inner_path = address + "/" # Fix relative resources loading if missing / end of site address
inner_path = re.sub(".*/(.+)", "\\1", inner_path) # Load innerframe relative to current url inner_path = re.sub(".*/(.+)", "\\1", inner_path) # Load innerframe relative to current url
site = SiteManager.site_manager.get(address) site = SiteManager.site_manager.get(address)
if site and site.content_manager.contents.get("content.json") and (not site.getReachableBadFiles() or site.settings["own"]): # Its downloaded or own if (
site and site.content_manager.contents.get("content.json") and
(not site.getReachableBadFiles() or site.settings["own"])
): # Its downloaded or own
title = site.content_manager.contents["content.json"]["title"] title = site.content_manager.contents["content.json"]["title"]
else: else:
title = "Loading %s..." % address title = "Loading %s..." % address
site = SiteManager.site_manager.need(address) # Start download site site = SiteManager.site_manager.need(address) # Start download site
if not site: return False if not site:
return False
#extra_headers.append(("X-Frame-Options", "DENY"))
self.sendHeader(extra_headers=extra_headers[:]) self.sendHeader(extra_headers=extra_headers[:])
@ -179,27 +198,31 @@ class UiRequest(object):
body_style = "" body_style = ""
meta_tags = "" meta_tags = ""
if self.env.get("QUERY_STRING"): query_string = "?"+self.env["QUERY_STRING"]+"&wrapper=False" if self.env.get("QUERY_STRING"):
else: query_string = "?wrapper=False" query_string = "?" + self.env["QUERY_STRING"] + "&wrapper=False"
else:
query_string = "?wrapper=False"
if self.isProxyRequest(): # Its a remote proxy request if self.isProxyRequest(): # Its a remote proxy request
if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1 if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1
server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"] server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"]
else: # Remote client, use SERVER_NAME as server's real address else: # Remote client, use SERVER_NAME as server's real address
server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"]) server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"])
homepage = "http://zero/"+config.homepage homepage = "http://zero/" + config.homepage
else: # Use relative path else: # Use relative path
server_url = "" server_url = ""
homepage = "/"+config.homepage homepage = "/" + config.homepage
if site.content_manager.contents.get("content.json") : # Got content.json if site.content_manager.contents.get("content.json"): # Got content.json
content = site.content_manager.contents["content.json"] content = site.content_manager.contents["content.json"]
if content.get("background-color"): if content.get("background-color"):
body_style += "background-color: "+cgi.escape(site.content_manager.contents["content.json"]["background-color"], True)+";" body_style += "background-color: %s;" % \
cgi.escape(site.content_manager.contents["content.json"]["background-color"], True)
if content.get("viewport"): if content.get("viewport"):
meta_tags += '<meta name="viewport" id="viewport" content="%s">' % cgi.escape(content["viewport"], True) meta_tags += '<meta name="viewport" id="viewport" content="%s">' % cgi.escape(content["viewport"], True)
return self.render("src/Ui/template/wrapper.html", return self.render(
"src/Ui/template/wrapper.html",
server_url=server_url, server_url=server_url,
inner_path=inner_path, inner_path=inner_path,
file_inner_path=file_inner_path, file_inner_path=file_inner_path,
@ -218,17 +241,16 @@ class UiRequest(object):
else: # Bad url else: # Bad url
return False return False
# Returns if media request allowed from that referer # Returns if media request allowed from that referer
def isMediaRequestAllowed(self, site_address, referer): def isMediaRequestAllowed(self, site_address, referer):
referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
return referer_path.startswith("/"+site_address) return referer_path.startswith("/" + site_address)
# Serve a media for site # Serve a media for site
def actionSiteMedia(self, path): def actionSiteMedia(self, path):
path = path.replace("/index.html/", "/") # Base Backward compatibility fix path = path.replace("/index.html/", "/") # Base Backward compatibility fix
if path.endswith("/"): path = path+"index.html" if path.endswith("/"):
path = path + "index.html"
match = re.match("/media/(?P<address>[A-Za-z0-9\._-]+)/(?P<inner_path>.*)", path) match = re.match("/media/(?P<address>[A-Za-z0-9\._-]+)/(?P<inner_path>.*)", path)
@ -242,22 +264,27 @@ class UiRequest(object):
file_path = "%s/%s/%s" % (config.data_dir, address, match.group("inner_path")) file_path = "%s/%s/%s" % (config.data_dir, address, match.group("inner_path"))
allowed_dir = os.path.abspath("%s/%s" % (config.data_dir, address)) # Only files within data/sitehash allowed allowed_dir = os.path.abspath("%s/%s" % (config.data_dir, address)) # Only files within data/sitehash allowed
data_dir = os.path.abspath("data") # No files from data/ allowed data_dir = os.path.abspath("data") # No files from data/ allowed
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir) or allowed_dir == data_dir: # File not in allowed path if (
".." in file_path
or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir)
or allowed_dir == data_dir
): # File not in allowed path
return self.error403() return self.error403()
else: else:
if config.debug and file_path.split("/")[-1].startswith("all."): # When debugging merge *.css to all.css and *.js to all.js if config.debug and file_path.split("/")[-1].startswith("all."):
# If debugging merge *.css to all.css and *.js to all.js
site = self.server.sites.get(address) site = self.server.sites.get(address)
if site.settings["own"]: if site.settings["own"]:
from Debug import DebugMedia from Debug import DebugMedia
DebugMedia.merge(file_path) DebugMedia.merge(file_path)
if os.path.isfile(file_path): # File exits if os.path.isfile(file_path): # File exits
#self.sendHeader(content_type=self.getContentType(file_path)) # ?? Get Exception without this # self.sendHeader(content_type=self.getContentType(file_path)) # ?? Get Exception without this
return self.actionFile(file_path) return self.actionFile(file_path)
else: # File not exits, try to download else: # File not exits, try to download
site = SiteManager.site_manager.need(address, all_file=False) site = SiteManager.site_manager.need(address, all_file=False)
result = site.needFile(match.group("inner_path"), priority=1) # Wait until file downloads result = site.needFile(match.group("inner_path"), priority=1) # Wait until file downloads
if result: if result:
#self.sendHeader(content_type=self.getContentType(file_path)) # self.sendHeader(content_type=self.getContentType(file_path))
return self.actionFile(file_path) return self.actionFile(file_path)
else: else:
self.log.debug("File not found: %s" % match.group("inner_path")) self.log.debug("File not found: %s" % match.group("inner_path"))
@ -266,31 +293,33 @@ class UiRequest(object):
else: # Bad url else: # Bad url
return self.error404(path) return self.error404(path)
# Serve a media for ui # Serve a media for ui
def actionUiMedia(self, path): def actionUiMedia(self, path):
match = re.match("/uimedia/(?P<inner_path>.*)", path) match = re.match("/uimedia/(?P<inner_path>.*)", path)
if match: # Looks like a valid path if match: # Looks like a valid path
file_path = "src/Ui/media/%s" % match.group("inner_path") file_path = "src/Ui/media/%s" % match.group("inner_path")
allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): # File not in allowed path if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
# File not in allowed path
return self.error403() return self.error403()
else: else:
if config.debug and match.group("inner_path").startswith("all."): # When debugging merge *.css to all.css and *.js to all.js if config.debug and match.group("inner_path").startswith("all."):
# If debugging merge *.css to all.css and *.js to all.js
from Debug import DebugMedia from Debug import DebugMedia
DebugMedia.merge(file_path) DebugMedia.merge(file_path)
return self.actionFile(file_path) return self.actionFile(file_path)
else: # Bad url else: # Bad url
return self.error400() return self.error400()
# Stream a file to client # Stream a file to client
def actionFile(self, file_path, block_size = 64*1024): def actionFile(self, file_path, block_size=64 * 1024):
if os.path.isfile(file_path): if os.path.isfile(file_path):
# Try to figure out content type by extension # Try to figure out content type by extension
content_type = self.getContentType(file_path) content_type = self.getContentType(file_path)
self.sendHeader(content_type = content_type) # TODO: Dont allow external access: extra_headers=[("Content-Security-Policy", "default-src 'unsafe-inline' data: http://localhost:43110 ws://localhost:43110")] # TODO: Dont allow external access: extra_headers=
# [("Content-Security-Policy", "default-src 'unsafe-inline' data: http://localhost:43110 ws://localhost:43110")]
self.sendHeader(content_type=content_type)
if self.env["REQUEST_METHOD"] != "OPTIONS": if self.env["REQUEST_METHOD"] != "OPTIONS":
file = open(file_path, "rb") file = open(file_path, "rb")
while 1: while 1:
@ -306,7 +335,6 @@ class UiRequest(object):
else: # File not exits else: # File not exits
yield self.error404(file_path) yield self.error404(file_path)
# On websocket connection # On websocket connection
def actionWebsocket(self): def actionWebsocket(self):
ws = self.env.get("wsgi.websocket") ws = self.env.get("wsgi.websocket")
@ -315,7 +343,8 @@ class UiRequest(object):
# Find site by wrapper_key # Find site by wrapper_key
site = None site = None
for site_check in self.server.sites.values(): for site_check in self.server.sites.values():
if site_check.settings["wrapper_key"] == wrapper_key: site = site_check if site_check.settings["wrapper_key"] == wrapper_key:
site = site_check
if site: # Correct wrapper key if site: # Correct wrapper key
user = self.getCurrentUser() user = self.getCurrentUser()
@ -325,7 +354,8 @@ class UiRequest(object):
ui_websocket = UiWebsocket(ws, site, self.server, user) ui_websocket = UiWebsocket(ws, site, self.server, user)
site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events
ui_websocket.start() ui_websocket.start()
for site_check in self.server.sites.values(): # Remove websocket from every site (admin sites allowed to join other sites event channels) for site_check in self.server.sites.values():
# Remove websocket from every site (admin sites allowed to join other sites event channels)
if ui_websocket in site_check.websockets: if ui_websocket in site_check.websockets:
site_check.websockets.remove(ui_websocket) site_check.websockets.remove(ui_websocket)
return "Bye." return "Bye."
@ -333,10 +363,9 @@ class UiRequest(object):
self.log.error("Wrapper key not found: %s" % wrapper_key) self.log.error("Wrapper key not found: %s" % wrapper_key)
return self.error403() return self.error403()
else: else:
start_response("400 Bad Request", []) self.start_response("400 Bad Request", [])
return "Not a websocket!" return "Not a websocket!"
# Debug last error # Debug last error
def actionDebug(self): def actionDebug(self):
# Raise last error from DebugHook # Raise last error from DebugHook
@ -348,7 +377,6 @@ class UiRequest(object):
self.sendHeader() self.sendHeader()
return "No error! :)" return "No error! :)"
# Just raise an error to get console # Just raise an error to get console
def actionConsole(self): def actionConsole(self):
import sys import sys
@ -356,19 +384,17 @@ class UiRequest(object):
main = sys.modules["main"] main = sys.modules["main"]
raise Exception("Here is your console") raise Exception("Here is your console")
# - Tests - # - Tests -
def actionTestStream(self): def actionTestStream(self):
self.sendHeader() self.sendHeader()
yield " "*1080 # Overflow browser's buffer yield " " * 1080 # Overflow browser's buffer
yield "He" yield "He"
time.sleep(1) time.sleep(1)
yield "llo!" yield "llo!"
yield "Running websockets: %s" % len(self.server.websockets) yield "Running websockets: %s" % len(self.server.websockets)
self.server.sendMessage("Hello!") self.server.sendMessage("Hello!")
# - Errors - # - Errors -
# Send bad request error # Send bad request error
@ -376,30 +402,27 @@ class UiRequest(object):
self.sendHeader(400) self.sendHeader(400)
return "Bad Request" return "Bad Request"
# You are not allowed to access this # You are not allowed to access this
def error403(self, message="Forbidden"): def error403(self, message="Forbidden"):
self.sendHeader(403) self.sendHeader(403)
return message return message
# Send file not found error # Send file not found error
def error404(self, path = None): def error404(self, path=None):
self.sendHeader(404) self.sendHeader(404)
return "Not Found: %s" % path.encode("utf8") return "Not Found: %s" % path.encode("utf8")
# Internal server error # Internal server error
def error500(self, message = ":("): def error500(self, message=":("):
self.sendHeader(500) self.sendHeader(500)
return "<h1>Server error</h1>%s" % cgi.escape(message) return "<h1>Server error</h1>%s" % cgi.escape(message)
# - Reload for eaiser developing - # - Reload for eaiser developing -
#def reload(): # def reload():
#import imp, sys # import imp, sys
#global UiWebsocket # global UiWebsocket
#UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket # UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket
#reload(sys.modules["User.UserManager"]) # reload(sys.modules["User.UserManager"])
#UserManager.reloadModule() # UserManager.reloadModule()
#self.user = UserManager.user_manager.getCurrent() # self.user = UserManager.user_manager.getCurrent()

View file

@ -1,7 +1,11 @@
import logging, time, cgi, string, random import logging
import time
import cgi
from gevent.pywsgi import WSGIServer from gevent.pywsgi import WSGIServer
from gevent.pywsgi import WSGIHandler from gevent.pywsgi import WSGIHandler
from lib.geventwebsocket.handler import WebSocketHandler from lib.geventwebsocket.handler import WebSocketHandler
from UiRequest import UiRequest from UiRequest import UiRequest
from Site import SiteManager from Site import SiteManager
from Config import config from Config import config
@ -10,13 +14,13 @@ from Debug import Debug
# Skip websocket handler if not necessary # Skip websocket handler if not necessary
class UiWSGIHandler(WSGIHandler): class UiWSGIHandler(WSGIHandler):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.server = args[2] self.server = args[2]
super(UiWSGIHandler, self).__init__(*args, **kwargs) super(UiWSGIHandler, self).__init__(*args, **kwargs)
self.args = args self.args = args
self.kwargs = kwargs self.kwargs = kwargs
def run_application(self): def run_application(self):
self.server.sockets[self.client_address] = self.socket self.server.sockets[self.client_address] = self.socket
if "HTTP_UPGRADE" in self.environ: # Websocket request if "HTTP_UPGRADE" in self.environ: # Websocket request
@ -31,7 +35,7 @@ class UiWSGIHandler(WSGIHandler):
del self.server.sockets[self.client_address] del self.server.sockets[self.client_address]
sys.modules["main"].DebugHook.handleError() sys.modules["main"].DebugHook.handleError()
else: # Standard HTTP request else: # Standard HTTP request
#print self.application.__class__.__name__ # print self.application.__class__.__name__
try: try:
super(UiWSGIHandler, self).run_application() super(UiWSGIHandler, self).run_application()
except Exception, err: except Exception, err:
@ -45,17 +49,15 @@ class UiWSGIHandler(WSGIHandler):
class UiServer: class UiServer:
def __init__(self): def __init__(self):
self.ip = config.ui_ip self.ip = config.ui_ip
self.port = config.ui_port self.port = config.ui_port
if self.ip == "*": self.ip = "" # Bind all if self.ip == "*":
#self.sidebar_websockets = [] # Sidebar websocket connections self.ip = "" # Bind all
#self.auth_key = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(12)) # Global admin auth key
self.sites = SiteManager.site_manager.list() self.sites = SiteManager.site_manager.list()
self.log = logging.getLogger(__name__) self.log = logging.getLogger(__name__)
# Handle WSGI request # Handle WSGI request
def handleRequest(self, env, start_response): def handleRequest(self, env, start_response):
path = env["PATH_INFO"] path = env["PATH_INFO"]
@ -73,16 +75,15 @@ class UiServer:
logging.debug("UiRequest error: %s" % Debug.formatException(err)) logging.debug("UiRequest error: %s" % Debug.formatException(err))
return ui_request.error500("Err: %s" % Debug.formatException(err)) return ui_request.error500("Err: %s" % Debug.formatException(err))
# Reload the UiRequest class to prevent restarts in debug mode # Reload the UiRequest class to prevent restarts in debug mode
def reload(self): def reload(self):
global UiRequest global UiRequest
import imp, sys import imp
import sys
reload(sys.modules["User.UserManager"]) reload(sys.modules["User.UserManager"])
reload(sys.modules["Ui.UiWebsocket"]) reload(sys.modules["Ui.UiWebsocket"])
UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest
#UiRequest.reload() # UiRequest.reload()
# Bind and run the server # Bind and run the server
def start(self): def start(self):
@ -119,7 +120,6 @@ class UiServer:
self.server.serve_forever() self.server.serve_forever()
self.log.debug("Stopped.") self.log.debug("Stopped.")
def stop(self): def stop(self):
self.log.debug("Stopping...") self.log.debug("Stopping...")
# Close WS sockets # Close WS sockets
@ -133,11 +133,10 @@ class UiServer:
sock._sock.close() sock._sock.close()
sock.close() sock.close()
sock_closed += 1 sock_closed += 1
except Exception, err: except Exception:
pass pass
self.log.debug("Socket closed: %s" % sock_closed) self.log.debug("Socket closed: %s" % sock_closed)
self.server.socket.close() self.server.socket.close()
self.server.stop() self.server.stop()
time.sleep(1) time.sleep(1)

View file

@ -1,12 +1,20 @@
import json, gevent, time, sys, hashlib import json
import time
import sys
import hashlib
import gevent
from Config import config from Config import config
from Site import SiteManager from Site import SiteManager
from Debug import Debug from Debug import Debug
from util import QueryJson, RateLimit from util import QueryJson, RateLimit
from Plugin import PluginManager from Plugin import PluginManager
@PluginManager.acceptPlugins @PluginManager.acceptPlugins
class UiWebsocket(object): class UiWebsocket(object):
def __init__(self, ws, site, server, user): def __init__(self, ws, site, server, user):
self.ws = ws self.ws = ws
self.site = site self.site = site
@ -19,15 +27,27 @@ class UiWebsocket(object):
self.sending = False # Currently sending to client self.sending = False # Currently sending to client
self.send_queue = [] # Messages to send to client self.send_queue = [] # Messages to send to client
# Start listener loop # Start listener loop
def start(self): def start(self):
ws = self.ws ws = self.ws
if self.site.address == config.homepage and not self.site.page_requested: # Add open fileserver port message or closed port error to homepage at first request after start if self.site.address == config.homepage and not self.site.page_requested:
if sys.modules["main"].file_server.port_opened == True: # Add open fileserver port message or closed port error to homepage at first request after start
self.site.notifications.append(["done", "Congratulation, your port <b>"+str(config.fileserver_port)+"</b> is opened. <br>You are full member of ZeroNet network!", 10000]) if sys.modules["main"].file_server.port_opened is True:
elif sys.modules["main"].file_server.port_opened == False: self.site.notifications.append([
self.site.notifications.append(["error", "Your network connection is restricted. Please, open <b>"+str(config.fileserver_port)+"</b> port <br>on your router to become full member of ZeroNet network.", 0]) "done",
"Congratulation, your port <b>%s</b> is opened.<br>You are full member of ZeroNet network!" %
config.fileserver_port,
10000
])
elif sys.modules["main"].file_server.port_opened is False:
self.site.notifications.append([
"error",
"""
Your network connection is restricted. Please, open <b>%s</b> port<br>
on your router to become full member of ZeroNet network.
""" % config.fileserver_port,
0
])
self.site.page_requested = True # Dont add connection notification anymore self.site.page_requested = True # Dont add connection notification anymore
for notification in self.site.notifications: # Send pending notification messages for notification in self.site.notifications: # Send pending notification messages
@ -45,7 +65,6 @@ class UiWebsocket(object):
self.log.error("WebSocket error: %s" % Debug.formatException(err)) self.log.error("WebSocket error: %s" % Debug.formatException(err))
return "Bye." return "Bye."
# Event in a channel # Event in a channel
def event(self, channel, *params): def event(self, channel, *params):
if channel in self.channels: # We are joined to channel if channel in self.channels: # We are joined to channel
@ -56,24 +75,22 @@ class UiWebsocket(object):
site_info.update(params[1]) site_info.update(params[1])
self.cmd("setSiteInfo", site_info) self.cmd("setSiteInfo", site_info)
# Send response to client (to = message.id) # Send response to client (to = message.id)
def response(self, to, result): def response(self, to, result):
self.send({"cmd": "response", "to": to, "result": result}) self.send({"cmd": "response", "to": to, "result": result})
# Send a command # Send a command
def cmd(self, cmd, params={}, cb = None): def cmd(self, cmd, params={}, cb=None):
self.send({"cmd": cmd, "params": params}, cb) self.send({"cmd": cmd, "params": params}, cb)
# Encode to json and send message # Encode to json and send message
def send(self, message, cb = None): def send(self, message, cb=None):
message["id"] = self.next_message_id # Add message id to allow response message["id"] = self.next_message_id # Add message id to allow response
self.next_message_id += 1 self.next_message_id += 1
if cb: # Callback after client responsed if cb: # Callback after client responsed
self.waiting_cb[message["id"]] = cb self.waiting_cb[message["id"]] = cb
if self.sending: return # Already sending if self.sending:
return # Already sending
self.send_queue.append(message) self.send_queue.append(message)
try: try:
while self.send_queue: while self.send_queue:
@ -84,7 +101,6 @@ class UiWebsocket(object):
except Exception, err: except Exception, err:
self.log.debug("Websocket send error: %s" % Debug.formatException(err)) self.log.debug("Websocket send error: %s" % Debug.formatException(err))
# Handle incoming messages # Handle incoming messages
def handleRequest(self, data): def handleRequest(self, data):
req = json.loads(data) req = json.loads(data)
@ -96,7 +112,11 @@ class UiWebsocket(object):
permissions = permissions[:] permissions = permissions[:]
permissions.append("ADMIN") permissions.append("ADMIN")
admin_commands = ("sitePause", "siteResume", "siteDelete", "siteList", "siteSetLimit", "siteClone", "channelJoinAllsite", "serverUpdate", "certSet") admin_commands = (
"sitePause", "siteResume", "siteDelete", "siteList", "siteSetLimit", "siteClone",
"channelJoinAllsite",
"serverUpdate", "certSet"
)
if cmd == "response": # It's a response to a command if cmd == "response": # It's a response to a command
return self.actionResponse(req["to"], req["result"]) return self.actionResponse(req["to"], req["result"])
@ -117,7 +137,6 @@ class UiWebsocket(object):
else: else:
func(req["id"], params) func(req["id"], params)
# Format site info # Format site info
def formatSiteInfo(self, site, create_user=True): def formatSiteInfo(self, site, create_user=True):
content = site.content_manager.contents.get("content.json") content = site.content_manager.contents.get("content.json")
@ -125,9 +144,12 @@ class UiWebsocket(object):
content = content.copy() content = content.copy()
content["files"] = len(content.get("files", {})) content["files"] = len(content.get("files", {}))
content["includes"] = len(content.get("includes", {})) content["includes"] = len(content.get("includes", {}))
if "sign" in content: del(content["sign"]) if "sign" in content:
if "signs" in content: del(content["signs"]) del(content["sign"])
if "signers_sign" in content: del(content["signers_sign"]) if "signs" in content:
del(content["signs"])
if "signers_sign" in content:
del(content["signers_sign"])
settings = site.settings.copy() settings = site.settings.copy()
del settings["wrapper_key"] # Dont expose wrapper key del settings["wrapper_key"] # Dont expose wrapper key
@ -150,11 +172,12 @@ class UiWebsocket(object):
"workers": len(site.worker_manager.workers), "workers": len(site.worker_manager.workers),
"content": content "content": content
} }
if site.settings["own"]: ret["privatekey"] = bool(self.user.getSiteData(site.address, create=create_user).get("privatekey")) if site.settings["own"]:
if site.settings["serving"] and content: ret["peers"] += 1 # Add myself if serving ret["privatekey"] = bool(self.user.getSiteData(site.address, create=create_user).get("privatekey"))
if site.settings["serving"] and content:
ret["peers"] += 1 # Add myself if serving
return ret return ret
def formatServerInfo(self): def formatServerInfo(self):
return { return {
"ip_external": bool(sys.modules["main"].file_server.port_opened), "ip_external": bool(sys.modules["main"].file_server.port_opened),
@ -169,7 +192,6 @@ class UiWebsocket(object):
"plugins": PluginManager.plugin_manager.plugin_names "plugins": PluginManager.plugin_manager.plugin_names
} }
# - Actions - # - Actions -
# Do callback on response {"cmd": "response", "to": message_id, "result": result} # Do callback on response {"cmd": "response", "to": message_id, "result": result}
@ -179,33 +201,28 @@ class UiWebsocket(object):
else: else:
self.log.error("Websocket callback not found: %s, %s" % (to, result)) self.log.error("Websocket callback not found: %s, %s" % (to, result))
# Send a simple pong answer # Send a simple pong answer
def actionPing(self, to): def actionPing(self, to):
self.response(to, "pong") self.response(to, "pong")
# Send site details # Send site details
def actionSiteInfo(self, to, file_status = None): def actionSiteInfo(self, to, file_status=None):
ret = self.formatSiteInfo(self.site) ret = self.formatSiteInfo(self.site)
if file_status: # Client queries file status if file_status: # Client queries file status
if self.site.storage.isFile(file_status): # File exits, add event done if self.site.storage.isFile(file_status): # File exits, add event done
ret["event"] = ("file_done", file_status) ret["event"] = ("file_done", file_status)
self.response(to, ret) self.response(to, ret)
# Join to an event channel # Join to an event channel
def actionChannelJoin(self, to, channel): def actionChannelJoin(self, to, channel):
if channel not in self.channels: if channel not in self.channels:
self.channels.append(channel) self.channels.append(channel)
# Server variables # Server variables
def actionServerInfo(self, to): def actionServerInfo(self, to):
ret = self.formatServerInfo() ret = self.formatServerInfo()
self.response(to, ret) self.response(to, ret)
# Sign content.json # Sign content.json
def actionSiteSign(self, to, privatekey=None, inner_path="content.json"): def actionSiteSign(self, to, privatekey=None, inner_path="content.json"):
site = self.site site = self.site
@ -219,8 +236,10 @@ class UiWebsocket(object):
extend["cert_user_id"] = self.user.getCertUserId(site.address) extend["cert_user_id"] = self.user.getCertUserId(site.address)
extend["cert_sign"] = cert["cert_sign"] extend["cert_sign"] = cert["cert_sign"]
if (
if not site.settings["own"] and self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path): not site.settings["own"] and
self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path)
):
return self.response(to, "Forbidden, you can only modify your own sites") return self.response(to, "Forbidden, you can only modify your own sites")
if privatekey == "stored": if privatekey == "stored":
privatekey = self.user.getSiteData(self.site.address).get("privatekey") privatekey = self.user.getSiteData(self.site.address).get("privatekey")
@ -230,10 +249,7 @@ class UiWebsocket(object):
# Signing # Signing
site.content_manager.loadContent(add_bad_files=False) # Reload content.json, ignore errors to make it up-to-date site.content_manager.loadContent(add_bad_files=False) # Reload content.json, ignore errors to make it up-to-date
signed = site.content_manager.sign(inner_path, privatekey, extend=extend) # Sign using private key sent by user signed = site.content_manager.sign(inner_path, privatekey, extend=extend) # Sign using private key sent by user
if signed: if not signed:
#if inner_path == "content_json": self.cmd("notification", ["done", "Private key correct, content signed!", 5000]) # Display message for 5 sec
pass
else:
self.cmd("notification", ["error", "Content sign failed: invalid private key."]) self.cmd("notification", ["error", "Content sign failed: invalid private key."])
self.response(to, "Site sign failed") self.response(to, "Site sign failed")
return return
@ -243,7 +259,6 @@ class UiWebsocket(object):
return inner_path return inner_path
# Sign and publish content.json # Sign and publish content.json
def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True): def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True):
if sign: if sign:
@ -257,39 +272,48 @@ class UiWebsocket(object):
self.site.saveSettings() self.site.saveSettings()
self.site.announce() self.site.announce()
event_name = "publish %s %s" % (self.site.address, inner_path) event_name = "publish %s %s" % (self.site.address, inner_path)
thread = RateLimit.callAsync(event_name, 7, self.site.publish, 5, inner_path) # Only publish once in 7 second to 5 peers thread = RateLimit.callAsync(event_name, 7, self.site.publish, 5, inner_path) # Only publish once in 7 second to 5 peers
notification = "linked" not in dir(thread) # Only display notification on first callback notification = "linked" not in dir(thread) # Only display notification on first callback
thread.linked = True thread.linked = True
thread.link(lambda thread: self.cbSitePublish(to, thread, notification)) # At the end callback with request id and thread thread.link(lambda thread: self.cbSitePublish(to, thread, notification)) # At the end callback with request id and thread
# Callback of site publish # Callback of site publish
def cbSitePublish(self, to, thread, notification=True): def cbSitePublish(self, to, thread, notification=True):
site = self.site site = self.site
published = thread.value published = thread.value
if published>0: # Successfuly published if published > 0: # Successfuly published
if notification: self.cmd("notification", ["done", "Content published to %s peers." % published, 5000]) if notification:
self.cmd("notification", ["done", "Content published to %s peers." % published, 5000])
self.response(to, "ok") self.response(to, "ok")
if notification: site.updateWebsocket() # Send updated site data to local websocket clients if notification:
site.updateWebsocket() # Send updated site data to local websocket clients
else: else:
if len(site.peers) == 0: if len(site.peers) == 0:
if sys.modules["main"].file_server.port_opened: if sys.modules["main"].file_server.port_opened:
if notification: self.cmd("notification", ["info", "No peers found, but your content is ready to access."]) if notification:
self.cmd("notification", ["info", "No peers found, but your content is ready to access."])
self.response(to, "ok") self.response(to, "ok")
else: else:
if notification: self.cmd("notification", ["info", "Your network connection is restricted. Please, open <b>"+str(config.fileserver_port)+"</b> port <br>on your router to make your site accessible for everyone."]) if notification:
self.cmd("notification", [
"info",
"""Your network connection is restricted. Please, open <b>%s</b> port <br>
on your router to make your site accessible for everyone.""" % config.fileserver_port
])
self.response(to, "Port not opened.") self.response(to, "Port not opened.")
else: else:
if notification: self.cmd("notification", ["error", "Content publish failed."]) if notification:
self.cmd("notification", ["error", "Content publish failed."])
self.response(to, "Content publish failed.") self.response(to, "Content publish failed.")
# Write a file to disk # Write a file to disk
def actionFileWrite(self, to, inner_path, content_base64): def actionFileWrite(self, to, inner_path, content_base64):
if not self.site.settings["own"] and self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path): if (
not self.site.settings["own"] and
self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path)
):
return self.response(to, "Forbidden, you can only modify your own files") return self.response(to, "Forbidden, you can only modify your own files")
try: try:
@ -309,8 +333,6 @@ class UiWebsocket(object):
if ws != self: if ws != self:
ws.event("siteChanged", self.site, {"event": ["file_done", inner_path]}) ws.event("siteChanged", self.site, {"event": ["file_done", inner_path]})
# Find data in json files # Find data in json files
def actionFileQuery(self, to, dir_inner_path, query): def actionFileQuery(self, to, dir_inner_path, query):
# s = time.time() # s = time.time()
@ -319,7 +341,6 @@ class UiWebsocket(object):
# self.log.debug("FileQuery %s %s done in %s" % (dir_inner_path, query, time.time()-s)) # self.log.debug("FileQuery %s %s done in %s" % (dir_inner_path, query, time.time()-s))
return self.response(to, rows) return self.response(to, rows)
# Sql query # Sql query
def actionDbQuery(self, to, query, params=None, wait_for=None): def actionDbQuery(self, to, query, params=None, wait_for=None):
rows = [] rows = []
@ -332,17 +353,16 @@ class UiWebsocket(object):
rows.append(dict(row)) rows.append(dict(row))
return self.response(to, rows) return self.response(to, rows)
# Return file content # Return file content
def actionFileGet(self, to, inner_path, required=True): def actionFileGet(self, to, inner_path, required=True):
try: try:
if required: self.site.needFile(inner_path, priority=1) if required:
self.site.needFile(inner_path, priority=1)
body = self.site.storage.read(inner_path) body = self.site.storage.read(inner_path)
except: except:
body = None body = None
return self.response(to, body) return self.response(to, body)
def actionFileRules(self, to, inner_path): def actionFileRules(self, to, inner_path):
rules = self.site.content_manager.getRules(inner_path) rules = self.site.content_manager.getRules(inner_path)
if inner_path.endswith("content.json"): if inner_path.endswith("content.json"):
@ -353,20 +373,21 @@ class UiWebsocket(object):
rules["current_size"] = 0 rules["current_size"] = 0
return self.response(to, rules) return self.response(to, rules)
# Add certificate to user # Add certificate to user
def actionCertAdd(self, to, domain, auth_type, auth_user_name, cert): def actionCertAdd(self, to, domain, auth_type, auth_user_name, cert):
try: try:
res = self.user.addCert(self.user.getAuthAddress(self.site.address), domain, auth_type, auth_user_name, cert) res = self.user.addCert(self.user.getAuthAddress(self.site.address), domain, auth_type, auth_user_name, cert)
if res == True: if res is True:
self.cmd("notification", ["done", "New certificate added: <b>%s/%s@%s</b>." % (auth_type, auth_user_name, domain)]) self.cmd(
"notification",
["done", "New certificate added: <b>%s/%s@%s</b>." % (auth_type, auth_user_name, domain)]
)
self.response(to, "ok") self.response(to, "ok")
else: else:
self.response(to, "Not changed") self.response(to, "Not changed")
except Exception, err: except Exception, err:
self.response(to, {"error": err.message}) self.response(to, {"error": err.message})
# Select certificate for site # Select certificate for site
def actionCertSelect(self, to, accepted_domains=[]): def actionCertSelect(self, to, accepted_domains=[]):
accounts = [] accounts = []
@ -378,13 +399,12 @@ class UiWebsocket(object):
for domain, cert in self.user.certs.items(): for domain, cert in self.user.certs.items():
if auth_address == cert["auth_address"]: if auth_address == cert["auth_address"]:
active = domain active = domain
title = cert["auth_user_name"]+"@"+domain title = cert["auth_user_name"] + "@" + domain
if domain in accepted_domains: if domain in accepted_domains:
accounts.append([domain, title, ""]) accounts.append([domain, title, ""])
else: else:
accounts.append([domain, title, "disabled"]) accounts.append([domain, title, "disabled"])
# Render the html # Render the html
body = "<span style='padding-bottom: 5px; display: inline-block'>Select account you want to use in this site:</span>" body = "<span style='padding-bottom: 5px; display: inline-block'>Select account you want to use in this site:</span>"
# Accounts # Accounts
@ -399,10 +419,14 @@ class UiWebsocket(object):
more_domains = [domain for domain in accepted_domains if domain not in self.user.certs] # Domainains we not displayed yet more_domains = [domain for domain in accepted_domains if domain not in self.user.certs] # Domainains we not displayed yet
if more_domains: if more_domains:
# body+= "<small style='margin-top: 10px; display: block'>Accepted authorization providers by the site:</small>" # body+= "<small style='margin-top: 10px; display: block'>Accepted authorization providers by the site:</small>"
body+= "<div style='background-color: #F7F7F7; margin-right: -30px'>" body += "<div style='background-color: #F7F7F7; margin-right: -30px'>"
for domain in more_domains: for domain in more_domains:
body += "<a href='/%s' onclick='wrapper.gotoSite(this)' target='_blank' class='select'><small style='float: right; margin-right: 40px; margin-top: -1px'>Register &raquo;</small>%s</a>" % (domain, domain) body += """
body+= "</div>" <a href='/%s' onclick='wrapper.gotoSite(this)' target='_blank' class='select'>
<small style='float: right; margin-right: 40px; margin-top: -1px'>Register &raquo;</small>%s
</a>
""" % (domain, domain)
body += "</div>"
body += """ body += """
<script> <script>
@ -417,13 +441,11 @@ class UiWebsocket(object):
# Send the notification # Send the notification
self.cmd("notification", ["ask", body]) self.cmd("notification", ["ask", body])
# Set certificate that used for authenticate user for site # Set certificate that used for authenticate user for site
def actionCertSet(self, to, domain): def actionCertSet(self, to, domain):
self.user.setCert(self.site.address, domain) self.user.setCert(self.site.address, domain)
self.site.updateWebsocket(cert_changed=domain) self.site.updateWebsocket(cert_changed=domain)
# - Admin actions - # - Admin actions -
# List all site info # List all site info
@ -431,11 +453,11 @@ class UiWebsocket(object):
ret = [] ret = []
SiteManager.site_manager.load() # Reload sites SiteManager.site_manager.load() # Reload sites
for site in self.server.sites.values(): for site in self.server.sites.values():
if not site.content_manager.contents.get("content.json"): continue # Broken site if not site.content_manager.contents.get("content.json"):
continue # Broken site
ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing
self.response(to, ret) self.response(to, ret)
# Join to an event channel on all sites # Join to an event channel on all sites
def actionChannelJoinAllsite(self, to, channel): def actionChannelJoinAllsite(self, to, channel):
if channel not in self.channels: # Add channel to channels if channel not in self.channels: # Add channel to channels
@ -445,7 +467,6 @@ class UiWebsocket(object):
if self not in site.websockets: if self not in site.websockets:
site.websockets.append(self) site.websockets.append(self)
# Update site content.json # Update site content.json
def actionSiteUpdate(self, to, address): def actionSiteUpdate(self, to, address):
site = self.server.sites.get(address) site = self.server.sites.get(address)
@ -454,7 +475,6 @@ class UiWebsocket(object):
else: else:
self.response(to, {"error": "Unknown site: %s" % address}) self.response(to, {"error": "Unknown site: %s" % address})
# Pause site serving # Pause site serving
def actionSitePause(self, to, address): def actionSitePause(self, to, address):
site = self.server.sites.get(address) site = self.server.sites.get(address)
@ -466,7 +486,6 @@ class UiWebsocket(object):
else: else:
self.response(to, {"error": "Unknown site: %s" % address}) self.response(to, {"error": "Unknown site: %s" % address})
# Resume site serving # Resume site serving
def actionSiteResume(self, to, address): def actionSiteResume(self, to, address):
site = self.server.sites.get(address) site = self.server.sites.get(address)
@ -479,7 +498,6 @@ class UiWebsocket(object):
else: else:
self.response(to, {"error": "Unknown site: %s" % address}) self.response(to, {"error": "Unknown site: %s" % address})
def actionSiteDelete(self, to, address): def actionSiteDelete(self, to, address):
site = self.server.sites.get(address) site = self.server.sites.get(address)
if site: if site:
@ -493,7 +511,6 @@ class UiWebsocket(object):
else: else:
self.response(to, {"error": "Unknown site: %s" % address}) self.response(to, {"error": "Unknown site: %s" % address})
def actionSiteClone(self, to, address): def actionSiteClone(self, to, address):
self.cmd("notification", ["info", "Cloning site..."]) self.cmd("notification", ["info", "Cloning site..."])
site = self.server.sites.get(address) site = self.server.sites.get(address)
@ -505,17 +522,14 @@ class UiWebsocket(object):
self.cmd("notification", ["done", "Site cloned<script>window.top.location = '/%s'</script>" % new_address]) self.cmd("notification", ["done", "Site cloned<script>window.top.location = '/%s'</script>" % new_address])
gevent.spawn(new_site.announce) gevent.spawn(new_site.announce)
def actionSiteSetLimit(self, to, size_limit): def actionSiteSetLimit(self, to, size_limit):
self.site.settings["size_limit"] = size_limit self.site.settings["size_limit"] = size_limit
self.site.saveSettings() self.site.saveSettings()
self.response(to, "Site size limit changed to %sMB" % size_limit) self.response(to, "Site size limit changed to %sMB" % size_limit)
self.site.download() self.site.download()
def actionServerUpdate(self, to): def actionServerUpdate(self, to):
self.cmd("updating") self.cmd("updating")
sys.modules["main"].update_after_shutdown = True sys.modules["main"].update_after_shutdown = True
sys.modules["main"].file_server.stop() sys.modules["main"].file_server.stop()
sys.modules["main"].ui_server.stop() sys.modules["main"].ui_server.stop()

View file

@ -1,4 +1,7 @@
import logging, json, time import logging
import json
import time
from Crypt import CryptBitcoin from Crypt import CryptBitcoin
from Plugin import PluginManager from Plugin import PluginManager
from Config import config from Config import config
@ -6,6 +9,7 @@ from Config import config
@PluginManager.acceptPlugins @PluginManager.acceptPlugins
class User(object): class User(object):
def __init__(self, master_address=None, master_seed=None, data={}): def __init__(self, master_address=None, master_seed=None, data={}):
if master_seed: if master_seed:
self.master_seed = master_seed self.master_seed = master_seed
@ -27,7 +31,8 @@ class User(object):
if self.master_address not in users: if self.master_address not in users:
users[self.master_address] = {} # Create if not exist users[self.master_address] = {} # Create if not exist
user_data = users[self.master_address] user_data = users[self.master_address]
if self.master_seed: user_data["master_seed"] = self.master_seed if self.master_seed:
user_data["master_seed"] = self.master_seed
user_data["sites"] = self.sites user_data["sites"] = self.sites
user_data["certs"] = self.certs user_data["certs"] = self.certs
open("%s/users.json" % config.data_dir, "w").write(json.dumps(users, indent=2, sort_keys=True)) open("%s/users.json" % config.data_dir, "w").write(json.dumps(users, indent=2, sort_keys=True))
@ -40,7 +45,8 @@ class User(object):
# Return: {"auth_address": "xxx", "auth_privatekey": "xxx"} # Return: {"auth_address": "xxx", "auth_privatekey": "xxx"}
def getSiteData(self, address, create=True): def getSiteData(self, address, create=True):
if address not in self.sites: # Generate new BIP32 child key based on site address if address not in self.sites: # Generate new BIP32 child key based on site address
if not create: return {"auth_address": None, "auth_privatekey": None} # Dont create user yet if not create:
return {"auth_address": None, "auth_privatekey": None} # Dont create user yet
s = time.time() s = time.time()
address_id = self.getAddressAuthIndex(address) # Convert site address to int address_id = self.getAddressAuthIndex(address) # Convert site address to int
auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id) auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id)
@ -49,17 +55,18 @@ class User(object):
"auth_privatekey": auth_privatekey "auth_privatekey": auth_privatekey
} }
self.save() self.save()
self.log.debug("Added new site: %s in %.3fs" % (address, time.time()-s)) self.log.debug("Added new site: %s in %.3fs" % (address, time.time() - s))
return self.sites[address] return self.sites[address]
# Get data for a new, unique site # Get data for a new, unique site
# Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}] # Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}]
def getNewSiteData(self): def getNewSiteData(self):
import random import random
bip32_index = random.randrange(2**256) % 100000000 bip32_index = random.randrange(2 ** 256) % 100000000
site_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, bip32_index) site_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, bip32_index)
site_address = CryptBitcoin.privatekeyToAddress(site_privatekey) site_address = CryptBitcoin.privatekeyToAddress(site_privatekey)
if site_address in self.sites: raise Exception("Random error: site exist!") if site_address in self.sites:
raise Exception("Random error: site exist!")
# Save to sites # Save to sites
self.getSiteData(site_address) self.getSiteData(site_address)
self.sites[site_address]["privatekey"] = site_privatekey self.sites[site_address]["privatekey"] = site_privatekey
@ -85,7 +92,8 @@ class User(object):
# Add cert for the user # Add cert for the user
def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign): def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign):
domain = domain.lower() domain = domain.lower()
auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0] # Find privatekey by auth address # Find privatekey by auth address
auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0]
cert_node = { cert_node = {
"auth_address": auth_address, "auth_address": auth_address,
"auth_privatekey": auth_privatekey, "auth_privatekey": auth_privatekey,
@ -95,7 +103,10 @@ class User(object):
} }
# Check if we have already cert for that domain and its not the same # Check if we have already cert for that domain and its not the same
if self.certs.get(domain) and self.certs[domain] != cert_node: if self.certs.get(domain) and self.certs[domain] != cert_node:
raise Exception("You already have certificate for this domain: %s/%s@%s" % (self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain)) raise Exception(
"You already have certificate for this domain: %s/%s@%s" %
(self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain)
)
elif self.certs.get(domain) == cert_node: # Same, not updated elif self.certs.get(domain) == cert_node: # Same, not updated
return None return None
else: # Not exist yet, add else: # Not exist yet, add
@ -113,17 +124,19 @@ class User(object):
return site_data return site_data
# Get cert for the site address # Get cert for the site address
# Return: { "auth_address": ..., "auth_privatekey":..., "auth_type": "web", "auth_user_name": "nofish", "cert_sign": ... } or None # Return: { "auth_address":.., "auth_privatekey":.., "auth_type": "web", "auth_user_name": "nofish", "cert_sign":.. } or None
def getCert(self, address): def getCert(self, address):
site_data = self.getSiteData(address, create=False) site_data = self.getSiteData(address, create=False)
if not site_data or not "cert" in site_data: return None # Site dont have cert if not site_data or "cert" not in site_data:
return None # Site dont have cert
return self.certs.get(site_data["cert"]) return self.certs.get(site_data["cert"])
# Get cert user name for the site address # Get cert user name for the site address
# Return: user@certprovider.bit or None # Return: user@certprovider.bit or None
def getCertUserId(self, address): def getCertUserId(self, address):
site_data = self.getSiteData(address, create=False) site_data = self.getSiteData(address, create=False)
if not site_data or not "cert" in site_data: return None # Site dont have cert if not site_data or "cert" not in site_data:
return None # Site dont have cert
cert = self.certs.get(site_data["cert"]) cert = self.certs.get(site_data["cert"])
if cert: if cert:
return cert["auth_user_name"]+"@"+site_data["cert"] return cert["auth_user_name"] + "@" + site_data["cert"]

View file

@ -1,5 +1,4 @@
# Included modules # Included modules
import os
import json import json
import logging import logging
@ -75,9 +74,9 @@ def reloadModule():
import imp import imp
global User, UserManager, user_manager global User, UserManager, user_manager
User = imp.load_source("User", "src/User/User.py").User # Reload source User = imp.load_source("User", "src/User/User.py").User # Reload source
#module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module # module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module
#UserManager = module.UserManager # UserManager = module.UserManager
#user_manager = module.user_manager # user_manager = module.user_manager
# Reload users # Reload users
user_manager = UserManager() user_manager = UserManager()
user_manager.load() user_manager.load()

View file

@ -1,8 +1,12 @@
import gevent, time, logging, shutil, os import time
from Peer import Peer
import gevent
from Debug import Debug from Debug import Debug
class Worker(object): class Worker(object):
def __init__(self, manager, peer): def __init__(self, manager, peer):
self.manager = manager self.manager = manager
self.peer = peer self.peer = peer
@ -11,15 +15,12 @@ class Worker(object):
self.running = False self.running = False
self.thread = None self.thread = None
def __str__(self): def __str__(self):
return "Worker %s %s" % (self.manager.site.address_short, self.key) return "Worker %s %s" % (self.manager.site.address_short, self.key)
def __repr__(self): def __repr__(self):
return "<%s>" % self.__str__() return "<%s>" % self.__str__()
# Downloader thread # Downloader thread
def downloader(self): def downloader(self):
self.peer.hash_failed = 0 # Reset hash error counter self.peer.hash_failed = 0 # Reset hash error counter
@ -29,14 +30,15 @@ class Worker(object):
if not task: # Die, no more task if not task: # Die, no more task
self.manager.log.debug("%s: No task found, stopping" % self.key) self.manager.log.debug("%s: No task found, stopping" % self.key)
break break
if not task["time_started"]: task["time_started"] = time.time() # Task started now if not task["time_started"]:
task["time_started"] = time.time() # Task started now
if task["workers_num"] > 0: # Wait a bit if someone already working on it if task["workers_num"] > 0: # Wait a bit if someone already working on it
self.manager.log.debug("%s: Someone already working on %s, sleeping 1 sec..." % (self.key, task["inner_path"])) self.manager.log.debug("%s: Someone already working on %s, sleeping 1 sec..." % (self.key, task["inner_path"]))
time.sleep(1) time.sleep(1)
self.manager.log.debug("%s: %s, task done after sleep: %s" % (self.key, task["inner_path"], task["done"])) self.manager.log.debug("%s: %s, task done after sleep: %s" % (self.key, task["inner_path"], task["done"]))
if task["done"] == False: if task["done"] is False:
self.task = task self.task = task
site = task["site"] site = task["site"]
task["workers_num"] += 1 task["workers_num"] += 1
@ -45,28 +47,32 @@ class Worker(object):
except Exception, err: except Exception, err:
self.manager.log.debug("%s: getFile error: err" % (self.key, err)) self.manager.log.debug("%s: getFile error: err" % (self.key, err))
buff = None buff = None
if self.running == False: # Worker no longer needed or got killed if self.running is False: # Worker no longer needed or got killed
self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"])) self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"]))
break break
if buff: # Download ok if buff: # Download ok
correct = site.content_manager.verifyFile(task["inner_path"], buff) correct = site.content_manager.verifyFile(task["inner_path"], buff)
else: # Download error else: # Download error
correct = False correct = False
if correct == True or correct == None: # Hash ok or same file if correct is True or correct is None: # Hash ok or same file
self.manager.log.debug("%s: Hash correct: %s" % (self.key, task["inner_path"])) self.manager.log.debug("%s: Hash correct: %s" % (self.key, task["inner_path"]))
if correct == True and task["done"] == False: # Save if changed and task not done yet if correct is True and task["done"] is False: # Save if changed and task not done yet
buff.seek(0) buff.seek(0)
file_path = site.storage.getPath(task["inner_path"])
site.storage.write(task["inner_path"], buff) site.storage.write(task["inner_path"], buff)
if task["done"] == False: self.manager.doneTask(task) if task["done"] is False:
self.manager.doneTask(task)
task["workers_num"] -= 1 task["workers_num"] -= 1
self.task = None self.task = None
else: # Hash failed else: # Hash failed
self.manager.log.debug("%s: Hash failed: %s, failed peers: %s" % (self.key, task["inner_path"], len(task["failed"]))) self.manager.log.debug(
"%s: Hash failed: %s, failed peers: %s" %
(self.key, task["inner_path"], len(task["failed"]))
)
task["failed"].append(self.peer) task["failed"].append(self.peer)
self.task = None self.task = None
self.peer.hash_failed += 1 self.peer.hash_failed += 1
if self.peer.hash_failed >= max(len(self.manager.tasks), 3): # More fails than tasks number but atleast 3: Broken peer if self.peer.hash_failed >= max(len(self.manager.tasks), 3):
# Broken peer: More fails than tasks number but atleast 3
break break
task["workers_num"] -= 1 task["workers_num"] -= 1
time.sleep(1) time.sleep(1)
@ -74,13 +80,11 @@ class Worker(object):
self.running = False self.running = False
self.manager.removeWorker(self) self.manager.removeWorker(self)
# Start the worker # Start the worker
def start(self): def start(self):
self.running = True self.running = True
self.thread = gevent.spawn(self.downloader) self.thread = gevent.spawn(self.downloader)
# Skip current task # Skip current task
def skip(self): def skip(self):
self.manager.log.debug("%s: Force skipping" % self.key) self.manager.log.debug("%s: Force skipping" % self.key)
@ -88,7 +92,6 @@ class Worker(object):
self.thread.kill(exception=Debug.Notify("Worker stopped")) self.thread.kill(exception=Debug.Notify("Worker stopped"))
self.start() self.start()
# Force stop the worker # Force stop the worker
def stop(self): def stop(self):
self.manager.log.debug("%s: Force stopping" % self.key) self.manager.log.debug("%s: Force stopping" % self.key)

View file

@ -1,29 +1,33 @@
import time
import logging
import random
import gevent
from Worker import Worker from Worker import Worker
import gevent, time, logging, random
MAX_WORKERS = 10 MAX_WORKERS = 10 # Max concurent workers
# Worker manager for site
class WorkerManager: class WorkerManager:
def __init__(self, site): def __init__(self, site):
self.site = site self.site = site
self.workers = {} # Key: ip:port, Value: Worker.Worker self.workers = {} # Key: ip:port, Value: Worker.Worker
self.tasks = [] # {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0, "failed": peer_ids} self.tasks = []
# {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False,
# "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0, "failed": peer_ids}
self.started_task_num = 0 # Last added task num self.started_task_num = 0 # Last added task num
self.running = True self.running = True
self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short) self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short)
self.process_taskchecker = gevent.spawn(self.checkTasks) self.process_taskchecker = gevent.spawn(self.checkTasks)
def __str__(self): def __str__(self):
return "WorkerManager %s" % self.site.address_short return "WorkerManager %s" % self.site.address_short
def __repr__(self): def __repr__(self):
return "<%s>" % self.__str__() return "<%s>" % self.__str__()
# Check expired tasks # Check expired tasks
def checkTasks(self): def checkTasks(self):
while self.running: while self.running:
@ -32,13 +36,15 @@ class WorkerManager:
# Clean up workers # Clean up workers
for worker in self.workers.values(): for worker in self.workers.values():
if worker.task and worker.task["done"]: worker.skip() # Stop workers with task done if worker.task and worker.task["done"]:
worker.skip() # Stop workers with task done
if not self.tasks: continue if not self.tasks:
continue
tasks = self.tasks[:] # Copy it so removing elements wont cause any problem tasks = self.tasks[:] # Copy it so removing elements wont cause any problem
for task in tasks: for task in tasks:
if task["time_started"] and time.time() >= task["time_started"]+60: # Task taking too long time, skip it if task["time_started"] and time.time() >= task["time_started"] + 60: # Task taking too long time, skip it
self.log.debug("Timeout, Skipping: %s" % task) self.log.debug("Timeout, Skipping: %s" % task)
# Skip to next file workers # Skip to next file workers
workers = self.findWorkers(task) workers = self.findWorkers(task)
@ -47,12 +53,13 @@ class WorkerManager:
worker.skip() worker.skip()
else: else:
self.failTask(task) self.failTask(task)
elif time.time() >= task["time_added"]+60 and not self.workers: # No workers left elif time.time() >= task["time_added"] + 60 and not self.workers: # No workers left
self.log.debug("Timeout, Cleanup task: %s" % task) self.log.debug("Timeout, Cleanup task: %s" % task)
# Remove task # Remove task
self.failTask(task) self.failTask(task)
elif (task["time_started"] and time.time() >= task["time_started"]+15) or not self.workers: # Task started more than 15 sec ago or no workers elif (task["time_started"] and time.time() >= task["time_started"] + 15) or not self.workers:
# Task started more than 15 sec ago or no workers
self.log.debug("Task taking more than 15 secs, find more peers: %s" % task["inner_path"]) self.log.debug("Task taking more than 15 secs, find more peers: %s" % task["inner_path"])
task["site"].announce() # Find more peers task["site"].announce() # Find more peers
if task["peers"]: # Release the peer lock if task["peers"]: # Release the peer lock
@ -61,39 +68,38 @@ class WorkerManager:
self.startWorkers() self.startWorkers()
break # One reannounce per loop break # One reannounce per loop
self.log.debug("checkTasks stopped running") self.log.debug("checkTasks stopped running")
# Tasks sorted by this # Tasks sorted by this
def taskSorter(self, task): def taskSorter(self, task):
if task["inner_path"] == "content.json": return 9999 # Content.json always prority if task["inner_path"] == "content.json":
if task["inner_path"] == "index.html": return 9998 # index.html also important return 9999 # Content.json always prority
if task["inner_path"] == "index.html":
return 9998 # index.html also important
priority = task["priority"] priority = task["priority"]
if task["inner_path"].endswith(".js") or task["inner_path"].endswith(".css"): priority += 1 # download js and css files first if task["inner_path"].endswith(".js") or task["inner_path"].endswith(".css"):
return priority-task["workers_num"] # Prefer more priority and less workers priority += 1 # download js and css files first
return priority - task["workers_num"] # Prefer more priority and less workers
# Returns the next free or less worked task # Returns the next free or less worked task
def getTask(self, peer): def getTask(self, peer):
self.tasks.sort(key=self.taskSorter, reverse=True) # Sort tasks by priority and worker numbers self.tasks.sort(key=self.taskSorter, reverse=True) # Sort tasks by priority and worker numbers
for task in self.tasks: # Find a task for task in self.tasks: # Find a task
if task["peers"] and peer not in task["peers"]: continue # This peer not allowed to pick this task if task["peers"] and peer not in task["peers"]:
if peer in task["failed"]: continue # Peer already tried to solve this, but failed continue # This peer not allowed to pick this task
if peer in task["failed"]:
continue # Peer already tried to solve this, but failed
return task return task
# New peers added to site # New peers added to site
def onPeers(self): def onPeers(self):
self.startWorkers() self.startWorkers()
# Add new worker # Add new worker
def addWorker(self, peer): def addWorker(self, peer):
key = peer.key key = peer.key
if key not in self.workers and len(self.workers) < MAX_WORKERS: # We dont have worker for that peer and workers num less than max if key not in self.workers and len(self.workers) < MAX_WORKERS:
# We dont have worker for that peer and workers num less than max
worker = Worker(self, peer) worker = Worker(self, peer)
self.workers[key] = worker self.workers[key] = worker
worker.key = key worker.key = key
@ -102,18 +108,21 @@ class WorkerManager:
else: # We have woker for this peer or its over the limit else: # We have woker for this peer or its over the limit
return False return False
# Start workers to process tasks # Start workers to process tasks
def startWorkers(self, peers=None): def startWorkers(self, peers=None):
if not self.tasks: return False # No task for workers if not self.tasks:
if len(self.workers) >= MAX_WORKERS and not peers: return False # Workers number already maxed and no starting peers definied return False # No task for workers
if not peers: peers = self.site.peers.values() # No peers definied, use any from site if len(self.workers) >= MAX_WORKERS and not peers:
return False # Workers number already maxed and no starting peers definied
if not peers:
peers = self.site.peers.values() # No peers definied, use any from site
random.shuffle(peers) random.shuffle(peers)
for peer in peers: # One worker for every peer for peer in peers: # One worker for every peer
if peers and peer not in peers: continue # If peers definied and peer not valid if peers and peer not in peers:
continue # If peers definied and peer not valid
worker = self.addWorker(peer) worker = self.addWorker(peer)
if worker: self.log.debug("Added worker: %s, workers: %s/%s" % (peer.key, len(self.workers), MAX_WORKERS)) if worker:
self.log.debug("Added worker: %s, workers: %s/%s" % (peer.key, len(self.workers), MAX_WORKERS))
# Stop all worker # Stop all worker
def stopWorkers(self): def stopWorkers(self):
@ -123,16 +132,14 @@ class WorkerManager:
for task in tasks: # Mark all current task as failed for task in tasks: # Mark all current task as failed
self.failTask(task) self.failTask(task)
# Find workers by task # Find workers by task
def findWorkers(self, task): def findWorkers(self, task):
workers = [] workers = []
for worker in self.workers.values(): for worker in self.workers.values():
if worker.task == task: workers.append(worker) if worker.task == task:
workers.append(worker)
return workers return workers
# Ends and remove a worker # Ends and remove a worker
def removeWorker(self, worker): def removeWorker(self, worker):
worker.running = False worker.running = False
@ -140,9 +147,8 @@ class WorkerManager:
del(self.workers[worker.key]) del(self.workers[worker.key])
self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), MAX_WORKERS)) self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), MAX_WORKERS))
# Create new task and return asyncresult # Create new task and return asyncresult
def addTask(self, inner_path, peer=None, priority = 0): def addTask(self, inner_path, peer=None, priority=0):
self.site.onFileStart(inner_path) # First task, trigger site download started self.site.onFileStart(inner_path) # First task, trigger site download started
task = self.findTask(inner_path) task = self.findTask(inner_path)
if task: # Already has task for that file if task: # Already has task for that file
@ -155,7 +161,6 @@ class WorkerManager:
self.log.debug("Removed peer %s from failed %s" % (peer.key, task["inner_path"])) self.log.debug("Removed peer %s from failed %s" % (peer.key, task["inner_path"]))
self.startWorkers([peer]) self.startWorkers([peer])
if priority: if priority:
task["priority"] += priority # Boost on priority task["priority"] += priority # Boost on priority
return task["evt"] return task["evt"]
@ -165,14 +170,19 @@ class WorkerManager:
peers = [peer] # Only download from this peer peers = [peer] # Only download from this peer
else: else:
peers = None peers = None
task = {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_added": time.time(), "time_started": None, "peers": peers, "priority": priority, "failed": []} task = {
"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False,
"time_added": time.time(), "time_started": None, "peers": peers, "priority": priority, "failed": []
}
self.tasks.append(task) self.tasks.append(task)
self.started_task_num += 1 self.started_task_num += 1
self.log.debug("New task: %s, peer lock: %s, priority: %s, tasks: %s" % (task["inner_path"], peers, priority, self.started_task_num)) self.log.debug(
"New task: %s, peer lock: %s, priority: %s, tasks: %s" %
(task["inner_path"], peers, priority, self.started_task_num)
)
self.startWorkers(peers) self.startWorkers(peers)
return evt return evt
# Find a task using inner_path # Find a task using inner_path
def findTask(self, inner_path): def findTask(self, inner_path):
for task in self.tasks: for task in self.tasks:
@ -180,7 +190,6 @@ class WorkerManager:
return task return task
return None # Not found return None # Not found
# Mark a task failed # Mark a task failed
def failTask(self, task): def failTask(self, task):
if task in self.tasks: if task in self.tasks:
@ -191,7 +200,6 @@ class WorkerManager:
if not self.tasks: if not self.tasks:
self.started_task_num = 0 self.started_task_num = 0
# Mark a task done # Mark a task done
def doneTask(self, task): def doneTask(self, task):
task["done"] = True task["done"] = True
@ -201,4 +209,3 @@ class WorkerManager:
if not self.tasks: if not self.tasks:
self.started_task_num = 0 self.started_task_num = 0
self.site.onComplete() # No more task trigger site complete self.site.onComplete() # No more task trigger site complete

View file

@ -1,14 +1,24 @@
import re import re
def prefix(content): def prefix(content):
content = re.sub("@keyframes (.*? {.*?[^ ]})", "@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n", content, flags=re.DOTALL) content = re.sub(
content = re.sub('([^-\*])(border-radius|box-shadow|transition|animation|box-sizing|transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])', '\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content) "@keyframes (.*? {.*?[^ ]})", "@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n",
content = re.sub('(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])', content, flags=re.DOTALL
'\\1: -webkit-\\2(\\3);'+ )
'\\1: -moz-\\2(\\3);'+ content = re.sub(
'\\1: -o-\\2(\\3);'+ '([^-\*])(border-radius|box-shadow|transition|animation|box-sizing|' +
'\\1: -ms-\\2(\\3);'+ 'transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])',
'\\1: \\2(\\3);', content) '\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content
)
content = re.sub(
'(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])',
'\\1: -webkit-\\2(\\3);' +
'\\1: -moz-\\2(\\3);' +
'\\1: -o-\\2(\\3);' +
'\\1: -ms-\\2(\\3);' +
'\\1: \\2(\\3);', content
)
return content return content
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -16,8 +16,10 @@ update_after_shutdown = False # If set True then update and restart zeronet aft
from Config import config from Config import config
# Create necessary files and dirs # Create necessary files and dirs
if not os.path.isdir(config.log_dir): os.mkdir(config.log_dir) if not os.path.isdir(config.log_dir):
if not os.path.isdir(config.data_dir): os.mkdir(config.data_dir) os.mkdir(config.log_dir)
if not os.path.isdir(config.data_dir):
os.mkdir(config.data_dir)
if not os.path.isfile("%s/sites.json" % config.data_dir): if not os.path.isfile("%s/sites.json" % config.data_dir):
open("%s/sites.json" % config.data_dir, "w").write("{}") open("%s/sites.json" % config.data_dir, "w").write("{}")
if not os.path.isfile("%s/users.json" % config.data_dir): if not os.path.isfile("%s/users.json" % config.data_dir):
@ -54,8 +56,7 @@ if config.debug:
else: else:
console_log.setLevel(logging.INFO) # Display only important info to console console_log.setLevel(logging.INFO) # Display only important info to console
monkey.patch_all(thread=False) # Make time, socket gevent compatible. Not thread: pyfilesystem and system tray icon not compatible monkey.patch_all(thread=False) # Not thread: pyfilesystem and system tray icon not compatible
# Log current config # Log current config
@ -81,6 +82,7 @@ PluginManager.plugin_manager.loadPlugins()
@PluginManager.acceptPlugins @PluginManager.acceptPlugins
class Actions(object): class Actions(object):
# Default action: Start serving UiServer and FileServer # Default action: Start serving UiServer and FileServer
def main(self): def main(self):
logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__)) logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__))
global ui_server, file_server global ui_server, file_server
@ -113,8 +115,10 @@ class Actions(object):
logging.info("----------------------------------------------------------------------") logging.info("----------------------------------------------------------------------")
while True: while True:
if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes": break if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes":
else: logging.info("Please, secure it now, you going to need it to modify your site!") break
else:
logging.info("Please, secure it now, you going to need it to modify your site!")
logging.info("Creating directory structure...") logging.info("Creating directory structure...")
from Site import Site from Site import Site
@ -132,7 +136,7 @@ class Actions(object):
def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False): def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False):
from Site import Site from Site import Site
logging.info("Signing site: %s..." % address) logging.info("Signing site: %s..." % address)
site = Site(address, allow_create = False) site = Site(address, allow_create=False)
if not privatekey: # If no privatekey in args then ask it now if not privatekey: # If no privatekey in args then ask it now
import getpass import getpass
@ -151,7 +155,10 @@ class Actions(object):
for content_inner_path in site.content_manager.contents: for content_inner_path in site.content_manager.contents:
logging.info("Verifing %s signature..." % content_inner_path) logging.info("Verifing %s signature..." % content_inner_path)
if site.content_manager.verifyFile(content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False) == True: file_correct = site.content_manager.verifyFile(
content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False
)
if file_correct is True:
logging.info("[OK] %s signed by address %s!" % (content_inner_path, address)) logging.info("[OK] %s signed by address %s!" % (content_inner_path, address))
else: else:
logging.error("[ERROR] %s: invalid file!" % content_inner_path) logging.error("[ERROR] %s: invalid file!" % content_inner_path)
@ -160,7 +167,7 @@ class Actions(object):
logging.info("Verifying site files...") logging.info("Verifying site files...")
bad_files += site.storage.verifyFiles() bad_files += site.storage.verifyFiles()
if not bad_files: if not bad_files:
logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time()-s)) logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s))
else: else:
logging.error("[ERROR] Error during verifying site files!") logging.error("[ERROR] Error during verifying site files!")
@ -170,7 +177,7 @@ class Actions(object):
site = Site(address) site = Site(address)
s = time.time() s = time.time()
site.storage.rebuildDb() site.storage.rebuildDb()
logging.info("Done in %.3fs" % (time.time()-s)) logging.info("Done in %.3fs" % (time.time() - s))
def dbQuery(self, address, query): def dbQuery(self, address, query):
from Site import Site from Site import Site
@ -188,17 +195,15 @@ class Actions(object):
s = time.time() s = time.time()
site.announce() site.announce()
print "Response time: %.3fs" % (time.time()-s) print "Response time: %.3fs" % (time.time() - s)
print site.peers print site.peers
def siteNeedFile(self, address, inner_path): def siteNeedFile(self, address, inner_path):
from Site import Site from Site import Site
site = Site(address) site = Site(address)
site.announce() site.announce()
print site.needFile(inner_path, update=True) print site.needFile(inner_path, update=True)
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"): def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"):
global file_server global file_server
from Site import Site from Site import Site
@ -222,7 +227,7 @@ class Actions(object):
gevent.joinall([file_server_thread], timeout=60) gevent.joinall([file_server_thread], timeout=60)
logging.info("Done.") logging.info("Done.")
else: else:
logging.info("No peers found for this site, sitePublish command only works if you already have peers serving your site") logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
# Crypto commands # Crypto commands
def cryptPrivatekeyToAddress(self, privatekey=None): def cryptPrivatekeyToAddress(self, privatekey=None):
@ -252,7 +257,7 @@ class Actions(object):
for i in range(5): for i in range(5):
s = time.time() s = time.time()
print peer.ping(), print peer.ping(),
print "Response time: %.3fs (crypt: %s)" % (time.time()-s, peer.connection.crypt) print "Response time: %.3fs (crypt: %s)" % (time.time() - s, peer.connection.crypt)
time.sleep(1) time.sleep(1)
def peerGetFile(self, peer_ip, peer_port, site, filename): def peerGetFile(self, peer_ip, peer_port, site, filename):
@ -266,7 +271,7 @@ class Actions(object):
peer = Peer(peer_ip, peer_port) peer = Peer(peer_ip, peer_port)
s = time.time() s = time.time()
print peer.getFile(site, filename).read() print peer.getFile(site, filename).read()
print "Response time: %.3fs" % (time.time()-s) print "Response time: %.3fs" % (time.time() - s)
def peerCmd(self, peer_ip, peer_port, cmd, parameters): def peerCmd(self, peer_ip, peer_port, cmd, parameters):
logging.info("Opening a simple connection server") logging.info("Opening a simple connection server")
@ -284,9 +289,10 @@ class Actions(object):
logging.info("Response: %s" % peer.request(cmd, parameters)) logging.info("Response: %s" % peer.request(cmd, parameters))
actions = Actions() actions = Actions()
# Starts here when running zeronet.py # Starts here when running zeronet.py
def start(): def start():
# Call function # Call function
func = getattr(actions, config.action, None) func = getattr(actions, config.action, None)

View file

@ -1,17 +1,17 @@
# Based on http://stackoverflow.com/a/2022629 # Based on http://stackoverflow.com/a/2022629
class Event(list): class Event(list):
def __call__(self, *args, **kwargs): def __call__(self, *args, **kwargs):
for f in self[:]: for f in self[:]:
if "once" in dir(f) and f in self: if "once" in dir(f) and f in self:
self.remove(f) self.remove(f)
f(*args, **kwargs) f(*args, **kwargs)
def __repr__(self): def __repr__(self):
return "Event(%s)" % list.__repr__(self) return "Event(%s)" % list.__repr__(self)
def once(self, func, name=None): def once(self, func, name=None):
func.once = True func.once = True
func.name = None func.name = None
@ -25,35 +25,32 @@ class Event(list):
return self return self
def testBenchmark(): def testBenchmark():
def say(pre, text): def say(pre, text):
print "%s Say: %s" % (pre, text) print "%s Say: %s" % (pre, text)
import time import time
s = time.time() s = time.time()
onChanged = Event() on_changed = Event()
for i in range(1000): for i in range(1000):
onChanged.once(lambda pre: say(pre, "once"), "once") on_changed.once(lambda pre: say(pre, "once"), "once")
print "Created 1000 once in %.3fs" % (time.time()-s) print "Created 1000 once in %.3fs" % (time.time() - s)
onChanged("#1") on_changed("#1")
def testUsage(): def testUsage():
def say(pre, text): def say(pre, text):
print "%s Say: %s" % (pre, text) print "%s Say: %s" % (pre, text)
onChanged = Event() on_changed = Event()
onChanged.once(lambda pre: say(pre, "once")) on_changed.once(lambda pre: say(pre, "once"))
onChanged.once(lambda pre: say(pre, "once")) on_changed.once(lambda pre: say(pre, "once"))
onChanged.once(lambda pre: say(pre, "namedonce"), "namedonce") on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce")
onChanged.once(lambda pre: say(pre, "namedonce"), "namedonce") on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce")
onChanged.append(lambda pre: say(pre, "always")) on_changed.append(lambda pre: say(pre, "always"))
onChanged("#1") on_changed("#1")
onChanged("#2") on_changed("#2")
onChanged("#3") on_changed("#3")
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -12,8 +12,11 @@ except AttributeError:
OldSSLSocket = __ssl__.SSLSocket OldSSLSocket = __ssl__.SSLSocket
class NewSSLSocket(OldSSLSocket): class NewSSLSocket(OldSSLSocket):
"""Fix SSLSocket constructor.""" """Fix SSLSocket constructor."""
def __init__( def __init__(
self, sock, keyfile=None, certfile=None, server_side=False, cert_reqs=0, self, sock, keyfile=None, certfile=None, server_side=False, cert_reqs=0,
ssl_version=2, ca_certs=None, do_handshake_on_connect=True, ssl_version=2, ca_certs=None, do_handshake_on_connect=True,

View file

@ -1,11 +1,13 @@
import urllib2, logging import urllib2
import logging
import GeventSslPatch import GeventSslPatch
from Config import config from Config import config
def get(url): def get(url):
logging.debug("Get %s" % url) logging.debug("Get %s" % url)
req = urllib2.Request(url) req = urllib2.Request(url)
req.add_header('User-Agent', "ZeroNet %s (https://github.com/HelloZeroNet/ZeroNet)" % config.version) req.add_header('User-Agent', "ZeroNet %s (https://github.com/HelloZeroNet/ZeroNet)" % config.version)
req.add_header('Accept', 'application/json') req.add_header('Accept', 'application/json')
return urllib2.urlopen(req) return urllib2.urlopen(req)

View file

@ -1,12 +1,13 @@
import gevent, time import gevent
import time
class Noparallel(object): # Only allow function running once in same time class Noparallel(object): # Only allow function running once in same time
def __init__(self,blocking=True):
def __init__(self, blocking=True):
self.threads = {} self.threads = {}
self.blocking = blocking # Blocking: Acts like normal function else thread returned self.blocking = blocking # Blocking: Acts like normal function else thread returned
def __call__(self, func): def __call__(self, func):
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
key = (func, tuple(args), tuple(kwargs.items())) # Unique key for function including parameters key = (func, tuple(args), tuple(kwargs.items())) # Unique key for function including parameters
@ -38,10 +39,12 @@ class Noparallel(object): # Only allow function running once in same time
# Cleanup finished threads # Cleanup finished threads
def cleanup(self, key, thread): def cleanup(self, key, thread):
if key in self.threads: del(self.threads[key]) if key in self.threads:
del(self.threads[key])
class Test(): class Test():
@Noparallel() @Noparallel()
def count(self, num=5): def count(self, num=5):
for i in range(num): for i in range(num):
@ -51,6 +54,7 @@ class Test():
class TestNoblock(): class TestNoblock():
@Noparallel(blocking=False) @Noparallel(blocking=False)
def count(self, num=5): def count(self, num=5):
for i in range(num): for i in range(num):
@ -85,6 +89,7 @@ def testBlocking():
print thread1.value, thread2.value, thread3.value, thread4.value print thread1.value, thread2.value, thread3.value, thread4.value
print "Done." print "Done."
def testNoblocking(): def testNoblocking():
test = TestNoblock() test = TestNoblock()
test2 = TestNoblock() test2 = TestNoblock()
@ -106,13 +111,13 @@ def testNoblocking():
print "Joining thread4" print "Joining thread4"
thread4.join() thread4.join()
print thread1.value, thread2.value, thread3.value, thread4.value print thread1.value, thread2.value, thread3.value, thread4.value
print "Done." print "Done."
def testBenchmark(): def testBenchmark():
import time import time
def printThreadNum(): def printThreadNum():
import gc import gc
from greenlet import greenlet from greenlet import greenlet
@ -123,13 +128,12 @@ def testBenchmark():
test = TestNoblock() test = TestNoblock()
s = time.time() s = time.time()
for i in range(3): for i in range(3):
gevent.spawn(test.count, i+1) gevent.spawn(test.count, i + 1)
print "Created in %.3fs" % (time.time()-s) print "Created in %.3fs" % (time.time() - s)
printThreadNum() printThreadNum()
time.sleep(5) time.sleep(5)
if __name__ == "__main__": if __name__ == "__main__":
from gevent import monkey from gevent import monkey
monkey.patch_all() monkey.patch_all()
@ -139,4 +143,3 @@ if __name__ == "__main__":
testBlocking() testBlocking()
print "Testing noblocking mode..." print "Testing noblocking mode..."
testNoblocking() testNoblocking()
print [instance.threads for instance in registry]

View file

@ -1,17 +1,22 @@
import json, re, os import json
import re
import os
def queryFile(file_path, filter_path, filter_key = None, filter_val = None): def queryFile(file_path, filter_path, filter_key=None, filter_val=None):
back = [] back = []
data = json.load(open(file_path)) data = json.load(open(file_path))
if filter_path == ['']: return [data] if filter_path == ['']:
return [data]
for key in filter_path: # Get to the point for key in filter_path: # Get to the point
data = data.get(key) data = data.get(key)
if not data: return if not data:
return
for row in data: for row in data:
if filter_val: # Filter by value if filter_val: # Filter by value
if row[filter_key] == filter_val: back.append(row) if row[filter_key] == filter_val:
back.append(row)
else: else:
back.append(row) back.append(row)
@ -19,7 +24,7 @@ def queryFile(file_path, filter_path, filter_key = None, filter_val = None):
# Find in json files # Find in json files
# Return: [{u'body': u'Hello Topic 1!!', 'inner_path': '1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6', u'added': 1422740732, u'message_id': 1},...] # Return: [{u'body': u'Hello Topic 1!!', 'inner_path': '1KRxE1...beEp6', u'added': 1422740732, u'message_id': 1},...]
def query(path_pattern, filter): def query(path_pattern, filter):
if "=" in filter: # Filter by value if "=" in filter: # Filter by value
filter_path, filter_val = filter.split("=") filter_path, filter_val = filter.split("=")
@ -40,22 +45,20 @@ def query(path_pattern, filter):
root = root.replace("\\", "/") root = root.replace("\\", "/")
inner_path = root.replace(root_dir, "").strip("/") inner_path = root.replace(root_dir, "").strip("/")
for file_name in files: for file_name in files:
if file_pattern != file_name: continue if file_pattern != file_name:
continue
try: try:
res = queryFile(root+"/"+file_name, filter_path, filter_key, filter_val) res = queryFile(root + "/" + file_name, filter_path, filter_key, filter_val)
if not res: continue if not res:
except Exception, err: # Json load error continue
# print file_name, filter, err except Exception: # Json load error
continue continue
for row in res: for row in res:
row["inner_path"] = inner_path row["inner_path"] = inner_path
yield row yield row
if __name__ == "__main__": if __name__ == "__main__":
#for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "topics")):
# print row
for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")): for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")):
print row print row

View file

@ -9,6 +9,8 @@ queue_db = {} # Commands queued to run
# Register event as called # Register event as called
# Return: None # Return: None
def called(event): def called(event):
called_db[event] = time.time() called_db[event] = time.time()
@ -19,7 +21,7 @@ def isAllowed(event, allowed_again=10):
last_called = called_db.get(event) last_called = called_db.get(event)
if not last_called: # Its not called before if not last_called: # Its not called before
return True return True
elif time.time()-last_called >= allowed_again: elif time.time() - last_called >= allowed_again:
del called_db[event] # Delete last call time to save memory del called_db[event] # Delete last call time to save memory
return True return True
else: else:
@ -34,16 +36,16 @@ def callQueue(event):
return func(*args, **kwargs) return func(*args, **kwargs)
# Rate limit and delay function call if necessary
# Rate limit and delay function call if needed, If the function called again within the rate limit interval then previous queued call will be dropped # If the function called again within the rate limit interval then previous queued call will be dropped
# Return: Immedietly gevent thread # Return: Immediately gevent thread
def callAsync(event, allowed_again=10, func=None, *args, **kwargs): def callAsync(event, allowed_again=10, func=None, *args, **kwargs):
if isAllowed(event, allowed_again): # Not called recently, call it now if isAllowed(event, allowed_again): # Not called recently, call it now
called(event) called(event)
# print "Calling now" # print "Calling now"
return gevent.spawn(func, *args, **kwargs) return gevent.spawn(func, *args, **kwargs)
else: # Called recently, schedule it for later else: # Called recently, schedule it for later
time_left = allowed_again-max(0, time.time()-called_db[event]) time_left = allowed_again - max(0, time.time() - called_db[event])
log.debug("Added to queue (%.2fs left): %s " % (time_left, event)) log.debug("Added to queue (%.2fs left): %s " % (time_left, event))
if not queue_db.get(event): # Function call not queued yet if not queue_db.get(event): # Function call not queued yet
thread = gevent.spawn_later(time_left, lambda: callQueue(event)) # Call this function later thread = gevent.spawn_later(time_left, lambda: callQueue(event)) # Call this function later
@ -64,7 +66,7 @@ def call(event, allowed_again=10, func=None, *args, **kwargs):
return func(*args, **kwargs) return func(*args, **kwargs)
else: # Called recently, schedule it for later else: # Called recently, schedule it for later
time_left = max(0, allowed_again-(time.time()-called_db[event])) time_left = max(0, allowed_again - (time.time() - called_db[event]))
# print "Time left: %s" % time_left, args, kwargs # print "Time left: %s" % time_left, args, kwargs
log.debug("Calling sync (%.2fs left): %s" % (time_left, event)) log.debug("Calling sync (%.2fs left): %s" % (time_left, event))
time.sleep(time_left) time.sleep(time_left)
@ -78,11 +80,11 @@ def call(event, allowed_again=10, func=None, *args, **kwargs):
# Cleanup expired events every 3 minutes # Cleanup expired events every 3 minutes
def cleanup(): def cleanup():
while 1: while 1:
expired = time.time()-60*2 # Cleanup if older than 2 minutes expired = time.time() - 60 * 2 # Cleanup if older than 2 minutes
for event in called_db.keys(): for event in called_db.keys():
if called_db[event] < expired: if called_db[event] < expired:
del called_db[event] del called_db[event]
time.sleep(60*3) # Every 3 minutes time.sleep(60 * 3) # Every 3 minutes
gevent.spawn(cleanup) gevent.spawn(cleanup)
@ -101,7 +103,7 @@ if __name__ == "__main__":
print "Testing async spam requests rate limit to 1/sec..." print "Testing async spam requests rate limit to 1/sec..."
for i in range(3000): for i in range(3000):
thread = callAsync("publish content.json", 1, publish, "content.json %s" % i) thread = callAsync("publish content.json", 1, publish, "content.json %s" % i)
time.sleep(float(random.randint(1,20))/100000) time.sleep(float(random.randint(1, 20)) / 100000)
print thread.link(cb) print thread.link(cb)
print "Done" print "Done"
@ -110,13 +112,12 @@ if __name__ == "__main__":
print "Testing sync spam requests rate limit to 1/sec..." print "Testing sync spam requests rate limit to 1/sec..."
for i in range(5): for i in range(5):
call("publish data.json", 1, publish, "data.json %s" % i) call("publish data.json", 1, publish, "data.json %s" % i)
time.sleep(float(random.randint(1,100))/100) time.sleep(float(random.randint(1, 100)) / 100)
print "Done" print "Done"
print "Testing cleanup" print "Testing cleanup"
thread = callAsync("publish content.json single", 1, publish, "content.json single") thread = callAsync("publish content.json single", 1, publish, "content.json single")
print "Needs to cleanup:", called_db, queue_db print "Needs to cleanup:", called_db, queue_db
print "Waiting 3min for cleanup process..." print "Waiting 3min for cleanup process..."
time.sleep(60*3) time.sleep(60 * 3)
print "Cleaned up:", called_db, queue_db print "Cleaned up:", called_db, queue_db

View file

@ -1,6 +1,7 @@
from lib.PySocks import socks
import socket import socket
from lib.PySocks import socks
def create_connection(address, timeout=None, source_address=None): def create_connection(address, timeout=None, source_address=None):
sock = socks.socksocket() sock = socks.socksocket()
@ -19,4 +20,3 @@ def monkeyPath(proxy_ip, proxy_port):
socket.socket = socks.socksocket socket.socket = socks.socksocket
socket.create_connection = create_connection socket.create_connection = create_connection
socket.getaddrinfo = getaddrinfo socket.getaddrinfo = getaddrinfo

View file

@ -2,6 +2,7 @@
# Disable SSL compression to save massive memory and cpu # Disable SSL compression to save massive memory and cpu
import logging import logging
from Config import config from Config import config
@ -9,7 +10,10 @@ def disableSSLCompression():
import ctypes import ctypes
import ctypes.util import ctypes.util
try: try:
openssl = ctypes.CDLL(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32', ctypes.RTLD_GLOBAL) openssl = ctypes.CDLL(
ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32',
ctypes.RTLD_GLOBAL
)
openssl.SSL_COMP_get_compression_methods.restype = ctypes.c_void_p openssl.SSL_COMP_get_compression_methods.restype = ctypes.c_void_p
except Exception, err: except Exception, err:
logging.debug("Disable SSL compression failed: %s (normal on Windows)" % err) logging.debug("Disable SSL compression failed: %s (normal on Windows)" % err)
@ -81,7 +85,7 @@ if not hasattr(_ssl, 'sslwrap'):
logging.debug("Missing SSLwrap, readded.") logging.debug("Missing SSLwrap, readded.")
# Add SSLContext to gevent.ssl (Ubutunu 15 fix) # Add SSLContext to gevent.ssl (Ubuntu 15 fix)
try: try:
import gevent import gevent
if not hasattr(gevent.ssl, "SSLContext"): if not hasattr(gevent.ssl, "SSLContext"):

View file

@ -1,12 +1,15 @@
import msgpack, os, struct import os
import struct
import msgpack
def msgpackHeader(size): def msgpackHeader(size):
if size <= 2**8-1: if size <= 2 ** 8 - 1:
return b"\xc4" + struct.pack("B", size) return b"\xc4" + struct.pack("B", size)
elif size <= 2**16-1: elif size <= 2 ** 16 - 1:
return b"\xc5" + struct.pack(">H", size) return b"\xc5" + struct.pack(">H", size)
elif size <= 2**32-1: elif size <= 2 ** 32 - 1:
return b"\xc6" + struct.pack(">I", size) return b"\xc6" + struct.pack(">I", size)
else: else:
raise Exception("huge binary string") raise Exception("huge binary string")
@ -18,15 +21,16 @@ def stream(data, writer):
for key, val in data.iteritems(): for key, val in data.iteritems():
writer(packer.pack(key)) writer(packer.pack(key))
if issubclass(type(val), file): # File obj if issubclass(type(val), file): # File obj
max_size = os.fstat(val.fileno()).st_size-val.tell() max_size = os.fstat(val.fileno()).st_size - val.tell()
size = min(max_size, val.read_bytes) size = min(max_size, val.read_bytes)
bytes_left = size bytes_left = size
writer(msgpackHeader(size)) writer(msgpackHeader(size))
buff = 1024*64 buff = 1024 * 64
while 1: while 1:
writer(val.read(min(bytes_left, buff))) writer(val.read(min(bytes_left, buff)))
bytes_left = bytes_left-buff bytes_left = bytes_left - buff
if bytes_left <= 0: break if bytes_left <= 0:
break
else: # Simple else: # Simple
writer(packer.pack(val)) writer(packer.pack(val))
return size return size

View file

@ -1,10 +1,13 @@
import gevent import re
from gevent import socket import urllib2
import httplib
import re, urllib2, httplib, logging import logging
from urlparse import urlparse from urlparse import urlparse
from xml.dom.minidom import parseString from xml.dom.minidom import parseString
import gevent
from gevent import socket
# Relevant UPnP spec: http://www.upnp.org/specs/gw/UPnP-gw-WANIPConnection-v1-Service.pdf # Relevant UPnP spec: http://www.upnp.org/specs/gw/UPnP-gw-WANIPConnection-v1-Service.pdf
# General TODOs: # General TODOs:
@ -188,7 +191,7 @@ def open_port(port=15441, desc="UpnpPunch"):
local_ips = list(set(local_ips)) # Delete duplicates local_ips = list(set(local_ips)) # Delete duplicates
logging.debug("Found local ips: %s" % local_ips) logging.debug("Found local ips: %s" % local_ips)
local_ips = local_ips*3 # Retry every ip 3 times local_ips = local_ips * 3 # Retry every ip 3 times
for local_ip in local_ips: for local_ip in local_ips:
logging.debug("Trying using local ip: %s" % local_ip) logging.debug("Trying using local ip: %s" % local_ip)

View file

@ -1,7 +1,16 @@
from gevent import monkey; monkey.patch_all() import urllib
import urllib, zipfile, os, ssl, httplib, socket, re import zipfile
import os
import ssl
import httplib
import socket
import re
import cStringIO as StringIO import cStringIO as StringIO
from gevent import monkey
monkey.patch_all()
def update(): def update():
# Gevent https bug workaround (https://github.com/gevent/gevent/issues/477) # Gevent https bug workaround (https://github.com/gevent/gevent/issues/477)
reload(socket) reload(socket)
@ -12,8 +21,9 @@ def update():
file = urllib.urlopen("https://github.com/HelloZeroNet/ZeroNet/archive/master.zip") file = urllib.urlopen("https://github.com/HelloZeroNet/ZeroNet/archive/master.zip")
data = StringIO.StringIO() data = StringIO.StringIO()
while True: while True:
buff = file.read(1024*16) buff = file.read(1024 * 16)
if not buff: break if not buff:
break
data.write(buff) data.write(buff)
print ".", print ".",
print "Downloaded." print "Downloaded."
@ -29,24 +39,23 @@ def update():
plugins_enabled.append(dir) plugins_enabled.append(dir)
print "Plugins enabled:", plugins_enabled, "disabled:", plugins_disabled print "Plugins enabled:", plugins_enabled, "disabled:", plugins_disabled
print "Extracting...", print "Extracting...",
zip = zipfile.ZipFile(data) zip = zipfile.ZipFile(data)
for inner_path in zip.namelist(): for inner_path in zip.namelist():
inner_path = inner_path.replace("\\", "/") # Make sure we have unix path inner_path = inner_path.replace("\\", "/") # Make sure we have unix path
print ".", print ".",
dest_path = inner_path.replace("ZeroNet-master/", "") dest_path = inner_path.replace("ZeroNet-master/", "")
if not dest_path: continue if not dest_path:
continue
# Keep plugin disabled/enabled status # Keep plugin disabled/enabled status
match = re.match("plugins/([^/]+)", dest_path) match = re.match("plugins/([^/]+)", dest_path)
if match: if match:
plugin_name = match.group(1).replace("disabled-","") plugin_name = match.group(1).replace("disabled-", "")
if plugin_name in plugins_enabled: # Plugin was enabled if plugin_name in plugins_enabled: # Plugin was enabled
dest_path = dest_path.replace("plugins/disabled-"+plugin_name, "plugins/"+plugin_name) dest_path = dest_path.replace("plugins/disabled-" + plugin_name, "plugins/" + plugin_name)
elif plugin_name in plugins_disabled: # Plugin was disabled elif plugin_name in plugins_disabled: # Plugin was disabled
dest_path = dest_path.replace("plugins/"+plugin_name, "plugins/disabled-"+plugin_name) dest_path = dest_path.replace("plugins/" + plugin_name, "plugins/disabled-" + plugin_name)
print "P", print "P",
dest_dir = os.path.dirname(dest_path) dest_dir = os.path.dirname(dest_path)