diff --git a/plugins/Stats/StatsPlugin.py b/plugins/Stats/StatsPlugin.py
index 7bc09165..627bad88 100644
--- a/plugins/Stats/StatsPlugin.py
+++ b/plugins/Stats/StatsPlugin.py
@@ -1,516 +1,540 @@
-import re, time, cgi, os
+import time
+import cgi
+import os
+
from Plugin import PluginManager
from Config import config
+
@PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object):
- def formatTableRow(self, row):
- back = []
- for format, val in row:
- if val == None:
- formatted = "n/a"
- elif format == "since":
- if val:
- formatted = "%.0f" % (time.time()-val)
- else:
- formatted = "n/a"
- else:
- formatted = format % val
- back.append("
%s | " % formatted)
- return "%s
" % "".join(back)
-
-
- def getObjSize(self, obj, hpy = None):
- if hpy:
- return float(hpy.iso(obj).domisize)/1024
- else:
- return 0
-
-
- # /Stats entry point
- def actionStats(self):
- import gc, sys
- from Ui import UiRequest
- from Crypt import CryptConnection
-
- hpy = None
- if self.get.get("size") == "1": # Calc obj size
- try:
- import guppy
- hpy = guppy.hpy()
- except:
- pass
- self.sendHeader()
- s = time.time()
- main = sys.modules["main"]
-
- # Style
- yield """
-
- """
-
- # Memory
- try:
- yield "rev%s | " % config.rev
- yield "%s | " % config.ip_external
- yield "Opened: %s | " % main.file_server.port_opened
- yield "Crypt: %s | " % CryptConnection.manager.crypt_supported
- yield "In: %.2fMB, Out: %.2fMB | " % (float(main.file_server.bytes_recv)/1024/1024, float(main.file_server.bytes_sent)/1024/1024)
- yield "Peerid: %s | " % main.file_server.peer_id
- import psutil
- process = psutil.Process(os.getpid())
- mem = process.get_memory_info()[0] / float(2 ** 20)
- yield "Mem: %.2fMB | " % mem
- yield "Threads: %s | " % len(process.threads())
- yield "CPU: usr %.2fs sys %.2fs | " % process.cpu_times()
- yield "Files: %s | " % len(process.open_files())
- yield "Sockets: %s | " % len(process.connections())
- yield "Calc size on off"
- except Exception, err:
- pass
- yield "
"
-
- # Connections
- yield "Connections (%s, total made: %s):
" % (len(main.file_server.connections), main.file_server.last_connection_id)
- yield " id | proto | type | ip | open | crypt | ping | buff | "
- yield "idle | open | delay | out | in | last sent | waiting | version | peerid |
"
- for connection in main.file_server.connections:
- if "cipher" in dir(connection.sock):
- cipher = connection.sock.cipher()[0]
- else:
- cipher = connection.crypt
- yield self.formatTableRow([
- ("%3d", connection.id),
- ("%s", connection.protocol),
- ("%s", connection.type),
- ("%s:%s", (connection.ip, connection.port)),
- ("%s", connection.handshake.get("port_opened")),
- ("%s", (connection.crypt, cipher)),
- ("%6.3f", connection.last_ping_delay),
- ("%s", connection.incomplete_buff_recv),
- ("since", max(connection.last_send_time, connection.last_recv_time)),
- ("since", connection.start_time),
- ("%.3f", connection.last_sent_time-connection.last_send_time),
- ("%.0fkB", connection.bytes_sent/1024),
- ("%.0fkB", connection.bytes_recv/1024),
- ("%s", connection.last_cmd),
- ("%s", connection.waiting_requests.keys()),
- ("%s r%s", (connection.handshake.get("version"), connection.handshake.get("rev", "?")) ),
- ("%s", connection.handshake.get("peer_id")),
- ])
- yield "
"
-
-
- # Sites
- yield "
Sites:"
- yield ""
- yield "address | connected | peers | content.json |
"
- for site in self.server.sites.values():
- yield self.formatTableRow([
- ("%s", (site.address, site.address)),
- ("%s", [peer.connection.id for peer in site.peers.values() if peer.connection and peer.connection.connected]),
- ("%s/%s/%s", (
- len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]),
- len(site.getConnectablePeers(100)),
- len(site.peers)
- ) ),
- ("%s", len(site.content_manager.contents)),
- ])
- yield "" % site.address
- for key, peer in site.peers.items():
- yield "(%s, err: %s) %22s - " % (peer.connection, peer.connection_error, key)
- yield "
|
"
- yield "
"
-
-
- # Object types
-
- obj_count = {}
- for obj in gc.get_objects():
- obj_type = str(type(obj))
- if not obj_type in obj_count:
- obj_count[obj_type] = [0, 0]
- obj_count[obj_type][0] += 1 # Count
- obj_count[obj_type][1] += float(sys.getsizeof(obj))/1024 # Size
-
- yield "
Objects in memory (types: %s, total: %s, %.2fkb):
" % (len(obj_count), sum([stat[0] for stat in obj_count.values()]), sum([stat[1] for stat in obj_count.values()]))
-
- for obj, stat in sorted(obj_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count
- yield " - %.1fkb = %s x %s
" % (stat[1], stat[0], obj, cgi.escape(obj))
-
-
- # Classes
-
- class_count = {}
- for obj in gc.get_objects():
- obj_type = str(type(obj))
- if obj_type != "": continue
- class_name = obj.__class__.__name__
- if not class_name in class_count:
- class_count[class_name] = [0, 0]
- class_count[class_name][0] += 1 # Count
- class_count[class_name][1] += float(sys.getsizeof(obj))/1024 # Size
-
- yield "
Classes in memory (types: %s, total: %s, %.2fkb):
" % (len(class_count), sum([stat[0] for stat in class_count.values()]), sum([stat[1] for stat in class_count.values()]))
-
- for obj, stat in sorted(class_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count
- yield " - %.1fkb = %s x %s
" % (stat[1], stat[0], obj, cgi.escape(obj))
-
-
- from greenlet import greenlet
- objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
- yield "
Greenlets (%s):
" % len(objs)
- for obj in objs:
- yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
-
-
- from Worker import Worker
- objs = [obj for obj in gc.get_objects() if isinstance(obj, Worker)]
- yield "
Workers (%s):
" % len(objs)
- for obj in objs:
- yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
-
-
- from Connection import Connection
- objs = [obj for obj in gc.get_objects() if isinstance(obj, Connection)]
- yield "
Connections (%s):
" % len(objs)
- for obj in objs:
- yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
-
- from socket import socket
- objs = [obj for obj in gc.get_objects() if isinstance(obj, socket)]
- yield "
Sockets (%s):
" % len(objs)
- for obj in objs:
- yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
-
- from msgpack import Unpacker
- objs = [obj for obj in gc.get_objects() if isinstance(obj, Unpacker)]
- yield "
Msgpack unpacker (%s):
" % len(objs)
- for obj in objs:
- yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
-
-
- from Site import Site
- objs = [obj for obj in gc.get_objects() if isinstance(obj, Site)]
- yield "
Sites (%s):
" % len(objs)
- for obj in objs:
- yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
-
-
- objs = [obj for obj in gc.get_objects() if isinstance(obj, self.server.log.__class__)]
- yield "
Loggers (%s):
" % len(objs)
- for obj in objs:
- yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj.name)))
-
-
- objs = [obj for obj in gc.get_objects() if isinstance(obj, UiRequest)]
- yield "
UiRequests (%s):
" % len(objs)
- for obj in objs:
- yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
-
-
- from Peer import Peer
- objs = [obj for obj in gc.get_objects() if isinstance(obj, Peer)]
- yield "
Peers (%s):
" % len(objs)
- for obj in objs:
- yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
-
-
- objs = [(key, val) for key, val in sys.modules.iteritems() if val is not None]
- objs.sort()
- yield "
Modules (%s):
" % len(objs)
- for module_name, module in objs:
- yield " - %.3fkb: %s %s
" % (self.getObjSize(module, hpy), module_name, cgi.escape(repr(module)))
-
- gc.collect() # Implicit grabage collection
- yield "Done in %.1f" % (time.time()-s)
-
-
- def actionDumpobj(self):
- import gc, sys
-
- self.sendHeader()
- class_filter = self.get.get("class")
-
- yield """
-
- """
-
- objs = gc.get_objects()
- for obj in objs:
- obj_type = str(type(obj))
- if obj_type != "" or obj.__class__.__name__ != class_filter: continue
- yield "%.1fkb %s... " % (float(sys.getsizeof(obj))/1024, cgi.escape(str(obj)) )
- for attr in dir(obj):
- yield "- %s: %s
" % (attr, cgi.escape(str(getattr(obj, attr))))
- yield "
"
-
- gc.collect() # Implicit grabage collection
-
-
- def actionListobj(self):
- import gc, sys
-
- self.sendHeader()
- type_filter = self.get.get("type")
-
- yield """
-
- """
-
- yield "Listing all %s objects in memory...
" % cgi.escape(type_filter)
-
- ref_count = {}
- objs = gc.get_objects()
- for obj in objs:
- obj_type = str(type(obj))
- if obj_type != type_filter: continue
- refs = [ref for ref in gc.get_referrers(obj) if hasattr(ref, "__class__") and ref.__class__.__name__ not in ["list", "dict", "function", "type", "frame", "WeakSet", "tuple"]]
- if not refs: continue
- yield "%.1fkb %s... " % (float(sys.getsizeof(obj))/1024, cgi.escape(str(obj)), cgi.escape(str(obj)[0:100].ljust(100)) )
- for ref in refs:
- yield " ["
- if "object at" in str(ref) or len(str(ref)) > 100:
- yield str(ref.__class__.__name__)
- else:
- yield str(ref.__class__.__name__)+":"+cgi.escape(str(ref))
- yield "] "
- ref_type = ref.__class__.__name__
- if ref_type not in ref_count:
- ref_count[ref_type] = [0,0]
- ref_count[ref_type][0] += 1 # Count
- ref_count[ref_type][1] += float(sys.getsizeof(obj))/1024 # Size
- yield "
"
-
- yield "
Object referrer (total: %s, %.2fkb):
" % (len(ref_count), sum([stat[1] for stat in ref_count.values()]))
-
- for obj, stat in sorted(ref_count.items(), key=lambda x: x[1][0], reverse=True)[0:30]: # Sorted by count
- yield " - %.1fkb = %s x %s
" % (stat[1], stat[0], cgi.escape(str(obj)) )
-
- gc.collect() # Implicit grabage collection
-
-
- def actionBenchmark(self):
- import sys, gc
- from contextlib import contextmanager
-
- output = self.sendHeader()
-
- @contextmanager
- def benchmark(name, standard):
- s = time.time()
- output("- %s" % name)
- try:
- yield 1
- except Exception, err:
- output("
! Error: %s
" % err)
- taken = time.time()-s
- multipler = standard/taken
- if multipler < 0.3: speed = "Sloooow"
- elif multipler < 0.5: speed = "Ehh"
- elif multipler < 0.8: speed = "Goodish"
- elif multipler < 1.2: speed = "OK"
- elif multipler < 1.7: speed = "Fine"
- elif multipler < 2.5: speed = "Fast"
- elif multipler < 3.5: speed = "WOW"
- else: speed = "Insane!!"
- output("%.3fs [x%.2f: %s]
" % (taken, multipler, speed))
- time.sleep(0.01)
-
-
- yield """
-
- """
-
- yield "Benchmarking ZeroNet %s (rev%s) Python %s, platform: %s...
" % (config.version, config.rev, sys.version, sys.platform)
-
- t = time.time()
-
- yield "
CryptBitcoin:
"
- from Crypt import CryptBitcoin
-
- # seed = CryptBitcoin.newSeed()
- # yield "- Seed: %s
" % seed
- seed = "e180efa477c63b0f2757eac7b1cce781877177fe0966be62754ffd4c8592ce38"
-
- with benchmark("hdPrivatekey x 10", 0.7):
- for i in range(10):
- privatekey = CryptBitcoin.hdPrivatekey(seed, i*10)
- yield "."
- valid = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
- assert privatekey == valid, "%s != %s" % (privatekey, valid)
-
-
- data = "Hello"*1024 #5k
- with benchmark("sign x 10", 0.35):
- for i in range(10):
- yield "."
- sign = CryptBitcoin.sign(data, privatekey)
- valid = "HFGXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOi+4+BbWHjuwmx0EaKNV1G+kP0tQDxWu0YApxwxZbSmZU="
- assert sign == valid, "%s != %s" % (sign, valid)
-
-
- address = CryptBitcoin.privatekeyToAddress(privatekey)
- if CryptBitcoin.opensslVerify: # Openssl avalible
- with benchmark("openssl verify x 100", 0.37):
- for i in range(100):
- if i%10==0: yield "."
- ok = CryptBitcoin.verify(data, address, sign)
- assert ok, "does not verify from %s" % address
- else:
- yield " - openssl verify x 100...not avalible :(
"
-
- opensslVerify_bk = CryptBitcoin.opensslVerify # Emulate openssl not found in any way
- CryptBitcoin.opensslVerify = None
- with benchmark("pure-python verify x 10", 1.6):
- for i in range(10):
- yield "."
- ok = CryptBitcoin.verify(data, address, sign)
- assert ok, "does not verify from %s" % address
- CryptBitcoin.opensslVerify = opensslVerify_bk
-
-
- yield "
CryptHash:
"
- from Crypt import CryptHash
- from cStringIO import StringIO
-
- data = StringIO("Hello"*1024*1024) #5m
- with benchmark("sha512 x 10 000", 1):
- for i in range(10):
- for y in range(10000):
- hash = CryptHash.sha512sum(data)
- yield "."
- valid = "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce"
- assert hash == valid, "%s != %s" % (hash, valid)
-
-
- yield "
Db:
"
- from Db import Db
-
- schema = {
- "db_name": "TestDb",
- "db_file": "%s/benchmark.db" % config.data_dir,
- "maps": {
- ".*": {
- "to_table": {
- "test": "test"
- }
- }
- },
- "tables": {
- "test": {
- "cols": [
- ["test_id", "INTEGER"],
- ["title", "TEXT"],
- ["json_id", "INTEGER REFERENCES json (json_id)"]
- ],
- "indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"],
- "schema_changed": 1426195822
- }
- }
- }
-
- if os.path.isfile("%s/benchmark.db" % config.data_dir): os.unlink("%s/benchmark.db" % config.data_dir)
-
- with benchmark("Open x 10", 0.13):
- for i in range(10):
- db = Db(schema, "%s/benchmark.db" % config.data_dir)
- db.checkTables()
- db.close()
- yield "."
-
-
- db = Db(schema, "%s/benchmark.db" % config.data_dir)
- db.checkTables()
- import json
-
- with benchmark("Insert x 10 x 1000", 1.0):
- for u in range(10): # 10 user
- data = {"test": []}
- for i in range(1000): # 1000 line of data
- data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
- json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
- db.loadJson("%s/test_%s.json" % (config.data_dir, u))
- os.unlink("%s/test_%s.json" % (config.data_dir, u))
- yield "."
-
-
- with benchmark("Buffered insert x 100 x 100", 1.3):
- cur = db.getCursor()
- cur.execute("BEGIN")
- cur.logging = False
- for u in range(100, 200): # 100 user
- data = {"test": []}
- for i in range(100): # 1000 line of data
- data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
- json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
- db.loadJson("%s/test_%s.json" % (config.data_dir, u), cur=cur)
- os.unlink("%s/test_%s.json" % (config.data_dir, u))
- if u%10 == 0: yield "."
- cur.execute("COMMIT")
-
- yield " - Total rows in db: %s
" % db.execute("SELECT COUNT(*) AS num FROM test").fetchone()[0]
-
- with benchmark("Indexed query x 1000", 0.25):
- found = 0
- cur = db.getCursor()
- cur.logging = False
- for i in range(1000): # 1000x by test_id
- res = cur.execute("SELECT * FROM test WHERE test_id = %s" % i)
- for row in res:
- found += 1
- if i%100 == 0: yield "."
-
- assert found == 20000, "Found: %s != 20000" % found
-
-
- with benchmark("Not indexed query x 100", 0.6):
- found = 0
- cur = db.getCursor()
- cur.logging = False
- for i in range(100): # 1000x by test_id
- res = cur.execute("SELECT * FROM test WHERE json_id = %s" % i)
- for row in res:
- found += 1
- if i%10 == 0: yield "."
-
- assert found == 18900, "Found: %s != 18900" % found
-
-
- with benchmark("Like query x 100", 1.8):
- found = 0
- cur = db.getCursor()
- cur.logging = False
- for i in range(100): # 1000x by test_id
- res = cur.execute("SELECT * FROM test WHERE title LIKE '%%message %s%%'" % i)
- for row in res:
- found += 1
- if i%10 == 0: yield "."
-
- assert found == 38900, "Found: %s != 11000" % found
-
-
- db.close()
- if os.path.isfile("%s/benchmark.db" % config.data_dir): os.unlink("%s/benchmark.db" % config.data_dir)
-
- gc.collect() # Implicit grabage collection
-
- yield "
Done. Total: %.2fs" % (time.time()-t)
-
-
- def actionGcCollect(self):
- import gc
- self.sendHeader()
- yield str(gc.collect())
+
+ def formatTableRow(self, row):
+ back = []
+ for format, val in row:
+ if val is None:
+ formatted = "n/a"
+ elif format == "since":
+ if val:
+ formatted = "%.0f" % (time.time() - val)
+ else:
+ formatted = "n/a"
+ else:
+ formatted = format % val
+ back.append("%s | " % formatted)
+ return "%s
" % "".join(back)
+
+ def getObjSize(self, obj, hpy=None):
+ if hpy:
+ return float(hpy.iso(obj).domisize) / 1024
+ else:
+ return 0
+
+ # /Stats entry point
+ def actionStats(self):
+ import gc
+ import sys
+ from Ui import UiRequest
+ from Crypt import CryptConnection
+
+ hpy = None
+ if self.get.get("size") == "1": # Calc obj size
+ try:
+ import guppy
+ hpy = guppy.hpy()
+ except:
+ pass
+ self.sendHeader()
+ s = time.time()
+ main = sys.modules["main"]
+
+ # Style
+ yield """
+
+ """
+
+ # Memory
+ try:
+ yield "rev%s | " % config.rev
+ yield "%s | " % config.ip_external
+ yield "Opened: %s | " % main.file_server.port_opened
+ yield "Crypt: %s | " % CryptConnection.manager.crypt_supported
+ yield "In: %.2fMB, Out: %.2fMB | " % (
+ float(main.file_server.bytes_recv) / 1024 / 1024,
+ float(main.file_server.bytes_sent) / 1024 / 1024
+ )
+ yield "Peerid: %s | " % main.file_server.peer_id
+ import psutil
+ process = psutil.Process(os.getpid())
+ mem = process.get_memory_info()[0] / float(2 ** 20)
+ yield "Mem: %.2fMB | " % mem
+ yield "Threads: %s | " % len(process.threads())
+ yield "CPU: usr %.2fs sys %.2fs | " % process.cpu_times()
+ yield "Files: %s | " % len(process.open_files())
+ yield "Sockets: %s | " % len(process.connections())
+ yield "Calc size on off"
+ except Exception:
+ pass
+ yield "
"
+
+ # Connections
+ yield "Connections (%s, total made: %s):
" % (
+ len(main.file_server.connections), main.file_server.last_connection_id
+ )
+ yield " id | proto | type | ip | open | crypt | ping | "
+ yield "buff | idle | open | delay | out | in | last sent | "
+ yield "waiting | version | peerid |
"
+ for connection in main.file_server.connections:
+ if "cipher" in dir(connection.sock):
+ cipher = connection.sock.cipher()[0]
+ else:
+ cipher = connection.crypt
+ yield self.formatTableRow([
+ ("%3d", connection.id),
+ ("%s", connection.protocol),
+ ("%s", connection.type),
+ ("%s:%s", (connection.ip, connection.port)),
+ ("%s", connection.handshake.get("port_opened")),
+ ("%s", (connection.crypt, cipher)),
+ ("%6.3f", connection.last_ping_delay),
+ ("%s", connection.incomplete_buff_recv),
+ ("since", max(connection.last_send_time, connection.last_recv_time)),
+ ("since", connection.start_time),
+ ("%.3f", connection.last_sent_time - connection.last_send_time),
+ ("%.0fkB", connection.bytes_sent / 1024),
+ ("%.0fkB", connection.bytes_recv / 1024),
+ ("%s", connection.last_cmd),
+ ("%s", connection.waiting_requests.keys()),
+ ("%s r%s", (connection.handshake.get("version"), connection.handshake.get("rev", "?"))),
+ ("%s", connection.handshake.get("peer_id")),
+ ])
+ yield "
"
+
+ # Sites
+ yield "
Sites:"
+ yield ""
+ yield "address | connected | peers | content.json |
"
+ for site in self.server.sites.values():
+ yield self.formatTableRow([
+ (
+ """%s""",
+ (site.address, site.address)
+ ),
+ ("%s", [peer.connection.id for peer in site.peers.values() if peer.connection and peer.connection.connected]),
+ ("%s/%s/%s", (
+ len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]),
+ len(site.getConnectablePeers(100)),
+ len(site.peers)
+ )),
+ ("%s", len(site.content_manager.contents)),
+ ])
+ yield "" % site.address
+ for key, peer in site.peers.items():
+ yield "(%s, err: %s) %22s - " % (peer.connection, peer.connection_error, key)
+ yield "
|
"
+ yield "
"
+
+ # Object types
+
+ obj_count = {}
+ for obj in gc.get_objects():
+ obj_type = str(type(obj))
+ if obj_type not in obj_count:
+ obj_count[obj_type] = [0, 0]
+ obj_count[obj_type][0] += 1 # Count
+ obj_count[obj_type][1] += float(sys.getsizeof(obj)) / 1024 # Size
+
+ yield "
Objects in memory (types: %s, total: %s, %.2fkb):
" % (
+ len(obj_count),
+ sum([stat[0] for stat in obj_count.values()]),
+ sum([stat[1] for stat in obj_count.values()])
+ )
+
+ for obj, stat in sorted(obj_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count
+ yield " - %.1fkb = %s x %s
" % (stat[1], stat[0], obj, cgi.escape(obj))
+
+ # Classes
+
+ class_count = {}
+ for obj in gc.get_objects():
+ obj_type = str(type(obj))
+ if obj_type != "":
+ continue
+ class_name = obj.__class__.__name__
+ if class_name not in class_count:
+ class_count[class_name] = [0, 0]
+ class_count[class_name][0] += 1 # Count
+ class_count[class_name][1] += float(sys.getsizeof(obj)) / 1024 # Size
+
+ yield "
Classes in memory (types: %s, total: %s, %.2fkb):
" % (
+ len(class_count),
+ sum([stat[0] for stat in class_count.values()]),
+ sum([stat[1] for stat in class_count.values()])
+ )
+
+ for obj, stat in sorted(class_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count
+ yield " - %.1fkb = %s x %s
" % (stat[1], stat[0], obj, cgi.escape(obj))
+
+ from greenlet import greenlet
+ objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
+ yield "
Greenlets (%s):
" % len(objs)
+ for obj in objs:
+ yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
+
+ from Worker import Worker
+ objs = [obj for obj in gc.get_objects() if isinstance(obj, Worker)]
+ yield "
Workers (%s):
" % len(objs)
+ for obj in objs:
+ yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
+
+ from Connection import Connection
+ objs = [obj for obj in gc.get_objects() if isinstance(obj, Connection)]
+ yield "
Connections (%s):
" % len(objs)
+ for obj in objs:
+ yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
+
+ from socket import socket
+ objs = [obj for obj in gc.get_objects() if isinstance(obj, socket)]
+ yield "
Sockets (%s):
" % len(objs)
+ for obj in objs:
+ yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
+
+ from msgpack import Unpacker
+ objs = [obj for obj in gc.get_objects() if isinstance(obj, Unpacker)]
+ yield "
Msgpack unpacker (%s):
" % len(objs)
+ for obj in objs:
+ yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
+
+ from Site import Site
+ objs = [obj for obj in gc.get_objects() if isinstance(obj, Site)]
+ yield "
Sites (%s):
" % len(objs)
+ for obj in objs:
+ yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
+
+ objs = [obj for obj in gc.get_objects() if isinstance(obj, self.server.log.__class__)]
+ yield "
Loggers (%s):
" % len(objs)
+ for obj in objs:
+ yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj.name)))
+
+ objs = [obj for obj in gc.get_objects() if isinstance(obj, UiRequest)]
+ yield "
UiRequests (%s):
" % len(objs)
+ for obj in objs:
+ yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
+
+ from Peer import Peer
+ objs = [obj for obj in gc.get_objects() if isinstance(obj, Peer)]
+ yield "
Peers (%s):
" % len(objs)
+ for obj in objs:
+ yield " - %.1fkb: %s
" % (self.getObjSize(obj, hpy), cgi.escape(repr(obj)))
+
+ objs = [(key, val) for key, val in sys.modules.iteritems() if val is not None]
+ objs.sort()
+ yield "
Modules (%s):
" % len(objs)
+ for module_name, module in objs:
+ yield " - %.3fkb: %s %s
" % (self.getObjSize(module, hpy), module_name, cgi.escape(repr(module)))
+
+ gc.collect() # Implicit grabage collection
+ yield "Done in %.1f" % (time.time() - s)
+
+ def actionDumpobj(self):
+ import gc
+ import sys
+
+ self.sendHeader()
+ class_filter = self.get.get("class")
+
+ yield """
+
+ """
+
+ objs = gc.get_objects()
+ for obj in objs:
+ obj_type = str(type(obj))
+ if obj_type != "" or obj.__class__.__name__ != class_filter:
+ continue
+ yield "%.1fkb %s... " % (float(sys.getsizeof(obj)) / 1024, cgi.escape(str(obj)))
+ for attr in dir(obj):
+ yield "- %s: %s
" % (attr, cgi.escape(str(getattr(obj, attr))))
+ yield "
"
+
+ gc.collect() # Implicit grabage collection
+
+ def actionListobj(self):
+ import gc
+ import sys
+
+ self.sendHeader()
+ type_filter = self.get.get("type")
+
+ yield """
+
+ """
+
+ yield "Listing all %s objects in memory...
" % cgi.escape(type_filter)
+
+ ref_count = {}
+ objs = gc.get_objects()
+ for obj in objs:
+ obj_type = str(type(obj))
+ if obj_type != type_filter:
+ continue
+ refs = [
+ ref for ref in gc.get_referrers(obj)
+ if hasattr(ref, "__class__") and
+ ref.__class__.__name__ not in ["list", "dict", "function", "type", "frame", "WeakSet", "tuple"]
+ ]
+ if not refs:
+ continue
+ yield "%.1fkb %s... " % (
+ float(sys.getsizeof(obj)) / 1024, cgi.escape(str(obj)), cgi.escape(str(obj)[0:100].ljust(100))
+ )
+ for ref in refs:
+ yield " ["
+ if "object at" in str(ref) or len(str(ref)) > 100:
+ yield str(ref.__class__.__name__)
+ else:
+ yield str(ref.__class__.__name__) + ":" + cgi.escape(str(ref))
+ yield "] "
+ ref_type = ref.__class__.__name__
+ if ref_type not in ref_count:
+ ref_count[ref_type] = [0, 0]
+ ref_count[ref_type][0] += 1 # Count
+ ref_count[ref_type][1] += float(sys.getsizeof(obj)) / 1024 # Size
+ yield "
"
+
+ yield "
Object referrer (total: %s, %.2fkb):
" % (len(ref_count), sum([stat[1] for stat in ref_count.values()]))
+
+ for obj, stat in sorted(ref_count.items(), key=lambda x: x[1][0], reverse=True)[0:30]: # Sorted by count
+ yield " - %.1fkb = %s x %s
" % (stat[1], stat[0], cgi.escape(str(obj)))
+
+ gc.collect() # Implicit grabage collection
+
+ def actionBenchmark(self):
+ import sys
+ import gc
+ from contextlib import contextmanager
+
+ output = self.sendHeader()
+
+ @contextmanager
+ def benchmark(name, standard):
+ s = time.time()
+ output("- %s" % name)
+ try:
+ yield 1
+ except Exception, err:
+ output("
! Error: %s
" % err)
+ taken = time.time() - s
+ multipler = standard / taken
+ if multipler < 0.3:
+ speed = "Sloooow"
+ elif multipler < 0.5:
+ speed = "Ehh"
+ elif multipler < 0.8:
+ speed = "Goodish"
+ elif multipler < 1.2:
+ speed = "OK"
+ elif multipler < 1.7:
+ speed = "Fine"
+ elif multipler < 2.5:
+ speed = "Fast"
+ elif multipler < 3.5:
+ speed = "WOW"
+ else:
+ speed = "Insane!!"
+ output("%.3fs [x%.2f: %s]
" % (taken, multipler, speed))
+ time.sleep(0.01)
+
+ yield """
+
+ """
+
+ yield "Benchmarking ZeroNet %s (rev%s) Python %s on: %s...
" % (config.version, config.rev, sys.version, sys.platform)
+
+ t = time.time()
+
+ yield "
CryptBitcoin:
"
+ from Crypt import CryptBitcoin
+
+ # seed = CryptBitcoin.newSeed()
+ # yield "- Seed: %s
" % seed
+ seed = "e180efa477c63b0f2757eac7b1cce781877177fe0966be62754ffd4c8592ce38"
+
+ with benchmark("hdPrivatekey x 10", 0.7):
+ for i in range(10):
+ privatekey = CryptBitcoin.hdPrivatekey(seed, i * 10)
+ yield "."
+ valid = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
+ assert privatekey == valid, "%s != %s" % (privatekey, valid)
+
+ data = "Hello" * 1024 # 5k
+ with benchmark("sign x 10", 0.35):
+ for i in range(10):
+ yield "."
+ sign = CryptBitcoin.sign(data, privatekey)
+ valid = "HFGXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOi+4+BbWHjuwmx0EaKNV1G+kP0tQDxWu0YApxwxZbSmZU="
+ assert sign == valid, "%s != %s" % (sign, valid)
+
+ address = CryptBitcoin.privatekeyToAddress(privatekey)
+ if CryptBitcoin.opensslVerify: # Openssl avalible
+ with benchmark("openssl verify x 100", 0.37):
+ for i in range(100):
+ if i % 10 == 0:
+ yield "."
+ ok = CryptBitcoin.verify(data, address, sign)
+ assert ok, "does not verify from %s" % address
+ else:
+ yield " - openssl verify x 100...not avalible :(
"
+
+ openssl_verify_bk = CryptBitcoin.opensslVerify # Emulate openssl not found in any way
+ CryptBitcoin.opensslVerify = None
+ with benchmark("pure-python verify x 10", 1.6):
+ for i in range(10):
+ yield "."
+ ok = CryptBitcoin.verify(data, address, sign)
+ assert ok, "does not verify from %s" % address
+ CryptBitcoin.opensslVerify = openssl_verify_bk
+
+ yield "
CryptHash:
"
+ from Crypt import CryptHash
+ from cStringIO import StringIO
+
+ data = StringIO("Hello" * 1024 * 1024) # 5m
+ with benchmark("sha512 x 10 000", 1):
+ for i in range(10):
+ for y in range(10000):
+ hash = CryptHash.sha512sum(data)
+ yield "."
+ valid = "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce"
+ assert hash == valid, "%s != %s" % (hash, valid)
+
+ yield "
Db:
"
+ from Db import Db
+
+ schema = {
+ "db_name": "TestDb",
+ "db_file": "%s/benchmark.db" % config.data_dir,
+ "maps": {
+ ".*": {
+ "to_table": {
+ "test": "test"
+ }
+ }
+ },
+ "tables": {
+ "test": {
+ "cols": [
+ ["test_id", "INTEGER"],
+ ["title", "TEXT"],
+ ["json_id", "INTEGER REFERENCES json (json_id)"]
+ ],
+ "indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"],
+ "schema_changed": 1426195822
+ }
+ }
+ }
+
+ if os.path.isfile("%s/benchmark.db" % config.data_dir):
+ os.unlink("%s/benchmark.db" % config.data_dir)
+
+ with benchmark("Open x 10", 0.13):
+ for i in range(10):
+ db = Db(schema, "%s/benchmark.db" % config.data_dir)
+ db.checkTables()
+ db.close()
+ yield "."
+
+ db = Db(schema, "%s/benchmark.db" % config.data_dir)
+ db.checkTables()
+ import json
+
+ with benchmark("Insert x 10 x 1000", 1.0):
+ for u in range(10): # 10 user
+ data = {"test": []}
+ for i in range(1000): # 1000 line of data
+ data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
+ json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
+ db.loadJson("%s/test_%s.json" % (config.data_dir, u))
+ os.unlink("%s/test_%s.json" % (config.data_dir, u))
+ yield "."
+
+ with benchmark("Buffered insert x 100 x 100", 1.3):
+ cur = db.getCursor()
+ cur.execute("BEGIN")
+ cur.logging = False
+ for u in range(100, 200): # 100 user
+ data = {"test": []}
+ for i in range(100): # 1000 line of data
+ data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
+ json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
+ db.loadJson("%s/test_%s.json" % (config.data_dir, u), cur=cur)
+ os.unlink("%s/test_%s.json" % (config.data_dir, u))
+ if u % 10 == 0:
+ yield "."
+ cur.execute("COMMIT")
+
+ yield " - Total rows in db: %s
" % db.execute("SELECT COUNT(*) AS num FROM test").fetchone()[0]
+
+ with benchmark("Indexed query x 1000", 0.25):
+ found = 0
+ cur = db.getCursor()
+ cur.logging = False
+ for i in range(1000): # 1000x by test_id
+ res = cur.execute("SELECT * FROM test WHERE test_id = %s" % i)
+ for row in res:
+ found += 1
+ if i % 100 == 0:
+ yield "."
+
+ assert found == 20000, "Found: %s != 20000" % found
+
+ with benchmark("Not indexed query x 100", 0.6):
+ found = 0
+ cur = db.getCursor()
+ cur.logging = False
+ for i in range(100): # 1000x by test_id
+ res = cur.execute("SELECT * FROM test WHERE json_id = %s" % i)
+ for row in res:
+ found += 1
+ if i % 10 == 0:
+ yield "."
+
+ assert found == 18900, "Found: %s != 18900" % found
+
+ with benchmark("Like query x 100", 1.8):
+ found = 0
+ cur = db.getCursor()
+ cur.logging = False
+ for i in range(100): # 1000x by test_id
+ res = cur.execute("SELECT * FROM test WHERE title LIKE '%%message %s%%'" % i)
+ for row in res:
+ found += 1
+ if i % 10 == 0:
+ yield "."
+
+ assert found == 38900, "Found: %s != 11000" % found
+
+ db.close()
+ if os.path.isfile("%s/benchmark.db" % config.data_dir):
+ os.unlink("%s/benchmark.db" % config.data_dir)
+
+ gc.collect() # Implicit grabage collection
+
+ yield "
Done. Total: %.2fs" % (time.time() - t)
+
+ def actionGcCollect(self):
+ import gc
+ self.sendHeader()
+ yield str(gc.collect())
diff --git a/plugins/Trayicon/TrayiconPlugin.py b/plugins/Trayicon/TrayiconPlugin.py
index 9e35cb1f..6aec34a1 100644
--- a/plugins/Trayicon/TrayiconPlugin.py
+++ b/plugins/Trayicon/TrayiconPlugin.py
@@ -1,130 +1,134 @@
-import re, time, cgi, os, sys
-from Plugin import PluginManager
-from Config import config
+import time
+import os
+import sys
import atexit
-allow_reload = False # No reload supported
+from Plugin import PluginManager
+from Config import config
+
+allow_reload = False # No source reload supported in this plugin
+
@PluginManager.registerTo("Actions")
class ActionsPlugin(object):
- def main(self):
- global notificationicon, winfolders
- from lib import notificationicon, winfolders
- import gevent.threadpool
- self.main = sys.modules["main"]
+ def main(self):
+ global notificationicon, winfolders
+ from lib import notificationicon, winfolders
+ import gevent.threadpool
- icon = notificationicon.NotificationIcon(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'trayicon.ico'), "ZeroNet %s" % config.version)
- self.icon = icon
+ self.main = sys.modules["main"]
- if not config.debug: # Hide console if not in debug mode
- notificationicon.hideConsole()
- self.console = False
- else:
- self.console = True
+ icon = notificationicon.NotificationIcon(
+ os.path.join(os.path.dirname(os.path.abspath(__file__)), 'trayicon.ico'),
+ "ZeroNet %s" % config.version
+ )
+ self.icon = icon
- @atexit.register
- def hideIcon():
- icon.die()
+ if not config.debug: # Hide console if not in debug mode
+ notificationicon.hideConsole()
+ self.console = False
+ else:
+ self.console = True
- icon.items = (
- (self.titleIp, False),
- (self.titleConnections, False),
- (self.titleTransfer, False),
- (self.titleConsole, self.toggleConsole),
- (self.titleAutorun, self.toggleAutorun),
- "--",
- ("ZeroNet Twitter", lambda: self.opensite("https://twitter.com/HelloZeroNet") ),
- ("ZeroNet Reddit", lambda: self.opensite("http://www.reddit.com/r/zeronet/") ),
- ("ZeroNet Github", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet") ),
- ("Report bug/request feature", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet/issues") ),
- "--",
- ("!Open ZeroNet", lambda: self.opensite("http://%s:%s" % (config.ui_ip, config.ui_port)) ),
- #"--",
- #("Start ZeroNet when Windows starts", quit),
- "--",
- ("Quit", self.quit),
+ @atexit.register
+ def hideIcon():
+ icon.die()
- )
+ icon.items = (
+ (self.titleIp, False),
+ (self.titleConnections, False),
+ (self.titleTransfer, False),
+ (self.titleConsole, self.toggleConsole),
+ (self.titleAutorun, self.toggleAutorun),
+ "--",
+ ("ZeroNet Twitter", lambda: self.opensite("https://twitter.com/HelloZeroNet")),
+ ("ZeroNet Reddit", lambda: self.opensite("http://www.reddit.com/r/zeronet/")),
+ ("ZeroNet Github", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet")),
+ ("Report bug/request feature", lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet/issues")),
+ "--",
+ ("!Open ZeroNet", lambda: self.opensite("http://%s:%s" % (config.ui_ip, config.ui_port))),
+ "--",
+ ("Quit", self.quit),
- icon.clicked = lambda: self.opensite("http://%s:%s" % (config.ui_ip, config.ui_port))
- gevent.threadpool.start_new_thread(icon._run, ()) # Start in real thread (not gevent compatible)
- super(ActionsPlugin, self).main()
- icon._die = True
+ )
+ icon.clicked = lambda: self.opensite("http://%s:%s" % (config.ui_ip, config.ui_port))
+ gevent.threadpool.start_new_thread(icon._run, ()) # Start in real thread (not gevent compatible)
+ super(ActionsPlugin, self).main()
+ icon._die = True
- def quit(self):
- self.icon.die()
- time.sleep(0.1)
- self.main.ui_server.stop()
- self.main.file_server.stop()
- #sys.exit()
+ def quit(self):
+ self.icon.die()
+ time.sleep(0.1)
+ self.main.ui_server.stop()
+ self.main.file_server.stop()
+ # sys.exit()
+ def opensite(self, url):
+ import webbrowser
+ webbrowser.open(url, new=0)
- def opensite(self, url):
- import webbrowser
- webbrowser.open(url, new=2)
+ def titleIp(self):
+ title = "!IP: %s" % config.ip_external
+ if self.main.file_server.port_opened:
+ title += " (active)"
+ else:
+ title += " (passive)"
+ return title
+ def titleConnections(self):
+ title = "Connections: %s" % len(self.main.file_server.connections)
+ return title
- def titleIp(self):
- title = "!IP: %s" % config.ip_external
- if self.main.file_server.port_opened:
- title += " (active)"
- else:
- title += " (passive)"
- return title
+ def titleTransfer(self):
+ title = "Received: %.2f MB | Sent: %.2f MB" % (
+ float(self.main.file_server.bytes_recv) / 1024 / 1024,
+ float(self.main.file_server.bytes_sent) / 1024 / 1024
+ )
+ return title
+ def titleConsole(self):
+ if self.console:
+ return "+Show console window"
+ else:
+ return "Show console window"
- def titleConnections(self):
- title = "Connections: %s" % len(self.main.file_server.connections)
- return title
+ def toggleConsole(self):
+ if self.console:
+ notificationicon.hideConsole()
+ self.console = False
+ else:
+ notificationicon.showConsole()
+ self.console = True
+ def getAutorunPath(self):
+ return "%s\\zeronet.cmd" % winfolders.get(winfolders.STARTUP)
- def titleTransfer(self):
- title = "Received: %.2f MB | Sent: %.2f MB" % (float(self.main.file_server.bytes_recv)/1024/1024, float(self.main.file_server.bytes_sent)/1024/1024)
- return title
+ def formatAutorun(self):
+ args = sys.argv[:]
+ args.insert(0, sys.executable)
+ if sys.platform == 'win32':
+ args = ['"%s"' % arg for arg in args]
+ cmd = " ".join(args)
+ # Dont open browser on autorun
+ cmd = cmd.replace("start.py", "zeronet.py").replace('"--open_browser"', "").replace('"default_browser"', "")
- def titleConsole(self):
- if self.console: return "+Show console window"
- else: return "Show console window"
+ return "cd /D %s \n%s" % (os.getcwd(), cmd)
+ def isAutorunEnabled(self):
+ path = self.getAutorunPath()
+ return os.path.isfile(path) and open(path).read() == self.formatAutorun()
- def toggleConsole(self):
- if self.console:
- notificationicon.hideConsole()
- self.console = False
- else:
- notificationicon.showConsole()
- self.console = True
+ def titleAutorun(self):
+ if self.isAutorunEnabled():
+ return "+Start ZeroNet when Windows starts"
+ else:
+ return "Start ZeroNet when Windows starts"
-
- def getAutorunPath(self):
- return "%s\\zeronet.cmd" % winfolders.get(winfolders.STARTUP)
-
-
- def formatAutorun(self):
- args = sys.argv[:]
- args.insert(0, sys.executable)
- if sys.platform == 'win32':
- args = ['"%s"' % arg for arg in args]
- cmd = " ".join(args)
- cmd = cmd.replace("start.py", "zeronet.py").replace('"--open_browser"', "").replace('"default_browser"', "") # Dont open browser on autorun
- return "cd /D %s \n%s" % (os.getcwd(), cmd)
-
-
- def isAutorunEnabled(self):
- path = self.getAutorunPath()
- return os.path.isfile(path) and open(path).read() == self.formatAutorun()
-
-
- def titleAutorun(self):
- if self.isAutorunEnabled(): return "+Start ZeroNet when Windows starts"
- else: return "Start ZeroNet when Windows starts"
-
-
- def toggleAutorun(self):
- if self.isAutorunEnabled():
- os.unlink(self.getAutorunPath())
- else:
- open(self.getAutorunPath(), "w").write(self.formatAutorun())
+ def toggleAutorun(self):
+ if self.isAutorunEnabled():
+ os.unlink(self.getAutorunPath())
+ else:
+ open(self.getAutorunPath(), "w").write(self.formatAutorun())
diff --git a/plugins/Zeroname/SiteManagerPlugin.py b/plugins/Zeroname/SiteManagerPlugin.py
index e2962549..d2b82c37 100644
--- a/plugins/Zeroname/SiteManagerPlugin.py
+++ b/plugins/Zeroname/SiteManagerPlugin.py
@@ -1,75 +1,71 @@
-import logging, json, os, re, sys, time
-import gevent
-from Plugin import PluginManager
-from Config import config
-from Debug import Debug
+import logging
+import re
-allow_reload = False # No reload supported
+from Plugin import PluginManager
+
+allow_reload = False # No reload supported
log = logging.getLogger("ZeronamePlugin")
@PluginManager.registerTo("SiteManager")
class SiteManagerPlugin(object):
- zeroname_address = "1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F"
- site_zeroname = None
- def load(self):
- super(SiteManagerPlugin, self).load()
- if not self.get(self.zeroname_address): self.need(self.zeroname_address) # Need ZeroName site
+ zeroname_address = "1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F"
+ site_zeroname = None
- # Checks if its a valid address
- def isAddress(self, address):
- if self.isDomain(address):
- return True
- else:
- return super(SiteManagerPlugin, self).isAddress(address)
+ def load(self):
+ super(SiteManagerPlugin, self).load()
+ if not self.get(self.zeroname_address):
+ self.need(self.zeroname_address) # Need ZeroName site
+ # Checks if its a valid address
+ def isAddress(self, address):
+ if self.isDomain(address):
+ return True
+ else:
+ return super(SiteManagerPlugin, self).isAddress(address)
- # Return: True if the address is domain
- def isDomain(self, address):
- return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address)
+ # Return: True if the address is domain
+ def isDomain(self, address):
+ return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address)
+ # Resolve domain
+ # Return: The address or None
+ def resolveDomain(self, domain):
+ domain = domain.lower()
+ if not self.site_zeroname:
+ self.site_zeroname = self.need(self.zeroname_address)
+ self.site_zeroname.needFile("data/names.json", priority=10)
+ db = self.site_zeroname.storage.loadJson("data/names.json")
+ return db.get(domain)
- # Resolve domain
- # Return: The address or None
- def resolveDomain(self, domain):
- domain = domain.lower()
- if not self.site_zeroname:
- self.site_zeroname = self.need(self.zeroname_address)
- self.site_zeroname.needFile("data/names.json", priority=10)
- db = self.site_zeroname.storage.loadJson("data/names.json")
- return db.get(domain)
+ # Return or create site and start download site files
+ # Return: Site or None if dns resolve failed
+ def need(self, address, all_file=True):
+ if self.isDomain(address): # Its looks like a domain
+ address_resolved = self.resolveDomain(address)
+ if address_resolved:
+ address = address_resolved
+ else:
+ return None
+ return super(SiteManagerPlugin, self).need(address, all_file)
- # Return or create site and start download site files
- # Return: Site or None if dns resolve failed
- def need(self, address, all_file=True):
- if self.isDomain(address): # Its looks like a domain
- address_resolved = self.resolveDomain(address)
- if address_resolved:
- address = address_resolved
- else:
- return None
-
- return super(SiteManagerPlugin, self).need(address, all_file)
-
-
- # Return: Site object or None if not found
- def get(self, address):
- if self.sites == None: # Not loaded yet
- self.load()
- if self.isDomain(address): # Its looks like a domain
- address_resolved = self.resolveDomain(address)
- if address_resolved: # Domain found
- site = self.sites.get(address_resolved)
- if site:
- site_domain = site.settings.get("domain")
- if site_domain != address:
- site.settings["domain"] = address
- else: # Domain not found
- site = self.sites.get(address)
-
- else: # Access by site address
- site = self.sites.get(address)
- return site
+ # Return: Site object or None if not found
+ def get(self, address):
+ if self.sites is None: # Not loaded yet
+ self.load()
+ if self.isDomain(address): # Its looks like a domain
+ address_resolved = self.resolveDomain(address)
+ if address_resolved: # Domain found
+ site = self.sites.get(address_resolved)
+ if site:
+ site_domain = site.settings.get("domain")
+ if site_domain != address:
+ site.settings["domain"] = address
+ else: # Domain not found
+ site = self.sites.get(address)
+ else: # Access by site address
+ site = self.sites.get(address)
+ return site
diff --git a/plugins/Zeroname/UiRequestPlugin.py b/plugins/Zeroname/UiRequestPlugin.py
index 3e54c765..0019015d 100644
--- a/plugins/Zeroname/UiRequestPlugin.py
+++ b/plugins/Zeroname/UiRequestPlugin.py
@@ -1,40 +1,39 @@
import re
from Plugin import PluginManager
+
@PluginManager.registerTo("UiRequest")
class UiRequestPlugin(object):
- def __init__(self, *args, **kwargs):
- from Site import SiteManager
- self.site_manager = SiteManager.site_manager
- super(UiRequestPlugin, self).__init__(*args, **kwargs)
+ def __init__(self, *args, **kwargs):
+ from Site import SiteManager
+ self.site_manager = SiteManager.site_manager
+ super(UiRequestPlugin, self).__init__(*args, **kwargs)
- # Media request
- def actionSiteMedia(self, path):
- match = re.match("/media/(?P[A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P/.*|$)", path)
- if match: # Its a valid domain, resolve first
- domain = match.group("address")
- address = self.site_manager.resolveDomain(domain)
- if address:
- path = "/media/"+address+match.group("inner_path")
- return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output
+ # Media request
+ def actionSiteMedia(self, path):
+ match = re.match("/media/(?P[A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P/.*|$)", path)
+ if match: # Its a valid domain, resolve first
+ domain = match.group("address")
+ address = self.site_manager.resolveDomain(domain)
+ if address:
+ path = "/media/" + address + match.group("inner_path")
+ return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output
+ # Is mediarequest allowed from that referer
+ def isMediaRequestAllowed(self, site_address, referer):
+ referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
+ referer_path = re.sub("\?.*", "", referer_path) # Remove http params
- # Is mediarequest allowed from that referer
- def isMediaRequestAllowed(self, site_address, referer):
- referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
- referer_path = re.sub("\?.*", "", referer_path) # Remove http params
-
- if self.isProxyRequest(): # Match to site domain
- referer = re.sub("^http://zero[/]+", "http://", referer) # Allow /zero access
- referer_site_address = re.match("http[s]{0,1}://(.*?)(/|$)", referer).group(1)
- else: # Match to request path
- referer_site_address = re.match("/(?P[A-Za-z0-9\.]+)(?P/.*|$)", referer_path).group("address")
-
- if referer_site_address == site_address: # Referer site address as simple address
- return True
- elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns
- return True
- else: # Invalid referer
- return False
+ if self.isProxyRequest(): # Match to site domain
+ referer = re.sub("^http://zero[/]+", "http://", referer) # Allow /zero access
+ referer_site_address = re.match("http[s]{0,1}://(.*?)(/|$)", referer).group(1)
+ else: # Match to request path
+ referer_site_address = re.match("/(?P[A-Za-z0-9\.]+)(?P/.*|$)", referer_path).group("address")
+ if referer_site_address == site_address: # Referer site address as simple address
+ return True
+ elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns
+ return True
+ else: # Invalid referer
+ return False
diff --git a/plugins/Zeroname/updater/zeroname_updater.py b/plugins/Zeroname/updater/zeroname_updater.py
index ace15f62..170ab2b2 100644
--- a/plugins/Zeroname/updater/zeroname_updater.py
+++ b/plugins/Zeroname/updater/zeroname_updater.py
@@ -1,75 +1,83 @@
-from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
-import time, json, os, sys, re, socket
+import time
+import json
+import os
+import sys
+import re
+import socket
+
+from bitcoinrpc.authproxy import AuthServiceProxy
def publish():
- print "* Signing..."
- os.system("python zeronet.py siteSign %s %s" % (config["site"], config["privatekey"]))
- print "* Publishing..."
- os.system("python zeronet.py sitePublish %s" % config["site"])
-
+ print "* Signing..."
+ os.system("python zeronet.py siteSign %s %s" % (config["site"], config["privatekey"]))
+ print "* Publishing..."
+ os.system("python zeronet.py sitePublish %s" % config["site"])
def processNameOp(domain, value):
- if not value.startswith("{"): return False
- try:
- data = json.loads(value)
- except Exception, err:
- print "Json load error: %s" % err
- return False
- if "zeronet" not in data:
- print "No zeronet in ", data.keys()
- return False
- if type(data["zeronet"]) != type({}):
- print "Bad type: ", data["zeronet"]
- return False
+ if not value.startswith("{"):
+ return False
+ try:
+ data = json.loads(value)
+ except Exception, err:
+ print "Json load error: %s" % err
+ return False
+ if "zeronet" not in data:
+ print "No zeronet in ", data.keys()
+ return False
+ if not isinstance(data["zeronet"], dict):
+ print "Not dict: ", data["zeronet"]
+ return False
+ if not re.match("^[a-z]([a-z0-9-]{0,62}[a-z0-9])?$", domain):
+ print "Invalid domain: ", domain
+ return False
- if "slave" in sys.argv:
- print "Waiting for master update arrive"
- time.sleep(30) # Wait 30 sec to allow master updater
-
- #Note: Requires the file data/names.json to exist and contain "{}" to work
- names_raw = open(names_path, "rb").read()
- names = json.loads(names_raw)
- for subdomain, address in data["zeronet"].items():
- address = re.sub("[^A-Za-z0-9]", "", address)
- print subdomain, domain, "->", address
- if subdomain:
- names["%s.%s.bit" % (subdomain, domain)] = address
- else:
- names["%s.bit" % domain] = address
-
- new_names_raw = json.dumps(names, indent=2, sort_keys=True)
- if new_names_raw != names_raw:
- open(names_path, "wb").write(new_names_raw)
- return True
- else:
- print "names not changed"
- return False
+ if "slave" in sys.argv:
+ print "Waiting for master update arrive"
+ time.sleep(30) # Wait 30 sec to allow master updater
+ # Note: Requires the file data/names.json to exist and contain "{}" to work
+ names_raw = open(names_path, "rb").read()
+ names = json.loads(names_raw)
+ for subdomain, address in data["zeronet"].items():
+ subdomain = subdomain.lower()
+ address = re.sub("[^A-Za-z0-9]", "", address)
+ print subdomain, domain, "->", address
+ if subdomain:
+ names["%s.%s.bit" % (subdomain, domain)] = address
+ else:
+ names["%s.bit" % domain] = address
+ new_names_raw = json.dumps(names, indent=2, sort_keys=True)
+ if new_names_raw != names_raw:
+ open(names_path, "wb").write(new_names_raw)
+ return True
+ else:
+ print "names not changed"
+ return False
def processBlock(block_id):
- print "Processing block #%s..." % block_id
- s = time.time()
- block_hash = rpc.getblockhash(block_id)
- block = rpc.getblock(block_hash)
+ print "Processing block #%s..." % block_id
+ s = time.time()
+ block_hash = rpc.getblockhash(block_id)
+ block = rpc.getblock(block_hash)
- print "Checking %s tx" % len(block["tx"])
- updated = 0
- for tx in block["tx"]:
- try:
- transaction = rpc.getrawtransaction(tx, 1)
- for vout in transaction.get("vout",[]):
- if "scriptPubKey" in vout and "nameOp" in vout["scriptPubKey"] and "name" in vout["scriptPubKey"]["nameOp"]:
- name_op = vout["scriptPubKey"]["nameOp"]
- updated += processNameOp(name_op["name"].replace("d/", ""), name_op["value"])
- except Exception, err:
- print "Error processing tx #%s %s" % (tx, err)
- print "Done in %.3fs (updated %s)." % (time.time()-s, updated)
- if updated:
- publish()
+ print "Checking %s tx" % len(block["tx"])
+ updated = 0
+ for tx in block["tx"]:
+ try:
+ transaction = rpc.getrawtransaction(tx, 1)
+ for vout in transaction.get("vout", []):
+ if "scriptPubKey" in vout and "nameOp" in vout["scriptPubKey"] and "name" in vout["scriptPubKey"]["nameOp"]:
+ name_op = vout["scriptPubKey"]["nameOp"]
+ updated += processNameOp(name_op["name"].replace("d/", ""), name_op["value"])
+ except Exception, err:
+ print "Error processing tx #%s %s" % (tx, err)
+ print "Done in %.3fs (updated %s)." % (time.time() - s, updated)
+ if updated:
+ publish()
# Loading config...
@@ -83,16 +91,16 @@ else:
namecoin_location = os.path.expanduser("~/.namecoin/")
config_path = namecoin_location + 'zeroname_config.json'
-if not os.path.isfile(config_path): # Create sample config
- open(config_path, "w").write(
- json.dumps({'site': 'site', 'zeronet_path': '/home/zeronet/', 'privatekey': '', 'lastprocessed': 223911}, indent=2)
- )
- print "Example config written to %s" % config_path
- sys.exit(0)
+if not os.path.isfile(config_path): # Create sample config
+ open(config_path, "w").write(
+ json.dumps({'site': 'site', 'zeronet_path': '/home/zeronet/', 'privatekey': '', 'lastprocessed': 223911}, indent=2)
+ )
+ print "Example config written to %s" % config_path
+ sys.exit(0)
config = json.load(open(config_path))
names_path = "%s/data/%s/data/names.json" % (config["zeronet_path"], config["site"])
-os.chdir(config["zeronet_path"]) # Change working dir - tells script where Zeronet install is.
+os.chdir(config["zeronet_path"]) # Change working dir - tells script where Zeronet install is.
# Getting rpc connect details
namecoin_conf = open(namecoin_location + "namecoin.conf").read()
@@ -102,42 +110,47 @@ rpc_user = re.search("rpcuser=(.*)$", namecoin_conf, re.M).group(1)
rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1)
rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass)
-rpc = AuthServiceProxy(rpc_url, timeout=60*5)
+rpc = AuthServiceProxy(rpc_url, timeout=60 * 5)
last_block = int(rpc.getinfo()["blocks"])
-if not config["lastprocessed"]: # Start processing from last block
- config["lastprocessed"] = last_block
+if not config["lastprocessed"]: # Start processing from last block
+ config["lastprocessed"] = last_block
# Processing skipped blocks
print "Processing block from #%s to #%s..." % (config["lastprocessed"], last_block)
-for block_id in range(config["lastprocessed"], last_block+1):
- processBlock(block_id)
+for block_id in range(config["lastprocessed"], last_block + 1):
+ processBlock(block_id)
# processBlock(223911) # Testing zeronetwork.bit
# processBlock(227052) # Testing brainwallets.bit
+# processBlock(236824) # Utf8 domain name (invalid should skip)
+# processBlock(236752) # Uppercase domain (invalid should skip)
+# processBlock(236870) # Encoded domain (should pass)
+# sys.exit(0)
while 1:
- print "Waiting for new block",
- sys.stdout.flush()
- while 1:
- try:
- rpc = AuthServiceProxy(rpc_url, timeout=60*5)
- if (int(rpc.getinfo()["blocks"]) > last_block): break
- time.sleep(1)
- rpc.waitforblock()
- print "Found"
- break # Block found
- except socket.timeout: # Timeout
- print ".",
- sys.stdout.flush()
- except Exception, err:
- print "Exception", err.__class__, err
- time.sleep(5)
+ print "Waiting for new block",
+ sys.stdout.flush()
+ while 1:
+ try:
+ rpc = AuthServiceProxy(rpc_url, timeout=60 * 5)
+ if (int(rpc.getinfo()["blocks"]) > last_block):
+ break
+ time.sleep(1)
+ rpc.waitforblock()
+ print "Found"
+ break # Block found
+ except socket.timeout: # Timeout
+ print ".",
+ sys.stdout.flush()
+ except Exception, err:
+ print "Exception", err.__class__, err
+ time.sleep(5)
- last_block = int(rpc.getinfo()["blocks"])
- for block_id in range(config["lastprocessed"]+1, last_block+1):
- processBlock(block_id)
+ last_block = int(rpc.getinfo()["blocks"])
+ for block_id in range(config["lastprocessed"] + 1, last_block + 1):
+ processBlock(block_id)
- config["lastprocessed"] = last_block
- open(config_path, "w").write(json.dumps(config, indent=2))
+ config["lastprocessed"] = last_block
+ open(config_path, "w").write(json.dumps(config, indent=2))
diff --git a/plugins/disabled-Zeroname-local/domainLookup.py b/plugins/disabled-Zeroname-local/domainLookup.py
index 013a2d24..ae0c56bc 100644
--- a/plugins/disabled-Zeroname-local/domainLookup.py
+++ b/plugins/disabled-Zeroname-local/domainLookup.py
@@ -19,20 +19,18 @@ def lookupDomain(domain):
try:
domain_object = rpc.name_show("d/"+domain)
- except Exception, err:
+ except:
#domain doesn't exist
- print "Error looking up domain - does not exist %s %s" % (domain,err)
return None
domain_json = json.loads(domain_object['value'])
try:
domain_address = domain_json["zeronet"][subdomain]
- except Exception, err:
+ except:
#domain exists but doesn't have any zeronet value
- print "Error looking up domain - doesn't contain zeronet value %s %s" % (domain,err)
return None
-
+
return domain_address
# Loading config...
@@ -54,3 +52,30 @@ rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1)
rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass)
rpc = AuthServiceProxy(rpc_url, timeout=60*5)
+
+"""
+while 1:
+ print "Waiting for new block",
+ sys.stdout.flush()
+ while 1:
+ try:
+ rpc = AuthServiceProxy(rpc_url, timeout=60*5)
+ if (int(rpc.getinfo()["blocks"]) > last_block): break
+ time.sleep(1)
+ rpc.waitforblock()
+ print "Found"
+ break # Block found
+ except socket.timeout: # Timeout
+ print ".",
+ sys.stdout.flush()
+ except Exception, err:
+ print "Exception", err.__class__, err
+ time.sleep(5)
+
+ last_block = int(rpc.getinfo()["blocks"])
+ for block_id in range(config["lastprocessed"]+1, last_block+1):
+ processBlock(block_id)
+
+ config["lastprocessed"] = last_block
+ open(config_path, "w").write(json.dumps(config, indent=2))
+"""
\ No newline at end of file
diff --git a/src/Config.py b/src/Config.py
index 6165b7f1..17a64c36 100644
--- a/src/Config.py
+++ b/src/Config.py
@@ -1,191 +1,195 @@
-import argparse, sys, os, time
+import argparse
+import sys
+import os
import ConfigParser
+
class Config(object):
- def __init__(self):
- self.version = "0.3.1"
- self.rev = 280
- self.parser = self.createArguments()
- argv = sys.argv[:] # Copy command line arguments
- argv = self.parseConfig(argv) # Add arguments from config file
- self.parseCommandline(argv) # Parse argv
- self.setAttributes()
+ def __init__(self):
+ self.version = "0.3.1"
+ self.rev = 280
+ self.parser = self.createArguments()
+ argv = sys.argv[:] # Copy command line arguments
+ argv = self.parseConfig(argv) # Add arguments from config file
+ self.parseCommandline(argv) # Parse argv
+ self.setAttributes()
- def __str__(self):
- return str(self.arguments).replace("Namespace", "Config") # Using argparse str output
+ def __str__(self):
+ return str(self.arguments).replace("Namespace", "Config") # Using argparse str output
+ # Convert string to bool
+ def strToBool(self, v):
+ return v.lower() in ("yes", "true", "t", "1")
- # Convert string to bool
- def strToBool(self, v):
- return v.lower() in ("yes", "true", "t", "1")
+ # Create command line arguments
+ def createArguments(self):
+ # Platform specific
+ if sys.platform.startswith("win"):
+ coffeescript = "type %s | tools\\coffee\\coffee.cmd"
+ else:
+ coffeescript = None
+ """ Probably fixed
+ if sys.platform.lower().startswith("darwin"):
+ # For some reasons openssl doesnt works on mac yet (https://github.com/HelloZeroNet/ZeroNet/issues/94)
+ use_openssl = False
+ else:
+ use_openssl = True
+ """
+ use_openssl = True
+ # Create parser
+ parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser.register('type', 'bool', self.strToBool)
+ subparsers = parser.add_subparsers(title="Action to perform", dest="action")
- # Create command line arguments
- def createArguments(self):
- # Platform specific
- if sys.platform.startswith("win"):
- coffeescript = "type %s | tools\\coffee\\coffee.cmd"
- else:
- coffeescript = None
- """ Probably fixed
- if sys.platform.lower().startswith("darwin"): # For some reasons openssl doesnt works on mac yet (https://github.com/HelloZeroNet/ZeroNet/issues/94)
- use_openssl = False
- else:
- use_openssl = True
- """
- use_openssl = True
+ # Main
+ action = subparsers.add_parser("main", help='Start UiServer and FileServer (default)')
- # Create parser
- parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
- parser.register('type','bool', self.strToBool)
- subparsers = parser.add_subparsers(title="Action to perform", dest="action")
+ # SiteCreate
+ action = subparsers.add_parser("siteCreate", help='Create a new site')
- # Main
- action = subparsers.add_parser("main", help='Start UiServer and FileServer (default)')
+ # SiteSign
+ action = subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]')
+ action.add_argument('address', help='Site to sign')
+ action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?')
+ action.add_argument('--inner_path', help='File you want to sign (default: content.json)',
+ default="content.json", metavar="inner_path")
+ action.add_argument('--publish', help='Publish site after the signing', action='store_true')
- # SiteCreate
- action = subparsers.add_parser("siteCreate", help='Create a new site')
+ # SitePublish
+ action = subparsers.add_parser("sitePublish", help='Publish site to other peers: address')
+ action.add_argument('address', help='Site to publish')
+ action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)',
+ default=None, nargs='?')
+ action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)',
+ default=15441, nargs='?')
+ action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)',
+ default="content.json", metavar="inner_path")
- # SiteSign
- action = subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]')
- action.add_argument('address', help='Site to sign')
- action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?')
- action.add_argument('--inner_path', help='File you want to sign (default: content.json)', default="content.json", metavar="inner_path")
- action.add_argument('--publish', help='Publish site after the signing', action='store_true')
+ # SiteVerify
+ action = subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
+ action.add_argument('address', help='Site to verify')
- # SitePublish
- action = subparsers.add_parser("sitePublish", help='Publish site to other peers: address')
- action.add_argument('address', help='Site to publish')
- action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)', default=None, nargs='?')
- action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)', default=15441, nargs='?')
- action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)', default="content.json", metavar="inner_path")
+ # dbRebuild
+ action = subparsers.add_parser("dbRebuild", help='Rebuild site database cache')
+ action.add_argument('address', help='Site to rebuild')
- # SiteVerify
- action = subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
- action.add_argument('address', help='Site to verify')
+ # dbQuery
+ action = subparsers.add_parser("dbQuery", help='Query site sql cache')
+ action.add_argument('address', help='Site to query')
+ action.add_argument('query', help='Sql query')
- #dbRebuild
- action = subparsers.add_parser("dbRebuild", help='Rebuild site database cache')
- action.add_argument('address', help='Site to rebuild')
+ # PeerPing
+ action = subparsers.add_parser("peerPing", help='Send Ping command to peer')
+ action.add_argument('peer_ip', help='Peer ip')
+ action.add_argument('peer_port', help='Peer port', nargs='?')
- #dbQuery
- action = subparsers.add_parser("dbQuery", help='Query site sql cache')
- action.add_argument('address', help='Site to query')
- action.add_argument('query', help='Sql query')
+ # PeerGetFile
+ action = subparsers.add_parser("peerGetFile", help='Request and print a file content from peer')
+ action.add_argument('peer_ip', help='Peer ip')
+ action.add_argument('peer_port', help='Peer port')
+ action.add_argument('site', help='Site address')
+ action.add_argument('filename', help='File name to request')
- # PeerPing
- action = subparsers.add_parser("peerPing", help='Send Ping command to peer')
- action.add_argument('peer_ip', help='Peer ip')
- action.add_argument('peer_port', help='Peer port', nargs='?')
+ # PeerGetFile
+ action = subparsers.add_parser("peerCmd", help='Request and print a file content from peer')
+ action.add_argument('peer_ip', help='Peer ip')
+ action.add_argument('peer_port', help='Peer port')
+ action.add_argument('cmd', help='Command to execute')
+ action.add_argument('parameters', help='Parameters to command', nargs='?')
- # PeerGetFile
- action = subparsers.add_parser("peerGetFile", help='Request and print a file content from peer')
- action.add_argument('peer_ip', help='Peer ip')
- action.add_argument('peer_port', help='Peer port')
- action.add_argument('site', help='Site address')
- action.add_argument('filename', help='File name to request')
+ # CryptSign
+ action = subparsers.add_parser("cryptSign", help='Sign message using Bitcoin private key')
+ action.add_argument('message', help='Message to sign')
+ action.add_argument('privatekey', help='Private key')
- # PeerGetFile
- action = subparsers.add_parser("peerCmd", help='Request and print a file content from peer')
- action.add_argument('peer_ip', help='Peer ip')
- action.add_argument('peer_port', help='Peer port')
- action.add_argument('cmd', help='Command to execute')
- action.add_argument('parameters', help='Parameters to command', nargs='?')
+ # Config parameters
+ parser.add_argument('--debug', help='Debug mode', action='store_true')
+ parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true')
- # CryptSign
- action = subparsers.add_parser("cryptSign", help='Sign message using Bitcoin private key')
- action.add_argument('message', help='Message to sign')
- action.add_argument('privatekey', help='Private key')
+ parser.add_argument('--config_file', help='Path of config file', default="zeronet.conf", metavar="path")
+ parser.add_argument('--data_dir', help='Path of data directory', default="data", metavar="path")
+ parser.add_argument('--log_dir', help='Path of logging directory', default="log", metavar="path")
+ parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip')
+ parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port')
+ parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*')
+ parser.add_argument('--open_browser', help='Open homepage in web browser automatically',
+ nargs='?', const="default_browser", metavar='browser_name')
+ parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr',
+ metavar='address')
+ parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, metavar='size')
- # Config parameters
- parser.add_argument('--debug', help='Debug mode', action='store_true')
- parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true')
+ parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip')
+ parser.add_argument('--fileserver_port', help='FileServer bind port', default=15441, type=int, metavar='port')
+ parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true')
+ parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port')
+ parser.add_argument('--ip_external', help='External ip (tested on start if None)', metavar='ip')
+ parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup',
+ type='bool', choices=[True, False], default=use_openssl)
+ parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true')
+ parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory',
+ type='bool', choices=[True, False], default=True)
- parser.add_argument('--config_file', help='Path of config file', default="zeronet.conf", metavar="path")
- parser.add_argument('--data_dir', help='Path of data directory', default="data", metavar="path")
- parser.add_argument('--log_dir', help='Path of logging directory', default="log", metavar="path")
+ parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript,
+ metavar='executable_path')
- parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip')
- parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port')
- parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*')
- parser.add_argument('--open_browser', help='Open homepage in web browser automatically', nargs='?', const="default_browser", metavar='browser_name')
- parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr', metavar='address')
- parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, metavar='size')
+ parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev))
- parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip')
- parser.add_argument('--fileserver_port',help='FileServer bind port', default=15441, type=int, metavar='port')
- parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true')
- parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port')
- parser.add_argument('--ip_external', help='External ip (tested on start if None)', metavar='ip')
- parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=use_openssl)
- parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true')
- parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory', type='bool', choices=[True, False], default=True)
+ return parser
- parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript, metavar='executable_path')
+ # Find arguments specificed for current action
+ def getActionArguments(self):
+ back = {}
+ arguments = self.parser._subparsers._group_actions[0].choices[self.action]._actions[1:] # First is --version
+ for argument in arguments:
+ back[argument.dest] = getattr(self, argument.dest)
+ return back
- parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev))
+ # Try to find action from sys.argv
+ def getAction(self, argv):
+ actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions
+ found_action = False
+ for action in actions: # See if any in sys.argv
+ if action in argv:
+ found_action = action
+ break
+ return found_action
- return parser
+ # Parse command line arguments
+ def parseCommandline(self, argv):
+ # Find out if action is specificed on start
+ action = self.getAction(argv)
+ if len(argv) == 1 or not action: # If no action specificed set the main action
+ argv.append("main")
+ self.arguments = self.parser.parse_args(argv[1:])
+ # Parse config file
+ def parseConfig(self, argv):
+ # Find config file path from parameters
+ config_file = "zeronet.conf"
+ if "--config_file" in argv:
+ config_file = argv[argv.index("--config_file") + 1]
+ # Load config file
+ if os.path.isfile(config_file):
+ config = ConfigParser.ConfigParser(allow_no_value=True)
+ config.read(config_file)
+ for section in config.sections():
+ for key, val in config.items(section):
+ if section != "global": # If not global prefix key with section
+ key = section + "_" + key
+ if val:
+ argv.insert(1, val)
+ argv.insert(1, "--%s" % key)
+ return argv
- # Find arguments specificed for current action
- def getActionArguments(self):
- back = {}
- arguments = self.parser._subparsers._group_actions[0].choices[self.action]._actions[1:] # First is --version
- for argument in arguments:
- back[argument.dest] = getattr(self, argument.dest)
- return back
-
-
-
- # Try to find action from sys.argv
- def getAction(self, argv):
- actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions
- found_action = False
- for action in actions: # See if any in sys.argv
- if action in argv:
- found_action = action
- break
- return found_action
-
-
- # Parse command line arguments
- def parseCommandline(self, argv):
- # Find out if action is specificed on start
- action = self.getAction(argv)
- if len(argv) == 1 or not action: # If no action specificed set the main action
- argv.append("main")
- self.arguments = self.parser.parse_args(argv[1:])
-
-
- # Parse config file
- def parseConfig(self, argv):
- # Find config file path from parameters
- config_file = "zeronet.conf"
- if "--config_file" in argv:
- config_file = argv[argv.index("--config_file")+1]
- # Load config file
- if os.path.isfile(config_file):
- config = ConfigParser.ConfigParser(allow_no_value=True)
- config.read(config_file)
- for section in config.sections():
- for key, val in config.items(section):
- if section != "global": # If not global prefix key with section
- key = section+"_"+key
- if val: argv.insert(1, val)
- argv.insert(1, "--%s" % key)
- return argv
-
-
-
- # Expose arguments as class attributes
- def setAttributes(self):
- # Set attributes from arguments
- args = vars(self.arguments)
- for key, val in args.items():
- setattr(self, key, val)
+ # Expose arguments as class attributes
+ def setAttributes(self):
+ # Set attributes from arguments
+ args = vars(self.arguments)
+ for key, val in args.items():
+ setattr(self, key, val)
config = Config()
diff --git a/src/Connection/Connection.py b/src/Connection/Connection.py
index 03a3e179..fe683091 100644
--- a/src/Connection/Connection.py
+++ b/src/Connection/Connection.py
@@ -1,282 +1,294 @@
-import logging, socket, time
-from cStringIO import StringIO
-import gevent, msgpack
+import socket
+import time
+
+import gevent
+import msgpack
+
from Config import config
from Debug import Debug
from util import StreamingMsgpack
from Crypt import CryptConnection
+
class Connection(object):
- __slots__ = ("sock", "sock_wrapped", "ip", "port", "peer_id", "id", "protocol", "type", "server", "unpacker", "req_id", "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "last_recv_time", "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "last_ping_delay", "last_req_time", "last_cmd", "name", "updateName", "waiting_requests")
+ __slots__ = (
+ "sock", "sock_wrapped", "ip", "port", "peer_id", "id", "protocol", "type", "server", "unpacker", "req_id",
+ "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "last_recv_time",
+ "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent",
+ "last_ping_delay", "last_req_time", "last_cmd", "name", "updateName", "waiting_requests"
+ )
- def __init__(self, server, ip, port, sock=None):
- self.sock = sock
- self.ip = ip
- self.port = port
- self.peer_id = None # Bittorrent style peer id (not used yet)
- self.id = server.last_connection_id
- server.last_connection_id += 1
- self.protocol = "?"
- self.type = "?"
+ def __init__(self, server, ip, port, sock=None):
+ self.sock = sock
+ self.ip = ip
+ self.port = port
+ self.peer_id = None # Bittorrent style peer id (not used yet)
+ self.id = server.last_connection_id
+ server.last_connection_id += 1
+ self.protocol = "?"
+ self.type = "?"
- self.server = server
- self.unpacker = None # Stream incoming socket messages here
- self.req_id = 0 # Last request id
- self.handshake = {} # Handshake info got from peer
- self.crypt = None # Connection encryption method
- self.sock_wrapped = False # Socket wrapped to encryption
+ self.server = server
+ self.unpacker = None # Stream incoming socket messages here
+ self.req_id = 0 # Last request id
+ self.handshake = {} # Handshake info got from peer
+ self.crypt = None # Connection encryption method
+ self.sock_wrapped = False # Socket wrapped to encryption
- self.connected = False
- self.event_connected = gevent.event.AsyncResult() # Solves on handshake received
- self.closed = False
+ self.connected = False
+ self.event_connected = gevent.event.AsyncResult() # Solves on handshake received
+ self.closed = False
- # Stats
- self.start_time = time.time()
- self.last_recv_time = 0
- self.last_message_time = 0
- self.last_send_time = 0
- self.last_sent_time = 0
- self.incomplete_buff_recv = 0
- self.bytes_recv = 0
- self.bytes_sent = 0
- self.last_ping_delay = None
- self.last_req_time = 0
- self.last_cmd = None
+ # Stats
+ self.start_time = time.time()
+ self.last_recv_time = 0
+ self.last_message_time = 0
+ self.last_send_time = 0
+ self.last_sent_time = 0
+ self.incomplete_buff_recv = 0
+ self.bytes_recv = 0
+ self.bytes_sent = 0
+ self.last_ping_delay = None
+ self.last_req_time = 0
+ self.last_cmd = None
- self.name = None
- self.updateName()
+ self.name = None
+ self.updateName()
- self.waiting_requests = {} # Waiting sent requests
+ self.waiting_requests = {} # Waiting sent requests
+ def updateName(self):
+ self.name = "Conn#%2s %-12s [%s]" % (self.id, self.ip, self.protocol)
- def updateName(self):
- self.name = "Conn#%2s %-12s [%s]" % (self.id, self.ip, self.protocol)
+ def __str__(self):
+ return self.name
+ def __repr__(self):
+ return "<%s>" % self.__str__()
- def __str__(self):
- return self.name
+ def log(self, text):
+ self.server.log.debug("%s > %s" % (self.name, text))
+ # Open connection to peer and wait for handshake
+ def connect(self):
+ self.log("Connecting...")
+ self.type = "out"
+ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.sock.connect((self.ip, int(self.port)))
- def __repr__(self):
- return "<%s>" % self.__str__()
+ # Implicit SSL in the future
+ # self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa")
+ # self.sock.do_handshake()
+ # self.crypt = "tls-rsa"
+ # self.sock_wrapped = True
+ # Detect protocol
+ self.send({"cmd": "handshake", "req_id": 0, "params": self.handshakeInfo()})
+ gevent.spawn(self.messageLoop)
+ return self.event_connected.get() # Wait for handshake
- def log(self, text):
- self.server.log.debug("%s > %s" % (self.name, text))
+ # Handle incoming connection
+ def handleIncomingConnection(self, sock):
+ self.log("Incoming connection...")
+ self.type = "in"
+ try:
+ if sock.recv(1, gevent.socket.MSG_PEEK) == "\x16":
+ self.log("Crypt in connection using implicit SSL")
+ self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", True)
+ self.sock_wrapped = True
+ self.crypt = "tls-rsa"
+ except Exception, err:
+ self.log("Socket peek error: %s" % Debug.formatException(err))
+ self.messageLoop()
+ # Message loop for connection
+ def messageLoop(self):
+ if not self.sock:
+ self.log("Socket error: No socket found")
+ return False
+ self.protocol = "v2"
+ self.updateName()
+ self.connected = True
- # Open connection to peer and wait for handshake
- def connect(self):
- self.log("Connecting...")
- self.type = "out"
- self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- self.sock.connect((self.ip, int(self.port)))
- # Implicit SSL in the future
- #self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa")
- #self.sock.do_handshake()
- #self.crypt = "tls-rsa"
- #self.sock_wrapped = True
- # Detect protocol
- self.send({"cmd": "handshake", "req_id": 0, "params": self.handshakeInfo()})
- gevent.spawn(self.messageLoop)
- return self.event_connected.get() # Wait for handshake
+ self.unpacker = msgpack.Unpacker()
+ try:
+ while True:
+ buff = self.sock.recv(16 * 1024)
+ if not buff:
+ break # Connection closed
+ self.last_recv_time = time.time()
+ self.incomplete_buff_recv += 1
+ self.bytes_recv += len(buff)
+ self.server.bytes_recv += len(buff)
+ if not self.unpacker:
+ self.unpacker = msgpack.Unpacker()
+ self.unpacker.feed(buff)
+ for message in self.unpacker:
+ self.incomplete_buff_recv = 0
+ self.handleMessage(message)
+ message = None
+ buff = None
+ except Exception, err:
+ if not self.closed:
+ self.log("Socket error: %s" % Debug.formatException(err))
+ self.close() # MessageLoop ended, close connection
+ # My handshake info
+ def handshakeInfo(self):
+ return {
+ "version": config.version,
+ "protocol": "v2",
+ "peer_id": self.server.peer_id,
+ "fileserver_port": self.server.port,
+ "port_opened": self.server.port_opened,
+ "rev": config.rev,
+ "crypt_supported": CryptConnection.manager.crypt_supported,
+ "crypt": self.crypt
+ }
+ def setHandshake(self, handshake):
+ self.handshake = handshake
+ if handshake.get("port_opened", None) is False: # Not connectable
+ self.port = 0
+ else:
+ self.port = handshake["fileserver_port"] # Set peer fileserver port
+ # Check if we can encrypt the connection
+ if handshake.get("crypt_supported"):
+ if handshake.get("crypt"): # Recommended crypt by server
+ crypt = handshake["crypt"]
+ else: # Select the best supported on both sides
+ crypt = CryptConnection.manager.selectCrypt(handshake["crypt_supported"])
- # Handle incoming connection
- def handleIncomingConnection(self, sock):
- self.log("Incoming connection...")
- self.type = "in"
- try:
- if sock.recv(1, gevent.socket.MSG_PEEK) == "\x16":
- self.log("Crypt in connection using implicit SSL")
- self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", True)
- self.sock_wrapped = True
- self.crypt = "tls-rsa"
- except Exception, err:
- self.log("Socket peek error: %s" % Debug.formatException(err))
- self.messageLoop()
+ if crypt:
+ self.crypt = crypt
+ self.event_connected.set(True) # Mark handshake as done
+ # Handle incoming message
+ def handleMessage(self, message):
+ self.last_message_time = time.time()
+ if message.get("cmd") == "response": # New style response
+ if message["to"] in self.waiting_requests:
+ self.waiting_requests[message["to"]].set(message) # Set the response to event
+ del self.waiting_requests[message["to"]]
+ elif message["to"] == 0: # Other peers handshake
+ ping = time.time() - self.start_time
+ if config.debug_socket:
+ self.log("Handshake response: %s, ping: %s" % (message, ping))
+ self.last_ping_delay = ping
+ # Server switched to crypt, lets do it also if not crypted already
+ if message.get("crypt") and not self.sock_wrapped:
+ self.crypt = message["crypt"]
+ server = (self.type == "in")
+ self.log("Crypt out connection using: %s (server side: %s)..." % (self.crypt, server))
+ self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server)
+ self.sock.do_handshake()
+ self.setHandshake(message)
+ else:
+ self.log("Unknown response: %s" % message)
+ elif message.get("cmd"): # Handhsake request
+ if message["cmd"] == "handshake":
+ if config.debug_socket:
+ self.log("Handshake request: %s" % message)
+ self.setHandshake(message["params"])
+ data = self.handshakeInfo()
+ data["cmd"] = "response"
+ data["to"] = message["req_id"]
+ self.send(data) # Send response to handshake
+ # Sent crypt request to client
+ if self.crypt and not self.sock_wrapped:
+ server = (self.type == "in")
+ self.log("Crypt in connection using: %s (server side: %s)..." % (self.crypt, server))
+ self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server)
+ self.sock_wrapped = True
+ else:
+ self.server.handleRequest(self, message)
+ else: # Old style response, no req_id definied
+ if config.debug_socket:
+ self.log("Old style response, waiting: %s" % self.waiting_requests.keys())
+ last_req_id = min(self.waiting_requests.keys()) # Get the oldest waiting request and set it true
+ self.waiting_requests[last_req_id].set(message)
+ del self.waiting_requests[last_req_id] # Remove from waiting request
- # Message loop for connection
- def messageLoop(self):
- if not self.sock:
- self.log("Socket error: No socket found")
- return False
- self.protocol = "v2"
- self.updateName()
- self.connected = True
+ # Send data to connection
+ def send(self, message, streaming=False):
+ if config.debug_socket:
+ self.log("Send: %s, to: %s, streaming: %s, site: %s, inner_path: %s, req_id: %s" % (
+ message.get("cmd"), message.get("to"), streaming,
+ message.get("params", {}).get("site"), message.get("params", {}).get("inner_path"),
+ message.get("req_id"))
+ )
+ self.last_send_time = time.time()
+ if streaming:
+ bytes_sent = StreamingMsgpack.stream(message, self.sock.sendall)
+ message = None
+ self.bytes_sent += bytes_sent
+ self.server.bytes_sent += bytes_sent
+ else:
+ data = msgpack.packb(message)
+ message = None
+ self.bytes_sent += len(data)
+ self.server.bytes_sent += len(data)
+ self.sock.sendall(data)
+ self.last_sent_time = time.time()
+ return True
- self.unpacker = msgpack.Unpacker()
- try:
- while True:
- buff = self.sock.recv(16*1024)
- if not buff: break # Connection closed
- self.last_recv_time = time.time()
- self.incomplete_buff_recv += 1
- self.bytes_recv += len(buff)
- self.server.bytes_recv += len(buff)
- if not self.unpacker:
- self.unpacker = msgpack.Unpacker()
- self.unpacker.feed(buff)
- for message in self.unpacker:
- self.incomplete_buff_recv = 0
- self.handleMessage(message)
- message = None
- buff = None
- except Exception, err:
- if not self.closed: self.log("Socket error: %s" % Debug.formatException(err))
- self.close() # MessageLoop ended, close connection
+ # Create and send a request to peer
+ def request(self, cmd, params={}):
+ # Last command sent more than 10 sec ago, timeout
+ if self.waiting_requests and self.protocol == "v2" and time.time() - max(self.last_req_time, self.last_recv_time) > 10:
+ self.log("Request %s timeout: %s" % (self.last_cmd, time.time() - self.last_send_time))
+ self.close()
+ return False
+ self.last_req_time = time.time()
+ self.last_cmd = cmd
+ self.req_id += 1
+ data = {"cmd": cmd, "req_id": self.req_id, "params": params}
+ event = gevent.event.AsyncResult() # Create new event for response
+ self.waiting_requests[self.req_id] = event
+ self.send(data) # Send request
+ res = event.get() # Wait until event solves
+ return res
- # My handshake info
- def handshakeInfo(self):
- return {
- "version": config.version,
- "protocol": "v2",
- "peer_id": self.server.peer_id,
- "fileserver_port": self.server.port,
- "port_opened": self.server.port_opened,
- "rev": config.rev,
- "crypt_supported": CryptConnection.manager.crypt_supported,
- "crypt": self.crypt
- }
+ def ping(self):
+ s = time.time()
+ response = None
+ with gevent.Timeout(10.0, False):
+ try:
+ response = self.request("ping")
+ except Exception, err:
+ self.log("Ping error: %s" % Debug.formatException(err))
+ if response and "body" in response and response["body"] == "Pong!":
+ self.last_ping_delay = time.time() - s
+ return True
+ else:
+ return False
+ # Close connection
+ def close(self):
+ if self.closed:
+ return False # Already closed
+ self.closed = True
+ self.connected = False
+ self.event_connected.set(False)
- def setHandshake(self, handshake):
- self.handshake = handshake
- if handshake.get("port_opened", None) == False: # Not connectable
- self.port = 0
- else:
- self.port = handshake["fileserver_port"] # Set peer fileserver port
- # Check if we can encrypt the connection
- if handshake.get("crypt_supported"):
- if handshake.get("crypt"): # Recommended crypt by server
- crypt = handshake["crypt"]
- else: # Select the best supported on both sides
- crypt = CryptConnection.manager.selectCrypt(handshake["crypt_supported"])
+ if config.debug_socket:
+ self.log(
+ "Closing connection, waiting_requests: %s, buff: %s..." %
+ (len(self.waiting_requests), self.incomplete_buff_recv)
+ )
+ for request in self.waiting_requests.values(): # Mark pending requests failed
+ request.set(False)
+ self.waiting_requests = {}
+ self.server.removeConnection(self) # Remove connection from server registry
+ try:
+ if self.sock:
+ self.sock.shutdown(gevent.socket.SHUT_WR)
+ self.sock.close()
+ except Exception, err:
+ if config.debug_socket:
+ self.log("Close error: %s" % err)
- if crypt:
- self.crypt = crypt
- self.event_connected.set(True) # Mark handshake as done
-
-
- # Handle incoming message
- def handleMessage(self, message):
- self.last_message_time = time.time()
- if message.get("cmd") == "response": # New style response
- if message["to"] in self.waiting_requests:
- self.waiting_requests[message["to"]].set(message) # Set the response to event
- del self.waiting_requests[message["to"]]
- elif message["to"] == 0: # Other peers handshake
- ping = time.time()-self.start_time
- if config.debug_socket: self.log("Handshake response: %s, ping: %s" % (message, ping))
- self.last_ping_delay = ping
- # Server switched to crypt, lets do it also if not crypted already
- if message.get("crypt") and not self.sock_wrapped:
- self.crypt = message["crypt"]
- server = (self.type == "in")
- self.log("Crypt out connection using: %s (server side: %s)..." % (self.crypt, server))
- self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server)
- self.sock.do_handshake()
- self.setHandshake(message)
- else:
- self.log("Unknown response: %s" % message)
- elif message.get("cmd"): # Handhsake request
- if message["cmd"] == "handshake":
- if config.debug_socket: self.log("Handshake request: %s" % message)
- self.setHandshake(message["params"])
- data = self.handshakeInfo()
- data["cmd"] = "response"
- data["to"] = message["req_id"]
- self.send(data) # Send response to handshake
- # Sent crypt request to client
- if self.crypt and not self.sock_wrapped:
- server = (self.type == "in")
- self.log("Crypt in connection using: %s (server side: %s)..." % (self.crypt, server))
- self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server)
- self.sock_wrapped = True
- else:
- self.server.handleRequest(self, message)
- else: # Old style response, no req_id definied
- if config.debug_socket: self.log("Old style response, waiting: %s" % self.waiting_requests.keys())
- last_req_id = min(self.waiting_requests.keys()) # Get the oldest waiting request and set it true
- self.waiting_requests[last_req_id].set(message)
- del self.waiting_requests[last_req_id] # Remove from waiting request
-
-
-
- # Send data to connection
- def send(self, message, streaming=False):
- if config.debug_socket: self.log("Send: %s, to: %s, streaming: %s, site: %s, inner_path: %s, req_id: %s" % (message.get("cmd"), message.get("to"), streaming, message.get("params", {}).get("site"), message.get("params", {}).get("inner_path"), message.get("req_id")))
- self.last_send_time = time.time()
- if streaming:
- bytes_sent = StreamingMsgpack.stream(message, self.sock.sendall)
- message = None
- self.bytes_sent += bytes_sent
- self.server.bytes_sent += bytes_sent
- else:
- data = msgpack.packb(message)
- message = None
- self.bytes_sent += len(data)
- self.server.bytes_sent += len(data)
- self.sock.sendall(data)
- self.last_sent_time = time.time()
- return True
-
-
- # Create and send a request to peer
- def request(self, cmd, params={}):
- if self.waiting_requests and self.protocol == "v2" and time.time() - max(self.last_req_time, self.last_recv_time) > 10: # Last command sent more than 10 sec ago, timeout
- self.log("Request %s timeout: %s" % (self.last_cmd, time.time() - self.last_send_time))
- self.close()
- return False
-
- self.last_req_time = time.time()
- self.last_cmd = cmd
- self.req_id += 1
- data = {"cmd": cmd, "req_id": self.req_id, "params": params}
- event = gevent.event.AsyncResult() # Create new event for response
- self.waiting_requests[self.req_id] = event
- self.send(data) # Send request
- res = event.get() # Wait until event solves
- return res
-
-
- def ping(self):
- s = time.time()
- response = None
- with gevent.Timeout(10.0, False):
- try:
- response = self.request("ping")
- except Exception, err:
- self.log("Ping error: %s" % Debug.formatException(err))
- if response and "body" in response and response["body"] == "Pong!":
- self.last_ping_delay = time.time()-s
- return True
- else:
- return False
-
-
- # Close connection
- def close(self):
- if self.closed: return False # Already closed
- self.closed = True
- self.connected = False
- self.event_connected.set(False)
-
- if config.debug_socket: self.log("Closing connection, waiting_requests: %s, buff: %s..." % (len(self.waiting_requests), self.incomplete_buff_recv))
- for request in self.waiting_requests.values(): # Mark pending requests failed
- request.set(False)
- self.waiting_requests = {}
- self.server.removeConnection(self) # Remove connection from server registry
- try:
- if self.sock:
- self.sock.shutdown(gevent.socket.SHUT_WR)
- self.sock.close()
- except Exception, err:
- if config.debug_socket: self.log("Close error: %s" % err)
-
- # Little cleanup
- self.sock = None
- self.unpacker = None
+ # Little cleanup
+ self.sock = None
+ self.unpacker = None
diff --git a/src/Connection/ConnectionServer.py b/src/Connection/ConnectionServer.py
index 84cd28ce..558c6f4c 100644
--- a/src/Connection/ConnectionServer.py
+++ b/src/Connection/ConnectionServer.py
@@ -43,14 +43,16 @@ class ConnectionServer:
# Check msgpack version
if msgpack.version[0] == 0 and msgpack.version[1] < 4:
self.log.error(
- "Error: Too old msgpack version: %s (>0.4.0 required), please update using `sudo pip install msgpack-python --upgrade`" %
+ "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo pip install msgpack-python --upgrade`" %
str(msgpack.version)
)
sys.exit(0)
if port: # Listen server on a port
self.pool = Pool(1000) # do not accept more than 1000 connections
- self.stream_server = StreamServer((ip.replace("*", ""), port), self.handleIncomingConnection, spawn=self.pool, backlog=100)
+ self.stream_server = StreamServer(
+ (ip.replace("*", ""), port), self.handleIncomingConnection, spawn=self.pool, backlog=100
+ )
if request_handler:
self.handleRequest = request_handler
@@ -152,25 +154,32 @@ class ConnectionServer:
for connection in self.connections[:]: # Make a copy
idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time)
- if connection.unpacker and idle > 30: # Delete the unpacker if not needed
+ if connection.unpacker and idle > 30:
+ # Delete the unpacker if not needed
del connection.unpacker
connection.unpacker = None
connection.log("Unpacker deleted")
- if idle > 60 * 60: # Wake up after 1h
+ if idle > 60 * 60:
+ # Wake up after 1h
connection.log("[Cleanup] After wakeup, idle: %s" % idle)
connection.close()
- elif idle > 20 * 60 and connection.last_send_time < time.time() - 10: # Idle more than 20 min and we not send request in last 10 sec
+ elif idle > 20 * 60 and connection.last_send_time < time.time() - 10:
+ # Idle more than 20 min and we not send request in last 10 sec
if not connection.ping(): # send ping request
connection.close()
- elif idle > 10 and connection.incomplete_buff_recv > 0: # Incompelte data with more than 10 sec idle
+ elif idle > 10 and connection.incomplete_buff_recv > 0:
+ # Incompelte data with more than 10 sec idle
connection.log("[Cleanup] Connection buff stalled")
connection.close()
- elif idle > 10 and connection.waiting_requests and time.time() - connection.last_send_time > 10: # Sent command and no response in 10 sec
- connection.log("[Cleanup] Command %s timeout: %s" % (connection.last_cmd, time.time() - connection.last_send_time))
+ elif idle > 10 and connection.waiting_requests and time.time() - connection.last_send_time > 10:
+ # Sent command and no response in 10 sec
+ connection.log(
+ "[Cleanup] Command %s timeout: %s" % (connection.last_cmd, time.time() - connection.last_send_time)
+ )
connection.close()
elif idle > 60 and connection.protocol == "?": # No connection after 1 min
diff --git a/src/Connection/__init__.py b/src/Connection/__init__.py
index 8f47108e..5bd29c6e 100644
--- a/src/Connection/__init__.py
+++ b/src/Connection/__init__.py
@@ -1,2 +1,2 @@
from ConnectionServer import ConnectionServer
-from Connection import Connection
\ No newline at end of file
+from Connection import Connection
diff --git a/src/Content/ContentManager.py b/src/Content/ContentManager.py
index c461456f..13989900 100644
--- a/src/Content/ContentManager.py
+++ b/src/Content/ContentManager.py
@@ -108,7 +108,7 @@ class ContentManager(object):
return total_size
# Find the file info line from self.contents
- # Return: { "sha512": "c29d73d30ee8c9c1b5600e8a84447a6de15a3c3db6869aca4a2a578c1721f518", "size": 41 , "content_inner_path": "content.json"}
+ # Return: { "sha512": "c29d73d...21f518", "size": 41 , "content_inner_path": "content.json"}
def getFileInfo(self, inner_path):
dirs = inner_path.split("/") # Parent dirs of content.json
inner_path_parts = [dirs.pop()] # Filename relative to content.json
@@ -279,11 +279,17 @@ class ContentManager(object):
privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
valid_signers = self.getValidSigners(inner_path, new_content)
if privatekey_address not in valid_signers:
- return self.log.error("Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address))
+ return self.log.error(
+ "Private key invalid! Valid signers: %s, Private key address: %s" %
+ (valid_signers, privatekey_address)
+ )
self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
- if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key sign the valid signers
- new_content["signers_sign"] = CryptBitcoin.sign("%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey)
+ if inner_path == "content.json" and privatekey_address == self.site.address:
+ # If signing using the root key, then sign the valid signers
+ new_content["signers_sign"] = CryptBitcoin.sign(
+ "%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey
+ )
if not new_content["signers_sign"]:
self.log.info("Old style address, signers_sign is none")
@@ -352,7 +358,9 @@ class ContentManager(object):
if not cert_address: # Cert signer not allowed
self.log.error("Invalid cert signer: %s" % domain)
return False
- return CryptBitcoin.verify("%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"])
+ return CryptBitcoin.verify(
+ "%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"]
+ )
# Checks if the content.json content is valid
# Return: True or False
@@ -414,10 +422,13 @@ class ContentManager(object):
if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json
return None
elif old_content["modified"] > new_content["modified"]: # We have newer
- self.log.debug("We have newer %s (Our: %s, Sent: %s)" % (inner_path, old_content["modified"], new_content["modified"]))
+ self.log.debug(
+ "We have newer %s (Our: %s, Sent: %s)" %
+ (inner_path, old_content["modified"], new_content["modified"])
+ )
gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers
return False
- if new_content["modified"] > time.time() + 60 * 60 * 24: # Content modified in the far future (allow 1 day window)
+ if new_content["modified"] > time.time() + 60 * 60 * 24: # Content modified in the far future (allow 1 day+)
self.log.error("%s modify is in the future!" % inner_path)
return False
# Check sign
@@ -437,7 +448,9 @@ class ContentManager(object):
signs_required = self.getSignsRequired(inner_path, new_content)
if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json
- if not CryptBitcoin.verify("%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]):
+ if not CryptBitcoin.verify(
+ "%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]
+ ):
self.log.error("%s invalid signers_sign!" % inner_path)
return False
@@ -470,8 +483,10 @@ class ContentManager(object):
else:
hash_valid = False
if file_info["size"] != file.tell():
- self.log.error("%s file size does not match %s <> %s, Hash: %s" % (inner_path, file.tell(),
- file_info["size"], hash_valid))
+ self.log.error(
+ "%s file size does not match %s <> %s, Hash: %s" %
+ (inner_path, file.tell(), file_info["size"], hash_valid)
+ )
return False
return hash_valid
@@ -493,7 +508,9 @@ def testSign():
from Site import Site
site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
content_manager = ContentManager(site)
- content_manager.sign("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR")
+ content_manager.sign(
+ "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR"
+ )
def testVerify():
@@ -504,10 +521,14 @@ def testVerify():
print "Loaded contents:", content_manager.contents.keys()
file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json"))
- print "content.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False)
+ print "content.json valid:", content_manager.verifyFile(
+ "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False
+ )
file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json"))
- print "messages.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False)
+ print "messages.json valid:", content_manager.verifyFile(
+ "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False
+ )
def testInfo():
diff --git a/src/Crypt/CryptBitcoin.py b/src/Crypt/CryptBitcoin.py
index f5b80cf5..15d76d5f 100644
--- a/src/Crypt/CryptBitcoin.py
+++ b/src/Crypt/CryptBitcoin.py
@@ -1,72 +1,75 @@
+import logging
+
from lib.BitcoinECC import BitcoinECC
from lib.pybitcointools import bitcoin as btctools
-import logging
from Config import config
# Try to load openssl
try:
- if not config.use_openssl: raise Exception("Disabled by config")
- from lib.opensslVerify import opensslVerify
- logging.info("OpenSSL loaded, version: %s" % opensslVerify.openssl_version)
+ if not config.use_openssl:
+ raise Exception("Disabled by config")
+ from lib.opensslVerify import opensslVerify
+ logging.info("OpenSSL loaded, version: %s" % opensslVerify.openssl_version)
except Exception, err:
- logging.info("OpenSSL load failed: %s, falling back to slow bitcoin verify" % err)
- opensslVerify = None
+ logging.info("OpenSSL load failed: %s, falling back to slow bitcoin verify" % err)
+ opensslVerify = None
-def newPrivatekey(uncompressed=True): # Return new private key
- privatekey = btctools.encode_privkey(btctools.random_key(), "wif")
- return privatekey
+def newPrivatekey(uncompressed=True): # Return new private key
+ privatekey = btctools.encode_privkey(btctools.random_key(), "wif")
+ return privatekey
def newSeed():
- return btctools.random_key()
+ return btctools.random_key()
def hdPrivatekey(seed, child):
- masterkey = btctools.bip32_master_key(seed)
- childkey = btctools.bip32_ckd(masterkey, child % 100000000) # Too large child id could cause problems
- key = btctools.bip32_extract_key(childkey)
- return btctools.encode_privkey(key, "wif")
+ masterkey = btctools.bip32_master_key(seed)
+ childkey = btctools.bip32_ckd(masterkey, child % 100000000) # Too large child id could cause problems
+ key = btctools.bip32_extract_key(childkey)
+ return btctools.encode_privkey(key, "wif")
-def privatekeyToAddress(privatekey): # Return address from private key
- if privatekey.startswith("23") and len(privatekey) > 52: # Backward compatibility to broken lib
- bitcoin = BitcoinECC.Bitcoin()
- bitcoin.BitcoinAddressFromPrivate(privatekey)
- return bitcoin.BitcoinAddresFromPublicKey()
- else:
- try:
- return btctools.privkey_to_address(privatekey)
- except Exception, err: # Invalid privatekey
- return False
+def privatekeyToAddress(privatekey): # Return address from private key
+ if privatekey.startswith("23") and len(privatekey) > 52: # Backward compatibility to broken lib
+ bitcoin = BitcoinECC.Bitcoin()
+ bitcoin.BitcoinAddressFromPrivate(privatekey)
+ return bitcoin.BitcoinAddresFromPublicKey()
+ else:
+ try:
+ return btctools.privkey_to_address(privatekey)
+ except Exception: # Invalid privatekey
+ return False
-def sign(data, privatekey): # Return sign to data using private key
- if privatekey.startswith("23") and len(privatekey) > 52: return None # Old style private key not supported
- sign = btctools.ecdsa_sign(data, privatekey)
- return sign
+def sign(data, privatekey): # Return sign to data using private key
+ if privatekey.startswith("23") and len(privatekey) > 52:
+ return None # Old style private key not supported
+ sign = btctools.ecdsa_sign(data, privatekey)
+ return sign
-def signOld(data, privatekey): # Return sign to data using private key (backward compatible old style)
- bitcoin = BitcoinECC.Bitcoin()
- bitcoin.BitcoinAddressFromPrivate(privatekey)
- sign = bitcoin.SignECDSA(data)
- return sign
+def signOld(data, privatekey): # Return sign to data using private key (backward compatible old style)
+ bitcoin = BitcoinECC.Bitcoin()
+ bitcoin.BitcoinAddressFromPrivate(privatekey)
+ sign = bitcoin.SignECDSA(data)
+ return sign
-def verify(data, address, sign): # Verify data using address and sign
- if hasattr(sign, "endswith"):
- if opensslVerify: # Use the faster method if avalible
- pub = opensslVerify.getMessagePubkey(data, sign)
- sign_address = btctools.pubtoaddr(pub)
- else: # Use pure-python
- pub = btctools.ecdsa_recover(data, sign)
- sign_address = btctools.pubtoaddr(pub)
-
- if type(address) is list: # Any address in the list
- return sign_address in address
- else: # One possible address
- return sign_address == address
- else: # Backward compatible old style
- bitcoin = BitcoinECC.Bitcoin()
- return bitcoin.VerifyMessageFromBitcoinAddress(address, data, sign)
+def verify(data, address, sign): # Verify data using address and sign
+ if hasattr(sign, "endswith"):
+ if opensslVerify: # Use the faster method if avalible
+ pub = opensslVerify.getMessagePubkey(data, sign)
+ sign_address = btctools.pubtoaddr(pub)
+ else: # Use pure-python
+ pub = btctools.ecdsa_recover(data, sign)
+ sign_address = btctools.pubtoaddr(pub)
+
+ if type(address) is list: # Any address in the list
+ return sign_address in address
+ else: # One possible address
+ return sign_address == address
+ else: # Backward compatible old style
+ bitcoin = BitcoinECC.Bitcoin()
+ return bitcoin.VerifyMessageFromBitcoinAddress(address, data, sign)
diff --git a/src/Crypt/CryptConnection.py b/src/Crypt/CryptConnection.py
index 764739ce..fb7097aa 100644
--- a/src/Crypt/CryptConnection.py
+++ b/src/Crypt/CryptConnection.py
@@ -4,103 +4,104 @@ import os
import ssl
from Config import config
-import gevent
from util import SslPatch
class CryptConnectionManager:
- def __init__(self):
- # OpenSSL params
- if sys.platform.startswith("win"):
- self.openssl_bin = "src\\lib\\opensslVerify\\openssl.exe"
- else:
- self.openssl_bin = "openssl"
- self.openssl_env = {"OPENSSL_CONF": "src/lib/opensslVerify/openssl.cnf"}
+ def __init__(self):
+ # OpenSSL params
+ if sys.platform.startswith("win"):
+ self.openssl_bin = "src\\lib\\opensslVerify\\openssl.exe"
+ else:
+ self.openssl_bin = "openssl"
+ self.openssl_env = {"OPENSSL_CONF": "src/lib/opensslVerify/openssl.cnf"}
- self.crypt_supported = [] # Supported cryptos
+ self.crypt_supported = [] # Supported cryptos
+
+ # Select crypt that supported by both sides
+ # Return: Name of the crypto
+ def selectCrypt(self, client_supported):
+ for crypt in self.crypt_supported:
+ if crypt in client_supported:
+ return crypt
+ return False
+
+ # Wrap socket for crypt
+ # Return: wrapped socket
+ def wrapSocket(self, sock, crypt, server=False):
+ if crypt == "tls-rsa":
+ ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:HIGH:"
+ ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
+ if server:
+ return ssl.wrap_socket(
+ sock, server_side=server, keyfile='%s/key-rsa.pem' % config.data_dir,
+ certfile='%s/cert-rsa.pem' % config.data_dir, ciphers=ciphers)
+ else:
+ return ssl.wrap_socket(sock, ciphers=ciphers)
+ else:
+ return sock
+
+ def removeCerts(self):
+ for file_name in ["cert-rsa.pem", "key-rsa.pem"]:
+ file_path = "%s/%s" % (config.data_dir, file_name)
+ if os.path.isfile(file_path):
+ os.unlink(file_path)
+
+ # Load and create cert files is necessary
+ def loadCerts(self):
+ if config.disable_encryption:
+ return False
+
+ if self.loadSslRsaCert():
+ self.crypt_supported.append("tls-rsa")
+
+ # Try to create RSA server cert + sign for connection encryption
+ # Return: True on success
+ def loadSslRsaCert(self):
+ import subprocess
+
+ if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir):
+ return True # Files already exits
+
+ back = subprocess.Popen(
+ "%s req -x509 -newkey rsa:2048 -sha256 -batch -keyout %s/key-rsa.pem -out %s/cert-rsa.pem -nodes -config %s" % (
+ self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]
+ ),
+ shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
+ ).stdout.read().strip()
+ logging.debug("Generating RSA cert and key PEM files...%s" % back)
+
+ if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir):
+ return True
+ else:
+ logging.error("RSA ECC SSL cert generation failed, cert or key files not exits.")
+ return False
+
+ # Not used yet: Missing on some platform
+ def createSslEccCert(self):
+ return False
+ import subprocess
+
+ # Create ECC privatekey
+ back = subprocess.Popen(
+ "%s ecparam -name prime256v1 -genkey -out %s/key-ecc.pem" % (self.openssl_bin, config.data_dir),
+ shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
+ ).stdout.read().strip()
+ self.log.debug("Generating ECC privatekey PEM file...%s" % back)
+
+ # Create ECC cert
+ back = subprocess.Popen(
+ "%s req -new -key %s/key-ecc.pem -x509 -nodes -out %s/cert-ecc.pem -config %s" % (
+ self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]),
+ shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
+ ).stdout.read().strip()
+ self.log.debug("Generating ECC cert PEM file...%s" % back)
+
+ if os.path.isfile("%s/cert-ecc.pem" % config.data_dir) and os.path.isfile("%s/key-ecc.pem" % config.data_dir):
+ return True
+ else:
+ self.logging.error("ECC SSL cert generation failed, cert or key files not exits.")
+ return False
- # Select crypt that supported by both sides
- # Return: Name of the crypto
- def selectCrypt(self, client_supported):
- for crypt in self.crypt_supported:
- if crypt in client_supported:
- return crypt
- return False
-
-
- # Wrap socket for crypt
- # Return: wrapped socket
- def wrapSocket(self, sock, crypt, server=False):
- if crypt == "tls-rsa":
- ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:HIGH:!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
- if server:
- return ssl.wrap_socket(sock, server_side=server, keyfile='%s/key-rsa.pem' % config.data_dir, certfile='%s/cert-rsa.pem' % config.data_dir, ciphers=ciphers)
- else:
- return ssl.wrap_socket(sock, ciphers=ciphers)
- else:
- return sock
-
-
- def removeCerts(self):
- for file_name in ["cert-rsa.pem", "key-rsa.pem"]:
- file_path = "%s/%s" % (config.data_dir, file_name)
- if os.path.isfile(file_path): os.unlink(file_path)
-
-
- # Load and create cert files is necessary
- def loadCerts(self):
- if config.disable_encryption: return False
-
- if self.loadSslRsaCert():
- self.crypt_supported.append("tls-rsa")
-
-
- # Try to create RSA server cert + sign for connection encryption
- # Return: True on success
- def loadSslRsaCert(self):
- import subprocess
-
- if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir):
- return True # Files already exits
-
- back = subprocess.Popen(
- "%s req -x509 -newkey rsa:2048 -sha256 -batch -keyout %s/key-rsa.pem -out %s/cert-rsa.pem -nodes -config %s" % (self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]),
- shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
- ).stdout.read().strip()
- logging.debug("Generating RSA cert and key PEM files...%s" % back)
-
- if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir):
- return True
- else:
- logging.error("RSA ECC SSL cert generation failed, cert or key files not exits.")
- return False
-
-
- # Not used yet: Missing on some platform
- def createSslEccCert(self):
- return False
- import subprocess
-
- # Create ECC privatekey
- back = subprocess.Popen(
- "%s ecparam -name prime256v1 -genkey -out %s/key-ecc.pem" % (self.openssl_bin, config.data_dir),
- shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
- ).stdout.read().strip()
- self.log.debug("Generating ECC privatekey PEM file...%s" % back)
-
- # Create ECC cert
- back = subprocess.Popen(
- "%s req -new -key %s/key-ecc.pem -x509 -nodes -out %s/cert-ecc.pem -config %s" % (self.openssl_bin, config.data_dir, config.data_dir, self.openssl_env["OPENSSL_CONF"]),
- shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env
- ).stdout.read().strip()
- self.log.debug("Generating ECC cert PEM file...%s" % back)
-
- if os.path.isfile("%s/cert-ecc.pem" % config.data_dir) and os.path.isfile("%s/key-ecc.pem" % config.data_dir):
- return True
- else:
- self.logging.error("ECC SSL cert generation failed, cert or key files not exits.")
- return False
-
-
-manager = CryptConnectionManager()
\ No newline at end of file
+manager = CryptConnectionManager()
diff --git a/src/Crypt/CryptHash.py b/src/Crypt/CryptHash.py
index d9de55f9..e71fa0e7 100644
--- a/src/Crypt/CryptHash.py
+++ b/src/Crypt/CryptHash.py
@@ -1,36 +1,37 @@
import hashlib
+
def sha1sum(file, blocksize=65536):
- if hasattr(file, "endswith"): # Its a string open it
- file = open(file, "rb")
- hash = hashlib.sha1()
- for block in iter(lambda: file.read(blocksize), ""):
- hash.update(block)
- return hash.hexdigest()
+ if hasattr(file, "endswith"): # Its a string open it
+ file = open(file, "rb")
+ hash = hashlib.sha1()
+ for block in iter(lambda: file.read(blocksize), ""):
+ hash.update(block)
+ return hash.hexdigest()
def sha512sum(file, blocksize=65536):
- if hasattr(file, "endswith"): # Its a string open it
- file = open(file, "rb")
- hash = hashlib.sha512()
- for block in iter(lambda: file.read(blocksize), ""):
- hash.update(block)
- return hash.hexdigest()[0:64] # Truncate to 256bits is good enough
+ if hasattr(file, "endswith"): # Its a string open it
+ file = open(file, "rb")
+ hash = hashlib.sha512()
+ for block in iter(lambda: file.read(blocksize), ""):
+ hash.update(block)
+ return hash.hexdigest()[0:64] # Truncate to 256bits is good enough
if __name__ == "__main__":
- import cStringIO as StringIO
- a = StringIO.StringIO()
- a.write("hello!")
- a.seek(0)
- print hashlib.sha1("hello!").hexdigest()
- print sha1sum(a)
+ import cStringIO as StringIO
+ a = StringIO.StringIO()
+ a.write("hello!")
+ a.seek(0)
+ print hashlib.sha1("hello!").hexdigest()
+ print sha1sum(a)
- import time
- s = time.time()
- print sha1sum(open("F:\\Temp\\bigfile")),
- print time.time()-s
+ import time
+ s = time.time()
+ print sha1sum(open("F:\\Temp\\bigfile")),
+ print time.time() - s
- s = time.time()
- print sha512sum(open("F:\\Temp\\bigfile")),
- print time.time()-s
\ No newline at end of file
+ s = time.time()
+ print sha512sum(open("F:\\Temp\\bigfile")),
+ print time.time() - s
diff --git a/src/Db/Db.py b/src/Db/Db.py
index 12e3e27f..4f2034b9 100644
--- a/src/Db/Db.py
+++ b/src/Db/Db.py
@@ -1,263 +1,283 @@
-import sqlite3, json, time, logging, re, os
+import sqlite3
+import json
+import time
+import logging
+import re
+import os
+
from DbCursor import DbCursor
+
class Db:
- def __init__(self, schema, db_path):
- self.db_path = db_path
- self.db_dir = os.path.dirname(db_path)+"/"
- self.schema = schema
- self.schema["version"] = self.schema.get("version", 1)
- self.conn = None
- self.cur = None
- self.log = logging.getLogger("Db:%s" % schema["db_name"])
- self.table_names = None
- self.collect_stats = False
- self.query_stats = {}
- self.db_keyvalues = {}
+ def __init__(self, schema, db_path):
+ self.db_path = db_path
+ self.db_dir = os.path.dirname(db_path) + "/"
+ self.schema = schema
+ self.schema["version"] = self.schema.get("version", 1)
+ self.conn = None
+ self.cur = None
+ self.log = logging.getLogger("Db:%s" % schema["db_name"])
+ self.table_names = None
+ self.collect_stats = False
+ self.query_stats = {}
+ self.db_keyvalues = {}
- def connect(self):
- self.log.debug("Connecting to %s (sqlite version: %s)..." % (self.db_path, sqlite3.version))
- if not os.path.isdir(self.db_dir): # Directory not exist yet
- os.makedirs(self.db_dir)
- self.log.debug("Created Db path: %s" % self.db_dir)
- if not os.path.isfile(self.db_path):
- self.log.debug("Db file not exist yet: %s" % self.db_path)
- self.conn = sqlite3.connect(self.db_path)
- self.conn.row_factory = sqlite3.Row
- self.conn.isolation_level = None
- self.cur = self.getCursor()
- # We need more speed then security
- self.cur.execute("PRAGMA journal_mode = WAL")
- self.cur.execute("PRAGMA journal_mode = MEMORY")
- self.cur.execute("PRAGMA synchronous = OFF")
+ def connect(self):
+ self.log.debug("Connecting to %s (sqlite version: %s)..." % (self.db_path, sqlite3.version))
+ if not os.path.isdir(self.db_dir): # Directory not exist yet
+ os.makedirs(self.db_dir)
+ self.log.debug("Created Db path: %s" % self.db_dir)
+ if not os.path.isfile(self.db_path):
+ self.log.debug("Db file not exist yet: %s" % self.db_path)
+ self.conn = sqlite3.connect(self.db_path)
+ self.conn.row_factory = sqlite3.Row
+ self.conn.isolation_level = None
+ self.cur = self.getCursor()
+ # We need more speed then security
+ self.cur.execute("PRAGMA journal_mode = WAL")
+ self.cur.execute("PRAGMA journal_mode = MEMORY")
+ self.cur.execute("PRAGMA synchronous = OFF")
+ # Execute query using dbcursor
+ def execute(self, query, params=None):
+ if not self.conn:
+ self.connect()
+ return self.cur.execute(query, params)
- # Execute query using dbcursor
- def execute(self, query, params = None):
- if not self.conn: self.connect()
- return self.cur.execute(query, params)
+ def close(self):
+ self.log.debug("Closing")
+ if self.cur:
+ self.cur.close()
+ if self.conn:
+ self.conn.close()
+ # Gets a cursor object to database
+ # Return: Cursor class
+ def getCursor(self):
+ if not self.conn:
+ self.connect()
+ return DbCursor(self.conn, self)
- def close(self):
- self.log.debug("Closing")
- if self.cur: self.cur.close()
- if self.conn: self.conn.close()
+ # Get the table version
+ # Return: Table version or None if not exist
+ def getTableVersion(self, table_name):
+ """if not self.table_names: # Get existing table names
+ res = self.cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
+ self.table_names = [row["name"] for row in res]
+ if table_name not in self.table_names:
+ return False
+ else:"""
+ if not self.db_keyvalues: # Get db keyvalues
+ try:
+ res = self.cur.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues
+ except sqlite3.OperationalError, err: # Table not exist
+ self.log.debug("Query error: %s" % err)
+ return False
- # Gets a cursor object to database
- # Return: Cursor class
- def getCursor(self):
- if not self.conn: self.connect()
- return DbCursor(self.conn, self)
+ for row in res:
+ self.db_keyvalues[row["key"]] = row["value"]
+ return self.db_keyvalues.get("table.%s.version" % table_name, 0)
- # Get the table version
- # Return: Table version or None if not exist
- def getTableVersion(self, table_name):
- """if not self.table_names: # Get existing table names
- res = self.cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
- self.table_names = [row["name"] for row in res]
- if table_name not in self.table_names:
- return False
+ # Check Db tables
+ # Return: Changed table names
+ def checkTables(self):
+ s = time.time()
+ changed_tables = []
+ cur = self.getCursor()
- else:"""
- if not self.db_keyvalues: # Get db keyvalues
- try:
- res = self.cur.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues
- except sqlite3.OperationalError, err: # Table not exist
- self.log.debug("Query error: %s" % err)
- return False
+ cur.execute("BEGIN")
- for row in res:
- self.db_keyvalues[row["key"]] = row["value"]
+ # Check internal tables
+ # Check keyvalue table
+ changed = cur.needTable("keyvalue", [
+ ["keyvalue_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
+ ["key", "TEXT"],
+ ["value", "INTEGER"],
+ ["json_id", "INTEGER REFERENCES json (json_id)"],
+ ], [
+ "CREATE UNIQUE INDEX key_id ON keyvalue(json_id, key)"
+ ], version=self.schema["version"])
+ if changed:
+ changed_tables.append("keyvalue")
- return self.db_keyvalues.get("table.%s.version" % table_name, 0)
+ # Check json table
+ if self.schema["version"] == 1:
+ changed = cur.needTable("json", [
+ ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
+ ["path", "VARCHAR(255)"]
+ ], [
+ "CREATE UNIQUE INDEX path ON json(path)"
+ ], version=self.schema["version"])
+ else:
+ changed = cur.needTable("json", [
+ ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
+ ["directory", "VARCHAR(255)"],
+ ["file_name", "VARCHAR(255)"]
+ ], [
+ "CREATE UNIQUE INDEX path ON json(directory, file_name)"
+ ], version=self.schema["version"])
+ if changed:
+ changed_tables.append("json")
+ # Check schema tables
+ for table_name, table_settings in self.schema["tables"].items():
+ changed = cur.needTable(
+ table_name, table_settings["cols"],
+ table_settings["indexes"], version=table_settings["schema_changed"]
+ )
+ if changed:
+ changed_tables.append(table_name)
+ cur.execute("COMMIT")
+ self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time() - s, changed_tables))
- # Check Db tables
- # Return: Changed table names
- def checkTables(self):
- s = time.time()
- changed_tables = []
- cur = self.getCursor()
+ return changed_tables
- cur.execute("BEGIN")
+ # Load json file to db
+ # Return: True if matched
+ def loadJson(self, file_path, file=None, cur=None):
+ if not file_path.startswith(self.db_dir):
+ return False # Not from the db dir: Skipping
+ relative_path = re.sub("^%s" % self.db_dir, "", file_path) # File path realative to db file
+ # Check if filename matches any of mappings in schema
+ matched_maps = []
+ for match, map_settings in self.schema["maps"].items():
+ if re.match(match, relative_path):
+ matched_maps.append(map_settings)
- # Check internal tables
- # Check keyvalue table
- changed = cur.needTable("keyvalue", [
- ["keyvalue_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
- ["key", "TEXT"],
- ["value", "INTEGER"],
- ["json_id", "INTEGER REFERENCES json (json_id)"],
- ],[
- "CREATE UNIQUE INDEX key_id ON keyvalue(json_id, key)"
- ], version=self.schema["version"])
- if changed: changed_tables.append("keyvalue")
+ # No match found for the file
+ if not matched_maps:
+ return False
- # Check json table
- if self.schema["version"] == 1:
- changed = cur.needTable("json", [
- ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
- ["path", "VARCHAR(255)"]
- ], [
- "CREATE UNIQUE INDEX path ON json(path)"
- ], version=self.schema["version"])
- else:
- changed = cur.needTable("json", [
- ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
- ["directory", "VARCHAR(255)"],
- ["file_name", "VARCHAR(255)"]
- ], [
- "CREATE UNIQUE INDEX path ON json(directory, file_name)"
- ], version=self.schema["version"])
- if changed: changed_tables.append("json")
+ # Load the json file
+ if not file:
+ file = open(file_path)
+ data = json.load(file)
- # Check schema tables
- for table_name, table_settings in self.schema["tables"].items():
- changed = cur.needTable(table_name, table_settings["cols"], table_settings["indexes"], version=table_settings["schema_changed"])
- if changed: changed_tables.append(table_name)
+ # No cursor specificed
+ if not cur:
+ cur = self.getCursor()
+ cur.execute("BEGIN")
+ cur.logging = False
+ commit_after_done = True
+ else:
+ commit_after_done = False
- cur.execute("COMMIT")
- self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time()-s, changed_tables))
+ # Row for current json file
+ json_row = cur.getJsonRow(relative_path)
- return changed_tables
+ # Check matched mappings in schema
+ for map in matched_maps:
+ # Insert non-relational key values
+ if map.get("to_keyvalue"):
+ # Get current values
+ res = cur.execute("SELECT * FROM keyvalue WHERE json_id = ?", (json_row["json_id"],))
+ current_keyvalue = {}
+ current_keyvalue_id = {}
+ for row in res:
+ current_keyvalue[row["key"]] = row["value"]
+ current_keyvalue_id[row["key"]] = row["keyvalue_id"]
+ for key in map["to_keyvalue"]:
+ if key not in current_keyvalue: # Keyvalue not exist yet in the db
+ cur.execute(
+ "INSERT INTO keyvalue ?",
+ {"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
+ )
+ elif data.get(key) != current_keyvalue[key]: # Keyvalue different value
+ cur.execute(
+ "UPDATE keyvalue SET value = ? WHERE keyvalue_id = ?",
+ (data.get(key), current_keyvalue_id[key])
+ )
- # Load json file to db
- # Return: True if matched
- def loadJson(self, file_path, file = None, cur = None):
- if not file_path.startswith(self.db_dir): return False # Not from the db dir: Skipping
- relative_path = re.sub("^%s" % self.db_dir, "", file_path) # File path realative to db file
- # Check if filename matches any of mappings in schema
- matched_maps = []
- for match, map_settings in self.schema["maps"].items():
- if re.match(match, relative_path):
- matched_maps.append(map_settings)
+ """
+ for key in map.get("to_keyvalue", []):
+ cur.execute("INSERT OR REPLACE INTO keyvalue ?",
+ {"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
+ )
+ """
- # No match found for the file
- if not matched_maps: return False
+ # Insert data to tables
+ for table_settings in map.get("to_table", []):
+ if isinstance(table_settings, dict): # Custom settings
+ table_name = table_settings["table"] # Table name to insert datas
+ node = table_settings.get("node", table_name) # Node keyname in data json file
+ key_col = table_settings.get("key_col") # Map dict key as this col
+ val_col = table_settings.get("val_col") # Map dict value as this col
+ import_cols = table_settings.get("import_cols")
+ replaces = table_settings.get("replaces")
+ else: # Simple settings
+ table_name = table_settings
+ node = table_settings
+ key_col = None
+ val_col = None
+ import_cols = None
+ replaces = None
- # Load the json file
- if not file: file = open(file_path)
- data = json.load(file)
+ cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],))
- # No cursor specificed
- if not cur:
- cur = self.getCursor()
- cur.execute("BEGIN")
- cur.logging = False
- commit_after_done = True
- else:
- commit_after_done = False
+ if node not in data:
+ continue
- # Row for current json file
- json_row = cur.getJsonRow(relative_path)
+ if key_col: # Map as dict
+ for key, val in data[node].iteritems():
+ if val_col: # Single value
+ cur.execute(
+ "INSERT OR REPLACE INTO %s ?" % table_name,
+ {key_col: key, val_col: val, "json_id": json_row["json_id"]}
+ )
+ else: # Multi value
+ if isinstance(val, dict): # Single row
+ row = val
+ if import_cols:
+ row = {key: row[key] for key in import_cols} # Filter row by import_cols
+ row[key_col] = key
+ # Replace in value if necessary
+ if replaces:
+ for replace_key, replace in replaces.iteritems():
+ if replace_key in row:
+ for replace_from, replace_to in replace.iteritems():
+ row[replace_key] = row[replace_key].replace(replace_from, replace_to)
- # Check matched mappings in schema
- for map in matched_maps:
- # Insert non-relational key values
- if map.get("to_keyvalue"):
- # Get current values
- res = cur.execute("SELECT * FROM keyvalue WHERE json_id = ?", (json_row["json_id"],))
- current_keyvalue = {}
- current_keyvalue_id = {}
- for row in res:
- current_keyvalue[row["key"]] = row["value"]
- current_keyvalue_id[row["key"]] = row["keyvalue_id"]
+ row["json_id"] = json_row["json_id"]
+ cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
+ else: # Multi row
+ for row in val:
+ row[key_col] = key
+ row["json_id"] = json_row["json_id"]
+ cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
+ else: # Map as list
+ for row in data[node]:
+ row["json_id"] = json_row["json_id"]
+ cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
- for key in map["to_keyvalue"]:
- if key not in current_keyvalue: # Keyvalue not exist yet in the db
- cur.execute("INSERT INTO keyvalue ?",
- {"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
- )
- elif data.get(key) != current_keyvalue[key]: # Keyvalue different value
- cur.execute("UPDATE keyvalue SET value = ? WHERE keyvalue_id = ?", (data.get(key), current_keyvalue_id[key]))
-
- """for key in map.get("to_keyvalue", []):
- cur.execute("INSERT OR REPLACE INTO keyvalue ?",
- {"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
- )
- """
-
- # Insert data to tables
- for table_settings in map.get("to_table", []):
- if isinstance(table_settings, dict): # Custom settings
- table_name = table_settings["table"] # Table name to insert datas
- node = table_settings.get("node", table_name) # Node keyname in data json file
- key_col = table_settings.get("key_col") # Map dict key as this col
- val_col = table_settings.get("val_col") # Map dict value as this col
- import_cols = table_settings.get("import_cols")
- replaces = table_settings.get("replaces")
- else: # Simple settings
- table_name = table_settings
- node = table_settings
- key_col = None
- val_col = None
- import_cols = None
- replaces = None
-
- cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],))
-
- if node not in data: continue
-
- table_schema = self.schema["tables"][table_name]
- if key_col: # Map as dict
- for key, val in data[node].iteritems():
- if val_col: # Single value
- cur.execute("INSERT OR REPLACE INTO %s ?" % table_name,
- { key_col: key, val_col: val, "json_id": json_row["json_id"] }
- )
- else: # Multi value
- if isinstance(val, dict): # Single row
- row = val
- if import_cols: row = { key: row[key] for key in import_cols } # Filter row by import_cols
- row[key_col] = key
- # Replace in value if necessary
- if replaces:
- for replace_key, replace in replaces.iteritems():
- if replace_key in row:
- for replace_from, replace_to in replace.iteritems():
- row[replace_key] = row[replace_key].replace(replace_from, replace_to)
-
- row["json_id"] = json_row["json_id"]
- cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
- else: # Multi row
- for row in val:
- row[key_col] = key
- row["json_id"] = json_row["json_id"]
- cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
- else: # Map as list
- for row in data[node]:
- row["json_id"] = json_row["json_id"]
- cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
-
- if commit_after_done: cur.execute("COMMIT")
- return True
+ if commit_after_done:
+ cur.execute("COMMIT")
+ return True
if __name__ == "__main__":
- s = time.time()
- console_log = logging.StreamHandler()
- logging.getLogger('').setLevel(logging.DEBUG)
- logging.getLogger('').addHandler(console_log)
- console_log.setLevel(logging.DEBUG)
- dbjson = DbJson(json.load(open("zerotalk.schema.json")), "data/users/zerotalk.db")
- dbjson.collect_stats = True
- dbjson.checkTables()
- cur = dbjson.getCursor()
- cur.execute("BEGIN")
- cur.logging = False
- dbjson.loadJson("data/users/content.json", cur=cur)
- for user_dir in os.listdir("data/users"):
- if os.path.isdir("data/users/%s" % user_dir):
- dbjson.loadJson("data/users/%s/data.json" % user_dir, cur=cur)
- #print ".",
- cur.logging = True
- cur.execute("COMMIT")
- print "Done in %.3fs" % (time.time()-s)
- for query, stats in sorted(dbjson.query_stats.items()):
- print "-", query, stats
-
+ s = time.time()
+ console_log = logging.StreamHandler()
+ logging.getLogger('').setLevel(logging.DEBUG)
+ logging.getLogger('').addHandler(console_log)
+ console_log.setLevel(logging.DEBUG)
+ dbjson = Db(json.load(open("zerotalk.schema.json")), "data/users/zerotalk.db")
+ dbjson.collect_stats = True
+ dbjson.checkTables()
+ cur = dbjson.getCursor()
+ cur.execute("BEGIN")
+ cur.logging = False
+ dbjson.loadJson("data/users/content.json", cur=cur)
+ for user_dir in os.listdir("data/users"):
+ if os.path.isdir("data/users/%s" % user_dir):
+ dbjson.loadJson("data/users/%s/data.json" % user_dir, cur=cur)
+ # print ".",
+ cur.logging = True
+ cur.execute("COMMIT")
+ print "Done in %.3fs" % (time.time() - s)
+ for query, stats in sorted(dbjson.query_stats.items()):
+ print "-", query, stats
diff --git a/src/Db/DbCursor.py b/src/Db/DbCursor.py
index 8d198fa1..1c8b8876 100644
--- a/src/Db/DbCursor.py
+++ b/src/Db/DbCursor.py
@@ -1,115 +1,118 @@
-import time, re
+import time
+import re
# Special sqlite cursor
-class DbCursor:
- def __init__(self, conn, db):
- self.conn = conn
- self.db = db
- self.cursor = conn.cursor()
- self.logging = True
- def execute(self, query, params=None):
- if isinstance(params, dict): # Make easier select and insert by allowing dict params
- if query.startswith("SELECT") or query.startswith("DELETE"): # Convert param dict to SELECT * FROM table WHERE key = ?, key2 = ? format
- wheres = "AND ".join([key+" = ?" for key in params])
- query = query.replace("?", wheres)
- params = params.values()
- else: # Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format
- keys = ", ".join(params.keys())
- values = ", ".join(['?' for key in params.keys()])
- query = query.replace("?", "(%s) VALUES (%s)" % (keys, values))
- params = tuple(params.values())
+class DbCursor:
- s = time.time()
- # if query == "COMMIT": self.logging = True # Turn logging back on transaction commit
+ def __init__(self, conn, db):
+ self.conn = conn
+ self.db = db
+ self.cursor = conn.cursor()
+ self.logging = False
- if params: # Query has parameters
- res = self.cursor.execute(query, params)
- if self.logging:
- self.db.log.debug((query.replace("?", "%s") % params)+" (Done in %.4f)" % (time.time()-s))
- else:
- res = self.cursor.execute(query)
- if self.logging: self.db.log.debug(query+" (Done in %.4f)" % (time.time()-s))
+ def execute(self, query, params=None):
+ if isinstance(params, dict): # Make easier select and insert by allowing dict params
+ if query.startswith("SELECT") or query.startswith("DELETE"):
+ # Convert param dict to SELECT * FROM table WHERE key = ?, key2 = ? format
+ wheres = "AND ".join([key + " = ?" for key in params])
+ query = query.replace("?", wheres)
+ params = params.values()
+ else:
+ # Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format
+ keys = ", ".join(params.keys())
+ values = ", ".join(['?' for key in params.keys()])
+ query = query.replace("?", "(%s) VALUES (%s)" % (keys, values))
+ params = tuple(params.values())
- # Log query stats
- if self.db.collect_stats:
- if query not in self.db.query_stats:
- self.db.query_stats[query] = {"call": 0, "time": 0.0}
- self.db.query_stats[query]["call"] += 1
- self.db.query_stats[query]["time"] += time.time()-s
-
- # if query == "BEGIN": self.logging = False # Turn logging off on transaction commit
- return res
+ s = time.time()
+ # if query == "COMMIT": self.logging = True # Turn logging back on transaction commit
+ if params: # Query has parameters
+ res = self.cursor.execute(query, params)
+ if self.logging:
+ self.db.log.debug((query.replace("?", "%s") % params) + " (Done in %.4f)" % (time.time() - s))
+ else:
+ res = self.cursor.execute(query)
+ if self.logging:
+ self.db.log.debug(query + " (Done in %.4f)" % (time.time() - s))
- # Create new table
- # Return: True on success
- def createTable(self, table, cols):
- # TODO: Check current structure
- """table_changed = False
- res = c.execute("PRAGMA table_info(%s)" % table)
- if res:
- for row in res:
- print row["name"], row["type"], cols[row["name"]]
- print row
- else:
- table_changed = True
+ # Log query stats
+ if self.db.collect_stats:
+ if query not in self.db.query_stats:
+ self.db.query_stats[query] = {"call": 0, "time": 0.0}
+ self.db.query_stats[query]["call"] += 1
+ self.db.query_stats[query]["time"] += time.time() - s
- if table_changed: # Table structure changed, drop and create again"""
- self.execute("DROP TABLE IF EXISTS %s" % table)
- col_definitions = []
- for col_name, col_type in cols:
- col_definitions.append("%s %s" % (col_name, col_type))
+ # if query == "BEGIN": self.logging = False # Turn logging off on transaction commit
+ return res
- self.execute("CREATE TABLE %s (%s)" % (table, ",".join(col_definitions)))
- return True
+ # Create new table
+ # Return: True on success
+ def createTable(self, table, cols):
+ # TODO: Check current structure
+ """table_changed = False
+ res = c.execute("PRAGMA table_info(%s)" % table)
+ if res:
+ for row in res:
+ print row["name"], row["type"], cols[row["name"]]
+ print row
+ else:
+ table_changed = True
+ if table_changed: # Table structure changed, drop and create again"""
+ self.execute("DROP TABLE IF EXISTS %s" % table)
+ col_definitions = []
+ for col_name, col_type in cols:
+ col_definitions.append("%s %s" % (col_name, col_type))
+ self.execute("CREATE TABLE %s (%s)" % (table, ",".join(col_definitions)))
+ return True
- # Create indexes on table
- # Return: True on success
- def createIndexes(self, table, indexes):
- # indexes.append("CREATE INDEX %s_id ON %s(%s_id)" % (table, table, table)) # Primary key index
- for index in indexes:
- self.execute(index)
+ # Create indexes on table
+ # Return: True on success
+ def createIndexes(self, table, indexes):
+ # indexes.append("CREATE INDEX %s_id ON %s(%s_id)" % (table, table, table)) # Primary key index
+ for index in indexes:
+ self.execute(index)
+ # Create table if not exist
+ # Return: True if updated
+ def needTable(self, table, cols, indexes=None, version=1):
+ current_version = self.db.getTableVersion(table)
+ if int(current_version) < int(version): # Table need update or not extis
+ self.db.log.info("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version))
+ self.createTable(table, cols)
+ if indexes:
+ self.createIndexes(table, indexes)
+ self.execute(
+ "INSERT OR REPLACE INTO keyvalue ?",
+ {"json_id": 0, "key": "table.%s.version" % table, "value": version}
+ )
+ return True
+ else: # Not changed
+ return False
- # Create table if not exist
- # Return: True if updated
- def needTable(self, table, cols, indexes=None, version=1):
- current_version = self.db.getTableVersion(table)
- if int(current_version) < int(version): # Table need update or not extis
- self.db.log.info("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version))
- self.createTable(table, cols)
- if indexes: self.createIndexes(table, indexes)
- self.execute("INSERT OR REPLACE INTO keyvalue ?",
- {"json_id": 0, "key": "table.%s.version" % table, "value": version}
- )
- return True
- else: # Not changed
- return False
+ # Get or create a row for json file
+ # Return: The database row
+ def getJsonRow(self, file_path):
+ directory, file_name = re.match("^(.*?)/*([^/]*)$", file_path).groups()
+ if self.db.schema["version"] == 1:
+ res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
+ row = res.fetchone()
+ if not row: # No row yet, create it
+ self.execute("INSERT INTO json ?", {"path": file_path})
+ res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
+ row = res.fetchone()
+ else:
+ res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
+ row = res.fetchone()
+ if not row: # No row yet, create it
+ self.execute("INSERT INTO json ?", {"directory": directory, "file_name": file_name})
+ res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
+ row = res.fetchone()
+ return row
-
- # Get or create a row for json file
- # Return: The database row
- def getJsonRow(self, file_path):
- directory, file_name = re.match("^(.*?)/*([^/]*)$", file_path).groups()
- if self.db.schema["version"] == 1:
- res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
- row = res.fetchone()
- if not row: # No row yet, create it
- self.execute("INSERT INTO json ?", {"path": file_path})
- res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
- row = res.fetchone()
- else:
- res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
- row = res.fetchone()
- if not row: # No row yet, create it
- self.execute("INSERT INTO json ?", {"directory": directory, "file_name": file_name})
- res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
- row = res.fetchone()
- return row
-
- def close(self):
- self.cursor.close()
+ def close(self):
+ self.cursor.close()
diff --git a/src/Debug/Debug.py b/src/Debug/Debug.py
index f7070963..3622554c 100644
--- a/src/Debug/Debug.py
+++ b/src/Debug/Debug.py
@@ -1,47 +1,51 @@
-import sys, os, traceback
+import sys
+import os
+import traceback
+
# Non fatal exception
class Notify(Exception):
- def __init__(self, message):
- self.message = message
+ def __init__(self, message):
+ self.message = message
- def __str__(self):
- return self.message
+ def __str__(self):
+ return self.message
def formatException(err=None):
- if type(err) == Notify: return err
- exc_type, exc_obj, exc_tb = sys.exc_info()
- if not err: err = exc_obj.message
- tb = []
- for frame in traceback.extract_tb(exc_tb):
- path, line, function, text = frame
- file = os.path.split(path)[1]
- tb.append("%s line %s" % (file, line))
- return "%s: %s in %s" % (exc_type.__name__, err, " > ".join(tb))
+ if type(err) == Notify:
+ return err
+ exc_type, exc_obj, exc_tb = sys.exc_info()
+ if not err:
+ err = exc_obj.message
+ tb = []
+ for frame in traceback.extract_tb(exc_tb):
+ path, line, function, text = frame
+ file = os.path.split(path)[1]
+ tb.append("%s line %s" % (file, line))
+ return "%s: %s in %s" % (exc_type.__name__, err, " > ".join(tb))
if __name__ == "__main__":
+ try:
+ print 1 / 0
+ except Exception, err:
+ print type(err).__name__
+ print "1/0 error: %s" % formatException(err)
- try:
- print 1/0
- except Exception, err:
- print type(err).__name__
- print "1/0 error: %s" % formatException(err)
-
- def loadJson():
- json.loads("Errr")
+ def loadJson():
+ json.loads("Errr")
- import json
- try:
- loadJson()
- except Exception, err:
- print err
- print "Json load error: %s" % formatException(err)
+ import json
+ try:
+ loadJson()
+ except Exception, err:
+ print err
+ print "Json load error: %s" % formatException(err)
- try:
- raise Notify("nothing...")
- except Exception, err:
- print "Notify: %s" % formatException(err)
+ try:
+ raise Notify("nothing...")
+ except Exception, err:
+ print "Notify: %s" % formatException(err)
- loadJson()
+ loadJson()
diff --git a/src/Debug/DebugHook.py b/src/Debug/DebugHook.py
index 858847ac..68afd220 100644
--- a/src/Debug/DebugHook.py
+++ b/src/Debug/DebugHook.py
@@ -1,55 +1,64 @@
-import gevent, sys, logging
+import sys
+import logging
+
+import gevent
+
from Config import config
last_error = None
+
# Store last error, ignore notify, allow manual error logging
def handleError(*args):
- global last_error
- if not args: # Manual called
- args = sys.exc_info()
- silent = True
- else:
- silent = False
- if args[0].__name__ != "Notify": last_error = args
- if not silent and args[0].__name__ != "Notify":
- logging.exception("Unhandled exception")
- sys.__excepthook__(*args)
+ global last_error
+ if not args: # Manual called
+ args = sys.exc_info()
+ silent = True
+ else:
+ silent = False
+ if args[0].__name__ != "Notify":
+ last_error = args
+ if not silent and args[0].__name__ != "Notify":
+ logging.exception("Unhandled exception")
+ sys.__excepthook__(*args)
# Ignore notify errors
def handleErrorNotify(*args):
- if args[0].__name__ != "Notify":
- logging.exception("Unhandled exception")
- sys.__excepthook__(*args)
+ if args[0].__name__ != "Notify":
+ logging.exception("Unhandled exception")
+ sys.__excepthook__(*args)
OriginalGreenlet = gevent.Greenlet
+
+
class ErrorhookedGreenlet(OriginalGreenlet):
- def _report_error(self, exc_info):
- sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
+ def _report_error(self, exc_info):
+ sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
if config.debug:
- sys.excepthook = handleError
+ sys.excepthook = handleError
else:
- sys.excepthook = handleErrorNotify
+ sys.excepthook = handleErrorNotify
gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet
reload(gevent)
if __name__ == "__main__":
- import time
- from gevent import monkey; monkey.patch_all(thread=False, ssl=False)
- import Debug
- def sleeper():
- print "started"
- time.sleep(3)
- print "stopped"
- thread1 = gevent.spawn(sleeper)
- thread2 = gevent.spawn(sleeper)
- time.sleep(1)
- print "killing..."
- thread1.throw(Exception("Hello"))
- thread2.throw(Debug.Notify("Throw"))
- print "killed"
+ import time
+ from gevent import monkey
+ monkey.patch_all(thread=False, ssl=False)
+ import Debug
+ def sleeper():
+ print "started"
+ time.sleep(3)
+ print "stopped"
+ thread1 = gevent.spawn(sleeper)
+ thread2 = gevent.spawn(sleeper)
+ time.sleep(1)
+ print "killing..."
+ thread1.throw(Exception("Hello"))
+ thread2.throw(Debug.Notify("Throw"))
+ print "killed"
diff --git a/src/Debug/DebugMedia.py b/src/Debug/DebugMedia.py
index a29e9490..27ad7a59 100644
--- a/src/Debug/DebugMedia.py
+++ b/src/Debug/DebugMedia.py
@@ -1,81 +1,91 @@
-import os, subprocess, re, logging, time
+import os
+import subprocess
+import re
+import logging
+import time
+
from Config import config
+
# Find files with extension in path
def findfiles(path, find_ext):
- for root, dirs, files in os.walk(path, topdown = False):
- for file in sorted(files):
- file_path = root+"/"+file
- file_ext = file.split(".")[-1]
- if file_ext in find_ext and not file.startswith("all."): yield file_path.replace("\\", "/")
+ for root, dirs, files in os.walk(path, topdown=False):
+ for file in sorted(files):
+ file_path = root + "/" + file
+ file_ext = file.split(".")[-1]
+ if file_ext in find_ext and not file.startswith("all."):
+ yield file_path.replace("\\", "/")
# Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features
def merge(merged_path):
- merge_dir = os.path.dirname(merged_path)
- s = time.time()
- ext = merged_path.split(".")[-1]
- if ext == "js": # If merging .js find .coffee too
- find_ext = ["js", "coffee"]
- else:
- find_ext = [ext]
+ merge_dir = os.path.dirname(merged_path)
+ s = time.time()
+ ext = merged_path.split(".")[-1]
+ if ext == "js": # If merging .js find .coffee too
+ find_ext = ["js", "coffee"]
+ else:
+ find_ext = [ext]
- # If exist check the other files modification date
- if os.path.isfile(merged_path):
- merged_mtime = os.path.getmtime(merged_path)
- else:
- merged_mtime = 0
+ # If exist check the other files modification date
+ if os.path.isfile(merged_path):
+ merged_mtime = os.path.getmtime(merged_path)
+ else:
+ merged_mtime = 0
+ changed = {}
+ for file_path in findfiles(merge_dir, find_ext):
+ if os.path.getmtime(file_path) > merged_mtime:
+ changed[file_path] = True
+ if not changed:
+ return # Assets not changed, nothing to do
- changed = {}
- for file_path in findfiles(merge_dir, find_ext):
- if os.path.getmtime(file_path) > merged_mtime:
- changed[file_path] = True
- if not changed: return # Assets not changed, nothing to do
+ if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile
+ merged_old = open(merged_path, "rb").read().decode("utf8")
+ old_parts = {}
+ for match in re.findall("(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
+ old_parts[match[1]] = match[2].strip("\n\r")
- if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile
- merged_old = open(merged_path, "rb").read().decode("utf8")
- old_parts = {}
- for match in re.findall("(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
- old_parts[match[1]] = match[2].strip("\n\r")
+ # Merge files
+ parts = []
+ s_total = time.time()
+ for file_path in findfiles(merge_dir, find_ext):
+ parts.append("\n\n/* ---- %s ---- */\n\n" % file_path)
+ if file_path.endswith(".coffee"): # Compile coffee script
+ if file_path in changed or file_path not in old_parts: # Only recompile if changed or its not compiled before
+ if not config.coffeescript_compiler:
+ logging.error("No coffeescript compiler definied, skipping compiling %s" % merged_path)
+ return False # No coffeescript compiler, skip this file
+ command = config.coffeescript_compiler % os.path.join(*file_path.split("/")) # Fix os path separator
+ s = time.time()
+ compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
+ out = compiler.stdout.read().decode("utf8")
+ logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s))
+ if out and out.startswith("("):
+ parts.append(out)
+ else:
+ error = out
+ logging.error("%s Compile error: %s" % (file_path, error))
+ parts.append(
+ "alert('%s compile error: %s');" %
+ (file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n"))
+ )
+ else: # Not changed use the old_part
+ parts.append(old_parts[file_path])
+ else: # Add to parts
+ parts.append(open(file_path).read().decode("utf8"))
- # Merge files
- parts = []
- s_total = time.time()
- for file_path in findfiles(merge_dir, find_ext):
- parts.append("\n\n/* ---- %s ---- */\n\n" % file_path)
- if file_path.endswith(".coffee"): # Compile coffee script
- if file_path in changed or file_path not in old_parts: # Only recompile if changed or its not compiled before
- if not config.coffeescript_compiler:
- logging.error("No coffeescript compiler definied, skipping compiling %s" % merged_path)
- return False # No coffeescript compiler, skip this file
- command = config.coffeescript_compiler % os.path.join(*file_path.split("/")) # Fix os path separator
- s = time.time()
- compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
- out = compiler.stdout.read().decode("utf8")
- logging.debug("Running: %s (Done in %.2fs)" % (command, time.time()-s))
- if out and out.startswith("("):
- parts.append(out)
- else:
- error = out
- logging.error("%s Compile error: %s" % (file_path, error))
- parts.append("alert('%s compile error: %s');" % (file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n") ) )
- else: # Not changed use the old_part
- parts.append(old_parts[file_path])
- else: # Add to parts
- parts.append(open(file_path).read().decode("utf8"))
-
- merged = u"\n".join(parts)
- if ext == "css": # Vendor prefix css
- from lib.cssvendor import cssvendor
- merged = cssvendor.prefix(merged)
- merged = merged.replace("\r", "")
- open(merged_path, "wb").write(merged.encode("utf8"))
- logging.debug("Merged %s (%.2fs)" % (merged_path, time.time()-s_total))
+ merged = u"\n".join(parts)
+ if ext == "css": # Vendor prefix css
+ from lib.cssvendor import cssvendor
+ merged = cssvendor.prefix(merged)
+ merged = merged.replace("\r", "")
+ open(merged_path, "wb").write(merged.encode("utf8"))
+ logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total))
if __name__ == "__main__":
- logging.getLogger().setLevel(logging.DEBUG)
- os.chdir("..")
- config.coffeescript_compiler = r'type "%s" | tools\coffee-node\bin\node.exe tools\coffee-node\bin\coffee --no-header -s -p'
- merge("data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/js/all.js")
+ logging.getLogger().setLevel(logging.DEBUG)
+ os.chdir("..")
+ config.coffeescript_compiler = r'type "%s" | tools\coffee-node\bin\node.exe tools\coffee-node\bin\coffee --no-header -s -p'
+ merge("data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/js/all.js")
diff --git a/src/Debug/DebugReloader.py b/src/Debug/DebugReloader.py
index b3ae437b..5308f3cb 100644
--- a/src/Debug/DebugReloader.py
+++ b/src/Debug/DebugReloader.py
@@ -1,42 +1,44 @@
-import logging, os, sys, time
+import logging
+import time
import threading
+
from Config import config
-if config.debug: # Only load pyfilesytem if using debug mode
- try:
- from fs.osfs import OSFS
- pyfilesystem = OSFS("src")
- pyfilesystem_plugins = OSFS("plugins")
- logging.debug("Pyfilesystem detected, source code autoreload enabled")
- except Exception, err:
- pyfilesystem = False
+if config.debug: # Only load pyfilesytem if using debug mode
+ try:
+ from fs.osfs import OSFS
+ pyfilesystem = OSFS("src")
+ pyfilesystem_plugins = OSFS("plugins")
+ logging.debug("Pyfilesystem detected, source code autoreload enabled")
+ except Exception, err:
+ pyfilesystem = False
else:
- pyfilesystem = False
+ pyfilesystem = False
+
class DebugReloader:
- def __init__ (self, callback, directory = "/"):
- self.last_chaged = 0
- if pyfilesystem:
- self.directory = directory
- self.callback = callback
- logging.debug("Adding autoreload: %s, cb: %s" % (directory, callback))
- thread = threading.Thread(target=self.addWatcher)
- thread.daemon = True
- thread.start()
+ def __init__(self, callback, directory="/"):
+ self.last_chaged = 0
+ if pyfilesystem:
+ self.directory = directory
+ self.callback = callback
+ logging.debug("Adding autoreload: %s, cb: %s" % (directory, callback))
+ thread = threading.Thread(target=self.addWatcher)
+ thread.daemon = True
+ thread.start()
- def addWatcher(self, recursive=True):
- try:
- time.sleep(1) # Wait for .pyc compiles
- pyfilesystem.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive)
- pyfilesystem_plugins.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive)
- except Exception, err:
- print "File system watcher failed: %s (on linux pyinotify not gevent compatible yet :( )" % err
+ def addWatcher(self, recursive=True):
+ try:
+ time.sleep(1) # Wait for .pyc compiles
+ pyfilesystem.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive)
+ pyfilesystem_plugins.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive)
+ except Exception, err:
+ print "File system watcher failed: %s (on linux pyinotify not gevent compatible yet :( )" % err
-
- def changed(self, evt):
- if not evt.path or "%s/" % config.data_dir in evt.path or evt.path.endswith("pyc") or time.time()-self.last_chaged < 1: return False # Ignore *.pyc changes and no reload within 1 sec
- #logging.debug("Changed: %s" % evt)
- time.sleep(0.1) # Wait for lock release
- self.callback()
- self.last_chaged = time.time()
+ def changed(self, evt):
+ if not evt.path or "%s/" % config.data_dir in evt.path or evt.path.endswith("pyc") or time.time() - self.last_chaged < 1:
+ return False # Ignore *.pyc changes and no reload within 1 sec
+ time.sleep(0.1) # Wait for lock release
+ self.callback()
+ self.last_chaged = time.time()
diff --git a/src/File/FileRequest.py b/src/File/FileRequest.py
index 3c413999..b987e304 100644
--- a/src/File/FileRequest.py
+++ b/src/File/FileRequest.py
@@ -11,7 +11,8 @@ from Debug import Debug
from Config import config
from util import RateLimit, StreamingMsgpack
-FILE_BUFF = 1024*512
+FILE_BUFF = 1024 * 512
+
# Request from me
class FileRequest(object):
@@ -52,7 +53,7 @@ class FileRequest(object):
self.actionGetFile(params)
elif cmd == "update":
event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"])
- if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second
+ if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second
self.response({"ok": "File update queued"})
# If called more than once within 10 sec only keep the last update
RateLimit.callAsync(event, 10, self.actionUpdate, params)
@@ -69,72 +70,86 @@ class FileRequest(object):
# Update a site file request
def actionUpdate(self, params):
site = self.sites.get(params["site"])
- if not site or not site.settings["serving"]: # Site unknown or not serving
+ if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
return False
if site.settings["own"] and params["inner_path"].endswith("content.json"):
- self.log.debug("Someone trying to push a file to own site %s, reload local %s first" % (site.address, params["inner_path"]))
+ self.log.debug(
+ "Someone trying to push a file to own site %s, reload local %s first" %
+ (site.address, params["inner_path"])
+ )
changed = site.content_manager.loadContent(params["inner_path"], add_bad_files=False)
- if changed: # Content.json changed locally
- site.settings["size"] = site.content_manager.getTotalSize() # Update site size
+ if changed: # Content.json changed locally
+ site.settings["size"] = site.content_manager.getTotalSize() # Update site size
buff = StringIO(params["body"])
valid = site.content_manager.verifyFile(params["inner_path"], buff)
- if valid == True: # Valid and changed
+ if valid is True: # Valid and changed
self.log.info("Update for %s looks valid, saving..." % params["inner_path"])
buff.seek(0)
site.storage.write(params["inner_path"], buff)
- site.onFileDone(params["inner_path"]) # Trigger filedone
+ site.onFileDone(params["inner_path"]) # Trigger filedone
- if params["inner_path"].endswith("content.json"): # Download every changed file from peer
- peer = site.addPeer(self.connection.ip, self.connection.port, return_peer = True) # Add or get peer
- site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"]), "publish_%s" % params["inner_path"]) # On complete publish to other peers
+ if params["inner_path"].endswith("content.json"): # Download every changed file from peer
+ peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer
+ # On complete publish to other peers
+ site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"]), "publish_%s" % params["inner_path"])
+
+ # Load new content file and download changed files in new thread
gevent.spawn(
lambda: site.downloadContent(params["inner_path"], peer=peer)
- ) # Load new content file and download changed files in new thread
+ )
self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]})
- elif valid == None: # Not changed
- peer = site.addPeer(*params["peer"], return_peer = True) # Add or get peer
+ elif valid is None: # Not changed
+ peer = site.addPeer(*params["peer"], return_peer=True) # Add or get peer
if peer:
- self.log.debug("Same version, adding new peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) )
- for task in site.worker_manager.tasks: # New peer add to every ongoing task
- if task["peers"]: site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) # Download file from this peer too if its peer locked
+ self.log.debug(
+ "Same version, adding new peer for locked files: %s, tasks: %s" %
+ (peer.key, len(site.worker_manager.tasks))
+ )
+ for task in site.worker_manager.tasks: # New peer add to every ongoing task
+ if task["peers"]:
+ # Download file from this peer too if its peer locked
+ site.needFile(task["inner_path"], peer=peer, update=True, blocking=False)
self.response({"ok": "File not changed"})
- else: # Invalid sign or sha1 hash
+ else: # Invalid sign or sha1 hash
self.log.debug("Update for %s is invalid" % params["inner_path"])
self.response({"error": "File invalid"})
# Send file content request
def actionGetFile(self, params):
site = self.sites.get(params["site"])
- if not site or not site.settings["serving"]: # Site unknown or not serving
+ if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
return False
try:
file_path = site.storage.getPath(params["inner_path"])
- if config.debug_socket: self.log.debug("Opening file: %s" % file_path)
+ if config.debug_socket:
+ self.log.debug("Opening file: %s" % file_path)
with StreamingMsgpack.FilePart(file_path, "rb") as file:
file.seek(params["location"])
file.read_bytes = FILE_BUFF
- back = {"body": file,
- "size": os.fstat(file.fileno()).st_size,
- "location": min(file.tell()+FILE_BUFF, os.fstat(file.fileno()).st_size)
- }
+ back = {
+ "body": file,
+ "size": os.fstat(file.fileno()).st_size,
+ "location": min(file.tell() + FILE_BUFF, os.fstat(file.fileno()).st_size)
+ }
if config.debug_socket:
- self.log.debug("Sending file %s from position %s to %s" % (file_path,
- params["location"],
- back["location"]))
+ self.log.debug(
+ "Sending file %s from position %s to %s" %
+ (file_path, params["location"], back["location"])
+ )
self.response(back, streaming=True)
if config.debug_socket:
self.log.debug("File %s sent" % file_path)
# Add peer to site if not added before
connected_peer = site.addPeer(self.connection.ip, self.connection.port)
- if connected_peer: # Just added
+ if connected_peer: # Just added
connected_peer.connect(self.connection) # Assign current connection to peer
except Exception, err:
@@ -145,7 +160,7 @@ class FileRequest(object):
# Peer exchange request
def actionPex(self, params):
site = self.sites.get(params["site"])
- if not site or not site.settings["serving"]: # Site unknown or not serving
+ if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
return False
@@ -156,10 +171,11 @@ class FileRequest(object):
added += 1
connected_peer.connect(self.connection) # Assign current connection to peer
- for peer in params["peers"]: # Add sent peers to site
+ for peer in params["peers"]: # Add sent peers to site
address = self.unpackAddress(peer)
got_peer_keys.append("%s:%s" % address)
- if site.addPeer(*address): added += 1
+ if site.addPeer(*address):
+ added += 1
# Send back peers that is not in the sent list and connectable (not port 0)
packed_peers = [peer.packAddress() for peer in site.getConnectablePeers(params["need"], got_peer_keys)]
if added:
@@ -170,12 +186,14 @@ class FileRequest(object):
# Get modified content.json files since
def actionListModified(self, params):
site = self.sites.get(params["site"])
- if not site or not site.settings["serving"]: # Site unknown or not serving
+ if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
return False
- modified_files = {inner_path: content["modified"]
- for inner_path, content in site.content_manager.contents.iteritems()
- if content["modified"] > params["since"]}
+ modified_files = {
+ inner_path: content["modified"]
+ for inner_path, content in site.content_manager.contents.iteritems()
+ if content["modified"] > params["since"]
+ }
# Add peer to site if not added before
connected_peer = site.addPeer(self.connection.ip, self.connection.port)
diff --git a/src/File/FileServer.py b/src/File/FileServer.py
index c9fdcc31..a56daa5d 100644
--- a/src/File/FileServer.py
+++ b/src/File/FileServer.py
@@ -1,5 +1,10 @@
-import os, logging, urllib2, re, time
-import gevent, msgpack
+import logging
+import urllib2
+import re
+import time
+
+import gevent
+
from Config import config
from FileRequest import FileRequest
from Site import SiteManager
@@ -9,216 +14,216 @@ from util import UpnpPunch
class FileServer(ConnectionServer):
- def __init__(self):
- ConnectionServer.__init__(self, config.fileserver_ip, config.fileserver_port, self.handleRequest)
- if config.ip_external: # Ip external definied in arguments
- self.port_opened = True
- SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
- else:
- self.port_opened = None # Is file server opened on router
- self.sites = SiteManager.site_manager.list()
+ def __init__(self):
+ ConnectionServer.__init__(self, config.fileserver_ip, config.fileserver_port, self.handleRequest)
+ if config.ip_external: # Ip external definied in arguments
+ self.port_opened = True
+ SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
+ else:
+ self.port_opened = None # Is file server opened on router
+ self.sites = SiteManager.site_manager.list()
- # Handle request to fileserver
- def handleRequest(self, connection, message):
- if "params" in message:
- self.log.debug("FileRequest: %s %s %s %s" % (str(connection), message["cmd"], message["params"].get("site"), message["params"].get("inner_path")))
- else:
- self.log.debug("FileRequest: %s %s" % (str(connection), req["cmd"]))
- req = FileRequest(self, connection)
- req.route(message["cmd"], message.get("req_id"), message.get("params"))
+ # Handle request to fileserver
+ def handleRequest(self, connection, message):
+ if "params" in message:
+ self.log.debug(
+ "FileRequest: %s %s %s %s" %
+ (str(connection), message["cmd"], message["params"].get("site"), message["params"].get("inner_path"))
+ )
+ else:
+ self.log.debug("FileRequest: %s %s" % (str(connection), message["cmd"]))
+ req = FileRequest(self, connection)
+ req.route(message["cmd"], message.get("req_id"), message.get("params"))
+ # Reload the FileRequest class to prevent restarts in debug mode
+ def reload(self):
+ global FileRequest
+ import imp
+ FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest
- # Reload the FileRequest class to prevent restarts in debug mode
- def reload(self):
- global FileRequest
- import imp
- FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest
+ # Try to open the port using upnp
+ def openport(self, port=None, check=True):
+ if not port:
+ port = self.port
+ if self.port_opened:
+ return True # Port already opened
+ if check: # Check first if its already opened
+ if self.testOpenport(port)["result"] is True:
+ return True # Port already opened
+ self.log.info("Trying to open port using UpnpPunch...")
+ try:
+ upnp_punch = UpnpPunch.open_port(self.port, 'ZeroNet')
+ upnp_punch = True
+ except Exception, err:
+ self.log.error("UpnpPunch run error: %s" % Debug.formatException(err))
+ upnp_punch = False
- # Try to open the port using upnp
- def openport(self, port=None, check=True):
- if not port: port = self.port
- if self.port_opened: return True # Port already opened
- if check: # Check first if its already opened
- if self.testOpenport(port)["result"] == True:
- return True # Port already opened
+ if upnp_punch and self.testOpenport(port)["result"] is True:
+ return True
- self.log.info("Trying to open port using UpnpPunch...")
- try:
- upnp_punch = UpnpPunch.open_port(self.port, 'ZeroNet')
- upnp_punch = True
- except Exception, err:
- self.log.error("UpnpPunch run error: %s" % Debug.formatException(err))
- upnp_punch = False
+ self.log.info("Upnp mapping failed :( Please forward port %s on your router to your ipaddress" % port)
+ return False
- if upnp_punch and self.testOpenport(port)["result"] == True:
- return True
+ # Test if the port is open
+ def testOpenport(self, port=None):
+ time.sleep(1) # Wait for port open
+ if not port:
+ port = self.port
+ back = self.testOpenportPortchecker(port)
+ if back["result"] is True: # Successful port check
+ return back
+ else: # Alternative port checker
+ return self.testOpenportCanyouseeme(port)
- self.log.info("Upnp mapping failed :( Please forward port %s on your router to your ipaddress" % port)
- return False
+ def testOpenportPortchecker(self, port=None):
+ self.log.info("Checking port %s using portchecker.co..." % port)
+ try:
+ data = urllib2.urlopen("http://portchecker.co/check", "port=%s" % port, timeout=20.0).read()
+ message = re.match('.*(.*?)
', data, re.DOTALL).group(1)
+ message = re.sub("<.*?>", "", message.replace("
", " ").replace(" ", " ").strip()) # Strip http tags
+ except Exception, err:
+ message = "Error: %s" % Debug.formatException(err)
+ data = ""
+ if "closed" in message or "Error" in message:
+ self.log.info("[BAD :(] Port closed: %s" % message)
+ if port == self.port:
+ self.port_opened = False # Self port, update port_opened status
+ match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) # Try find my external ip in message
+ if match: # Found my ip in message
+ config.ip_external = match.group(1)
+ SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
+ else:
+ config.ip_external = False
+ return {"result": False, "message": message}
+ else:
+ self.log.info("[OK :)] Port open: %s" % message)
+ if port == self.port: # Self port, update port_opened status
+ self.port_opened = True
+ match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) # Try find my external ip in message
+ if match: # Found my ip in message
+ config.ip_external = match.group(1)
+ SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
+ else:
+ config.ip_external = False
+ return {"result": True, "message": message}
- # Test if the port is open
- def testOpenport(self, port = None):
- time.sleep(1) # Wait for port open
- if not port: port = self.port
- back = self.testOpenportPortchecker(port)
- if back["result"] == True: # Successful port check
- return back
- else: # Alternative port checker
- return self.testOpenportCanyouseeme(port)
+ def testOpenportCanyouseeme(self, port=None):
+ self.log.info("Checking port %s using canyouseeme.org..." % port)
+ try:
+ data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read()
+ message = re.match('.*(.*?)
', data, re.DOTALL).group(1)
+ message = re.sub("<.*?>", "", message.replace("
", " ").replace(" ", " ")) # Strip http tags
+ except Exception, err:
+ message = "Error: %s" % Debug.formatException(err)
+ if "Error" in message:
+ self.log.info("[BAD :(] Port closed: %s" % message)
+ if port == self.port:
+ self.port_opened = False # Self port, update port_opened status
+ match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message
+ if match: # Found my ip in message
+ config.ip_external = match.group(1)
+ SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
+ else:
+ config.ip_external = False
+ return {"result": False, "message": message}
+ else:
+ self.log.info("[OK :)] Port open: %s" % message)
+ if port == self.port: # Self port, update port_opened status
+ self.port_opened = True
+ match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message
+ if match: # Found my ip in message
+ config.ip_external = match.group(1)
+ SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
+ else:
+ config.ip_external = False
+ return {"result": True, "message": message}
- def testOpenportPortchecker(self, port = None):
- self.log.info("Checking port %s using portchecker.co..." % port)
- try:
- data = urllib2.urlopen("http://portchecker.co/check", "port=%s" % port, timeout=20.0).read()
- message = re.match('.*(.*?)
', data, re.DOTALL).group(1)
- message = re.sub("<.*?>", "", message.replace("
", " ").replace(" ", " ").strip()) # Strip http tags
- except Exception, err:
- message = "Error: %s" % Debug.formatException(err)
- data = ""
- if "closed" in message or "Error" in message:
- self.log.info("[BAD :(] Port closed: %s" % message)
- if port == self.port:
- self.port_opened = False # Self port, update port_opened status
- match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) # Try find my external ip in message
- if match: # Found my ip in message
- config.ip_external = match.group(1)
- SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
- else:
- config.ip_external = False
- return {"result": False, "message": message}
- else:
- self.log.info("[OK :)] Port open: %s" % message)
- if port == self.port: # Self port, update port_opened status
- self.port_opened = True
- match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) # Try find my external ip in message
- if match: # Found my ip in message
- config.ip_external = match.group(1)
- SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
- else:
- config.ip_external = False
- return {"result": True, "message": message}
+ # Set external ip without testing
+ def setIpExternal(self, ip_external):
+ logging.info("Setting external ip without testing: %s..." % ip_external)
+ config.ip_external = ip_external
+ self.port_opened = True
+ # Check site file integrity
+ def checkSite(self, site):
+ if site.settings["serving"]:
+ site.announce() # Announce site to tracker
+ site.update() # Update site's content.json and download changed files
+ if self.port_opened is False: # In passive mode keep 5 active peer connection to get the updates
+ site.needConnections()
- def testOpenportCanyouseeme(self, port = None):
- self.log.info("Checking port %s using canyouseeme.org..." % port)
- try:
- data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read()
- message = re.match('.*(.*?)
', data, re.DOTALL).group(1)
- message = re.sub("<.*?>", "", message.replace("
", " ").replace(" ", " ")) # Strip http tags
- except Exception, err:
- message = "Error: %s" % Debug.formatException(err)
- if "Error" in message:
- self.log.info("[BAD :(] Port closed: %s" % message)
- if port == self.port:
- self.port_opened = False # Self port, update port_opened status
- match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message
- if match: # Found my ip in message
- config.ip_external = match.group(1)
- SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
- else:
- config.ip_external = False
- return {"result": False, "message": message}
- else:
- self.log.info("[OK :)] Port open: %s" % message)
- if port == self.port: # Self port, update port_opened status
- self.port_opened = True
- match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message
- if match: # Found my ip in message
- config.ip_external = match.group(1)
- SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
- else:
- config.ip_external = False
- return {"result": True, "message": message}
+ # Check sites integrity
+ def checkSites(self):
+ if self.port_opened is None: # Test and open port if not tested yet
+ self.openport()
+ self.log.debug("Checking sites integrity..")
+ for address, site in self.sites.items(): # Check sites integrity
+ gevent.spawn(self.checkSite, site) # Check in new thread
+ time.sleep(2) # Prevent too quick request
+ site = None
- # Set external ip without testing
- def setIpExternal(self, ip_external):
- logging.info("Setting external ip without testing: %s..." % ip_external)
- config.ip_external = ip_external
- self.port_opened = True
+ # Announce sites every 20 min
+ def announceSites(self):
+ import gc
+ while 1:
+ time.sleep(20 * 60) # Announce sites every 20 min
+ for address, site in self.sites.items():
+ if site.settings["serving"]:
+ site.announce() # Announce site to tracker
+ # Reset bad file retry counter
+ for inner_path in site.bad_files:
+ site.bad_files[inner_path] = 0
- # Check site file integrity
- def checkSite(self, site):
- if site.settings["serving"]:
- site.announce() # Announce site to tracker
- site.update() # Update site's content.json and download changed files
- if self.port_opened == False: # In passive mode keep 5 active peer connection to get the updates
- site.needConnections()
+ # Retry failed files
+ if site.bad_files:
+ site.retryBadFiles()
+ # In passive mode keep 5 active peer connection to get the updates
+ if self.port_opened is False:
+ site.needConnections()
- # Check sites integrity
- def checkSites(self):
- if self.port_opened == None: # Test and open port if not tested yet
- self.openport()
+ time.sleep(2) # Prevent too quick request
- self.log.debug("Checking sites integrity..")
- for address, site in self.sites.items(): # Check sites integrity
- gevent.spawn(self.checkSite, site) # Check in new thread
- time.sleep(2) # Prevent too quick request
- site = None
+ site = None
+ gc.collect() # Implicit grabage collection
+ # Detects if computer back from wakeup
+ def wakeupWatcher(self):
+ last_time = time.time()
+ while 1:
+ time.sleep(30)
+ if time.time() - last_time > 60: # If taken more than 60 second then the computer was in sleep mode
+ self.log.info(
+ "Wakeup detected: time wrap from %s to %s (%s sleep seconds), acting like startup..." %
+ (last_time, time.time(), time.time() - last_time)
+ )
+ self.port_opened = None # Check if we still has the open port on router
+ self.checkSites()
+ last_time = time.time()
- # Announce sites every 20 min
- def announceSites(self):
- import gc
- while 1:
- time.sleep(20*60) # Announce sites every 20 min
- for address, site in self.sites.items():
- if site.settings["serving"]:
- site.announce() # Announce site to tracker
+ # Bind and start serving sites
+ def start(self, check_sites=True):
+ self.log = logging.getLogger("FileServer")
- # Reset bad file retry counter
- for inner_path in site.bad_files:
- site.bad_files[inner_path] = 0
+ if config.debug:
+ # Auto reload FileRequest on change
+ from Debug import DebugReloader
+ DebugReloader(self.reload)
- # Retry failed files
- if site.bad_files:
- site.retryBadFiles()
-
- # In passive mode keep 5 active peer connection to get the updates
- if self.port_opened == False:
- site.needConnections()
+ if check_sites: # Open port, Update sites, Check files integrity
+ gevent.spawn(self.checkSites)
- time.sleep(2) # Prevent too quick request
+ thread_announce_sites = gevent.spawn(self.announceSites)
+ thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher)
- site = None
- gc.collect() # Implicit grabage collection
+ ConnectionServer.start(self)
-
- # Detects if computer back from wakeup
- def wakeupWatcher(self):
- last_time = time.time()
- while 1:
- time.sleep(30)
- if time.time()-last_time > 60: # If taken more than 60 second then the computer was in sleep mode
- self.log.info("Wakeup detected: time wrap from %s to %s (%s sleep seconds), acting like startup..." % (last_time, time.time(), time.time()-last_time))
- self.port_opened = None # Check if we still has the open port on router
- self.checkSites()
- last_time = time.time()
-
-
- # Bind and start serving sites
- def start(self, check_sites = True):
- self.log = logging.getLogger("FileServer")
-
- if config.debug:
- # Auto reload FileRequest on change
- from Debug import DebugReloader
- DebugReloader(self.reload)
-
- if check_sites: # Open port, Update sites, Check files integrity
- gevent.spawn(self.checkSites)
-
- thread_announce_sites = gevent.spawn(self.announceSites)
- thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher)
-
- ConnectionServer.start(self)
-
- # thread_wakeup_watcher.kill(exception=Debug.Notify("Stopping FileServer"))
- # thread_announce_sites.kill(exception=Debug.Notify("Stopping FileServer"))
- self.log.debug("Stopped.")
+ # thread_wakeup_watcher.kill(exception=Debug.Notify("Stopping FileServer"))
+ # thread_announce_sites.kill(exception=Debug.Notify("Stopping FileServer"))
+ self.log.debug("Stopped.")
diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py
index 74a2d97d..7003ced0 100644
--- a/src/Peer/Peer.py
+++ b/src/Peer/Peer.py
@@ -1,8 +1,14 @@
-import os, logging, gevent, time, msgpack, sys, random, socket, struct
+import logging
+import gevent
+import time
+import sys
+import socket
+import struct
+
from cStringIO import StringIO
-from Config import config
from Debug import Debug
+
# Communicate remote peers
class Peer(object):
__slots__ = ("ip", "port", "site", "key", "connection_server", "connection", "last_found", "last_response",
@@ -49,7 +55,8 @@ class Peer(object):
self.connection = self.connection_server.getConnection(self.ip, self.port)
except Exception, err:
self.onConnectionError()
- self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed))
+ self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" %
+ (Debug.formatException(err), self.connection_error, self.hash_failed))
self.connection = None
# Check if we have connection to peer
@@ -57,7 +64,7 @@ class Peer(object):
if self.connection and self.connection.connected: # We have connection to peer
return self.connection
else: # Try to find from other sites connections
- self.connection = self.connection_server.getConnection(self.ip, self.port, create=False) # Do not create new connection if not found
+ self.connection = self.connection_server.getConnection(self.ip, self.port, create=False)
return self.connection
def __str__(self):
@@ -68,7 +75,7 @@ class Peer(object):
# Peer ip:port to packed 6byte format
def packAddress(self):
- return socket.inet_aton(self.ip)+struct.pack("H", self.port)
+ return socket.inet_aton(self.ip) + struct.pack("H", self.port)
def unpackAddress(self, packed):
return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0]
@@ -83,22 +90,17 @@ class Peer(object):
self.connect()
if not self.connection:
self.onConnectionError()
- return None # Connection failed
+ return None # Connection failed
- #if cmd != "ping" and self.last_response and time.time() - self.last_response > 20*60: # If last response if older than 20 minute, ping first to see if still alive
- # if not self.ping(): return None
-
- for retry in range(1,3): # Retry 3 times
- #if config.debug_socket: self.log.debug("sendCmd: %s %s" % (cmd, params.get("inner_path")))
+ for retry in range(1, 3): # Retry 3 times
try:
response = self.connection.request(cmd, params)
if not response:
raise Exception("Send error")
- #if config.debug_socket: self.log.debug("Got response to: %s" % cmd)
if "error" in response:
self.log("%s error: %s" % (cmd, response["error"]))
self.onConnectionError()
- else: # Successful request, reset connection error num
+ else: # Successful request, reset connection error num
self.connection_error = 0
self.last_response = time.time()
return response
@@ -108,10 +110,11 @@ class Peer(object):
break
else:
self.onConnectionError()
- self.log("%s (connection_error: %s, hash_failed: %s, retry: %s)" % (Debug.formatException(err),
- self.connection_error,
- self.hash_failed, retry))
- time.sleep(1*retry)
+ self.log(
+ "%s (connection_error: %s, hash_failed: %s, retry: %s)" %
+ (Debug.formatException(err), self.connection_error, self.hash_failed, retry)
+ )
+ time.sleep(1 * retry)
self.connect()
return None # Failed after 4 retry
@@ -121,7 +124,8 @@ class Peer(object):
buff = StringIO()
s = time.time()
while True: # Read in 512k parts
- back = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location}) # Get file content from last location
+ back = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location})
+
if not back or "body" not in back: # Error
return False
@@ -145,7 +149,7 @@ class Peer(object):
response = self.request("ping")
if response and "body" in response and response["body"] == "Pong!":
- response_time = time.time()-s
+ response_time = time.time() - s
break # All fine, exit from for loop
# Timeout reached or bad response
self.onConnectionError()
@@ -185,7 +189,8 @@ class Peer(object):
# Stop and remove from site
def remove(self):
self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed))
- if self.site and self.key in self.site.peers: del(self.site.peers[self.key])
+ if self.site and self.key in self.site.peers:
+ del(self.site.peers[self.key])
if self.connection:
self.connection.close()
diff --git a/src/Plugin/PluginManager.py b/src/Plugin/PluginManager.py
index 3f050e9c..c1069da7 100644
--- a/src/Plugin/PluginManager.py
+++ b/src/Plugin/PluginManager.py
@@ -1,99 +1,108 @@
-import logging, os, sys
+import logging
+import os
+import sys
+
from Debug import Debug
from Config import config
+
class PluginManager:
- def __init__(self):
- self.log = logging.getLogger("PluginManager")
- self.plugin_path = "plugins" # Plugin directory
- self.plugins = {} # Registered plugins (key: class name, value: list of plugins for class)
- self.plugin_names = [] # Loaded plugin names
- sys.path.append(self.plugin_path)
+ def __init__(self):
+ self.log = logging.getLogger("PluginManager")
+ self.plugin_path = "plugins" # Plugin directory
+ self.plugins = {} # Registered plugins (key: class name, value: list of plugins for class)
+ self.plugin_names = [] # Loaded plugin names
+
+ sys.path.append(self.plugin_path)
+
+ if config.debug: # Auto reload Plugins on file change
+ from Debug import DebugReloader
+ DebugReloader(self.reloadPlugins)
+
+ # -- Load / Unload --
+
+ # Load all plugin
+ def loadPlugins(self):
+ for dir_name in os.listdir(self.plugin_path):
+ dir_path = os.path.join(self.plugin_path, dir_name)
+ if dir_name.startswith("disabled"):
+ continue # Dont load if disabled
+ if not os.path.isdir(dir_path):
+ continue # Dont load if not dir
+ if dir_name.startswith("Debug") and not config.debug:
+ continue # Only load in debug mode if module name starts with Debug
+ self.log.debug("Loading plugin: %s" % dir_name)
+ try:
+ __import__(dir_name)
+ except Exception, err:
+ self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err)))
+ if dir_name not in self.plugin_names:
+ self.plugin_names.append(dir_name)
+
+ # Reload all plugins
+ def reloadPlugins(self):
+ self.plugins = {} # Reset registered plugins
+ for module_name, module in sys.modules.items():
+ if module and "__file__" in dir(module) and self.plugin_path in module.__file__: # Module file within plugin_path
+ if "allow_reload" not in dir(module) or module.allow_reload: # Check if reload disabled
+ try:
+ reload(module)
+ except Exception, err:
+ self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err)))
+
+ self.loadPlugins() # Load new plugins
- if config.debug: # Auto reload Plugins on file change
- from Debug import DebugReloader
- DebugReloader(self.reloadPlugins)
-
-
- # -- Load / Unload --
-
- # Load all plugin
- def loadPlugins(self):
- for dir_name in os.listdir(self.plugin_path):
- dir_path = os.path.join(self.plugin_path, dir_name)
- if dir_name.startswith("disabled"): continue # Dont load if disabled
- if not os.path.isdir(dir_path): continue # Dont load if not dir
- if dir_name.startswith("Debug") and not config.debug: continue # Only load in debug mode if module name starts with Debug
- self.log.debug("Loading plugin: %s" % dir_name)
- try:
- __import__(dir_name)
- except Exception, err:
- self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err)))
- if dir_name not in self.plugin_names: self.plugin_names.append(dir_name)
-
-
- # Reload all plugins
- def reloadPlugins(self):
- self.plugins = {} # Reset registered plugins
- for module_name, module in sys.modules.items():
- if module and "__file__" in dir(module) and self.plugin_path in module.__file__: # Module file within plugin_path
- if "allow_reload" not in dir(module) or module.allow_reload: # Check if reload disabled
- try:
- reload(module)
- except Exception, err:
- self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err)))
-
- self.loadPlugins() # Load new plugins
-
-
-plugin_manager = PluginManager() # Singletone
+plugin_manager = PluginManager() # Singletone
# -- Decorators --
# Accept plugin to class decorator
+
+
def acceptPlugins(base_class):
- class_name = base_class.__name__
- if class_name in plugin_manager.plugins: # Has plugins
- classes = plugin_manager.plugins[class_name][:] # Copy the current plugins
- classes.reverse()
- classes.append(base_class) # Add the class itself to end of inherience line
- PluginedClass = type(class_name, tuple(classes), dict()) # Create the plugined class
- plugin_manager.log.debug("New class accepts plugins: %s (Loaded plugins: %s)" % (class_name, classes))
- else: # No plugins just use the original
- PluginedClass = base_class
- return PluginedClass
+ class_name = base_class.__name__
+ if class_name in plugin_manager.plugins: # Has plugins
+ classes = plugin_manager.plugins[class_name][:] # Copy the current plugins
+ classes.reverse()
+ classes.append(base_class) # Add the class itself to end of inherience line
+ plugined_class = type(class_name, tuple(classes), dict()) # Create the plugined class
+ plugin_manager.log.debug("New class accepts plugins: %s (Loaded plugins: %s)" % (class_name, classes))
+ else: # No plugins just use the original
+ plugined_class = base_class
+ return plugined_class
# Register plugin to class name decorator
def registerTo(class_name):
- plugin_manager.log.debug("New plugin registered to: %s" % class_name)
- if class_name not in plugin_manager.plugins: plugin_manager.plugins[class_name] = []
-
- def classDecorator(self):
- plugin_manager.plugins[class_name].append(self)
- return self
- return classDecorator
+ plugin_manager.log.debug("New plugin registered to: %s" % class_name)
+ if class_name not in plugin_manager.plugins:
+ plugin_manager.plugins[class_name] = []
+ def classDecorator(self):
+ plugin_manager.plugins[class_name].append(self)
+ return self
+ return classDecorator
# - Example usage -
if __name__ == "__main__":
- @registerTo("Request")
- class RequestPlugin(object):
- def actionMainPage(self, path):
- return "Hello MainPage!"
+ @registerTo("Request")
+ class RequestPlugin(object):
+ def actionMainPage(self, path):
+ return "Hello MainPage!"
- @accept
- class Request(object):
- def route(self, path):
- func = getattr(self, "action"+path, None)
- if func:
- return func(path)
- else:
- return "Can't route to", path
+ @acceptPlugins
+ class Request(object):
- print Request().route("MainPage")
+ def route(self, path):
+ func = getattr(self, "action" + path, None)
+ if func:
+ return func(path)
+ else:
+ return "Can't route to", path
+
+ print Request().route("MainPage")
diff --git a/src/Site/Site.py b/src/Site/Site.py
index 9ea474f3..3010dca3 100644
--- a/src/Site/Site.py
+++ b/src/Site/Site.py
@@ -1,670 +1,708 @@
-import os, json, logging, hashlib, re, time, string, random, sys, binascii, struct, socket, urllib, urllib2
-from lib.subtl.subtl import UdpTrackerClient
-from lib import bencode
+import os
+import json
+import logging
+import hashlib
+import re
+import time
+import string
+import random
+import sys
+import binascii
+import struct
+import socket
+import urllib
+import urllib2
+
import gevent
+
import util
+from lib import bencode
+from lib.subtl.subtl import UdpTrackerClient
from Config import config
from Peer import Peer
from Worker import WorkerManager
-from Crypt import CryptHash
from Debug import Debug
from Content import ContentManager
from SiteStorage import SiteStorage
import SiteManager
+
class Site:
- def __init__(self, address, allow_create=True):
- self.address = re.sub("[^A-Za-z0-9]", "", address) # Make sure its correct address
- self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging
- self.log = logging.getLogger("Site:%s" % self.address_short)
-
- self.content = None # Load content.json
- self.peers = {} # Key: ip:port, Value: Peer.Peer
- self.peer_blacklist = SiteManager.peer_blacklist # Ignore this peers (eg. myself)
- self.last_announce = 0 # Last announce time to tracker
- self.worker_manager = WorkerManager(self) # Handle site download from other peers
- self.bad_files = {} # SHA512 check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept)
- self.content_updated = None # Content.js update time
- self.notifications = [] # Pending notifications displayed once on page load [error|ok|info, message, timeout]
- self.page_requested = False # Page viewed in browser
-
- self.storage = SiteStorage(self, allow_create=allow_create) # Save and load site files
- self.loadSettings() # Load settings from sites.json
- self.content_manager = ContentManager(self) # Load contents
-
- if not self.settings.get("auth_key"): # To auth user in site (Obsolete, will be removed)
- self.settings["auth_key"] = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(24))
- self.log.debug("New auth key: %s" % self.settings["auth_key"])
- self.saveSettings()
-
- if not self.settings.get("wrapper_key"): # To auth websocket permissions
- self.settings["wrapper_key"] = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(12))
- self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"])
- self.saveSettings()
-
- self.websockets = [] # Active site websocket connections
-
- # Add event listeners
- self.addEventListeners()
-
-
-
- def __str__(self):
- return "Site %s" % self.address_short
-
-
- def __repr__(self):
- return "<%s>" % self.__str__()
-
-
- # Load site settings from data/sites.json
- def loadSettings(self):
- sites_settings = json.load(open("%s/sites.json" % config.data_dir))
- if self.address in sites_settings:
- self.settings = sites_settings[self.address]
- else:
- if self.address == config.homepage: # Add admin permissions to homepage
- permissions = ["ADMIN"]
- else:
- permissions = []
- self.settings = { "own": False, "serving": True, "permissions": permissions } # Default
- return
-
-
- # Save site settings to data/sites.json
- def saveSettings(self):
- sites_settings = json.load(open("%s/sites.json" % config.data_dir))
- sites_settings[self.address] = self.settings
- open("%s/sites.json" % config.data_dir, "w").write(json.dumps(sites_settings, indent=2, sort_keys=True))
- return
-
-
- # Max site size in MB
- def getSizeLimit(self):
- return self.settings.get("size_limit", config.size_limit)
-
-
- # Next size limit based on current size
- def getNextSizeLimit(self):
- size_limits = [10,20,50,100,200,500,1000,2000,5000,10000,20000,50000,100000]
- size = self.settings.get("size", 0)
- for size_limit in size_limits:
- if size*1.2 < size_limit*1024*1024:
- return size_limit
- return 999999
-
-
-
- # Download all file from content.json
- def downloadContent(self, inner_path, download_files=True, peer=None):
- s = time.time()
- self.log.debug("Downloading %s..." % inner_path)
- found = self.needFile(inner_path, update=self.bad_files.get(inner_path))
- content_inner_dir = self.content_manager.toDir(inner_path)
- if not found: return False # Could not download content.json
-
- self.log.debug("Got %s" % inner_path)
- changed = self.content_manager.loadContent(inner_path, load_includes=False)
-
- # Start download files
- file_threads = []
- if download_files:
- for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys():
- file_inner_path = content_inner_dir+file_relative_path
- res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer) # No waiting for finish, return the event
- if res != True: # Need downloading
- file_threads.append(res) # Append evt
-
- # Wait for includes download
- include_threads = []
- for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys():
- file_inner_path = content_inner_dir+file_relative_path
- include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer)
- include_threads.append(include_thread)
-
- self.log.debug("%s: Downloading %s includes..." % (inner_path, len(include_threads)))
- gevent.joinall(include_threads)
- self.log.debug("%s: Includes downloaded" % inner_path)
-
- self.log.debug("%s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed)))
- gevent.joinall(file_threads)
- self.log.debug("%s: All file downloaded in %.2fs" % (inner_path, time.time()-s))
-
- return True
-
-
- # Return bad files with less than 3 retry
- def getReachableBadFiles(self):
- if not self.bad_files: return False
- return [bad_file for bad_file, retry in self.bad_files.iteritems() if retry < 3]
-
-
- # Retry download bad files
- def retryBadFiles(self):
- for bad_file in self.bad_files.keys():
- self.needFile(bad_file, update=True, blocking=False)
-
-
- # Download all files of the site
- @util.Noparallel(blocking=False)
- def download(self, check_size=False):
- self.log.debug("Start downloading...%s" % self.bad_files)
- gevent.spawn(self.announce)
- if check_size: # Check the size first
- valid = downloadContent(download_files=False) # Just download content.json files
- if not valid: return False # Cant download content.jsons or size is not fits
-
- # Download everything
- found = self.downloadContent("content.json")
- self.checkModifications(0) # Download multiuser blind includes
-
- return found
-
-
- # Update worker, try to find client that supports listModifications command
- def updater(self, peers_try, queried, since):
- while 1:
- if not peers_try or len(queried) >= 3: # Stop after 3 successful query
- break
- peer = peers_try.pop(0)
- if not peer.connection and len(queried) < 2: peer.connect() # Only open new connection if less than 2 queried already
- if not peer.connection or peer.connection.handshake.get("rev",0) < 126: continue # Not compatible
- res = peer.listModified(since)
- if not res or not "modified_files" in res: continue # Failed query
-
- queried.append(peer)
- for inner_path, modified in res["modified_files"].iteritems(): # Check if the peer has newer files than we
- content = self.content_manager.contents.get(inner_path)
- if not content or modified > content["modified"]: # We dont have this file or we have older
- self.bad_files[inner_path] = self.bad_files.get(inner_path, 0)+1 # Mark as bad file
- gevent.spawn(self.downloadContent, inner_path) # Download the content.json + the changed files
-
-
- # Check modified content.json files from peers and add modified files to bad_files
- # Return: Successfully queried peers [Peer, Peer...]
- def checkModifications(self, since=None):
- peers_try = [] # Try these peers
- queried = [] # Successfully queried from these peers
-
- peers = self.peers.values()
- random.shuffle(peers)
- for peer in peers: # Try to find connected good peers, but we must have at least 5 peers
- if peer.findConnection() and peer.connection.handshake.get("rev",0) > 125: # Add to the beginning if rev125
- peers_try.insert(0, peer)
- elif len(peers_try) < 5: # Backup peers, add to end of the try list
- peers_try.append(peer)
-
- if since == None: # No since definied, download from last modification time-1day
- since = self.settings.get("modified", 60*60*24)-60*60*24
- self.log.debug("Try to get listModifications from peers: %s since: %s" % (peers_try, since))
-
- updaters = []
- for i in range(3):
- updaters.append(gevent.spawn(self.updater, peers_try, queried, since))
-
- gevent.joinall(updaters, timeout=5) # Wait 5 sec to workers
- time.sleep(0.1)
- self.log.debug("Queried listModifications from: %s" % queried)
- return queried
-
-
- # Update content.json from peers and download changed files
- # Return: None
- @util.Noparallel()
- def update(self, announce=False):
- self.content_manager.loadContent("content.json") # Reload content.json
- self.content_updated = None # Reset content updated time
- self.updateWebsocket(updating=True)
- if announce: self.announce()
-
- queried = self.checkModifications()
-
- if not queried: # Not found any client that supports listModifications
- self.log.debug("Fallback to old-style update")
- self.redownloadContents()
-
- if not self.settings["own"]: self.storage.checkFiles(quick_check=True) # Quick check files based on file size
-
- changed = self.content_manager.loadContent("content.json")
- if changed:
- for changed_file in changed:
- self.bad_files[changed_file] = self.bad_files.get(changed_file, 0)+1
-
- if self.bad_files:
- self.download()
-
- self.settings["size"] = self.content_manager.getTotalSize() # Update site size
- self.updateWebsocket(updated=True)
-
-
- # Update site by redownload all content.json
- def redownloadContents(self):
-
- # Download all content.json again
- content_threads = []
- for inner_path in self.content_manager.contents.keys():
- content_threads.append(self.needFile(inner_path, update=True, blocking=False))
-
- self.log.debug("Waiting %s content.json to finish..." % len(content_threads))
- gevent.joinall(content_threads)
-
-
- # Publish worker
- def publisher(self, inner_path, peers, published, limit, event_done=None):
- file_size = self.storage.getSize(inner_path)
- body = self.storage.read(inner_path)
- while 1:
- if not peers or len(published) >= limit:
- if event_done: event_done.set(True)
- break # All peers done, or published engouht
- peer = peers.pop(0)
- if peer.connection and peer.connection.last_ping_delay: # Peer connected
- timeout = timeout = 5+int(file_size/1024)+peer.connection.last_ping_delay # Timeout: 5sec + size in kb + last_ping
- else:
- timeout = timeout = 5+int(file_size/1024) # Timeout: 5sec + size in kb
- result = {"exception": "Timeout"}
-
- for retry in range(2):
- try:
- with gevent.Timeout(timeout, False):
- result = peer.request("update", {
- "site": self.address,
- "inner_path": inner_path,
- "body": body,
- "peer": (config.ip_external, config.fileserver_port)
- })
- if result: break
- except Exception, err:
- result = {"exception": Debug.formatException(err)}
-
- if result and "ok" in result:
- published.append(peer)
- self.log.info("[OK] %s: %s" % (peer.key, result["ok"]))
- else:
- if result == {"exception": "Timeout"}: peer.onConnectionError()
- self.log.info("[FAILED] %s: %s" % (peer.key, result))
-
-
- # Update content.json on peers
- @util.Noparallel()
- def publish(self, limit=5, inner_path="content.json"):
- self.log.info( "Publishing to %s/%s peers..." % (min(len(self.peers), limit), len(self.peers)) )
- published = [] # Successfully published (Peer)
- publishers = [] # Publisher threads
- peers = self.peers.values()
- if not peers: return 0 # No peers found
-
- random.shuffle(peers)
- event_done = gevent.event.AsyncResult()
- for i in range(min(len(self.peers), limit, 5)): # Max 5 thread
- publisher = gevent.spawn(self.publisher, inner_path, peers, published, limit, event_done)
- publishers.append(publisher)
-
- event_done.get() # Wait for done
- if len(published) < min(len(self.peers), limit): time.sleep(0.2) # If less than we need sleep a bit
- if len(published) == 0: gevent.joinall(publishers) # No successful publish, wait for all publisher
-
- # Make sure the connected passive peers got the update
- passive_peers = [peer for peer in peers if peer.connection and not peer.connection.closed and peer.key.endswith(":0") and peer not in published] # Every connected passive peer that we not published to
- for peer in passive_peers:
- gevent.spawn(self.publisher, inner_path, passive_peers, published, limit=10)
-
- self.log.info("Successfuly published to %s peers, publishing to %s more passive peers" % (len(published), len(passive_peers)) )
- return len(published)
-
-
- # Copy this site
- def clone(self, address, privatekey=None, address_index=None, overwrite=False):
- import shutil
- new_site = SiteManager.site_manager.need(address, all_file=False)
- default_dirs = [] # Dont copy these directories (has -default version)
- for dir_name in os.listdir(self.storage.directory):
- if "-default" in dir_name:
- default_dirs.append(dir_name.replace("-default", ""))
-
- self.log.debug("Cloning to %s, ignore dirs: %s" % (address, default_dirs))
-
- # Copy root content.json
- if not new_site.storage.isFile("content.json") and not overwrite: # Content.json not exist yet, create a new one from source site
- content_json = self.storage.loadJson("content.json")
- if "domain" in content_json:
- del content_json["domain"]
- content_json["title"] = "my"+content_json["title"]
- content_json["cloned_from"] = self.address
- if address_index: content_json["address_index"] = address_index # Site owner's BIP32 index
- new_site.storage.writeJson("content.json", content_json)
- new_site.content_manager.loadContent("content.json", add_bad_files=False, load_includes=False)
-
- # Copy files
- for content_inner_path, content in self.content_manager.contents.items():
- for file_relative_path in sorted(content["files"].keys()):
- file_inner_path = self.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
- file_inner_path = file_inner_path.strip("/") # Strip leading /
- if file_inner_path.split("/")[0] in default_dirs: # Dont copy directories that has -default postfixed alternative
- self.log.debug("[SKIP] %s (has default alternative)" % file_inner_path)
- continue
- file_path = self.storage.getPath(file_inner_path)
-
- # Copy the file normally to keep the -default postfixed dir and file to allow cloning later
- file_path_dest = new_site.storage.getPath(file_inner_path)
- self.log.debug("[COPY] %s to %s..." % (file_inner_path, file_path_dest))
- dest_dir = os.path.dirname(file_path_dest)
- if not os.path.isdir(dest_dir): os.makedirs(dest_dir)
- shutil.copy(file_path, file_path_dest)
-
- # If -default in path, create a -default less copy of the file
- if "-default" in file_inner_path:
- file_path_dest = new_site.storage.getPath(file_inner_path.replace("-default", ""))
- if new_site.storage.isFile(file_path_dest) and not overwrite: # Don't overwrite site files with default ones
- self.log.debug("[SKIP] Default file: %s (already exist)" % file_inner_path)
- continue
- self.log.debug("[COPY] Default file: %s to %s..." % (file_inner_path, file_path_dest))
- dest_dir = os.path.dirname(file_path_dest)
- if not os.path.isdir(dest_dir): os.makedirs(dest_dir)
- shutil.copy(file_path, file_path_dest)
- # Sign if content json
- if file_path_dest.endswith("/content.json"):
- new_site.storage.onUpdated(file_inner_path.replace("-default", ""))
- new_site.content_manager.loadContent(file_inner_path.replace("-default", ""), add_bad_files=False, load_includes=False)
- if privatekey: new_site.content_manager.sign(file_inner_path.replace("-default", ""), privatekey)
-
- if privatekey: new_site.content_manager.sign("content.json", privatekey)
-
-
- # Rebuild DB
- if new_site.storage.isFile("dbschema.json"): new_site.storage.rebuildDb()
-
- return new_site
-
-
- # Check and download if file not exist
- def needFile(self, inner_path, update=False, blocking=True, peer=None, priority=0):
- if self.storage.isFile(inner_path) and not update: # File exist, no need to do anything
- return True
- elif self.settings["serving"] == False: # Site not serving
- return False
- else: # Wait until file downloaded
- self.bad_files[inner_path] = True # Mark as bad file
- if not self.content_manager.contents.get("content.json"): # No content.json, download it first!
- self.log.debug("Need content.json first")
- gevent.spawn(self.announce)
- if inner_path != "content.json": # Prevent double download
- task = self.worker_manager.addTask("content.json", peer)
- task.get()
- self.content_manager.loadContent()
- if not self.content_manager.contents.get("content.json"): return False # Content.json download failed
-
- if not inner_path.endswith("content.json") and not self.content_manager.getFileInfo(inner_path): # No info for file, download all content.json first
- self.log.debug("No info for %s, waiting for all content.json" % inner_path)
- success = self.downloadContent("content.json", download_files=False)
- if not success: return False
- if not self.content_manager.getFileInfo(inner_path): return False # Still no info for file
-
-
- task = self.worker_manager.addTask(inner_path, peer, priority=priority)
- if blocking:
- return task.get()
- else:
- return task
-
-
- # Add or update a peer to site
- def addPeer(self, ip, port, return_peer = False):
- if not ip: return False
- if (ip, port) in self.peer_blacklist: return False # Ignore blacklist (eg. myself)
- key = "%s:%s" % (ip, port)
- if key in self.peers: # Already has this ip
- #self.peers[key].found()
- if return_peer: # Always return peer
- return self.peers[key]
- else:
- return False
- else: # New peer
- peer = Peer(ip, port, self)
- self.peers[key] = peer
- return peer
-
-
- # Gather peer from connected peers
- @util.Noparallel(blocking=False)
- def announcePex(self, query_num=2, need_num=5):
- peers = [peer for peer in self.peers.values() if peer.connection and peer.connection.connected] # Connected peers
- if len(peers) == 0: # Small number of connected peers for this site, connect to any
- self.log.debug("Small number of peers detected...query all of peers using pex")
- peers = self.peers.values()
- need_num = 10
-
- random.shuffle(peers)
- done = 0
- added = 0
- for peer in peers:
- if peer.connection: # Has connection
- if "port_opened" in peer.connection.handshake: # This field added recently, so probably has has peer exchange
- res = peer.pex(need_num=need_num)
- else:
- res = False
- else: # No connection
- res = peer.pex(need_num=need_num)
- if type(res) == int: # We have result
- done += 1
- added += res
- if res:
- self.worker_manager.onPeers()
- self.updateWebsocket(peers_added=res)
- if done == query_num: break
- self.log.debug("Queried pex from %s peers got %s new peers." % (done, added))
-
-
- # Gather peers from tracker
- # Return: Complete time or False on error
- def announceTracker(self, protocol, ip, port, fileserver_port, address_hash, my_peer_id):
- s = time.time()
- if protocol == "udp": # Udp tracker
- if config.disable_udp: return False # No udp supported
- tracker = UdpTrackerClient(ip, port)
- tracker.peer_port = fileserver_port
- try:
- tracker.connect()
- tracker.poll_once()
- tracker.announce(info_hash=address_hash, num_want=50)
- back = tracker.poll_once()
- peers = back["response"]["peers"]
- except Exception, err:
- return False
-
- else: # Http tracker
- params = {
- 'info_hash': binascii.a2b_hex(address_hash),
- 'peer_id': my_peer_id, 'port': fileserver_port,
- 'uploaded': 0, 'downloaded': 0, 'left': 0, 'compact': 1, 'numwant': 30,
- 'event': 'started'
- }
- req = None
- try:
- url = "http://"+ip+"?"+urllib.urlencode(params)
- # Load url
- with gevent.Timeout(10, False): # Make sure of timeout
- req = urllib2.urlopen(url, timeout=8)
- response = req.read()
- req.fp._sock.recv=None # Hacky avoidance of memory leak for older python versions
- req.close()
- req = None
- if not response:
- self.log.debug("Http tracker %s response error" % url)
- return False
- # Decode peers
- peer_data = bencode.decode(response)["peers"]
- response = None
- peer_count = len(peer_data) / 6
- peers = []
- for peer_offset in xrange(peer_count):
- off = 6 * peer_offset
- peer = peer_data[off:off + 6]
- addr, port = struct.unpack('!LH', peer)
- peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port})
- except Exception, err:
- self.log.debug("Http tracker %s error: %s" % (url, err))
- if req:
- req.close()
- req = None
- return False
-
- # Adding peers
- added = 0
- for peer in peers:
- if not peer["port"]: continue # Dont add peers with port 0
- if self.addPeer(peer["addr"], peer["port"]): added += 1
- if added:
- self.worker_manager.onPeers()
- self.updateWebsocket(peers_added=added)
- self.log.debug("Found %s peers, new: %s" % (len(peers), added))
- return time.time()-s
-
-
- # Add myself and get other peers from tracker
- def announce(self, force=False):
- if time.time() < self.last_announce+30 and not force: return # No reannouncing within 30 secs
- self.last_announce = time.time()
- errors = []
- slow = []
- address_hash = hashlib.sha1(self.address).hexdigest() # Site address hash
- my_peer_id = sys.modules["main"].file_server.peer_id
-
- if sys.modules["main"].file_server.port_opened:
- fileserver_port = config.fileserver_port
- else: # Port not opened, report port 0
- fileserver_port = 0
-
- s = time.time()
- announced = 0
- threads = []
-
- for protocol, ip, port in SiteManager.TRACKERS: # Start announce threads
- thread = gevent.spawn(self.announceTracker, protocol, ip, port, fileserver_port, address_hash, my_peer_id)
- threads.append(thread)
- thread.ip = ip
- thread.protocol = protocol
-
- gevent.joinall(threads) # Wait for announce finish
-
- for thread in threads:
- if thread.value:
- if thread.value > 1:
- slow.append("%.2fs %s://%s" % (thread.value, thread.protocol, thread.ip))
- announced += 1
- else:
- errors.append("%s://%s" % (thread.protocol, thread.ip))
-
- # Save peers num
- self.settings["peers"] = len(self.peers)
- self.saveSettings()
-
- if len(errors) < len(SiteManager.TRACKERS): # Less errors than total tracker nums
- self.log.debug("Announced port %s to %s trackers in %.3fs, errors: %s, slow: %s" % (fileserver_port, announced, time.time()-s, errors, slow))
- else:
- self.log.error("Announced to %s trackers in %.3fs, failed" % (announced, time.time()-s))
-
- if not [peer for peer in self.peers.values() if peer.connection and peer.connection.connected]: # If no connected peer yet then wait for connections
- gevent.spawn_later(3, self.announcePex, need_num=10) # Spawn 3 secs later
- # self.onFileDone.once(lambda inner_path: self.announcePex(need_num=10), "announcePex_%s" % self.address) # After first file downloaded try to find more peers using pex
- else: # Else announce immediately
- self.announcePex()
-
-
- # Keep connections to get the updates (required for passive clients)
- def needConnections(self, num=3):
- need = min(len(self.peers), num) # Need 3 peer, but max total peers
-
- connected = 0
- for peer in self.peers.values(): # Check current connected number
- if peer.connection and peer.connection.connected:
- connected += 1
-
- self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, connected, len(self.peers)))
-
- if connected < need: # Need more than we have
- for peer in self.peers.values():
- if not peer.connection or not peer.connection.connected: # No peer connection or disconnected
- peer.pex() # Initiate peer exchange
- if peer.connection and peer.connection.connected: connected += 1 # Successfully connected
- if connected >= need: break
- return connected
-
-
- # Return: Probably working, connectable Peers
- def getConnectablePeers(self, need_num=5, ignore=[]):
- peers = self.peers.values()
- random.shuffle(peers)
- found = []
- for peer in peers:
- if peer.key.endswith(":0"): continue # Not connectable
- if not peer.connection: continue # No connection
- if peer.key in ignore: continue # The requester has this peer
- if time.time() - peer.connection.last_recv_time > 60*60*2: # Last message more than 2 hours ago
- peer.connection = None # Cleanup: Dead connection
- continue
- found.append(peer)
- if len(found) >= need_num: break # Found requested number of peers
-
- if (not found and not ignore) or (need_num > 5 and need_num < 100 and len(found) < need_num): # Not found any peer and the requester dont have any, return not that good peers or Initial pex, but not /Stats page and we can't give enought peer
- found = [peer for peer in peers if not peer.key.endswith(":0") and peer.key not in ignore][0:need_num-len(found)]
-
- return found
-
-
-
- # - Events -
-
- # Add event listeners
- def addEventListeners(self):
- self.onFileStart = util.Event() # If WorkerManager added new task
- self.onFileDone = util.Event() # If WorkerManager successfuly downloaded a file
- self.onFileFail = util.Event() # If WorkerManager failed to download a file
- self.onComplete = util.Event() # All file finished
-
- self.onFileStart.append(lambda inner_path: self.fileStarted()) # No parameters to make Noparallel batching working
- self.onFileDone.append(lambda inner_path: self.fileDone(inner_path))
- self.onFileFail.append(lambda inner_path: self.fileFailed(inner_path))
-
-
- # Send site status update to websocket clients
- def updateWebsocket(self, **kwargs):
- if kwargs:
- param = {"event": kwargs.items()[0]}
- else:
- param = None
- for ws in self.websockets:
- ws.event("siteChanged", self, param)
-
-
- # File download started
- @util.Noparallel(blocking=False)
- def fileStarted(self):
- time.sleep(0.001) # Wait for other files adds
- self.updateWebsocket(file_started=True)
-
-
- # File downloaded successful
- def fileDone(self, inner_path):
- # File downloaded, remove it from bad files
- if inner_path in self.bad_files:
- self.log.debug("Bad file solved: %s" % inner_path)
- del(self.bad_files[inner_path])
-
- # Update content.json last downlad time
- if inner_path == "content.json":
- self.content_updated = time.time()
-
- self.updateWebsocket(file_done=inner_path)
-
-
- # File download failed
- def fileFailed(self, inner_path):
- if inner_path == "content.json":
- self.content_updated = False
- self.log.debug("Can't update content.json")
- if inner_path in self.bad_files:
- self.bad_files[inner_path] = self.bad_files.get(inner_path, 0)+1
-
- self.updateWebsocket(file_failed=inner_path)
+ def __init__(self, address, allow_create=True):
+ self.address = re.sub("[^A-Za-z0-9]", "", address) # Make sure its correct address
+ self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging
+ self.log = logging.getLogger("Site:%s" % self.address_short)
+
+ self.content = None # Load content.json
+ self.peers = {} # Key: ip:port, Value: Peer.Peer
+ self.peer_blacklist = SiteManager.peer_blacklist # Ignore this peers (eg. myself)
+ self.last_announce = 0 # Last announce time to tracker
+ self.worker_manager = WorkerManager(self) # Handle site download from other peers
+ self.bad_files = {} # SHA check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept)
+ self.content_updated = None # Content.js update time
+ self.notifications = [] # Pending notifications displayed once on page load [error|ok|info, message, timeout]
+ self.page_requested = False # Page viewed in browser
+
+ self.storage = SiteStorage(self, allow_create=allow_create) # Save and load site files
+ self.loadSettings() # Load settings from sites.json
+ self.content_manager = ContentManager(self) # Load contents
+
+ if not self.settings.get("auth_key"): # To auth user in site (Obsolete, will be removed)
+ self.settings["auth_key"] = ''.join(
+ random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(24)
+ )
+ self.log.debug("New auth key: %s" % self.settings["auth_key"])
+ self.saveSettings()
+
+ if not self.settings.get("wrapper_key"): # To auth websocket permissions
+ self.settings["wrapper_key"] = ''.join(
+ random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(12)
+ )
+ self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"])
+ self.saveSettings()
+
+ self.websockets = [] # Active site websocket connections
+
+ # Add event listeners
+ self.addEventListeners()
+
+ def __str__(self):
+ return "Site %s" % self.address_short
+
+ def __repr__(self):
+ return "<%s>" % self.__str__()
+
+ # Load site settings from data/sites.json
+ def loadSettings(self):
+ sites_settings = json.load(open("%s/sites.json" % config.data_dir))
+ if self.address in sites_settings:
+ self.settings = sites_settings[self.address]
+ else:
+ if self.address == config.homepage: # Add admin permissions to homepage
+ permissions = ["ADMIN"]
+ else:
+ permissions = []
+ self.settings = {"own": False, "serving": True, "permissions": permissions} # Default
+ return
+
+ # Save site settings to data/sites.json
+ def saveSettings(self):
+ sites_settings = json.load(open("%s/sites.json" % config.data_dir))
+ sites_settings[self.address] = self.settings
+ open("%s/sites.json" % config.data_dir, "w").write(json.dumps(sites_settings, indent=2, sort_keys=True))
+ return
+
+ # Max site size in MB
+ def getSizeLimit(self):
+ return self.settings.get("size_limit", config.size_limit)
+
+ # Next size limit based on current size
+ def getNextSizeLimit(self):
+ size_limits = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000]
+ size = self.settings.get("size", 0)
+ for size_limit in size_limits:
+ if size * 1.2 < size_limit * 1024 * 1024:
+ return size_limit
+ return 999999
+
+ # Download all file from content.json
+ def downloadContent(self, inner_path, download_files=True, peer=None):
+ s = time.time()
+ self.log.debug("Downloading %s..." % inner_path)
+ found = self.needFile(inner_path, update=self.bad_files.get(inner_path))
+ content_inner_dir = self.content_manager.toDir(inner_path)
+ if not found:
+ return False # Could not download content.json
+
+ self.log.debug("Got %s" % inner_path)
+ changed = self.content_manager.loadContent(inner_path, load_includes=False)
+
+ # Start download files
+ file_threads = []
+ if download_files:
+ for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys():
+ file_inner_path = content_inner_dir + file_relative_path
+ # Start download and dont wait for finish, return the event
+ res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer)
+ if res is not True and res is not False: # Need downloading and file is allowed
+ file_threads.append(res) # Append evt
+
+ # Wait for includes download
+ include_threads = []
+ for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys():
+ file_inner_path = content_inner_dir + file_relative_path
+ include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer)
+ include_threads.append(include_thread)
+
+ self.log.debug("%s: Downloading %s includes..." % (inner_path, len(include_threads)))
+ gevent.joinall(include_threads)
+ self.log.debug("%s: Includes downloaded" % inner_path)
+
+ self.log.debug("%s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed)))
+ gevent.joinall(file_threads)
+ self.log.debug("%s: All file downloaded in %.2fs" % (inner_path, time.time() - s))
+
+ return True
+
+ # Return bad files with less than 3 retry
+ def getReachableBadFiles(self):
+ if not self.bad_files:
+ return False
+ return [bad_file for bad_file, retry in self.bad_files.iteritems() if retry < 3]
+
+ # Retry download bad files
+ def retryBadFiles(self):
+ for bad_file in self.bad_files.keys():
+ self.needFile(bad_file, update=True, blocking=False)
+
+ # Download all files of the site
+ @util.Noparallel(blocking=False)
+ def download(self, check_size=False):
+ self.log.debug("Start downloading...%s" % self.bad_files)
+ gevent.spawn(self.announce)
+ if check_size: # Check the size first
+ valid = self.downloadContent(download_files=False) # Just download content.json files
+ if not valid:
+ return False # Cant download content.jsons or size is not fits
+
+ # Download everything
+ found = self.downloadContent("content.json")
+ self.checkModifications(0) # Download multiuser blind includes
+
+ return found
+
+ # Update worker, try to find client that supports listModifications command
+ def updater(self, peers_try, queried, since):
+ while 1:
+ if not peers_try or len(queried) >= 3: # Stop after 3 successful query
+ break
+ peer = peers_try.pop(0)
+ if not peer.connection and len(queried) < 2:
+ peer.connect() # Only open new connection if less than 2 queried already
+ if not peer.connection or peer.connection.handshake.get("rev", 0) < 126:
+ continue # Not compatible
+ res = peer.listModified(since)
+ if not res or "modified_files" not in res:
+ continue # Failed query
+
+ queried.append(peer)
+ for inner_path, modified in res["modified_files"].iteritems(): # Check if the peer has newer files than we
+ content = self.content_manager.contents.get(inner_path)
+ if not content or modified > content["modified"]: # We dont have this file or we have older
+ self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 # Mark as bad file
+ gevent.spawn(self.downloadContent, inner_path) # Download the content.json + the changed files
+
+ # Check modified content.json files from peers and add modified files to bad_files
+ # Return: Successfully queried peers [Peer, Peer...]
+ def checkModifications(self, since=None):
+ peers_try = [] # Try these peers
+ queried = [] # Successfully queried from these peers
+
+ peers = self.peers.values()
+ random.shuffle(peers)
+ for peer in peers: # Try to find connected good peers, but we must have at least 5 peers
+ if peer.findConnection() and peer.connection.handshake.get("rev", 0) > 125: # Add to the beginning if rev125
+ peers_try.insert(0, peer)
+ elif len(peers_try) < 5: # Backup peers, add to end of the try list
+ peers_try.append(peer)
+
+ if since is not None: # No since definied, download from last modification time-1day
+ since = self.settings.get("modified", 60 * 60 * 24) - 60 * 60 * 24
+ self.log.debug("Try to get listModifications from peers: %s since: %s" % (peers_try, since))
+
+ updaters = []
+ for i in range(3):
+ updaters.append(gevent.spawn(self.updater, peers_try, queried, since))
+
+ gevent.joinall(updaters, timeout=5) # Wait 5 sec to workers
+ time.sleep(0.1)
+ self.log.debug("Queried listModifications from: %s" % queried)
+ return queried
+
+ # Update content.json from peers and download changed files
+ # Return: None
+ @util.Noparallel()
+ def update(self, announce=False):
+ self.content_manager.loadContent("content.json") # Reload content.json
+ self.content_updated = None # Reset content updated time
+ self.updateWebsocket(updating=True)
+ if announce:
+ self.announce()
+
+ queried = self.checkModifications()
+
+ if not queried: # Not found any client that supports listModifications
+ self.log.debug("Fallback to old-style update")
+ self.redownloadContents()
+
+ if not self.settings["own"]:
+ self.storage.checkFiles(quick_check=True) # Quick check files based on file size
+
+ changed = self.content_manager.loadContent("content.json")
+ if changed:
+ for changed_file in changed:
+ self.bad_files[changed_file] = self.bad_files.get(changed_file, 0) + 1
+
+ if self.bad_files:
+ self.download()
+
+ self.settings["size"] = self.content_manager.getTotalSize() # Update site size
+ self.updateWebsocket(updated=True)
+
+ # Update site by redownload all content.json
+ def redownloadContents(self):
+
+ # Download all content.json again
+ content_threads = []
+ for inner_path in self.content_manager.contents.keys():
+ content_threads.append(self.needFile(inner_path, update=True, blocking=False))
+
+ self.log.debug("Waiting %s content.json to finish..." % len(content_threads))
+ gevent.joinall(content_threads)
+
+ # Publish worker
+ def publisher(self, inner_path, peers, published, limit, event_done=None):
+ file_size = self.storage.getSize(inner_path)
+ body = self.storage.read(inner_path)
+ while 1:
+ if not peers or len(published) >= limit:
+ if event_done:
+ event_done.set(True)
+ break # All peers done, or published engouht
+ peer = peers.pop(0)
+ if peer.connection and peer.connection.last_ping_delay: # Peer connected
+ # Timeout: 5sec + size in kb + last_ping
+ timeout = timeout = 5 + int(file_size / 1024) + peer.connection.last_ping_delay
+ else: # Peer not connected
+ # Timeout: 5sec + size in kb
+ timeout = timeout = 5 + int(file_size / 1024)
+ result = {"exception": "Timeout"}
+
+ for retry in range(2):
+ try:
+ with gevent.Timeout(timeout, False):
+ result = peer.request("update", {
+ "site": self.address,
+ "inner_path": inner_path,
+ "body": body,
+ "peer": (config.ip_external, config.fileserver_port)
+ })
+ if result:
+ break
+ except Exception, err:
+ result = {"exception": Debug.formatException(err)}
+
+ if result and "ok" in result:
+ published.append(peer)
+ self.log.info("[OK] %s: %s" % (peer.key, result["ok"]))
+ else:
+ if result == {"exception": "Timeout"}:
+ peer.onConnectionError()
+ self.log.info("[FAILED] %s: %s" % (peer.key, result))
+
+ # Update content.json on peers
+ @util.Noparallel()
+ def publish(self, limit=5, inner_path="content.json"):
+ self.log.info("Publishing to %s/%s peers..." % (min(len(self.peers), limit), len(self.peers)))
+ published = [] # Successfully published (Peer)
+ publishers = [] # Publisher threads
+ peers = self.peers.values()
+ if not peers:
+ return 0 # No peers found
+
+ random.shuffle(peers)
+ event_done = gevent.event.AsyncResult()
+ for i in range(min(len(self.peers), limit, 5)): # Max 5 thread
+ publisher = gevent.spawn(self.publisher, inner_path, peers, published, limit, event_done)
+ publishers.append(publisher)
+
+ event_done.get() # Wait for done
+ if len(published) < min(len(self.peers), limit):
+ time.sleep(0.2) # If less than we need sleep a bit
+ if len(published) == 0:
+ gevent.joinall(publishers) # No successful publish, wait for all publisher
+
+ # Make sure the connected passive peers got the update
+ passive_peers = [
+ peer for peer in peers
+ if peer.connection and not peer.connection.closed and peer.key.endswith(":0") and peer not in published
+ ] # Every connected passive peer that we not published to
+ for peer in passive_peers:
+ gevent.spawn(self.publisher, inner_path, passive_peers, published, limit=10)
+
+ self.log.info(
+ "Successfuly published to %s peers, publishing to %s more passive peers" %
+ (len(published), len(passive_peers))
+ )
+ return len(published)
+
+ # Copy this site
+ def clone(self, address, privatekey=None, address_index=None, overwrite=False):
+ import shutil
+ new_site = SiteManager.site_manager.need(address, all_file=False)
+ default_dirs = [] # Dont copy these directories (has -default version)
+ for dir_name in os.listdir(self.storage.directory):
+ if "-default" in dir_name:
+ default_dirs.append(dir_name.replace("-default", ""))
+
+ self.log.debug("Cloning to %s, ignore dirs: %s" % (address, default_dirs))
+
+ # Copy root content.json
+ if not new_site.storage.isFile("content.json") and not overwrite:
+ # Content.json not exist yet, create a new one from source site
+ content_json = self.storage.loadJson("content.json")
+ if "domain" in content_json:
+ del content_json["domain"]
+ content_json["title"] = "my" + content_json["title"]
+ content_json["cloned_from"] = self.address
+ if address_index:
+ content_json["address_index"] = address_index # Site owner's BIP32 index
+ new_site.storage.writeJson("content.json", content_json)
+ new_site.content_manager.loadContent("content.json", add_bad_files=False, load_includes=False)
+
+ # Copy files
+ for content_inner_path, content in self.content_manager.contents.items():
+ for file_relative_path in sorted(content["files"].keys()):
+ file_inner_path = self.content_manager.toDir(content_inner_path) + file_relative_path # Relative to content.json
+ file_inner_path = file_inner_path.strip("/") # Strip leading /
+ if file_inner_path.split("/")[0] in default_dirs: # Dont copy directories that has -default postfixed alternative
+ self.log.debug("[SKIP] %s (has default alternative)" % file_inner_path)
+ continue
+ file_path = self.storage.getPath(file_inner_path)
+
+ # Copy the file normally to keep the -default postfixed dir and file to allow cloning later
+ file_path_dest = new_site.storage.getPath(file_inner_path)
+ self.log.debug("[COPY] %s to %s..." % (file_inner_path, file_path_dest))
+ dest_dir = os.path.dirname(file_path_dest)
+ if not os.path.isdir(dest_dir):
+ os.makedirs(dest_dir)
+ shutil.copy(file_path, file_path_dest)
+
+ # If -default in path, create a -default less copy of the file
+ if "-default" in file_inner_path:
+ file_path_dest = new_site.storage.getPath(file_inner_path.replace("-default", ""))
+ if new_site.storage.isFile(file_path_dest) and not overwrite: # Don't overwrite site files with default ones
+ self.log.debug("[SKIP] Default file: %s (already exist)" % file_inner_path)
+ continue
+ self.log.debug("[COPY] Default file: %s to %s..." % (file_inner_path, file_path_dest))
+ dest_dir = os.path.dirname(file_path_dest)
+ if not os.path.isdir(dest_dir):
+ os.makedirs(dest_dir)
+ shutil.copy(file_path, file_path_dest)
+ # Sign if content json
+ if file_path_dest.endswith("/content.json"):
+ new_site.storage.onUpdated(file_inner_path.replace("-default", ""))
+ new_site.content_manager.loadContent(
+ file_inner_path.replace("-default", ""), add_bad_files=False, load_includes=False
+ )
+ if privatekey:
+ new_site.content_manager.sign(file_inner_path.replace("-default", ""), privatekey)
+
+ if privatekey:
+ new_site.content_manager.sign("content.json", privatekey)
+
+ # Rebuild DB
+ if new_site.storage.isFile("dbschema.json"):
+ new_site.storage.rebuildDb()
+
+ return new_site
+
+ # Check and download if file not exist
+ def needFile(self, inner_path, update=False, blocking=True, peer=None, priority=0):
+ if self.storage.isFile(inner_path) and not update: # File exist, no need to do anything
+ return True
+ elif self.settings["serving"] is False: # Site not serving
+ return False
+ else: # Wait until file downloaded
+ self.bad_files[inner_path] = True # Mark as bad file
+ if not self.content_manager.contents.get("content.json"): # No content.json, download it first!
+ self.log.debug("Need content.json first")
+ gevent.spawn(self.announce)
+ if inner_path != "content.json": # Prevent double download
+ task = self.worker_manager.addTask("content.json", peer)
+ task.get()
+ self.content_manager.loadContent()
+ if not self.content_manager.contents.get("content.json"):
+ return False # Content.json download failed
+
+ if not inner_path.endswith("content.json") and not self.content_manager.getFileInfo(inner_path):
+ # No info for file, download all content.json first
+ self.log.debug("No info for %s, waiting for all content.json" % inner_path)
+ success = self.downloadContent("content.json", download_files=False)
+ if not success:
+ return False
+ if not self.content_manager.getFileInfo(inner_path):
+ return False # Still no info for file
+
+ task = self.worker_manager.addTask(inner_path, peer, priority=priority)
+ if blocking:
+ return task.get()
+ else:
+ return task
+
+ # Add or update a peer to site
+ def addPeer(self, ip, port, return_peer=False):
+ if not ip:
+ return False
+ if (ip, port) in self.peer_blacklist:
+ return False # Ignore blacklist (eg. myself)
+ key = "%s:%s" % (ip, port)
+ if key in self.peers: # Already has this ip
+ # self.peers[key].found()
+ if return_peer: # Always return peer
+ return self.peers[key]
+ else:
+ return False
+ else: # New peer
+ peer = Peer(ip, port, self)
+ self.peers[key] = peer
+ return peer
+
+ # Gather peer from connected peers
+ @util.Noparallel(blocking=False)
+ def announcePex(self, query_num=2, need_num=5):
+ peers = [peer for peer in self.peers.values() if peer.connection and peer.connection.connected] # Connected peers
+ if len(peers) == 0: # Small number of connected peers for this site, connect to any
+ self.log.debug("Small number of peers detected...query all of peers using pex")
+ peers = self.peers.values()
+ need_num = 10
+
+ random.shuffle(peers)
+ done = 0
+ added = 0
+ for peer in peers:
+ if peer.connection: # Has connection
+ if "port_opened" in peer.connection.handshake: # This field added recently, so probably has has peer exchange
+ res = peer.pex(need_num=need_num)
+ else:
+ res = False
+ else: # No connection
+ res = peer.pex(need_num=need_num)
+ if type(res) == int: # We have result
+ done += 1
+ added += res
+ if res:
+ self.worker_manager.onPeers()
+ self.updateWebsocket(peers_added=res)
+ if done == query_num:
+ break
+ self.log.debug("Queried pex from %s peers got %s new peers." % (done, added))
+
+ # Gather peers from tracker
+ # Return: Complete time or False on error
+ def announceTracker(self, protocol, ip, port, fileserver_port, address_hash, my_peer_id):
+ s = time.time()
+ if protocol == "udp": # Udp tracker
+ if config.disable_udp:
+ return False # No udp supported
+ tracker = UdpTrackerClient(ip, port)
+ tracker.peer_port = fileserver_port
+ try:
+ tracker.connect()
+ tracker.poll_once()
+ tracker.announce(info_hash=address_hash, num_want=50)
+ back = tracker.poll_once()
+ peers = back["response"]["peers"]
+ except Exception, err:
+ return False
+
+ else: # Http tracker
+ params = {
+ 'info_hash': binascii.a2b_hex(address_hash),
+ 'peer_id': my_peer_id, 'port': fileserver_port,
+ 'uploaded': 0, 'downloaded': 0, 'left': 0, 'compact': 1, 'numwant': 30,
+ 'event': 'started'
+ }
+ req = None
+ try:
+ url = "http://" + ip + "?" + urllib.urlencode(params)
+ # Load url
+ with gevent.Timeout(10, False): # Make sure of timeout
+ req = urllib2.urlopen(url, timeout=8)
+ response = req.read()
+ req.fp._sock.recv = None # Hacky avoidance of memory leak for older python versions
+ req.close()
+ req = None
+ if not response:
+ self.log.debug("Http tracker %s response error" % url)
+ return False
+ # Decode peers
+ peer_data = bencode.decode(response)["peers"]
+ response = None
+ peer_count = len(peer_data) / 6
+ peers = []
+ for peer_offset in xrange(peer_count):
+ off = 6 * peer_offset
+ peer = peer_data[off:off + 6]
+ addr, port = struct.unpack('!LH', peer)
+ peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port})
+ except Exception, err:
+ self.log.debug("Http tracker %s error: %s" % (url, err))
+ if req:
+ req.close()
+ req = None
+ return False
+
+ # Adding peers
+ added = 0
+ for peer in peers:
+ if not peer["port"]:
+ continue # Dont add peers with port 0
+ if self.addPeer(peer["addr"], peer["port"]):
+ added += 1
+ if added:
+ self.worker_manager.onPeers()
+ self.updateWebsocket(peers_added=added)
+ self.log.debug("Found %s peers, new: %s" % (len(peers), added))
+ return time.time() - s
+
+ # Add myself and get other peers from tracker
+ def announce(self, force=False):
+ if time.time() < self.last_announce + 30 and not force:
+ return # No reannouncing within 30 secs
+ self.last_announce = time.time()
+ errors = []
+ slow = []
+ address_hash = hashlib.sha1(self.address).hexdigest() # Site address hash
+ my_peer_id = sys.modules["main"].file_server.peer_id
+
+ if sys.modules["main"].file_server.port_opened:
+ fileserver_port = config.fileserver_port
+ else: # Port not opened, report port 0
+ fileserver_port = 0
+
+ s = time.time()
+ announced = 0
+ threads = []
+
+ for protocol, ip, port in SiteManager.TRACKERS: # Start announce threads
+ thread = gevent.spawn(self.announceTracker, protocol, ip, port, fileserver_port, address_hash, my_peer_id)
+ threads.append(thread)
+ thread.ip = ip
+ thread.protocol = protocol
+
+ gevent.joinall(threads) # Wait for announce finish
+
+ for thread in threads:
+ if thread.value:
+ if thread.value > 1:
+ slow.append("%.2fs %s://%s" % (thread.value, thread.protocol, thread.ip))
+ announced += 1
+ else:
+ errors.append("%s://%s" % (thread.protocol, thread.ip))
+
+ # Save peers num
+ self.settings["peers"] = len(self.peers)
+ self.saveSettings()
+
+ if len(errors) < len(SiteManager.TRACKERS): # Less errors than total tracker nums
+ self.log.debug(
+ "Announced port %s to %s trackers in %.3fs, errors: %s, slow: %s" %
+ (fileserver_port, announced, time.time() - s, errors, slow)
+ )
+ else:
+ self.log.error("Announced to %s trackers in %.3fs, failed" % (announced, time.time() - s))
+
+ if not [peer for peer in self.peers.values() if peer.connection and peer.connection.connected]:
+ # If no connected peer yet then wait for connections
+ gevent.spawn_later(3, self.announcePex, need_num=10) # Spawn 3 secs later
+ else: # Else announce immediately
+ self.announcePex()
+
+ # Keep connections to get the updates (required for passive clients)
+ def needConnections(self, num=3):
+ need = min(len(self.peers), num) # Need 3 peer, but max total peers
+
+ connected = 0
+ for peer in self.peers.values(): # Check current connected number
+ if peer.connection and peer.connection.connected:
+ connected += 1
+
+ self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, connected, len(self.peers)))
+
+ if connected < need: # Need more than we have
+ for peer in self.peers.values():
+ if not peer.connection or not peer.connection.connected: # No peer connection or disconnected
+ peer.pex() # Initiate peer exchange
+ if peer.connection and peer.connection.connected:
+ connected += 1 # Successfully connected
+ if connected >= need:
+ break
+ return connected
+
+ # Return: Probably working, connectable Peers
+ def getConnectablePeers(self, need_num=5, ignore=[]):
+ peers = self.peers.values()
+ random.shuffle(peers)
+ found = []
+ for peer in peers:
+ if peer.key.endswith(":0"):
+ continue # Not connectable
+ if not peer.connection:
+ continue # No connection
+ if peer.key in ignore:
+ continue # The requester has this peer
+ if time.time() - peer.connection.last_recv_time > 60 * 60 * 2: # Last message more than 2 hours ago
+ peer.connection = None # Cleanup: Dead connection
+ continue
+ found.append(peer)
+ if len(found) >= need_num:
+ break # Found requested number of peers
+
+ if (not found and not ignore) or (need_num > 5 and need_num < 100 and len(found) < need_num):
+ # Return not that good peers: Not found any peer and the requester dont have any or cant give enought peer
+ found = [peer for peer in peers if not peer.key.endswith(":0") and peer.key not in ignore][0:need_num - len(found)]
+
+ return found
+
+ # - Events -
+
+ # Add event listeners
+ def addEventListeners(self):
+ self.onFileStart = util.Event() # If WorkerManager added new task
+ self.onFileDone = util.Event() # If WorkerManager successfuly downloaded a file
+ self.onFileFail = util.Event() # If WorkerManager failed to download a file
+ self.onComplete = util.Event() # All file finished
+
+ self.onFileStart.append(lambda inner_path: self.fileStarted()) # No parameters to make Noparallel batching working
+ self.onFileDone.append(lambda inner_path: self.fileDone(inner_path))
+ self.onFileFail.append(lambda inner_path: self.fileFailed(inner_path))
+
+ # Send site status update to websocket clients
+ def updateWebsocket(self, **kwargs):
+ if kwargs:
+ param = {"event": kwargs.items()[0]}
+ else:
+ param = None
+ for ws in self.websockets:
+ ws.event("siteChanged", self, param)
+
+ # File download started
+ @util.Noparallel(blocking=False)
+ def fileStarted(self):
+ time.sleep(0.001) # Wait for other files adds
+ self.updateWebsocket(file_started=True)
+
+ # File downloaded successful
+ def fileDone(self, inner_path):
+ # File downloaded, remove it from bad files
+ if inner_path in self.bad_files:
+ self.log.debug("Bad file solved: %s" % inner_path)
+ del(self.bad_files[inner_path])
+
+ # Update content.json last downlad time
+ if inner_path == "content.json":
+ self.content_updated = time.time()
+
+ self.updateWebsocket(file_done=inner_path)
+
+ # File download failed
+ def fileFailed(self, inner_path):
+ if inner_path == "content.json":
+ self.content_updated = False
+ self.log.debug("Can't update content.json")
+ if inner_path in self.bad_files:
+ self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1
+
+ self.updateWebsocket(file_failed=inner_path)
diff --git a/src/Site/SiteManager.py b/src/Site/SiteManager.py
index fa863870..34ba0562 100644
--- a/src/Site/SiteManager.py
+++ b/src/Site/SiteManager.py
@@ -1,99 +1,99 @@
-import json, logging, time, re, os
-import gevent
+import json
+import logging
+import re
+import os
+
from Plugin import PluginManager
from Config import config
TRACKERS = [
- ("udp", "open.demonii.com", 1337),
- #("udp", "sugoi.pomf.se", 2710),
- #("udp", "tracker.coppersurfer.tk", 80),
- ("udp", "tracker.leechers-paradise.org", 6969),
- ("udp", "9.rarbg.com", 2710),
- #("udp", "www.eddie4.nl", 6969),
- #("udp", "trackr.sytes.net", 80),
- #("udp", "tracker4.piratux.com", 6969)
- #("http", "exodus.desync.com:80/announce", None), Off
- ("http", "tracker.aletorrenty.pl:2710/announce", None),
- #("http", "torrent.gresille.org/announce", None), # Slow
- #("http", "announce.torrentsmd.com:6969/announce", None), # Off
- #("http", "i.bandito.org/announce", None), # Off
- ("http", "retracker.telecom.kz/announce", None),
- ("http", "torrent.gresille.org/announce", None),
+ ("udp", "open.demonii.com", 1337),
+ # ("udp", "sugoi.pomf.se", 2710),
+ # ("udp", "tracker.coppersurfer.tk", 80),
+ ("udp", "tracker.leechers-paradise.org", 6969),
+ ("udp", "9.rarbg.com", 2710),
+ # ("udp", "www.eddie4.nl", 6969),
+ # ("udp", "trackr.sytes.net", 80),
+ # ("udp", "tracker4.piratux.com", 6969)
+ # ("http", "exodus.desync.com:80/announce", None), Off
+ ("http", "tracker.aletorrenty.pl:2710/announce", None),
+ # ("http", "torrent.gresille.org/announce", None), # Slow
+ # ("http", "announce.torrentsmd.com:6969/announce", None), # Off
+ # ("http", "i.bandito.org/announce", None), # Off
+ ("http", "retracker.telecom.kz/announce", None),
+ ("http", "torrent.gresille.org/announce", None),
]
@PluginManager.acceptPlugins
class SiteManager(object):
- def __init__(self):
- self.sites = None
- # Load all sites from data/sites.json
- def load(self):
- from Site import Site
- if not self.sites: self.sites = {}
- address_found = []
- added = 0
- # Load new adresses
- for address in json.load(open("%s/sites.json" % config.data_dir)):
- if address not in self.sites and os.path.isfile("%s/%s/content.json" % (config.data_dir, address)):
- self.sites[address] = Site(address)
- added += 1
- address_found.append(address)
+ def __init__(self):
+ self.sites = None
- # Remove deleted adresses
- for address in self.sites.keys():
- if address not in address_found:
- del(self.sites[address])
- logging.debug("Removed site: %s" % address)
+ # Load all sites from data/sites.json
+ def load(self):
+ from Site import Site
+ if not self.sites:
+ self.sites = {}
+ address_found = []
+ added = 0
+ # Load new adresses
+ for address in json.load(open("%s/sites.json" % config.data_dir)):
+ if address not in self.sites and os.path.isfile("%s/%s/content.json" % (config.data_dir, address)):
+ self.sites[address] = Site(address)
+ added += 1
+ address_found.append(address)
- if added: logging.debug("SiteManager added %s sites" % added)
+ # Remove deleted adresses
+ for address in self.sites.keys():
+ if address not in address_found:
+ del(self.sites[address])
+ logging.debug("Removed site: %s" % address)
+
+ if added:
+ logging.debug("SiteManager added %s sites" % added)
+
+ # Checks if its a valid address
+ def isAddress(self, address):
+ return re.match("^[A-Za-z0-9]{26,35}$", address)
+
+ # Return: Site object or None if not found
+ def get(self, address):
+ if self.sites is None: # Not loaded yet
+ self.load()
+ return self.sites.get(address)
+
+ # Return or create site and start download site files
+ def need(self, address, all_file=True):
+ from Site import Site
+ site = self.get(address)
+ if not site: # Site not exist yet
+ if not self.isAddress(address):
+ return False # Not address: %s % address
+ logging.debug("Added new site: %s" % address)
+ site = Site(address)
+ self.sites[address] = site
+ if not site.settings["serving"]: # Maybe it was deleted before
+ site.settings["serving"] = True
+ site.saveSettings()
+
+ if all_file:
+ site.download()
+ return site
+
+ def delete(self, address):
+ logging.debug("SiteManager deleted site: %s" % address)
+ del(self.sites[address])
+
+ # Lazy load sites
+ def list(self):
+ if self.sites is None: # Not loaded yet
+ self.load()
+ return self.sites
- # Checks if its a valid address
- def isAddress(self, address):
- return re.match("^[A-Za-z0-9]{26,35}$", address)
+site_manager = SiteManager() # Singletone
-
- # Return: Site object or None if not found
- def get(self, address):
- if self.sites == None: # Not loaded yet
- self.load()
- return self.sites.get(address)
-
-
- # Return or create site and start download site files
- def need(self, address, all_file=True):
- from Site import Site
- new = False
- site = self.get(address)
- if not site: # Site not exist yet
- if not self.isAddress(address): return False # Not address: %s % address
- logging.debug("Added new site: %s" % address)
- site = Site(address)
- self.sites[address] = site
- if not site.settings["serving"]: # Maybe it was deleted before
- site.settings["serving"] = True
- site.saveSettings()
- new = True
-
- if all_file: site.download()
- return site
-
-
- def delete(self, address):
- logging.debug("SiteManager deleted site: %s" % address)
- del(self.sites[address])
-
-
- # Lazy load sites
- def list(self):
- if self.sites == None: # Not loaded yet
- self.load()
- return self.sites
-
-
-
-site_manager = SiteManager() # Singletone
-
-peer_blacklist = [] # Dont download from this peers
\ No newline at end of file
+peer_blacklist = [] # Dont download from this peers
diff --git a/src/Site/SiteStorage.py b/src/Site/SiteStorage.py
index 47452206..e6c03a3d 100644
--- a/src/Site/SiteStorage.py
+++ b/src/Site/SiteStorage.py
@@ -1,292 +1,297 @@
-import os, re, shutil, json, time, sqlite3
+import os
+import re
+import shutil
+import json
+import time
+
+import sqlite3
import gevent.event
+
from Db import Db
from Debug import Debug
from Config import config
class SiteStorage:
- def __init__(self, site, allow_create=True):
- self.site = site
- self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
- self.log = site.log
- self.db = None # Db class
- self.db_checked = False # Checked db tables since startup
- self.event_db_busy = None # Gevent AsyncResult if db is working on rebuild
- self.has_db = self.isFile("dbschema.json") # The site has schema
- if not os.path.isdir(self.directory):
- if allow_create:
- os.mkdir(self.directory) # Create directory if not found
- else:
- raise Exception("Directory not exists: %s" % self.directory)
+ def __init__(self, site, allow_create=True):
+ self.site = site
+ self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
+ self.log = site.log
+ self.db = None # Db class
+ self.db_checked = False # Checked db tables since startup
+ self.event_db_busy = None # Gevent AsyncResult if db is working on rebuild
+ self.has_db = self.isFile("dbschema.json") # The site has schema
+ if not os.path.isdir(self.directory):
+ if allow_create:
+ os.mkdir(self.directory) # Create directory if not found
+ else:
+ raise Exception("Directory not exists: %s" % self.directory)
- # Load db from dbschema.json
- def openDb(self, check=True):
- schema = self.loadJson("dbschema.json")
- db_path = self.getPath(schema["db_file"])
- if check:
- if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: # Not exist or null
- self.rebuildDb()
- self.db = Db(schema, db_path)
- if check and not self.db_checked:
- changed_tables = self.db.checkTables()
- if changed_tables: self.rebuildDb(delete_db=False) # Todo only update the changed table datas
+ # Load db from dbschema.json
+ def openDb(self, check=True):
+ schema = self.loadJson("dbschema.json")
+ db_path = self.getPath(schema["db_file"])
+ if check:
+ if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: # Not exist or null
+ self.rebuildDb()
+ self.db = Db(schema, db_path)
+ if check and not self.db_checked:
+ changed_tables = self.db.checkTables()
+ if changed_tables:
+ self.rebuildDb(delete_db=False) # Todo only update the changed table datas
+ def closeDb(self):
+ if self.db:
+ self.db.close()
+ self.event_db_busy = None
+ self.db = None
- def closeDb(self):
- if self.db: self.db.close()
- self.event_db_busy = None
- self.db = None
+ # Return db class
+ def getDb(self):
+ if not self.db:
+ self.log.debug("No database, waiting for dbschema.json...")
+ self.site.needFile("dbschema.json", priority=1)
+ self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist
+ if self.has_db:
+ self.openDb()
+ return self.db
+ # Rebuild sql cache
+ def rebuildDb(self, delete_db=True):
+ self.has_db = self.isFile("dbschema.json")
+ if not self.has_db:
+ return False
+ self.event_db_busy = gevent.event.AsyncResult()
+ schema = self.loadJson("dbschema.json")
+ db_path = self.getPath(schema["db_file"])
+ if os.path.isfile(db_path) and delete_db:
+ if self.db:
+ self.db.close() # Close db if open
+ self.log.info("Deleting %s" % db_path)
+ try:
+ os.unlink(db_path)
+ except Exception, err:
+ self.log.error("Delete error: %s" % err)
+ self.openDb(check=False)
+ self.log.info("Creating tables...")
+ self.db.checkTables()
+ self.log.info("Importing data...")
+ cur = self.db.getCursor()
+ cur.execute("BEGIN")
+ cur.logging = False
+ found = 0
+ s = time.time()
+ for content_inner_path, content in self.site.content_manager.contents.items():
+ content_path = self.getPath(content_inner_path)
+ if os.path.isfile(content_path): # Missing content.json file
+ if self.db.loadJson(content_path, cur=cur):
+ found += 1
+ else:
+ self.log.error("[MISSING] %s" % content_inner_path)
+ for file_relative_path in content["files"].keys():
+ if not file_relative_path.endswith(".json"):
+ continue # We only interesed in json files
+ content_inner_path_dir = self.site.content_manager.toDir(content_inner_path) # Content.json dir relative to site
+ file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
+ file_inner_path = file_inner_path.strip("/") # Strip leading /
+ file_path = self.getPath(file_inner_path)
+ if os.path.isfile(file_path):
+ if self.db.loadJson(file_path, cur=cur):
+ found += 1
+ else:
+ self.log.error("[MISSING] %s" % file_inner_path)
+ cur.execute("END")
+ self.log.info("Imported %s data file in %ss" % (found, time.time() - s))
+ self.event_db_busy.set(True) # Event done, notify waiters
+ self.event_db_busy = None # Clear event
- # Return db class
- def getDb(self):
- if not self.db:
- self.log.debug("No database, waiting for dbschema.json...")
- self.site.needFile("dbschema.json", priority=1)
- self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist
- if self.has_db: self.openDb()
- return self.db
+ # Execute sql query or rebuild on dberror
+ def query(self, query, params=None):
+ if self.event_db_busy: # Db not ready for queries
+ self.log.debug("Wating for db...")
+ self.event_db_busy.get() # Wait for event
+ try:
+ res = self.getDb().execute(query, params)
+ except sqlite3.DatabaseError, err:
+ if err.__class__.__name__ == "DatabaseError":
+ self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query))
+ self.rebuildDb()
+ res = self.db.cur.execute(query, params)
+ else:
+ raise err
+ return res
+ # Open file object
+ def open(self, inner_path, mode="rb"):
+ return open(self.getPath(inner_path), mode)
- # Rebuild sql cache
- def rebuildDb(self, delete_db=True):
- self.has_db = self.isFile("dbschema.json")
- if not self.has_db: return False
- self.event_db_busy = gevent.event.AsyncResult()
- schema = self.loadJson("dbschema.json")
- db_path = self.getPath(schema["db_file"])
- if os.path.isfile(db_path) and delete_db:
- if self.db: self.db.close() # Close db if open
- self.log.info("Deleting %s" % db_path)
- try:
- os.unlink(db_path)
- except Exception, err:
- self.log.error("Delete error: %s" % err)
- self.openDb(check=False)
- self.log.info("Creating tables...")
- self.db.checkTables()
- self.log.info("Importing data...")
- cur = self.db.getCursor()
- cur.execute("BEGIN")
- cur.logging = False
- found = 0
- s = time.time()
- for content_inner_path, content in self.site.content_manager.contents.items():
- content_path = self.getPath(content_inner_path)
- if os.path.isfile(content_path): # Missing content.json file
- if self.db.loadJson(content_path, cur=cur): found += 1
- else:
- self.log.error("[MISSING] %s" % content_inner_path)
- for file_relative_path in content["files"].keys():
- if not file_relative_path.endswith(".json"): continue # We only interesed in json files
- file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
- file_inner_path = file_inner_path.strip("/") # Strip leading /
- file_path = self.getPath(file_inner_path)
- if os.path.isfile(file_path):
- if self.db.loadJson(file_path, cur=cur): found += 1
- else:
- self.log.error("[MISSING] %s" % file_inner_path)
- cur.execute("END")
- self.log.info("Imported %s data file in %ss" % (found, time.time()-s))
- self.event_db_busy.set(True) # Event done, notify waiters
- self.event_db_busy = None # Clear event
+ # Open file object
+ def read(self, inner_path, mode="r"):
+ return open(self.getPath(inner_path), mode).read()
+ # Write content to file
+ def write(self, inner_path, content):
+ file_path = self.getPath(inner_path)
+ # Create dir if not exist
+ file_dir = os.path.dirname(file_path)
+ if not os.path.isdir(file_dir):
+ os.makedirs(file_dir)
+ # Write file
+ if hasattr(content, 'read'): # File-like object
+ with open(file_path, "wb") as file:
+ shutil.copyfileobj(content, file) # Write buff to disk
+ else: # Simple string
+ with open(file_path, "wb") as file:
+ file.write(content)
+ del content
+ self.onUpdated(inner_path)
- # Execute sql query or rebuild on dberror
- def query(self, query, params=None):
- if self.event_db_busy: # Db not ready for queries
- self.log.debug("Wating for db...")
- self.event_db_busy.get() # Wait for event
- try:
- res = self.getDb().execute(query, params)
- except sqlite3.DatabaseError, err:
- if err.__class__.__name__ == "DatabaseError":
- self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query))
- self.rebuildDb()
- res = self.db.cur.execute(query, params)
- else:
- raise err
- return res
+ # Site content updated
+ def onUpdated(self, inner_path):
+ file_path = self.getPath(inner_path)
+ # Update Sql cache
+ if inner_path == "dbschema.json":
+ self.has_db = self.isFile("dbschema.json")
+ self.getDb().checkTables() # Check if any if table schema changed
+ elif inner_path.endswith(".json") and self.has_db: # Load json file to db
+ self.log.debug("Loading json file to db: %s" % inner_path)
+ try:
+ self.getDb().loadJson(file_path)
+ except Exception, err:
+ self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
+ self.closeDb()
+ # Load and parse json file
+ def loadJson(self, inner_path):
+ with self.open(inner_path) as file:
+ return json.load(file)
- # Open file object
- def open(self, inner_path, mode="rb"):
- return open(self.getPath(inner_path), mode)
+ # Write formatted json file
+ def writeJson(self, inner_path, data):
+ content = json.dumps(data, indent=2, sort_keys=True)
+ # Make it a little more compact by removing unnecessary white space
+ def compact_list(match):
+ return "[ " + match.group(1).strip() + " ]"
- # Open file object
- def read(self, inner_path, mode="r"):
- return open(self.getPath(inner_path), mode).read()
+ def compact_dict(match):
+ return "{ " + match.group(1).strip() + " }"
+ content = re.sub("\[([^,\{\[]{10,100}?)\]", compact_list, content, flags=re.DOTALL)
+ content = re.sub("\{([^,\[\{]{10,100}?)\}", compact_dict, content, flags=re.DOTALL)
+ # Write to disk
+ self.write(inner_path, content)
- # Write content to file
- def write(self, inner_path, content):
- file_path = self.getPath(inner_path)
- # Create dir if not exist
- file_dir = os.path.dirname(file_path)
- if not os.path.isdir(file_dir):
- os.makedirs(file_dir)
- # Write file
- if hasattr(content, 'read'): # File-like object
- with open(file_path, "wb") as file:
- shutil.copyfileobj(content, file) # Write buff to disk
- else: # Simple string
- with open(file_path, "wb") as file:
- file.write(content)
- del content
- self.onUpdated(inner_path)
+ # Get file size
+ def getSize(self, inner_path):
+ path = self.getPath(inner_path)
+ if os.path.isfile(path):
+ return os.path.getsize(path)
+ else:
+ return 0
+ # File exist
+ def isFile(self, inner_path):
+ return os.path.isfile(self.getPath(inner_path))
- # Site content updated
- def onUpdated(self, inner_path):
- file_path = self.getPath(inner_path)
- # Update Sql cache
- if inner_path == "dbschema.json":
- self.has_db = self.isFile("dbschema.json")
- self.getDb().checkTables() # Check if any if table schema changed
- elif inner_path.endswith(".json") and self.has_db: # Load json file to db
- self.log.debug("Loading json file to db: %s" % inner_path)
- try:
- self.getDb().loadJson(file_path)
- except Exception, err:
- self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
- self.closeDb()
+ # Dir exist
+ def isDir(self, inner_path):
+ return os.path.isdir(self.getPath(inner_path))
+ # Security check and return path of site's file
+ def getPath(self, inner_path):
+ inner_path = inner_path.replace("\\", "/") # Windows separator fix
+ inner_path = re.sub("^%s/" % re.escape(self.directory), "", inner_path) # Remove site directory if begins with it
+ file_path = self.directory + "/" + inner_path
+ allowed_dir = os.path.abspath(self.directory) # Only files within this directory allowed
+ if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
+ raise Exception("File not allowed: %s" % file_path)
+ return file_path
- # Load and parse json file
- def loadJson(self, inner_path):
- with self.open(inner_path) as file:
- return json.load(file)
+ # Verify all files sha512sum using content.json
+ def verifyFiles(self, quick_check=False): # Fast = using file size
+ bad_files = []
+ if not self.site.content_manager.contents.get("content.json"): # No content.json, download it first
+ self.site.needFile("content.json", update=True) # Force update to fix corrupt file
+ self.site.content_manager.loadContent() # Reload content.json
+ for content_inner_path, content in self.site.content_manager.contents.items():
+ if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file
+ self.log.debug("[MISSING] %s" % content_inner_path)
+ bad_files.append(content_inner_path)
+ for file_relative_path in content["files"].keys():
+ file_inner_path = self.site.content_manager.toDir(content_inner_path) + file_relative_path # Relative to site dir
+ file_inner_path = file_inner_path.strip("/") # Strip leading /
+ file_path = self.getPath(file_inner_path)
+ if not os.path.isfile(file_path):
+ self.log.debug("[MISSING] %s" % file_inner_path)
+ bad_files.append(file_inner_path)
+ continue
- # Write formatted json file
- def writeJson(self, inner_path, data):
- content = json.dumps(data, indent=2, sort_keys=True)
- # Make it a little more compact by removing unnecessary white space
- def compact_list(match):
- return "[ "+match.group(1).strip()+" ]"
+ if quick_check:
+ ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"]
+ else:
+ ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
- def compact_dict(match):
- return "{ "+match.group(1).strip()+" }"
+ if not ok:
+ self.log.debug("[CHANGED] %s" % file_inner_path)
+ bad_files.append(file_inner_path)
+ self.log.debug(
+ "%s verified: %s files, quick_check: %s, bad files: %s" %
+ (content_inner_path, len(content["files"]), quick_check, bad_files)
+ )
- content = re.sub("\[([^,\{\[]{10,100}?)\]", compact_list, content, flags=re.DOTALL)
- content = re.sub("\{([^,\[\{]{10,100}?)\}", compact_dict, content, flags=re.DOTALL)
- # Write to disk
- self.write(inner_path, content)
+ return bad_files
+ # Check and try to fix site files integrity
+ def checkFiles(self, quick_check=True):
+ s = time.time()
+ bad_files = self.verifyFiles(quick_check)
+ if bad_files:
+ for bad_file in bad_files:
+ self.site.bad_files[bad_file] = self.site.bad_files.get("bad_file", 0) + 1
+ self.log.debug("Checked files in %.2fs... Quick:%s" % (time.time() - s, quick_check))
- # Get file size
- def getSize(self, inner_path):
- path = self.getPath(inner_path)
- if os.path.isfile(path):
- return os.path.getsize(path)
- else:
- return 0
+ # Delete site's all file
+ def deleteFiles(self):
+ if self.has_db:
+ self.log.debug("Deleting db file...")
+ self.closeDb()
+ try:
+ schema = self.loadJson("dbschema.json")
+ db_path = self.getPath(schema["db_file"])
+ if os.path.isfile(db_path):
+ os.unlink(db_path)
+ except Exception, err:
+ self.log.error("Db file delete error: %s" % err)
+ self.log.debug("Deleting files from content.json...")
+ files = [] # Get filenames
+ for content_inner_path, content in self.site.content_manager.contents.items():
+ files.append(content_inner_path)
+ for file_relative_path in content["files"].keys():
+ file_inner_path = self.site.content_manager.toDir(content_inner_path) + file_relative_path # Relative to site dir
+ files.append(file_inner_path)
- # File exist
- def isFile(self, inner_path):
- return os.path.isfile(self.getPath(inner_path))
+ for inner_path in files:
+ path = self.getPath(inner_path)
+ if os.path.isfile(path):
+ os.unlink(path)
+ self.log.debug("Deleting empty dirs...")
+ for root, dirs, files in os.walk(self.directory, topdown=False):
+ for dir in dirs:
+ path = os.path.join(root, dir)
+ if os.path.isdir(path) and os.listdir(path) == []:
+ os.removedirs(path)
+ self.log.debug("Removing %s" % path)
+ if os.path.isdir(self.directory) and os.listdir(self.directory) == []:
+ os.removedirs(self.directory) # Remove sites directory if empty
- # Dir exist
- def isDir(self, inner_path):
- return os.path.isdir(self.getPath(inner_path))
-
-
- # Security check and return path of site's file
- def getPath(self, inner_path):
- inner_path = inner_path.replace("\\", "/") # Windows separator fix
- inner_path = re.sub("^%s/" % re.escape(self.directory), "", inner_path) # Remove site directory if begins with it
- file_path = self.directory+"/"+inner_path
- allowed_dir = os.path.abspath(self.directory) # Only files within this directory allowed
- if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
- raise Exception("File not allowed: %s" % file_path)
- return file_path
-
-
-
- # Verify all files sha512sum using content.json
- def verifyFiles(self, quick_check=False): # Fast = using file size
- bad_files = []
- if not self.site.content_manager.contents.get("content.json"): # No content.json, download it first
- self.site.needFile("content.json", update=True) # Force update to fix corrupt file
- self.site.content_manager.loadContent() # Reload content.json
- for content_inner_path, content in self.site.content_manager.contents.items():
- if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file
- self.log.debug("[MISSING] %s" % content_inner_path)
- bad_files.append(content_inner_path)
- for file_relative_path in content["files"].keys():
- file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
- file_inner_path = file_inner_path.strip("/") # Strip leading /
- file_path = self.getPath(file_inner_path)
- if not os.path.isfile(file_path):
- self.log.debug("[MISSING] %s" % file_inner_path)
- bad_files.append(file_inner_path)
- continue
-
- if quick_check:
- ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"]
- else:
- ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
-
- if not ok:
- self.log.debug("[CHANGED] %s" % file_inner_path)
- bad_files.append(file_inner_path)
- self.log.debug("%s verified: %s files, quick_check: %s, bad files: %s" % (content_inner_path, len(content["files"]), quick_check, bad_files))
-
- return bad_files
-
-
- # Check and try to fix site files integrity
- def checkFiles(self, quick_check=True):
- s = time.time()
- bad_files = self.verifyFiles(quick_check)
- if bad_files:
- for bad_file in bad_files:
- self.site.bad_files[bad_file] = self.site.bad_files.get("bad_file", 0)+1
- self.log.debug("Checked files in %.2fs... Quick:%s" % (time.time()-s, quick_check))
-
-
- # Delete site's all file
- def deleteFiles(self):
- if self.has_db:
- self.log.debug("Deleting db file...")
- self.closeDb()
- try:
- schema = self.loadJson("dbschema.json")
- db_path = self.getPath(schema["db_file"])
- if os.path.isfile(db_path): os.unlink(db_path)
- except Exception, err:
- self.log.error("Db file delete error: %s" % err)
-
-
- self.log.debug("Deleting files from content.json...")
- files = [] # Get filenames
- for content_inner_path, content in self.site.content_manager.contents.items():
- files.append(content_inner_path)
- for file_relative_path in content["files"].keys():
- file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
- files.append(file_inner_path)
-
- for inner_path in files:
- path = self.getPath(inner_path)
- if os.path.isfile(path): os.unlink(path)
-
- self.log.debug("Deleting empty dirs...")
- for root, dirs, files in os.walk(self.directory, topdown=False):
- for dir in dirs:
- path = os.path.join(root,dir)
- if os.path.isdir(path) and os.listdir(path) == []:
- os.removedirs(path)
- self.log.debug("Removing %s" % path)
- if os.path.isdir(self.directory) and os.listdir(self.directory) == []: os.removedirs(self.directory) # Remove sites directory if empty
-
- if os.path.isdir(self.directory):
- self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory)
- return False # Some files not deleted
- else:
- self.log.debug("Site data directory deleted: %s..." % self.directory)
- return True # All clean
+ if os.path.isdir(self.directory):
+ self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory)
+ return False # Some files not deleted
+ else:
+ self.log.debug("Site data directory deleted: %s..." % self.directory)
+ return True # All clean
diff --git a/src/Test/BenchmarkConnection.py b/src/Test/BenchmarkConnection.py
new file mode 100644
index 00000000..28eae013
--- /dev/null
+++ b/src/Test/BenchmarkConnection.py
@@ -0,0 +1,140 @@
+import time
+import socket
+import msgpack
+
+
+print "Connecting..."
+sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+sock.connect(("localhost", 1234))
+
+
+print "1 Threaded: Send, receive 10000 ping request...",
+s = time.time()
+for i in range(10000):
+ sock.sendall(msgpack.packb({"cmd": "Ping"}))
+ req = sock.recv(16 * 1024)
+print time.time() - s, repr(req), time.time() - s
+
+
+print "1 Threaded: Send, receive, decode 10000 ping request...",
+s = time.time()
+unpacker = msgpack.Unpacker()
+reqs = 0
+for i in range(10000):
+ sock.sendall(msgpack.packb({"cmd": "Ping"}))
+ unpacker.feed(sock.recv(16 * 1024))
+ for req in unpacker:
+ reqs += 1
+print "Found:", req, "x", reqs, time.time() - s
+
+
+print "1 Threaded: Send, receive, decode, reconnect 1000 ping request...",
+s = time.time()
+unpacker = msgpack.Unpacker()
+reqs = 0
+for i in range(1000):
+ sock.sendall(msgpack.packb({"cmd": "Ping"}))
+ unpacker.feed(sock.recv(16 * 1024))
+ for req in unpacker:
+ reqs += 1
+ sock.close()
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.connect(("localhost", 1234))
+print "Found:", req, "x", reqs, time.time() - s
+
+
+print "1 Threaded: Request, receive, decode 10000 x 10k data request...",
+s = time.time()
+unpacker = msgpack.Unpacker()
+reqs = 0
+for i in range(10000):
+ sock.sendall(msgpack.packb({"cmd": "Bigdata"}))
+
+ """buff = StringIO()
+ data = sock.recv(16*1024)
+ buff.write(data)
+ if not data:
+ break
+ while not data.endswith("\n"):
+ data = sock.recv(16*1024)
+ if not data: break
+ buff.write(data)
+ req = msgpack.unpackb(buff.getvalue().strip("\n"))
+ reqs += 1"""
+
+ req_found = False
+ while not req_found:
+ buff = sock.recv(16 * 1024)
+ unpacker.feed(buff)
+ for req in unpacker:
+ reqs += 1
+ req_found = True
+ break # Only process one request
+print "Found:", len(req["res"]), "x", reqs, time.time() - s
+
+
+print "10 Threaded: Request, receive, decode 10000 x 10k data request...",
+import gevent
+s = time.time()
+reqs = 0
+req = None
+
+
+def requester():
+ global reqs, req
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.connect(("localhost", 1234))
+ unpacker = msgpack.Unpacker()
+ for i in range(1000):
+ sock.sendall(msgpack.packb({"cmd": "Bigdata"}))
+
+ req_found = False
+ while not req_found:
+ buff = sock.recv(16 * 1024)
+ unpacker.feed(buff)
+ for req in unpacker:
+ reqs += 1
+ req_found = True
+ break # Only process one request
+
+threads = []
+for i in range(10):
+ threads.append(gevent.spawn(requester))
+gevent.joinall(threads)
+print "Found:", len(req["res"]), "x", reqs, time.time() - s
+
+
+print "1 Threaded: ZeroMQ Send, receive 1000 ping request...",
+s = time.time()
+import zmq.green as zmq
+c = zmq.Context()
+zmq_sock = c.socket(zmq.REQ)
+zmq_sock.connect('tcp://127.0.0.1:1234')
+for i in range(1000):
+ zmq_sock.send(msgpack.packb({"cmd": "Ping"}))
+ req = zmq_sock.recv(16 * 1024)
+print "Found:", req, time.time() - s
+
+
+print "1 Threaded: ZeroMQ Send, receive 1000 x 10k data request...",
+s = time.time()
+import zmq.green as zmq
+c = zmq.Context()
+zmq_sock = c.socket(zmq.REQ)
+zmq_sock.connect('tcp://127.0.0.1:1234')
+for i in range(1000):
+ zmq_sock.send(msgpack.packb({"cmd": "Bigdata"}))
+ req = msgpack.unpackb(zmq_sock.recv(1024 * 1024))
+print "Found:", len(req["res"]), time.time() - s
+
+
+print "1 Threaded: direct ZeroMQ Send, receive 1000 x 10k data request...",
+s = time.time()
+import zmq.green as zmq
+c = zmq.Context()
+zmq_sock = c.socket(zmq.REQ)
+zmq_sock.connect('tcp://127.0.0.1:1233')
+for i in range(1000):
+ zmq_sock.send(msgpack.packb({"cmd": "Bigdata"}))
+ req = msgpack.unpackb(zmq_sock.recv(1024 * 1024))
+print "Found:", len(req["res"]), time.time() - s
diff --git a/src/Test/BenchmarkSsl.py b/src/Test/BenchmarkSsl.py
index 324beab4..fec19474 100644
--- a/src/Test/BenchmarkSsl.py
+++ b/src/Test/BenchmarkSsl.py
@@ -1,11 +1,16 @@
#!/usr/bin/python2
-from gevent import monkey; monkey.patch_all()
-import os, time, sys, socket, ssl
-sys.path.append(os.path.abspath("src")) # Imports relative to src dir
+from gevent import monkey
+monkey.patch_all()
+import os
+import time
+import sys
+import socket
+import ssl
+sys.path.append(os.path.abspath("src")) # Imports relative to src dir
import cStringIO as StringIO
import gevent
-from gevent.queue import Queue, Empty, JoinableQueue
+
from gevent.server import StreamServer
from gevent.pool import Pool
from util import SslPatch
@@ -13,39 +18,45 @@ from util import SslPatch
# Server
socks = []
-data = os.urandom(1024*100)
+data = os.urandom(1024 * 100)
data += "\n"
-def handle(sock_raw, addr):
- socks.append(sock_raw)
- sock = sock_raw
- #sock = ctx.wrap_socket(sock, server_side=True)
- #if sock_raw.recv( 1, gevent.socket.MSG_PEEK ) == "\x16":
- # sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='key-cz.pem', certfile='cert-cz.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
- #fp = os.fdopen(sock.fileno(), 'rb', 1024*512)
- try:
- while True:
- line = sock.recv(16*1024)
- if not line: break
- if line == "bye\n":
- break
- elif line == "gotssl\n":
- sock.sendall("yes\n")
- sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='data/key-rsa.pem', certfile='data/cert-rsa.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
- else:
- sock.sendall(data)
- except Exception, err:
- print err
- try:
- sock.shutdown(gevent.socket.SHUT_WR)
- sock.close()
- except:
- pass
- socks.remove(sock_raw)
-pool = Pool(1000) # do not accept more than 10000 connections
-server = StreamServer(('127.0.0.1', 1234), handle) #
-server.start()
+def handle(sock_raw, addr):
+ socks.append(sock_raw)
+ sock = sock_raw
+ # sock = ctx.wrap_socket(sock, server_side=True)
+ # if sock_raw.recv( 1, gevent.socket.MSG_PEEK ) == "\x16":
+ # sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='key-cz.pem',
+ # certfile='cert-cz.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
+ # fp = os.fdopen(sock.fileno(), 'rb', 1024*512)
+ try:
+ while True:
+ line = sock.recv(16 * 1024)
+ if not line:
+ break
+ if line == "bye\n":
+ break
+ elif line == "gotssl\n":
+ sock.sendall("yes\n")
+ sock = gevent.ssl.wrap_socket(
+ sock_raw, server_side=True, keyfile='data/key-rsa.pem', certfile='data/cert-rsa.pem',
+ ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1
+ )
+ else:
+ sock.sendall(data)
+ except Exception, err:
+ print err
+ try:
+ sock.shutdown(gevent.socket.SHUT_WR)
+ sock.close()
+ except:
+ pass
+ socks.remove(sock_raw)
+
+pool = Pool(1000) # do not accept more than 10000 connections
+server = StreamServer(('127.0.0.1', 1234), handle)
+server.start()
# Client
@@ -54,92 +65,97 @@ server.start()
total_num = 0
total_bytes = 0
clipher = None
-ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDH+AES128:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:AES128-SHA:HIGH:!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
+ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDH+AES128:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:AES128-SHA:HIGH:" + \
+ "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
# ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+
def getData():
- global total_num, total_bytes, clipher
- data = None
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- #sock = socket.ssl(s)
- #sock = ssl.wrap_socket(sock)
- sock.connect(("127.0.0.1", 1234))
- #sock.do_handshake()
- #clipher = sock.cipher()
- sock.send("gotssl\n")
- if sock.recv(128) == "yes\n":
- sock = ssl.wrap_socket(sock, ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
- sock.do_handshake()
- clipher = sock.cipher()
+ global total_num, total_bytes, clipher
+ data = None
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ # sock = socket.ssl(s)
+ # sock = ssl.wrap_socket(sock)
+ sock.connect(("127.0.0.1", 1234))
+ # sock.do_handshake()
+ # clipher = sock.cipher()
+ sock.send("gotssl\n")
+ if sock.recv(128) == "yes\n":
+ sock = ssl.wrap_socket(sock, ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
+ sock.do_handshake()
+ clipher = sock.cipher()
+ for req in range(100):
+ sock.sendall("req\n")
+ buff = StringIO.StringIO()
+ data = sock.recv(16 * 1024)
+ buff.write(data)
+ if not data:
+ break
+ while not data.endswith("\n"):
+ data = sock.recv(16 * 1024)
+ if not data:
+ break
+ buff.write(data)
+ total_num += 1
+ total_bytes += buff.tell()
+ if not data:
+ print "No data"
- for req in range(100):
- sock.sendall("req\n")
- buff = StringIO.StringIO()
- data = sock.recv(16*1024)
- buff.write(data)
- if not data:
- break
- while not data.endswith("\n"):
- data = sock.recv(16*1024)
- if not data: break
- buff.write(data)
- total_num += 1
- total_bytes += buff.tell()
- if not data:
- print "No data"
-
- sock.shutdown(gevent.socket.SHUT_WR)
- sock.close()
+ sock.shutdown(gevent.socket.SHUT_WR)
+ sock.close()
s = time.time()
+
def info():
- import psutil, os
- process = psutil.Process(os.getpid())
- if "memory_info" in dir(process):
- memory_info = process.memory_info
- else:
- memory_info = process.get_memory_info
- while 1:
- print total_num, "req", (total_bytes/1024), "kbytes", "transfered in", time.time()-s, "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20)
- time.sleep(1)
+ import psutil
+ import os
+ process = psutil.Process(os.getpid())
+ if "memory_info" in dir(process):
+ memory_info = process.memory_info
+ else:
+ memory_info = process.get_memory_info
+ while 1:
+ print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s,
+ print "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20)
+ time.sleep(1)
gevent.spawn(info)
for test in range(10):
- clients = []
- for i in range(10): # Thread
- clients.append(gevent.spawn(getData))
- gevent.joinall(clients)
+ clients = []
+ for i in range(10): # Thread
+ clients.append(gevent.spawn(getData))
+ gevent.joinall(clients)
-print total_num, "req", (total_bytes/1024), "kbytes", "transfered in", time.time()-s
+print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s
# Separate client/server process:
# 10*10*100:
-# Raw: 10000 req 1000009 kbytes transfered in 5.39999985695
+# Raw: 10000 req 1000009 kbytes transfered in 5.39999985695
# RSA 2048: 10000 req 1000009 kbytes transfered in 27.7890000343 using ('ECDHE-RSA-AES256-SHA', 'TLSv1/SSLv3', 256)
-# ECC: 10000 req 1000009 kbytes transfered in 26.1959998608 using ('ECDHE-ECDSA-AES256-SHA', 'TLSv1/SSLv3', 256)
-# ECC: 10000 req 1000009 kbytes transfered in 28.2410001755 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 13.3828125
+# ECC: 10000 req 1000009 kbytes transfered in 26.1959998608 using ('ECDHE-ECDSA-AES256-SHA', 'TLSv1/SSLv3', 256)
+# ECC: 10000 req 1000009 kbytes transfered in 28.2410001755 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 13.3828125
#
# 10*100*10:
-# Raw: 10000 req 1000009 kbytes transfered in 7.02700018883 Mem: 14.328125
-# RSA 2048: 10000 req 1000009 kbytes transfered in 44.8860001564 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.078125
-# ECC: 10000 req 1000009 kbytes transfered in 37.9430000782 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.0234375
+# Raw: 10000 req 1000009 kbytes transfered in 7.02700018883 Mem: 14.328125
+# RSA 2048: 10000 req 1000009 kbytes transfered in 44.8860001564 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.078125
+# ECC: 10000 req 1000009 kbytes transfered in 37.9430000782 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.0234375
#
# 1*100*100:
-# Raw: 10000 req 1000009 kbytes transfered in 4.64400005341 Mem: 14.06640625
-# RSA: 10000 req 1000009 kbytes transfered in 24.2300000191 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 19.7734375
-# ECC: 10000 req 1000009 kbytes transfered in 22.8849999905 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 17.8125
-# AES128: 10000 req 1000009 kbytes transfered in 21.2839999199 using ('AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.1328125
-# ECC+128: 10000 req 1000009 kbytes transfered in 20.496999979 using ('ECDHE-ECDSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.40234375
+# Raw: 10000 req 1000009 kbytes transfered in 4.64400005341 Mem: 14.06640625
+# RSA: 10000 req 1000009 kbytes transfered in 24.2300000191 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 19.7734375
+# ECC: 10000 req 1000009 kbytes transfered in 22.8849999905 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 17.8125
+# AES128: 10000 req 1000009 kbytes transfered in 21.2839999199 using ('AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.1328125
+# ECC+128: 10000 req 1000009 kbytes transfered in 20.496999979 using ('ECDHE-ECDSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.40234375
#
#
# Single process:
# 1*100*100
-# RSA: 10000 req 1000009 kbytes transfered in 41.7899999619 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 26.91015625
+# RSA: 10000 req 1000009 kbytes transfered in 41.7899999619 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 26.91015625
#
# 10*10*100
-# RSA: 10000 req 1000009 kbytes transfered in 40.1640000343 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.94921875
\ No newline at end of file
+# RSA: 10000 req 1000009 kbytes transfered in 40.1640000343 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.94921875
diff --git a/src/Ui/UiRequest.py b/src/Ui/UiRequest.py
index 66c34ffc..be6abca5 100644
--- a/src/Ui/UiRequest.py
+++ b/src/Ui/UiRequest.py
@@ -1,4 +1,10 @@
-import time, re, os, mimetypes, json, cgi
+import time
+import re
+import os
+import mimetypes
+import json
+import cgi
+
from Config import config
from Site import SiteManager
from User import UserManager
@@ -6,400 +12,417 @@ from Plugin import PluginManager
from Ui.UiWebsocket import UiWebsocket
status_texts = {
- 200: "200 OK",
- 400: "400 Bad Request",
- 403: "403 Forbidden",
- 404: "404 Not Found",
- 500: "500 Internal Server Error",
+ 200: "200 OK",
+ 400: "400 Bad Request",
+ 403: "403 Forbidden",
+ 404: "404 Not Found",
+ 500: "500 Internal Server Error",
}
@PluginManager.acceptPlugins
class UiRequest(object):
- def __init__(self, server, get, env, start_response):
- if server:
- self.server = server
- self.log = server.log
- self.get = get # Get parameters
- self.env = env # Enviroment settings
- self.start_response = start_response # Start response function
- self.user = None
-
-
- # Call the request handler function base on path
- def route(self, path):
- if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: # Restict Ui access by ip
- return self.error403()
-
- path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension
- path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access
-
- if path == "/":
- return self.actionIndex()
- elif path.endswith("favicon.ico"):
- return self.actionFile("src/Ui/media/img/favicon.ico")
- # Media
- elif path.startswith("/uimedia/"):
- return self.actionUiMedia(path)
- elif path.startswith("/media"):
- return self.actionSiteMedia(path)
- # Websocket
- elif path == "/Websocket":
- return self.actionWebsocket()
- # Debug
- elif path == "/Debug" and config.debug:
- return self.actionDebug()
- elif path == "/Console" and config.debug:
- return self.actionConsole()
- # Site media wrapper
- else:
- body = self.actionWrapper(path)
- if body:
- return body
- else:
- func = getattr(self, "action"+path.lstrip("/"), None) # Check if we have action+request_path function
- if func:
- return func()
- else:
- return self.error404(path)
-
-
- # The request is proxied by chrome extension
- def isProxyRequest(self):
- return self.env["PATH_INFO"].startswith("http://")
-
-
- def isAjaxRequest(self):
- return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest"
-
-
- # Get mime by filename
- def getContentType(self, file_name):
- content_type = mimetypes.guess_type(file_name)[0]
- if not content_type:
- if file_name.endswith("json"): # Correct json header
- content_type = "application/json"
- else:
- content_type = "application/octet-stream"
- return content_type
-
-
- # Returns: Cookies based on self.env
- def getCookies(self):
- raw_cookies = self.env.get('HTTP_COOKIE')
- if raw_cookies:
- cookies = cgi.parse_qsl(raw_cookies)
- return {key.strip(): val for key, val in cookies}
- else:
- return {}
-
-
- def getCurrentUser(self):
- if self.user: return self.user # Cache
- self.user = UserManager.user_manager.get() # Get user
- if not self.user:
- self.user = UserManager.user_manager.create()
- return self.user
-
-
- # Send response headers
- def sendHeader(self, status=200, content_type="text/html", extra_headers=[]):
- if content_type == "text/html": content_type = "text/html; charset=utf-8"
- headers = []
- headers.append(("Version", "HTTP/1.1"))
- headers.append(("Access-Control-Allow-Origin", "*")) # Allow json access
- if self.env["REQUEST_METHOD"] == "OPTIONS":
- headers.append(("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept")) # Allow json access
-
- if (self.env["REQUEST_METHOD"] == "OPTIONS" or not self.isAjaxRequest()) and status == 200 and (content_type == "text/css" or content_type.startswith("application") or self.env["REQUEST_METHOD"] == "OPTIONS" or content_type.startswith("image")): # Cache Css, Js, Image files for 10min
- headers.append(("Cache-Control", "public, max-age=600")) # Cache 10 min
- else: # Images, Css, Js
- headers.append(("Cache-Control", "no-cache, no-store, private, must-revalidate, max-age=0")) # No caching at all
- #headers.append(("Cache-Control", "public, max-age=604800")) # Cache 1 week
- headers.append(("Content-Type", content_type))
- for extra_header in extra_headers:
- headers.append(extra_header)
- return self.start_response(status_texts[status], headers)
-
-
- # Renders a template
- def render(self, template_path, *args, **kwargs):
- #template = SimpleTemplate(open(template_path), lookup=[os.path.dirname(template_path)])
- #yield str(template.render(*args, **kwargs).encode("utf8"))
- template = open(template_path).read().decode("utf8")
- return template.format(**kwargs).encode("utf8")
-
-
- # - Actions -
-
- # Redirect to an url
- def actionRedirect(self, url):
- self.start_response('301 Redirect', [('Location', url)])
- yield "Location changed: %s" % url
-
-
- def actionIndex(self):
- return self.actionRedirect("/"+config.homepage)
-
-
- # Render a file from media with iframe site wrapper
- def actionWrapper(self, path, extra_headers=None):
- if not extra_headers: extra_headers = []
- if self.get.get("wrapper") == "False": return self.actionSiteMedia("/media"+path) # Only serve html files with frame
-
- match = re.match("/(?P[A-Za-z0-9\._-]+)(?P/.*|$)", path)
- if match:
- address = match.group("address")
- inner_path = match.group("inner_path").lstrip("/")
- if "." in inner_path and not inner_path.endswith(".html"): return self.actionSiteMedia("/media"+path) # Only serve html files with frame
- if self.env.get("HTTP_X_REQUESTED_WITH"): return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper
-
- file_inner_path = inner_path
- if not file_inner_path: file_inner_path = "index.html" # If inner path defaults to index.html
-
- if not inner_path and not path.endswith("/"): inner_path = address+"/" # Fix relative resources loading if missing / end of site address
- inner_path = re.sub(".*/(.+)", "\\1", inner_path) # Load innerframe relative to current url
-
- site = SiteManager.site_manager.get(address)
-
- if site and site.content_manager.contents.get("content.json") and (not site.getReachableBadFiles() or site.settings["own"]): # Its downloaded or own
- title = site.content_manager.contents["content.json"]["title"]
- else:
- title = "Loading %s..." % address
- site = SiteManager.site_manager.need(address) # Start download site
-
- if not site: return False
-
- #extra_headers.append(("X-Frame-Options", "DENY"))
-
- self.sendHeader(extra_headers=extra_headers[:])
-
- # Wrapper variable inits
- query_string = ""
- body_style = ""
- meta_tags = ""
-
- if self.env.get("QUERY_STRING"): query_string = "?"+self.env["QUERY_STRING"]+"&wrapper=False"
- else: query_string = "?wrapper=False"
-
- if self.isProxyRequest(): # Its a remote proxy request
- if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1
- server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"]
- else: # Remote client, use SERVER_NAME as server's real address
- server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"])
- homepage = "http://zero/"+config.homepage
- else: # Use relative path
- server_url = ""
- homepage = "/"+config.homepage
-
- if site.content_manager.contents.get("content.json") : # Got content.json
- content = site.content_manager.contents["content.json"]
- if content.get("background-color"):
- body_style += "background-color: "+cgi.escape(site.content_manager.contents["content.json"]["background-color"], True)+";"
- if content.get("viewport"):
- meta_tags += '' % cgi.escape(content["viewport"], True)
-
- return self.render("src/Ui/template/wrapper.html",
- server_url=server_url,
- inner_path=inner_path,
- file_inner_path=file_inner_path,
- address=address,
- title=title,
- body_style=body_style,
- meta_tags=meta_tags,
- query_string=query_string,
- wrapper_key=site.settings["wrapper_key"],
- permissions=json.dumps(site.settings["permissions"]),
- show_loadingscreen=json.dumps(not site.storage.isFile(file_inner_path)),
- rev=config.rev,
- homepage=homepage
- )
-
- else: # Bad url
- return False
-
-
- # Returns if media request allowed from that referer
- def isMediaRequestAllowed(self, site_address, referer):
- referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
- return referer_path.startswith("/"+site_address)
-
-
- # Serve a media for site
- def actionSiteMedia(self, path):
- path = path.replace("/index.html/", "/") # Base Backward compatibility fix
- if path.endswith("/"): path = path+"index.html"
-
- match = re.match("/media/(?P[A-Za-z0-9\._-]+)/(?P.*)", path)
-
- referer = self.env.get("HTTP_REFERER")
- if referer and match: # Only allow same site to receive media
- if not self.isMediaRequestAllowed(match.group("address"), referer):
- return self.error403("Media referrer error") # Referrer not starts same address as requested path
-
- if match: # Looks like a valid path
- address = match.group("address")
- file_path = "%s/%s/%s" % (config.data_dir, address, match.group("inner_path"))
- allowed_dir = os.path.abspath("%s/%s" % (config.data_dir, address)) # Only files within data/sitehash allowed
- data_dir = os.path.abspath("data") # No files from data/ allowed
- if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir) or allowed_dir == data_dir: # File not in allowed path
- return self.error403()
- else:
- if config.debug and file_path.split("/")[-1].startswith("all."): # When debugging merge *.css to all.css and *.js to all.js
- site = self.server.sites.get(address)
- if site.settings["own"]:
- from Debug import DebugMedia
- DebugMedia.merge(file_path)
- if os.path.isfile(file_path): # File exits
- #self.sendHeader(content_type=self.getContentType(file_path)) # ?? Get Exception without this
- return self.actionFile(file_path)
- else: # File not exits, try to download
- site = SiteManager.site_manager.need(address, all_file=False)
- result = site.needFile(match.group("inner_path"), priority=1) # Wait until file downloads
- if result:
- #self.sendHeader(content_type=self.getContentType(file_path))
- return self.actionFile(file_path)
- else:
- self.log.debug("File not found: %s" % match.group("inner_path"))
- return self.error404(match.group("inner_path"))
-
- else: # Bad url
- return self.error404(path)
-
-
- # Serve a media for ui
- def actionUiMedia(self, path):
- match = re.match("/uimedia/(?P.*)", path)
- if match: # Looks like a valid path
- file_path = "src/Ui/media/%s" % match.group("inner_path")
- allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed
- if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): # File not in allowed path
- return self.error403()
- else:
- if config.debug and match.group("inner_path").startswith("all."): # When debugging merge *.css to all.css and *.js to all.js
- from Debug import DebugMedia
- DebugMedia.merge(file_path)
- return self.actionFile(file_path)
- else: # Bad url
- return self.error400()
-
-
- # Stream a file to client
- def actionFile(self, file_path, block_size = 64*1024):
- if os.path.isfile(file_path):
- # Try to figure out content type by extension
- content_type = self.getContentType(file_path)
-
- self.sendHeader(content_type = content_type) # TODO: Dont allow external access: extra_headers=[("Content-Security-Policy", "default-src 'unsafe-inline' data: http://localhost:43110 ws://localhost:43110")]
- if self.env["REQUEST_METHOD"] != "OPTIONS":
- file = open(file_path, "rb")
- while 1:
- try:
- block = file.read(block_size)
- if block:
- yield block
- else:
- raise StopIteration
- except StopIteration:
- file.close()
- break
- else: # File not exits
- yield self.error404(file_path)
-
-
- # On websocket connection
- def actionWebsocket(self):
- ws = self.env.get("wsgi.websocket")
- if ws:
- wrapper_key = self.get["wrapper_key"]
- # Find site by wrapper_key
- site = None
- for site_check in self.server.sites.values():
- if site_check.settings["wrapper_key"] == wrapper_key: site = site_check
-
- if site: # Correct wrapper key
- user = self.getCurrentUser()
- if not user:
- self.log.error("No user found")
- return self.error403()
- ui_websocket = UiWebsocket(ws, site, self.server, user)
- site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events
- ui_websocket.start()
- for site_check in self.server.sites.values(): # Remove websocket from every site (admin sites allowed to join other sites event channels)
- if ui_websocket in site_check.websockets:
- site_check.websockets.remove(ui_websocket)
- return "Bye."
- else: # No site found by wrapper key
- self.log.error("Wrapper key not found: %s" % wrapper_key)
- return self.error403()
- else:
- start_response("400 Bad Request", [])
- return "Not a websocket!"
-
-
- # Debug last error
- def actionDebug(self):
- # Raise last error from DebugHook
- import sys
- last_error = sys.modules["main"].DebugHook.last_error
- if last_error:
- raise last_error[0], last_error[1], last_error[2]
- else:
- self.sendHeader()
- return "No error! :)"
-
-
- # Just raise an error to get console
- def actionConsole(self):
- import sys
- sites = self.server.sites
- main = sys.modules["main"]
- raise Exception("Here is your console")
-
-
- # - Tests -
-
- def actionTestStream(self):
- self.sendHeader()
- yield " "*1080 # Overflow browser's buffer
- yield "He"
- time.sleep(1)
- yield "llo!"
- yield "Running websockets: %s" % len(self.server.websockets)
- self.server.sendMessage("Hello!")
-
-
- # - Errors -
-
- # Send bad request error
- def error400(self):
- self.sendHeader(400)
- return "Bad Request"
-
-
- # You are not allowed to access this
- def error403(self, message="Forbidden"):
- self.sendHeader(403)
- return message
-
-
- # Send file not found error
- def error404(self, path = None):
- self.sendHeader(404)
- return "Not Found: %s" % path.encode("utf8")
-
-
- # Internal server error
- def error500(self, message = ":("):
- self.sendHeader(500)
- return "Server error
%s" % cgi.escape(message)
+
+ def __init__(self, server, get, env, start_response):
+ if server:
+ self.server = server
+ self.log = server.log
+ self.get = get # Get parameters
+ self.env = env # Enviroment settings
+
+ self.start_response = start_response # Start response function
+ self.user = None
+
+ # Return posted variables as dict
+ def getPosted(self):
+ if self.env['REQUEST_METHOD'] == "POST":
+ return dict(cgi.parse_qsl(
+ self.env['wsgi.input'].readline().decode()
+ ))
+ else:
+ return {}
+
+ # Call the request handler function base on path
+ def route(self, path):
+ if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: # Restict Ui access by ip
+ return self.error403()
+
+ path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension
+ path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access
+
+ if path == "/":
+ return self.actionIndex()
+ elif path.endswith("favicon.ico"):
+ return self.actionFile("src/Ui/media/img/favicon.ico")
+ # Media
+ elif path.startswith("/uimedia/"):
+ return self.actionUiMedia(path)
+ elif path.startswith("/media"):
+ return self.actionSiteMedia(path)
+ # Websocket
+ elif path == "/Websocket":
+ return self.actionWebsocket()
+ # Debug
+ elif path == "/Debug" and config.debug:
+ return self.actionDebug()
+ elif path == "/Console" and config.debug:
+ return self.actionConsole()
+ # Site media wrapper
+ else:
+ body = self.actionWrapper(path)
+ if body:
+ return body
+ else:
+ func = getattr(self, "action" + path.lstrip("/"), None) # Check if we have action+request_path function
+ if func:
+ return func()
+ else:
+ return self.error404(path)
+
+ # The request is proxied by chrome extension
+ def isProxyRequest(self):
+ return self.env["PATH_INFO"].startswith("http://")
+
+ def isAjaxRequest(self):
+ return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest"
+
+ # Get mime by filename
+ def getContentType(self, file_name):
+ content_type = mimetypes.guess_type(file_name)[0]
+ if not content_type:
+ if file_name.endswith("json"): # Correct json header
+ content_type = "application/json"
+ else:
+ content_type = "application/octet-stream"
+ return content_type
+
+ # Returns: Cookies based on self.env
+ def getCookies(self):
+ raw_cookies = self.env.get('HTTP_COOKIE')
+ if raw_cookies:
+ cookies = cgi.parse_qsl(raw_cookies)
+ return {key.strip(): val for key, val in cookies}
+ else:
+ return {}
+
+ def getCurrentUser(self):
+ if self.user:
+ return self.user # Cache
+ self.user = UserManager.user_manager.get() # Get user
+ if not self.user:
+ self.user = UserManager.user_manager.create()
+ return self.user
+
+ # Send response headers
+ def sendHeader(self, status=200, content_type="text/html", extra_headers=[]):
+ if content_type == "text/html":
+ content_type = "text/html; charset=utf-8"
+ headers = []
+ headers.append(("Version", "HTTP/1.1"))
+ headers.append(("Access-Control-Allow-Origin", "*")) # Allow json access
+ if self.env["REQUEST_METHOD"] == "OPTIONS":
+ # Allow json access
+ headers.append(("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept"))
+
+ cacheable_type = (
+ content_type == "text/css" or content_type.startswith("image") or
+ self.env["REQUEST_METHOD"] == "OPTIONS" or content_type == "application/javascript"
+ )
+
+ if status == 200 and cacheable_type: # Cache Css, Js, Image files for 10min
+ headers.append(("Cache-Control", "public, max-age=600")) # Cache 10 min
+ else: # Images, Css, Js
+ headers.append(("Cache-Control", "no-cache, no-store, private, must-revalidate, max-age=0")) # No caching at all
+ headers.append(("Content-Type", content_type))
+ for extra_header in extra_headers:
+ headers.append(extra_header)
+ return self.start_response(status_texts[status], headers)
+
+ # Renders a template
+ def render(self, template_path, *args, **kwargs):
+ template = open(template_path).read().decode("utf8")
+ return template.format(**kwargs).encode("utf8")
+
+ # - Actions -
+
+ # Redirect to an url
+ def actionRedirect(self, url):
+ self.start_response('301 Redirect', [('Location', url)])
+ yield "Location changed: %s" % url
+
+ def actionIndex(self):
+ return self.actionRedirect("/" + config.homepage)
+
+ # Render a file from media with iframe site wrapper
+ def actionWrapper(self, path, extra_headers=None):
+ if not extra_headers:
+ extra_headers = []
+ if self.get.get("wrapper") == "False":
+ return self.actionSiteMedia("/media" + path) # Only serve html files with frame
+
+ match = re.match("/(?P[A-Za-z0-9\._-]+)(?P/.*|$)", path)
+ if match:
+ address = match.group("address")
+ inner_path = match.group("inner_path").lstrip("/")
+ if "." in inner_path and not inner_path.endswith(".html"):
+ return self.actionSiteMedia("/media" + path) # Only serve html files with frame
+ if self.env.get("HTTP_X_REQUESTED_WITH"):
+ return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper
+
+ file_inner_path = inner_path
+ if not file_inner_path:
+ file_inner_path = "index.html" # If inner path defaults to index.html
+
+ if not inner_path and not path.endswith("/"):
+ inner_path = address + "/" # Fix relative resources loading if missing / end of site address
+ inner_path = re.sub(".*/(.+)", "\\1", inner_path) # Load innerframe relative to current url
+
+ site = SiteManager.site_manager.get(address)
+
+ if (
+ site and site.content_manager.contents.get("content.json") and
+ (not site.getReachableBadFiles() or site.settings["own"])
+ ): # Its downloaded or own
+ title = site.content_manager.contents["content.json"]["title"]
+ else:
+ title = "Loading %s..." % address
+ site = SiteManager.site_manager.need(address) # Start download site
+
+ if not site:
+ return False
+
+ self.sendHeader(extra_headers=extra_headers[:])
+
+ # Wrapper variable inits
+ query_string = ""
+ body_style = ""
+ meta_tags = ""
+
+ if self.env.get("QUERY_STRING"):
+ query_string = "?" + self.env["QUERY_STRING"] + "&wrapper=False"
+ else:
+ query_string = "?wrapper=False"
+
+ if self.isProxyRequest(): # Its a remote proxy request
+ if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1
+ server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"]
+ else: # Remote client, use SERVER_NAME as server's real address
+ server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"])
+ homepage = "http://zero/" + config.homepage
+ else: # Use relative path
+ server_url = ""
+ homepage = "/" + config.homepage
+
+ if site.content_manager.contents.get("content.json"): # Got content.json
+ content = site.content_manager.contents["content.json"]
+ if content.get("background-color"):
+ body_style += "background-color: %s;" % \
+ cgi.escape(site.content_manager.contents["content.json"]["background-color"], True)
+ if content.get("viewport"):
+ meta_tags += '' % cgi.escape(content["viewport"], True)
+
+ return self.render(
+ "src/Ui/template/wrapper.html",
+ server_url=server_url,
+ inner_path=inner_path,
+ file_inner_path=file_inner_path,
+ address=address,
+ title=title,
+ body_style=body_style,
+ meta_tags=meta_tags,
+ query_string=query_string,
+ wrapper_key=site.settings["wrapper_key"],
+ permissions=json.dumps(site.settings["permissions"]),
+ show_loadingscreen=json.dumps(not site.storage.isFile(file_inner_path)),
+ rev=config.rev,
+ homepage=homepage
+ )
+
+ else: # Bad url
+ return False
+
+ # Returns if media request allowed from that referer
+ def isMediaRequestAllowed(self, site_address, referer):
+ referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
+ return referer_path.startswith("/" + site_address)
+
+ # Serve a media for site
+ def actionSiteMedia(self, path):
+ path = path.replace("/index.html/", "/") # Base Backward compatibility fix
+ if path.endswith("/"):
+ path = path + "index.html"
+
+ match = re.match("/media/(?P[A-Za-z0-9\._-]+)/(?P.*)", path)
+
+ referer = self.env.get("HTTP_REFERER")
+ if referer and match: # Only allow same site to receive media
+ if not self.isMediaRequestAllowed(match.group("address"), referer):
+ return self.error403("Media referrer error") # Referrer not starts same address as requested path
+
+ if match: # Looks like a valid path
+ address = match.group("address")
+ file_path = "%s/%s/%s" % (config.data_dir, address, match.group("inner_path"))
+ allowed_dir = os.path.abspath("%s/%s" % (config.data_dir, address)) # Only files within data/sitehash allowed
+ data_dir = os.path.abspath("data") # No files from data/ allowed
+ if (
+ ".." in file_path
+ or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir)
+ or allowed_dir == data_dir
+ ): # File not in allowed path
+ return self.error403()
+ else:
+ if config.debug and file_path.split("/")[-1].startswith("all."):
+ # If debugging merge *.css to all.css and *.js to all.js
+ site = self.server.sites.get(address)
+ if site.settings["own"]:
+ from Debug import DebugMedia
+ DebugMedia.merge(file_path)
+ if os.path.isfile(file_path): # File exits
+ # self.sendHeader(content_type=self.getContentType(file_path)) # ?? Get Exception without this
+ return self.actionFile(file_path)
+ else: # File not exits, try to download
+ site = SiteManager.site_manager.need(address, all_file=False)
+ result = site.needFile(match.group("inner_path"), priority=1) # Wait until file downloads
+ if result:
+ # self.sendHeader(content_type=self.getContentType(file_path))
+ return self.actionFile(file_path)
+ else:
+ self.log.debug("File not found: %s" % match.group("inner_path"))
+ return self.error404(match.group("inner_path"))
+
+ else: # Bad url
+ return self.error404(path)
+
+ # Serve a media for ui
+ def actionUiMedia(self, path):
+ match = re.match("/uimedia/(?P.*)", path)
+ if match: # Looks like a valid path
+ file_path = "src/Ui/media/%s" % match.group("inner_path")
+ allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed
+ if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
+ # File not in allowed path
+ return self.error403()
+ else:
+ if config.debug and match.group("inner_path").startswith("all."):
+ # If debugging merge *.css to all.css and *.js to all.js
+ from Debug import DebugMedia
+ DebugMedia.merge(file_path)
+ return self.actionFile(file_path)
+ else: # Bad url
+ return self.error400()
+
+ # Stream a file to client
+ def actionFile(self, file_path, block_size=64 * 1024):
+ if os.path.isfile(file_path):
+ # Try to figure out content type by extension
+ content_type = self.getContentType(file_path)
+
+ # TODO: Dont allow external access: extra_headers=
+ # [("Content-Security-Policy", "default-src 'unsafe-inline' data: http://localhost:43110 ws://localhost:43110")]
+ self.sendHeader(content_type=content_type)
+ if self.env["REQUEST_METHOD"] != "OPTIONS":
+ file = open(file_path, "rb")
+ while 1:
+ try:
+ block = file.read(block_size)
+ if block:
+ yield block
+ else:
+ raise StopIteration
+ except StopIteration:
+ file.close()
+ break
+ else: # File not exits
+ yield self.error404(file_path)
+
+ # On websocket connection
+ def actionWebsocket(self):
+ ws = self.env.get("wsgi.websocket")
+ if ws:
+ wrapper_key = self.get["wrapper_key"]
+ # Find site by wrapper_key
+ site = None
+ for site_check in self.server.sites.values():
+ if site_check.settings["wrapper_key"] == wrapper_key:
+ site = site_check
+
+ if site: # Correct wrapper key
+ user = self.getCurrentUser()
+ if not user:
+ self.log.error("No user found")
+ return self.error403()
+ ui_websocket = UiWebsocket(ws, site, self.server, user)
+ site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events
+ ui_websocket.start()
+ for site_check in self.server.sites.values():
+ # Remove websocket from every site (admin sites allowed to join other sites event channels)
+ if ui_websocket in site_check.websockets:
+ site_check.websockets.remove(ui_websocket)
+ return "Bye."
+ else: # No site found by wrapper key
+ self.log.error("Wrapper key not found: %s" % wrapper_key)
+ return self.error403()
+ else:
+ self.start_response("400 Bad Request", [])
+ return "Not a websocket!"
+
+ # Debug last error
+ def actionDebug(self):
+ # Raise last error from DebugHook
+ import sys
+ last_error = sys.modules["main"].DebugHook.last_error
+ if last_error:
+ raise last_error[0], last_error[1], last_error[2]
+ else:
+ self.sendHeader()
+ return "No error! :)"
+
+ # Just raise an error to get console
+ def actionConsole(self):
+ import sys
+ sites = self.server.sites
+ main = sys.modules["main"]
+ raise Exception("Here is your console")
+
+ # - Tests -
+
+ def actionTestStream(self):
+ self.sendHeader()
+ yield " " * 1080 # Overflow browser's buffer
+ yield "He"
+ time.sleep(1)
+ yield "llo!"
+ yield "Running websockets: %s" % len(self.server.websockets)
+ self.server.sendMessage("Hello!")
+
+ # - Errors -
+
+ # Send bad request error
+ def error400(self):
+ self.sendHeader(400)
+ return "Bad Request"
+
+ # You are not allowed to access this
+ def error403(self, message="Forbidden"):
+ self.sendHeader(403)
+ return message
+
+ # Send file not found error
+ def error404(self, path=None):
+ self.sendHeader(404)
+ return "Not Found: %s" % path.encode("utf8")
+
+ # Internal server error
+ def error500(self, message=":("):
+ self.sendHeader(500)
+ return "Server error
%s" % cgi.escape(message)
# - Reload for eaiser developing -
-#def reload():
- #import imp, sys
- #global UiWebsocket
- #UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket
- #reload(sys.modules["User.UserManager"])
- #UserManager.reloadModule()
- #self.user = UserManager.user_manager.getCurrent()
+# def reload():
+ # import imp, sys
+ # global UiWebsocket
+ # UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket
+ # reload(sys.modules["User.UserManager"])
+ # UserManager.reloadModule()
+ # self.user = UserManager.user_manager.getCurrent()
diff --git a/src/Ui/UiServer.py b/src/Ui/UiServer.py
index de757fb9..280304d6 100644
--- a/src/Ui/UiServer.py
+++ b/src/Ui/UiServer.py
@@ -1,7 +1,11 @@
-import logging, time, cgi, string, random
+import logging
+import time
+import cgi
+
from gevent.pywsgi import WSGIServer
from gevent.pywsgi import WSGIHandler
from lib.geventwebsocket.handler import WebSocketHandler
+
from UiRequest import UiRequest
from Site import SiteManager
from Config import config
@@ -10,134 +14,129 @@ from Debug import Debug
# Skip websocket handler if not necessary
class UiWSGIHandler(WSGIHandler):
- def __init__(self, *args, **kwargs):
- self.server = args[2]
- super(UiWSGIHandler, self).__init__(*args, **kwargs)
- self.args = args
- self.kwargs = kwargs
+ def __init__(self, *args, **kwargs):
+ self.server = args[2]
+ super(UiWSGIHandler, self).__init__(*args, **kwargs)
+ self.args = args
+ self.kwargs = kwargs
- def run_application(self):
- self.server.sockets[self.client_address] = self.socket
- if "HTTP_UPGRADE" in self.environ: # Websocket request
- try:
- ws_handler = WebSocketHandler(*self.args, **self.kwargs)
- ws_handler.__dict__ = self.__dict__ # Match class variables
- ws_handler.run_application()
- except Exception, err:
- logging.error("UiWSGIHandler websocket error: %s" % Debug.formatException(err))
- if config.debug: # Allow websocket errors to appear on /Debug
- import sys
- del self.server.sockets[self.client_address]
- sys.modules["main"].DebugHook.handleError()
- else: # Standard HTTP request
- #print self.application.__class__.__name__
- try:
- super(UiWSGIHandler, self).run_application()
- except Exception, err:
- logging.error("UiWSGIHandler error: %s" % Debug.formatException(err))
- if config.debug: # Allow websocket errors to appear on /Debug
- import sys
- del self.server.sockets[self.client_address]
- sys.modules["main"].DebugHook.handleError()
- if self.client_address in self.server.sockets:
- del self.server.sockets[self.client_address]
+ def run_application(self):
+ self.server.sockets[self.client_address] = self.socket
+ if "HTTP_UPGRADE" in self.environ: # Websocket request
+ try:
+ ws_handler = WebSocketHandler(*self.args, **self.kwargs)
+ ws_handler.__dict__ = self.__dict__ # Match class variables
+ ws_handler.run_application()
+ except Exception, err:
+ logging.error("UiWSGIHandler websocket error: %s" % Debug.formatException(err))
+ if config.debug: # Allow websocket errors to appear on /Debug
+ import sys
+ del self.server.sockets[self.client_address]
+ sys.modules["main"].DebugHook.handleError()
+ else: # Standard HTTP request
+ # print self.application.__class__.__name__
+ try:
+ super(UiWSGIHandler, self).run_application()
+ except Exception, err:
+ logging.error("UiWSGIHandler error: %s" % Debug.formatException(err))
+ if config.debug: # Allow websocket errors to appear on /Debug
+ import sys
+ del self.server.sockets[self.client_address]
+ sys.modules["main"].DebugHook.handleError()
+ if self.client_address in self.server.sockets:
+ del self.server.sockets[self.client_address]
class UiServer:
- def __init__(self):
- self.ip = config.ui_ip
- self.port = config.ui_port
- if self.ip == "*": self.ip = "" # Bind all
- #self.sidebar_websockets = [] # Sidebar websocket connections
- #self.auth_key = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(12)) # Global admin auth key
- self.sites = SiteManager.site_manager.list()
- self.log = logging.getLogger(__name__)
-
+ def __init__(self):
+ self.ip = config.ui_ip
+ self.port = config.ui_port
+ if self.ip == "*":
+ self.ip = "" # Bind all
+ self.sites = SiteManager.site_manager.list()
+ self.log = logging.getLogger(__name__)
- # Handle WSGI request
- def handleRequest(self, env, start_response):
- path = env["PATH_INFO"]
- if env.get("QUERY_STRING"):
- get = dict(cgi.parse_qsl(env['QUERY_STRING']))
- else:
- get = {}
- ui_request = UiRequest(self, get, env, start_response)
- if config.debug: # Let the exception catched by werkezung
- return ui_request.route(path)
- else: # Catch and display the error
- try:
- return ui_request.route(path)
- except Exception, err:
- logging.debug("UiRequest error: %s" % Debug.formatException(err))
- return ui_request.error500("Err: %s" % Debug.formatException(err))
+ # Handle WSGI request
+ def handleRequest(self, env, start_response):
+ path = env["PATH_INFO"]
+ if env.get("QUERY_STRING"):
+ get = dict(cgi.parse_qsl(env['QUERY_STRING']))
+ else:
+ get = {}
+ ui_request = UiRequest(self, get, env, start_response)
+ if config.debug: # Let the exception catched by werkezung
+ return ui_request.route(path)
+ else: # Catch and display the error
+ try:
+ return ui_request.route(path)
+ except Exception, err:
+ logging.debug("UiRequest error: %s" % Debug.formatException(err))
+ return ui_request.error500("Err: %s" % Debug.formatException(err))
+ # Reload the UiRequest class to prevent restarts in debug mode
+ def reload(self):
+ global UiRequest
+ import imp
+ import sys
+ reload(sys.modules["User.UserManager"])
+ reload(sys.modules["Ui.UiWebsocket"])
+ UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest
+ # UiRequest.reload()
- # Reload the UiRequest class to prevent restarts in debug mode
- def reload(self):
- global UiRequest
- import imp, sys
- reload(sys.modules["User.UserManager"])
- reload(sys.modules["Ui.UiWebsocket"])
- UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest
- #UiRequest.reload()
+ # Bind and run the server
+ def start(self):
+ handler = self.handleRequest
+ if config.debug:
+ # Auto reload UiRequest on change
+ from Debug import DebugReloader
+ DebugReloader(self.reload)
- # Bind and run the server
- def start(self):
- handler = self.handleRequest
+ # Werkzeug Debugger
+ try:
+ from werkzeug.debug import DebuggedApplication
+ handler = DebuggedApplication(self.handleRequest, evalex=True)
+ except Exception, err:
+ self.log.info("%s: For debugging please download Werkzeug (http://werkzeug.pocoo.org/)" % err)
+ from Debug import DebugReloader
+ self.log.write = lambda msg: self.log.debug(msg.strip()) # For Wsgi access.log
+ self.log.info("--------------------------------------")
+ self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port))
+ self.log.info("--------------------------------------")
- if config.debug:
- # Auto reload UiRequest on change
- from Debug import DebugReloader
- DebugReloader(self.reload)
+ if config.open_browser:
+ logging.info("Opening browser: %s...", config.open_browser)
+ import webbrowser
+ if config.open_browser == "default_browser":
+ browser = webbrowser.get()
+ else:
+ browser = webbrowser.get(config.open_browser)
+ browser.open("http://%s:%s" % (config.ui_ip, config.ui_port), new=2)
- # Werkzeug Debugger
- try:
- from werkzeug.debug import DebuggedApplication
- handler = DebuggedApplication(self.handleRequest, evalex=True)
- except Exception, err:
- self.log.info("%s: For debugging please download Werkzeug (http://werkzeug.pocoo.org/)" % err)
- from Debug import DebugReloader
- self.log.write = lambda msg: self.log.debug(msg.strip()) # For Wsgi access.log
- self.log.info("--------------------------------------")
- self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port))
- self.log.info("--------------------------------------")
+ self.server = WSGIServer((self.ip.replace("*", ""), self.port), handler, handler_class=UiWSGIHandler, log=self.log)
+ self.server.sockets = {}
+ self.server.serve_forever()
+ self.log.debug("Stopped.")
- if config.open_browser:
- logging.info("Opening browser: %s...", config.open_browser)
- import webbrowser
- if config.open_browser == "default_browser":
- browser = webbrowser.get()
- else:
- browser = webbrowser.get(config.open_browser)
- browser.open("http://%s:%s" % (config.ui_ip, config.ui_port), new=2)
-
- self.server = WSGIServer((self.ip.replace("*", ""), self.port), handler, handler_class=UiWSGIHandler, log=self.log)
- self.server.sockets = {}
- self.server.serve_forever()
- self.log.debug("Stopped.")
-
-
- def stop(self):
- self.log.debug("Stopping...")
- # Close WS sockets
- if "clients" in dir(self.server):
- for client in self.server.clients.values():
- client.ws.close()
- # Close http sockets
- sock_closed = 0
- for sock in self.server.sockets.values():
- try:
- sock._sock.close()
- sock.close()
- sock_closed += 1
- except Exception, err:
- pass
- self.log.debug("Socket closed: %s" % sock_closed)
-
- self.server.socket.close()
- self.server.stop()
- time.sleep(1)
+ def stop(self):
+ self.log.debug("Stopping...")
+ # Close WS sockets
+ if "clients" in dir(self.server):
+ for client in self.server.clients.values():
+ client.ws.close()
+ # Close http sockets
+ sock_closed = 0
+ for sock in self.server.sockets.values():
+ try:
+ sock._sock.close()
+ sock.close()
+ sock_closed += 1
+ except Exception:
+ pass
+ self.log.debug("Socket closed: %s" % sock_closed)
+ self.server.socket.close()
+ self.server.stop()
+ time.sleep(1)
diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py
index bb2298b7..3fb7531d 100644
--- a/src/Ui/UiWebsocket.py
+++ b/src/Ui/UiWebsocket.py
@@ -1,521 +1,535 @@
-import json, gevent, time, sys, hashlib
+import json
+import time
+import sys
+import hashlib
+
+import gevent
+
from Config import config
from Site import SiteManager
from Debug import Debug
from util import QueryJson, RateLimit
from Plugin import PluginManager
+
@PluginManager.acceptPlugins
class UiWebsocket(object):
- def __init__(self, ws, site, server, user):
- self.ws = ws
- self.site = site
- self.user = user
- self.log = site.log
- self.server = server
- self.next_message_id = 1
- self.waiting_cb = {} # Waiting for callback. Key: message_id, Value: function pointer
- self.channels = [] # Channels joined to
- self.sending = False # Currently sending to client
- self.send_queue = [] # Messages to send to client
-
-
- # Start listener loop
- def start(self):
- ws = self.ws
- if self.site.address == config.homepage and not self.site.page_requested: # Add open fileserver port message or closed port error to homepage at first request after start
- if sys.modules["main"].file_server.port_opened == True:
- self.site.notifications.append(["done", "Congratulation, your port "+str(config.fileserver_port)+" is opened.
You are full member of ZeroNet network!", 10000])
- elif sys.modules["main"].file_server.port_opened == False:
- self.site.notifications.append(["error", "Your network connection is restricted. Please, open "+str(config.fileserver_port)+" port
on your router to become full member of ZeroNet network.", 0])
- self.site.page_requested = True # Dont add connection notification anymore
-
- for notification in self.site.notifications: # Send pending notification messages
- self.cmd("notification", notification)
- self.site.notifications = []
- while True:
- try:
- message = ws.receive()
- if message:
- self.handleRequest(message)
- except Exception, err:
- if err.message != 'Connection is already closed':
- if config.debug: # Allow websocket errors to appear on /Debug
- sys.modules["main"].DebugHook.handleError()
- self.log.error("WebSocket error: %s" % Debug.formatException(err))
- return "Bye."
-
-
- # Event in a channel
- def event(self, channel, *params):
- if channel in self.channels: # We are joined to channel
- if channel == "siteChanged":
- site = params[0] # Triggerer site
- site_info = self.formatSiteInfo(site)
- if len(params) > 1 and params[1]: # Extra data
- site_info.update(params[1])
- self.cmd("setSiteInfo", site_info)
-
-
- # Send response to client (to = message.id)
- def response(self, to, result):
- self.send({"cmd": "response", "to": to, "result": result})
-
-
- # Send a command
- def cmd(self, cmd, params={}, cb = None):
- self.send({"cmd": cmd, "params": params}, cb)
-
-
- # Encode to json and send message
- def send(self, message, cb = None):
- message["id"] = self.next_message_id # Add message id to allow response
- self.next_message_id += 1
- if cb: # Callback after client responsed
- self.waiting_cb[message["id"]] = cb
- if self.sending: return # Already sending
- self.send_queue.append(message)
- try:
- while self.send_queue:
- self.sending = True
- message = self.send_queue.pop(0)
- self.ws.send(json.dumps(message))
- self.sending = False
- except Exception, err:
- self.log.debug("Websocket send error: %s" % Debug.formatException(err))
-
-
- # Handle incoming messages
- def handleRequest(self, data):
- req = json.loads(data)
-
- cmd = req.get("cmd")
- params = req.get("params")
- permissions = self.site.settings["permissions"]
- if req["id"] >= 1000000: # Its a wrapper command, allow admin commands
- permissions = permissions[:]
- permissions.append("ADMIN")
-
- admin_commands = ("sitePause", "siteResume", "siteDelete", "siteList", "siteSetLimit", "siteClone", "channelJoinAllsite", "serverUpdate", "certSet")
-
- if cmd == "response": # It's a response to a command
- return self.actionResponse(req["to"], req["result"])
- elif cmd in admin_commands and "ADMIN" not in permissions: # Admin commands
- return self.response(req["id"], "You don't have permission to run %s" % cmd)
- else: # Normal command
- func_name = "action" + cmd[0].upper() + cmd[1:]
- func = getattr(self, func_name, None)
- if not func: # Unknown command
- self.response(req["id"], "Unknown command: %s" % cmd)
- return
-
- # Support calling as named, unnamed paramters and raw first argument too
- if type(params) is dict:
- func(req["id"], **params)
- elif type(params) is list:
- func(req["id"], *params)
- else:
- func(req["id"], params)
-
-
- # Format site info
- def formatSiteInfo(self, site, create_user=True):
- content = site.content_manager.contents.get("content.json")
- if content: # Remove unnecessary data transfer
- content = content.copy()
- content["files"] = len(content.get("files", {}))
- content["includes"] = len(content.get("includes", {}))
- if "sign" in content: del(content["sign"])
- if "signs" in content: del(content["signs"])
- if "signers_sign" in content: del(content["signers_sign"])
-
- settings = site.settings.copy()
- del settings["wrapper_key"] # Dont expose wrapper key
- del settings["auth_key"] # Dont send auth key twice
-
- ret = {
- "auth_key": self.site.settings["auth_key"], # Obsolete, will be removed
- "auth_key_sha512": hashlib.sha512(self.site.settings["auth_key"]).hexdigest()[0:64], # Obsolete, will be removed
- "auth_address": self.user.getAuthAddress(site.address, create=create_user),
- "cert_user_id": self.user.getCertUserId(site.address),
- "address": site.address,
- "settings": settings,
- "content_updated": site.content_updated,
- "bad_files": len(site.bad_files),
- "size_limit": site.getSizeLimit(),
- "next_size_limit": site.getNextSizeLimit(),
- "peers": site.settings.get("peers", len(site.peers)),
- "started_task_num": site.worker_manager.started_task_num,
- "tasks": len(site.worker_manager.tasks),
- "workers": len(site.worker_manager.workers),
- "content": content
- }
- if site.settings["own"]: ret["privatekey"] = bool(self.user.getSiteData(site.address, create=create_user).get("privatekey"))
- if site.settings["serving"] and content: ret["peers"] += 1 # Add myself if serving
- return ret
-
-
- def formatServerInfo(self):
- return {
- "ip_external": bool(sys.modules["main"].file_server.port_opened),
- "platform": sys.platform,
- "fileserver_ip": config.fileserver_ip,
- "fileserver_port": config.fileserver_port,
- "ui_ip": config.ui_ip,
- "ui_port": config.ui_port,
- "version": config.version,
- "rev": config.rev,
- "debug": config.debug,
- "plugins": PluginManager.plugin_manager.plugin_names
- }
-
-
- # - Actions -
-
- # Do callback on response {"cmd": "response", "to": message_id, "result": result}
- def actionResponse(self, to, result):
- if to in self.waiting_cb:
- self.waiting_cb[to](result) # Call callback function
- else:
- self.log.error("Websocket callback not found: %s, %s" % (to, result))
-
-
- # Send a simple pong answer
- def actionPing(self, to):
- self.response(to, "pong")
-
-
- # Send site details
- def actionSiteInfo(self, to, file_status = None):
- ret = self.formatSiteInfo(self.site)
- if file_status: # Client queries file status
- if self.site.storage.isFile(file_status): # File exits, add event done
- ret["event"] = ("file_done", file_status)
- self.response(to, ret)
-
-
- # Join to an event channel
- def actionChannelJoin(self, to, channel):
- if channel not in self.channels:
- self.channels.append(channel)
-
-
- # Server variables
- def actionServerInfo(self, to):
- ret = self.formatServerInfo()
- self.response(to, ret)
-
-
- # Sign content.json
- def actionSiteSign(self, to, privatekey=None, inner_path="content.json"):
- site = self.site
- extend = {} # Extended info for signing
- if not inner_path.endswith("content.json"): # Find the content.json first
- file_info = site.content_manager.getFileInfo(inner_path)
- inner_path = file_info["content_inner_path"]
- if "cert_signers" in file_info: # Its an user dir file
- cert = self.user.getCert(self.site.address)
- extend["cert_auth_type"] = cert["auth_type"]
- extend["cert_user_id"] = self.user.getCertUserId(site.address)
- extend["cert_sign"] = cert["cert_sign"]
-
-
- if not site.settings["own"] and self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path):
- return self.response(to, "Forbidden, you can only modify your own sites")
- if privatekey == "stored":
- privatekey = self.user.getSiteData(self.site.address).get("privatekey")
- if not privatekey: # Get privatekey from users.json auth_address
- privatekey = self.user.getAuthPrivatekey(self.site.address)
-
- # Signing
- site.content_manager.loadContent(add_bad_files=False) # Reload content.json, ignore errors to make it up-to-date
- signed = site.content_manager.sign(inner_path, privatekey, extend=extend) # Sign using private key sent by user
- if signed:
- #if inner_path == "content_json": self.cmd("notification", ["done", "Private key correct, content signed!", 5000]) # Display message for 5 sec
- pass
- else:
- self.cmd("notification", ["error", "Content sign failed: invalid private key."])
- self.response(to, "Site sign failed")
- return
-
- site.content_manager.loadContent(add_bad_files=False) # Load new content.json, ignore errors
- self.response(to, "ok")
-
- return inner_path
-
-
- # Sign and publish content.json
- def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True):
- if sign:
- inner_path = self.actionSiteSign(to, privatekey, inner_path)
- if not inner_path:
- return
-
- # Publishing
- if not self.site.settings["serving"]: # Enable site if paused
- self.site.settings["serving"] = True
- self.site.saveSettings()
- self.site.announce()
-
-
- event_name = "publish %s %s" % (self.site.address, inner_path)
- thread = RateLimit.callAsync(event_name, 7, self.site.publish, 5, inner_path) # Only publish once in 7 second to 5 peers
- notification = "linked" not in dir(thread) # Only display notification on first callback
- thread.linked = True
- thread.link(lambda thread: self.cbSitePublish(to, thread, notification)) # At the end callback with request id and thread
-
-
- # Callback of site publish
- def cbSitePublish(self, to, thread, notification=True):
- site = self.site
- published = thread.value
- if published>0: # Successfuly published
- if notification: self.cmd("notification", ["done", "Content published to %s peers." % published, 5000])
- self.response(to, "ok")
- if notification: site.updateWebsocket() # Send updated site data to local websocket clients
- else:
- if len(site.peers) == 0:
- if sys.modules["main"].file_server.port_opened:
- if notification: self.cmd("notification", ["info", "No peers found, but your content is ready to access."])
- self.response(to, "ok")
- else:
- if notification: self.cmd("notification", ["info", "Your network connection is restricted. Please, open "+str(config.fileserver_port)+" port
on your router to make your site accessible for everyone."])
- self.response(to, "Port not opened.")
-
- else:
- if notification: self.cmd("notification", ["error", "Content publish failed."])
- self.response(to, "Content publish failed.")
-
-
- # Write a file to disk
- def actionFileWrite(self, to, inner_path, content_base64):
- if not self.site.settings["own"] and self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path):
- return self.response(to, "Forbidden, you can only modify your own files")
-
- try:
- import base64
- content = base64.b64decode(content_base64)
- self.site.storage.write(inner_path, content)
- except Exception, err:
- return self.response(to, "Write error: %s" % err)
-
- if inner_path.endswith("content.json"):
- self.site.content_manager.loadContent(inner_path, add_bad_files=False)
-
- self.response(to, "ok")
-
- # Send sitechanged to other local users
- for ws in self.site.websockets:
- if ws != self:
- ws.event("siteChanged", self.site, {"event": ["file_done", inner_path]})
-
-
-
- # Find data in json files
- def actionFileQuery(self, to, dir_inner_path, query):
- # s = time.time()
- dir_path = self.site.storage.getPath(dir_inner_path)
- rows = list(QueryJson.query(dir_path, query))
- # self.log.debug("FileQuery %s %s done in %s" % (dir_inner_path, query, time.time()-s))
- return self.response(to, rows)
-
-
- # Sql query
- def actionDbQuery(self, to, query, params=None, wait_for=None):
- rows = []
- try:
- res = self.site.storage.query(query, params)
- except Exception, err: # Response the error to client
- return self.response(to, {"error": str(err)})
- # Convert result to dict
- for row in res:
- rows.append(dict(row))
- return self.response(to, rows)
-
-
- # Return file content
- def actionFileGet(self, to, inner_path, required=True):
- try:
- if required: self.site.needFile(inner_path, priority=1)
- body = self.site.storage.read(inner_path)
- except:
- body = None
- return self.response(to, body)
-
-
- def actionFileRules(self, to, inner_path):
- rules = self.site.content_manager.getRules(inner_path)
- if inner_path.endswith("content.json"):
- content = self.site.content_manager.contents.get(inner_path)
- if content:
- rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()])
- else:
- rules["current_size"] = 0
- return self.response(to, rules)
-
-
- # Add certificate to user
- def actionCertAdd(self, to, domain, auth_type, auth_user_name, cert):
- try:
- res = self.user.addCert(self.user.getAuthAddress(self.site.address), domain, auth_type, auth_user_name, cert)
- if res == True:
- self.cmd("notification", ["done", "New certificate added: %s/%s@%s." % (auth_type, auth_user_name, domain)])
- self.response(to, "ok")
- else:
- self.response(to, "Not changed")
- except Exception, err:
- self.response(to, {"error": err.message})
-
-
- # Select certificate for site
- def actionCertSelect(self, to, accepted_domains=[]):
- accounts = []
- accounts.append(["", "Unique to site", ""]) # Default option
- active = "" # Make it active if no other option found
-
- # Add my certs
- auth_address = self.user.getAuthAddress(self.site.address) # Current auth address
- for domain, cert in self.user.certs.items():
- if auth_address == cert["auth_address"]:
- active = domain
- title = cert["auth_user_name"]+"@"+domain
- if domain in accepted_domains:
- accounts.append([domain, title, ""])
- else:
- accounts.append([domain, title, "disabled"])
-
-
- # Render the html
- body = "Select account you want to use in this site:"
- # Accounts
- for domain, account, css_class in accounts:
- if domain == active:
- css_class += " active" # Currently selected option
- title = "%s (currently selected)" % account
- else:
- title = "%s" % account
- body += "%s" % (css_class, domain, title)
- # More avalible providers
- more_domains = [domain for domain in accepted_domains if domain not in self.user.certs] # Domainains we not displayed yet
- if more_domains:
- # body+= "Accepted authorization providers by the site:"
- body+= ""
- for domain in more_domains:
- body += "
Register »%s" % (domain, domain)
- body+= "
"
-
- body += """
-
- """
-
- # Send the notification
- self.cmd("notification", ["ask", body])
-
-
- # Set certificate that used for authenticate user for site
- def actionCertSet(self, to, domain):
- self.user.setCert(self.site.address, domain)
- self.site.updateWebsocket(cert_changed=domain)
-
-
- # - Admin actions -
-
- # List all site info
- def actionSiteList(self, to):
- ret = []
- SiteManager.site_manager.load() # Reload sites
- for site in self.server.sites.values():
- if not site.content_manager.contents.get("content.json"): continue # Broken site
- ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing
- self.response(to, ret)
-
-
- # Join to an event channel on all sites
- def actionChannelJoinAllsite(self, to, channel):
- if channel not in self.channels: # Add channel to channels
- self.channels.append(channel)
-
- for site in self.server.sites.values(): # Add websocket to every channel
- if self not in site.websockets:
- site.websockets.append(self)
-
-
- # Update site content.json
- def actionSiteUpdate(self, to, address):
- site = self.server.sites.get(address)
- if site and (site.address == self.site.address or "ADMIN" in self.site.settings["permissions"]):
- gevent.spawn(site.update)
- else:
- self.response(to, {"error": "Unknown site: %s" % address})
-
-
- # Pause site serving
- def actionSitePause(self, to, address):
- site = self.server.sites.get(address)
- if site:
- site.settings["serving"] = False
- site.saveSettings()
- site.updateWebsocket()
- site.worker_manager.stopWorkers()
- else:
- self.response(to, {"error": "Unknown site: %s" % address})
-
-
- # Resume site serving
- def actionSiteResume(self, to, address):
- site = self.server.sites.get(address)
- if site:
- site.settings["serving"] = True
- site.saveSettings()
- gevent.spawn(site.update, announce=True)
- time.sleep(0.001) # Wait for update thread starting
- site.updateWebsocket()
- else:
- self.response(to, {"error": "Unknown site: %s" % address})
-
-
- def actionSiteDelete(self, to, address):
- site = self.server.sites.get(address)
- if site:
- site.settings["serving"] = False
- site.saveSettings()
- site.worker_manager.running = False
- site.worker_manager.stopWorkers()
- site.storage.deleteFiles()
- SiteManager.site_manager.delete(address)
- site.updateWebsocket()
- else:
- self.response(to, {"error": "Unknown site: %s" % address})
-
-
- def actionSiteClone(self, to, address):
- self.cmd("notification", ["info", "Cloning site..."])
- site = self.server.sites.get(address)
- # Generate a new site from user's bip32 seed
- new_address, new_address_index, new_site_data = self.user.getNewSiteData()
- new_site = site.clone(new_address, new_site_data["privatekey"], address_index=new_address_index)
- new_site.settings["own"] = True
- new_site.saveSettings()
- self.cmd("notification", ["done", "Site cloned" % new_address])
- gevent.spawn(new_site.announce)
-
-
- def actionSiteSetLimit(self, to, size_limit):
- self.site.settings["size_limit"] = size_limit
- self.site.saveSettings()
- self.response(to, "Site size limit changed to %sMB" % size_limit)
- self.site.download()
-
-
- def actionServerUpdate(self, to):
- self.cmd("updating")
- sys.modules["main"].update_after_shutdown = True
- sys.modules["main"].file_server.stop()
- sys.modules["main"].ui_server.stop()
+ def __init__(self, ws, site, server, user):
+ self.ws = ws
+ self.site = site
+ self.user = user
+ self.log = site.log
+ self.server = server
+ self.next_message_id = 1
+ self.waiting_cb = {} # Waiting for callback. Key: message_id, Value: function pointer
+ self.channels = [] # Channels joined to
+ self.sending = False # Currently sending to client
+ self.send_queue = [] # Messages to send to client
+
+ # Start listener loop
+ def start(self):
+ ws = self.ws
+ if self.site.address == config.homepage and not self.site.page_requested:
+ # Add open fileserver port message or closed port error to homepage at first request after start
+ if sys.modules["main"].file_server.port_opened is True:
+ self.site.notifications.append([
+ "done",
+ "Congratulation, your port %s is opened.
You are full member of ZeroNet network!" %
+ config.fileserver_port,
+ 10000
+ ])
+ elif sys.modules["main"].file_server.port_opened is False:
+ self.site.notifications.append([
+ "error",
+ """
+ Your network connection is restricted. Please, open %s port
+ on your router to become full member of ZeroNet network.
+ """ % config.fileserver_port,
+ 0
+ ])
+ self.site.page_requested = True # Dont add connection notification anymore
+
+ for notification in self.site.notifications: # Send pending notification messages
+ self.cmd("notification", notification)
+ self.site.notifications = []
+ while True:
+ try:
+ message = ws.receive()
+ if message:
+ self.handleRequest(message)
+ except Exception, err:
+ if err.message != 'Connection is already closed':
+ if config.debug: # Allow websocket errors to appear on /Debug
+ sys.modules["main"].DebugHook.handleError()
+ self.log.error("WebSocket error: %s" % Debug.formatException(err))
+ return "Bye."
+
+ # Event in a channel
+ def event(self, channel, *params):
+ if channel in self.channels: # We are joined to channel
+ if channel == "siteChanged":
+ site = params[0] # Triggerer site
+ site_info = self.formatSiteInfo(site)
+ if len(params) > 1 and params[1]: # Extra data
+ site_info.update(params[1])
+ self.cmd("setSiteInfo", site_info)
+
+ # Send response to client (to = message.id)
+ def response(self, to, result):
+ self.send({"cmd": "response", "to": to, "result": result})
+
+ # Send a command
+ def cmd(self, cmd, params={}, cb=None):
+ self.send({"cmd": cmd, "params": params}, cb)
+
+ # Encode to json and send message
+ def send(self, message, cb=None):
+ message["id"] = self.next_message_id # Add message id to allow response
+ self.next_message_id += 1
+ if cb: # Callback after client responsed
+ self.waiting_cb[message["id"]] = cb
+ if self.sending:
+ return # Already sending
+ self.send_queue.append(message)
+ try:
+ while self.send_queue:
+ self.sending = True
+ message = self.send_queue.pop(0)
+ self.ws.send(json.dumps(message))
+ self.sending = False
+ except Exception, err:
+ self.log.debug("Websocket send error: %s" % Debug.formatException(err))
+
+ # Handle incoming messages
+ def handleRequest(self, data):
+ req = json.loads(data)
+
+ cmd = req.get("cmd")
+ params = req.get("params")
+ permissions = self.site.settings["permissions"]
+ if req["id"] >= 1000000: # Its a wrapper command, allow admin commands
+ permissions = permissions[:]
+ permissions.append("ADMIN")
+
+ admin_commands = (
+ "sitePause", "siteResume", "siteDelete", "siteList", "siteSetLimit", "siteClone",
+ "channelJoinAllsite",
+ "serverUpdate", "certSet"
+ )
+
+ if cmd == "response": # It's a response to a command
+ return self.actionResponse(req["to"], req["result"])
+ elif cmd in admin_commands and "ADMIN" not in permissions: # Admin commands
+ return self.response(req["id"], "You don't have permission to run %s" % cmd)
+ else: # Normal command
+ func_name = "action" + cmd[0].upper() + cmd[1:]
+ func = getattr(self, func_name, None)
+ if not func: # Unknown command
+ self.response(req["id"], "Unknown command: %s" % cmd)
+ return
+
+ # Support calling as named, unnamed paramters and raw first argument too
+ if type(params) is dict:
+ func(req["id"], **params)
+ elif type(params) is list:
+ func(req["id"], *params)
+ else:
+ func(req["id"], params)
+
+ # Format site info
+ def formatSiteInfo(self, site, create_user=True):
+ content = site.content_manager.contents.get("content.json")
+ if content: # Remove unnecessary data transfer
+ content = content.copy()
+ content["files"] = len(content.get("files", {}))
+ content["includes"] = len(content.get("includes", {}))
+ if "sign" in content:
+ del(content["sign"])
+ if "signs" in content:
+ del(content["signs"])
+ if "signers_sign" in content:
+ del(content["signers_sign"])
+
+ settings = site.settings.copy()
+ del settings["wrapper_key"] # Dont expose wrapper key
+ del settings["auth_key"] # Dont send auth key twice
+
+ ret = {
+ "auth_key": self.site.settings["auth_key"], # Obsolete, will be removed
+ "auth_key_sha512": hashlib.sha512(self.site.settings["auth_key"]).hexdigest()[0:64], # Obsolete, will be removed
+ "auth_address": self.user.getAuthAddress(site.address, create=create_user),
+ "cert_user_id": self.user.getCertUserId(site.address),
+ "address": site.address,
+ "settings": settings,
+ "content_updated": site.content_updated,
+ "bad_files": len(site.bad_files),
+ "size_limit": site.getSizeLimit(),
+ "next_size_limit": site.getNextSizeLimit(),
+ "peers": site.settings.get("peers", len(site.peers)),
+ "started_task_num": site.worker_manager.started_task_num,
+ "tasks": len(site.worker_manager.tasks),
+ "workers": len(site.worker_manager.workers),
+ "content": content
+ }
+ if site.settings["own"]:
+ ret["privatekey"] = bool(self.user.getSiteData(site.address, create=create_user).get("privatekey"))
+ if site.settings["serving"] and content:
+ ret["peers"] += 1 # Add myself if serving
+ return ret
+
+ def formatServerInfo(self):
+ return {
+ "ip_external": bool(sys.modules["main"].file_server.port_opened),
+ "platform": sys.platform,
+ "fileserver_ip": config.fileserver_ip,
+ "fileserver_port": config.fileserver_port,
+ "ui_ip": config.ui_ip,
+ "ui_port": config.ui_port,
+ "version": config.version,
+ "rev": config.rev,
+ "debug": config.debug,
+ "plugins": PluginManager.plugin_manager.plugin_names
+ }
+
+ # - Actions -
+
+ # Do callback on response {"cmd": "response", "to": message_id, "result": result}
+ def actionResponse(self, to, result):
+ if to in self.waiting_cb:
+ self.waiting_cb[to](result) # Call callback function
+ else:
+ self.log.error("Websocket callback not found: %s, %s" % (to, result))
+
+ # Send a simple pong answer
+ def actionPing(self, to):
+ self.response(to, "pong")
+
+ # Send site details
+ def actionSiteInfo(self, to, file_status=None):
+ ret = self.formatSiteInfo(self.site)
+ if file_status: # Client queries file status
+ if self.site.storage.isFile(file_status): # File exits, add event done
+ ret["event"] = ("file_done", file_status)
+ self.response(to, ret)
+
+ # Join to an event channel
+ def actionChannelJoin(self, to, channel):
+ if channel not in self.channels:
+ self.channels.append(channel)
+
+ # Server variables
+ def actionServerInfo(self, to):
+ ret = self.formatServerInfo()
+ self.response(to, ret)
+
+ # Sign content.json
+ def actionSiteSign(self, to, privatekey=None, inner_path="content.json"):
+ site = self.site
+ extend = {} # Extended info for signing
+ if not inner_path.endswith("content.json"): # Find the content.json first
+ file_info = site.content_manager.getFileInfo(inner_path)
+ inner_path = file_info["content_inner_path"]
+ if "cert_signers" in file_info: # Its an user dir file
+ cert = self.user.getCert(self.site.address)
+ extend["cert_auth_type"] = cert["auth_type"]
+ extend["cert_user_id"] = self.user.getCertUserId(site.address)
+ extend["cert_sign"] = cert["cert_sign"]
+
+ if (
+ not site.settings["own"] and
+ self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path)
+ ):
+ return self.response(to, "Forbidden, you can only modify your own sites")
+ if privatekey == "stored":
+ privatekey = self.user.getSiteData(self.site.address).get("privatekey")
+ if not privatekey: # Get privatekey from users.json auth_address
+ privatekey = self.user.getAuthPrivatekey(self.site.address)
+
+ # Signing
+ site.content_manager.loadContent(add_bad_files=False) # Reload content.json, ignore errors to make it up-to-date
+ signed = site.content_manager.sign(inner_path, privatekey, extend=extend) # Sign using private key sent by user
+ if not signed:
+ self.cmd("notification", ["error", "Content sign failed: invalid private key."])
+ self.response(to, "Site sign failed")
+ return
+
+ site.content_manager.loadContent(add_bad_files=False) # Load new content.json, ignore errors
+ self.response(to, "ok")
+
+ return inner_path
+
+ # Sign and publish content.json
+ def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True):
+ if sign:
+ inner_path = self.actionSiteSign(to, privatekey, inner_path)
+ if not inner_path:
+ return
+
+ # Publishing
+ if not self.site.settings["serving"]: # Enable site if paused
+ self.site.settings["serving"] = True
+ self.site.saveSettings()
+ self.site.announce()
+
+ event_name = "publish %s %s" % (self.site.address, inner_path)
+ thread = RateLimit.callAsync(event_name, 7, self.site.publish, 5, inner_path) # Only publish once in 7 second to 5 peers
+ notification = "linked" not in dir(thread) # Only display notification on first callback
+ thread.linked = True
+ thread.link(lambda thread: self.cbSitePublish(to, thread, notification)) # At the end callback with request id and thread
+
+ # Callback of site publish
+ def cbSitePublish(self, to, thread, notification=True):
+ site = self.site
+ published = thread.value
+ if published > 0: # Successfuly published
+ if notification:
+ self.cmd("notification", ["done", "Content published to %s peers." % published, 5000])
+ self.response(to, "ok")
+ if notification:
+ site.updateWebsocket() # Send updated site data to local websocket clients
+ else:
+ if len(site.peers) == 0:
+ if sys.modules["main"].file_server.port_opened:
+ if notification:
+ self.cmd("notification", ["info", "No peers found, but your content is ready to access."])
+ self.response(to, "ok")
+ else:
+ if notification:
+ self.cmd("notification", [
+ "info",
+ """Your network connection is restricted. Please, open %s port
+ on your router to make your site accessible for everyone.""" % config.fileserver_port
+ ])
+ self.response(to, "Port not opened.")
+
+ else:
+ if notification:
+ self.cmd("notification", ["error", "Content publish failed."])
+ self.response(to, "Content publish failed.")
+
+ # Write a file to disk
+ def actionFileWrite(self, to, inner_path, content_base64):
+ if (
+ not self.site.settings["own"] and
+ self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path)
+ ):
+ return self.response(to, "Forbidden, you can only modify your own files")
+
+ try:
+ import base64
+ content = base64.b64decode(content_base64)
+ self.site.storage.write(inner_path, content)
+ except Exception, err:
+ return self.response(to, "Write error: %s" % err)
+
+ if inner_path.endswith("content.json"):
+ self.site.content_manager.loadContent(inner_path, add_bad_files=False)
+
+ self.response(to, "ok")
+
+ # Send sitechanged to other local users
+ for ws in self.site.websockets:
+ if ws != self:
+ ws.event("siteChanged", self.site, {"event": ["file_done", inner_path]})
+
+ # Find data in json files
+ def actionFileQuery(self, to, dir_inner_path, query):
+ # s = time.time()
+ dir_path = self.site.storage.getPath(dir_inner_path)
+ rows = list(QueryJson.query(dir_path, query))
+ # self.log.debug("FileQuery %s %s done in %s" % (dir_inner_path, query, time.time()-s))
+ return self.response(to, rows)
+
+ # Sql query
+ def actionDbQuery(self, to, query, params=None, wait_for=None):
+ rows = []
+ try:
+ res = self.site.storage.query(query, params)
+ except Exception, err: # Response the error to client
+ return self.response(to, {"error": str(err)})
+ # Convert result to dict
+ for row in res:
+ rows.append(dict(row))
+ return self.response(to, rows)
+
+ # Return file content
+ def actionFileGet(self, to, inner_path, required=True):
+ try:
+ if required:
+ self.site.needFile(inner_path, priority=1)
+ body = self.site.storage.read(inner_path)
+ except:
+ body = None
+ return self.response(to, body)
+
+ def actionFileRules(self, to, inner_path):
+ rules = self.site.content_manager.getRules(inner_path)
+ if inner_path.endswith("content.json"):
+ content = self.site.content_manager.contents.get(inner_path)
+ if content:
+ rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()])
+ else:
+ rules["current_size"] = 0
+ return self.response(to, rules)
+
+ # Add certificate to user
+ def actionCertAdd(self, to, domain, auth_type, auth_user_name, cert):
+ try:
+ res = self.user.addCert(self.user.getAuthAddress(self.site.address), domain, auth_type, auth_user_name, cert)
+ if res is True:
+ self.cmd(
+ "notification",
+ ["done", "New certificate added: %s/%s@%s." % (auth_type, auth_user_name, domain)]
+ )
+ self.response(to, "ok")
+ else:
+ self.response(to, "Not changed")
+ except Exception, err:
+ self.response(to, {"error": err.message})
+
+ # Select certificate for site
+ def actionCertSelect(self, to, accepted_domains=[]):
+ accounts = []
+ accounts.append(["", "Unique to site", ""]) # Default option
+ active = "" # Make it active if no other option found
+
+ # Add my certs
+ auth_address = self.user.getAuthAddress(self.site.address) # Current auth address
+ for domain, cert in self.user.certs.items():
+ if auth_address == cert["auth_address"]:
+ active = domain
+ title = cert["auth_user_name"] + "@" + domain
+ if domain in accepted_domains:
+ accounts.append([domain, title, ""])
+ else:
+ accounts.append([domain, title, "disabled"])
+
+ # Render the html
+ body = "Select account you want to use in this site:"
+ # Accounts
+ for domain, account, css_class in accounts:
+ if domain == active:
+ css_class += " active" # Currently selected option
+ title = "%s (currently selected)" % account
+ else:
+ title = "%s" % account
+ body += "%s" % (css_class, domain, title)
+ # More avalible providers
+ more_domains = [domain for domain in accepted_domains if domain not in self.user.certs] # Domainains we not displayed yet
+ if more_domains:
+ # body+= "Accepted authorization providers by the site:"
+ body += ""
+ for domain in more_domains:
+ body += """
+
+ Register »%s
+
+ """ % (domain, domain)
+ body += "
"
+
+ body += """
+
+ """
+
+ # Send the notification
+ self.cmd("notification", ["ask", body])
+
+ # Set certificate that used for authenticate user for site
+ def actionCertSet(self, to, domain):
+ self.user.setCert(self.site.address, domain)
+ self.site.updateWebsocket(cert_changed=domain)
+
+ # - Admin actions -
+
+ # List all site info
+ def actionSiteList(self, to):
+ ret = []
+ SiteManager.site_manager.load() # Reload sites
+ for site in self.server.sites.values():
+ if not site.content_manager.contents.get("content.json"):
+ continue # Broken site
+ ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing
+ self.response(to, ret)
+
+ # Join to an event channel on all sites
+ def actionChannelJoinAllsite(self, to, channel):
+ if channel not in self.channels: # Add channel to channels
+ self.channels.append(channel)
+
+ for site in self.server.sites.values(): # Add websocket to every channel
+ if self not in site.websockets:
+ site.websockets.append(self)
+
+ # Update site content.json
+ def actionSiteUpdate(self, to, address):
+ site = self.server.sites.get(address)
+ if site and (site.address == self.site.address or "ADMIN" in self.site.settings["permissions"]):
+ gevent.spawn(site.update)
+ else:
+ self.response(to, {"error": "Unknown site: %s" % address})
+
+ # Pause site serving
+ def actionSitePause(self, to, address):
+ site = self.server.sites.get(address)
+ if site:
+ site.settings["serving"] = False
+ site.saveSettings()
+ site.updateWebsocket()
+ site.worker_manager.stopWorkers()
+ else:
+ self.response(to, {"error": "Unknown site: %s" % address})
+
+ # Resume site serving
+ def actionSiteResume(self, to, address):
+ site = self.server.sites.get(address)
+ if site:
+ site.settings["serving"] = True
+ site.saveSettings()
+ gevent.spawn(site.update, announce=True)
+ time.sleep(0.001) # Wait for update thread starting
+ site.updateWebsocket()
+ else:
+ self.response(to, {"error": "Unknown site: %s" % address})
+
+ def actionSiteDelete(self, to, address):
+ site = self.server.sites.get(address)
+ if site:
+ site.settings["serving"] = False
+ site.saveSettings()
+ site.worker_manager.running = False
+ site.worker_manager.stopWorkers()
+ site.storage.deleteFiles()
+ SiteManager.site_manager.delete(address)
+ site.updateWebsocket()
+ else:
+ self.response(to, {"error": "Unknown site: %s" % address})
+
+ def actionSiteClone(self, to, address):
+ self.cmd("notification", ["info", "Cloning site..."])
+ site = self.server.sites.get(address)
+ # Generate a new site from user's bip32 seed
+ new_address, new_address_index, new_site_data = self.user.getNewSiteData()
+ new_site = site.clone(new_address, new_site_data["privatekey"], address_index=new_address_index)
+ new_site.settings["own"] = True
+ new_site.saveSettings()
+ self.cmd("notification", ["done", "Site cloned" % new_address])
+ gevent.spawn(new_site.announce)
+
+ def actionSiteSetLimit(self, to, size_limit):
+ self.site.settings["size_limit"] = size_limit
+ self.site.saveSettings()
+ self.response(to, "Site size limit changed to %sMB" % size_limit)
+ self.site.download()
+
+ def actionServerUpdate(self, to):
+ self.cmd("updating")
+ sys.modules["main"].update_after_shutdown = True
+ sys.modules["main"].file_server.stop()
+ sys.modules["main"].ui_server.stop()
diff --git a/src/User/User.py b/src/User/User.py
index ec2e6b50..2514ded8 100644
--- a/src/User/User.py
+++ b/src/User/User.py
@@ -1,4 +1,7 @@
-import logging, json, time
+import logging
+import json
+import time
+
from Crypt import CryptBitcoin
from Plugin import PluginManager
from Config import config
@@ -6,6 +9,7 @@ from Config import config
@PluginManager.acceptPlugins
class User(object):
+
def __init__(self, master_address=None, master_seed=None, data={}):
if master_seed:
self.master_seed = master_seed
@@ -27,7 +31,8 @@ class User(object):
if self.master_address not in users:
users[self.master_address] = {} # Create if not exist
user_data = users[self.master_address]
- if self.master_seed: user_data["master_seed"] = self.master_seed
+ if self.master_seed:
+ user_data["master_seed"] = self.master_seed
user_data["sites"] = self.sites
user_data["certs"] = self.certs
open("%s/users.json" % config.data_dir, "w").write(json.dumps(users, indent=2, sort_keys=True))
@@ -40,26 +45,28 @@ class User(object):
# Return: {"auth_address": "xxx", "auth_privatekey": "xxx"}
def getSiteData(self, address, create=True):
if address not in self.sites: # Generate new BIP32 child key based on site address
- if not create: return {"auth_address": None, "auth_privatekey": None} # Dont create user yet
+ if not create:
+ return {"auth_address": None, "auth_privatekey": None} # Dont create user yet
s = time.time()
- address_id = self.getAddressAuthIndex(address) # Convert site address to int
+ address_id = self.getAddressAuthIndex(address) # Convert site address to int
auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id)
self.sites[address] = {
"auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey),
"auth_privatekey": auth_privatekey
}
self.save()
- self.log.debug("Added new site: %s in %.3fs" % (address, time.time()-s))
+ self.log.debug("Added new site: %s in %.3fs" % (address, time.time() - s))
return self.sites[address]
# Get data for a new, unique site
# Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}]
def getNewSiteData(self):
import random
- bip32_index = random.randrange(2**256) % 100000000
+ bip32_index = random.randrange(2 ** 256) % 100000000
site_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, bip32_index)
site_address = CryptBitcoin.privatekeyToAddress(site_privatekey)
- if site_address in self.sites: raise Exception("Random error: site exist!")
+ if site_address in self.sites:
+ raise Exception("Random error: site exist!")
# Save to sites
self.getSiteData(site_address)
self.sites[site_address]["privatekey"] = site_privatekey
@@ -85,7 +92,8 @@ class User(object):
# Add cert for the user
def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign):
domain = domain.lower()
- auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0] # Find privatekey by auth address
+ # Find privatekey by auth address
+ auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0]
cert_node = {
"auth_address": auth_address,
"auth_privatekey": auth_privatekey,
@@ -95,10 +103,13 @@ class User(object):
}
# Check if we have already cert for that domain and its not the same
if self.certs.get(domain) and self.certs[domain] != cert_node:
- raise Exception("You already have certificate for this domain: %s/%s@%s" % (self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain))
- elif self.certs.get(domain) == cert_node: # Same, not updated
+ raise Exception(
+ "You already have certificate for this domain: %s/%s@%s" %
+ (self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain)
+ )
+ elif self.certs.get(domain) == cert_node: # Same, not updated
return None
- else: # Not exist yet, add
+ else: # Not exist yet, add
self.certs[domain] = cert_node
self.save()
return True
@@ -113,17 +124,19 @@ class User(object):
return site_data
# Get cert for the site address
- # Return: { "auth_address": ..., "auth_privatekey":..., "auth_type": "web", "auth_user_name": "nofish", "cert_sign": ... } or None
+ # Return: { "auth_address":.., "auth_privatekey":.., "auth_type": "web", "auth_user_name": "nofish", "cert_sign":.. } or None
def getCert(self, address):
site_data = self.getSiteData(address, create=False)
- if not site_data or not "cert" in site_data: return None # Site dont have cert
+ if not site_data or "cert" not in site_data:
+ return None # Site dont have cert
return self.certs.get(site_data["cert"])
# Get cert user name for the site address
# Return: user@certprovider.bit or None
def getCertUserId(self, address):
site_data = self.getSiteData(address, create=False)
- if not site_data or not "cert" in site_data: return None # Site dont have cert
+ if not site_data or "cert" not in site_data:
+ return None # Site dont have cert
cert = self.certs.get(site_data["cert"])
if cert:
- return cert["auth_user_name"]+"@"+site_data["cert"]
\ No newline at end of file
+ return cert["auth_user_name"] + "@" + site_data["cert"]
diff --git a/src/User/UserManager.py b/src/User/UserManager.py
index 28c9c021..dff7ece1 100644
--- a/src/User/UserManager.py
+++ b/src/User/UserManager.py
@@ -1,5 +1,4 @@
# Included modules
-import os
import json
import logging
@@ -43,7 +42,7 @@ class UserManager(object):
def create(self, master_address=None, master_seed=None):
user = User(master_address, master_seed)
logging.debug("Created user: %s" % user.master_address)
- if user.master_address: # If successfully created
+ if user.master_address: # If successfully created
self.users[user.master_address] = user
user.save()
return user
@@ -74,10 +73,10 @@ def reloadModule():
import imp
global User, UserManager, user_manager
- User = imp.load_source("User", "src/User/User.py").User # Reload source
- #module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module
- #UserManager = module.UserManager
- #user_manager = module.user_manager
+ User = imp.load_source("User", "src/User/User.py").User # Reload source
+ # module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module
+ # UserManager = module.UserManager
+ # user_manager = module.user_manager
# Reload users
user_manager = UserManager()
user_manager.load()
diff --git a/src/Worker/Worker.py b/src/Worker/Worker.py
index 242ca9ef..033b71e7 100644
--- a/src/Worker/Worker.py
+++ b/src/Worker/Worker.py
@@ -1,99 +1,102 @@
-import gevent, time, logging, shutil, os
-from Peer import Peer
+import time
+
+import gevent
+
from Debug import Debug
+
class Worker(object):
- def __init__(self, manager, peer):
- self.manager = manager
- self.peer = peer
- self.task = None
- self.key = None
- self.running = False
- self.thread = None
+ def __init__(self, manager, peer):
+ self.manager = manager
+ self.peer = peer
+ self.task = None
+ self.key = None
+ self.running = False
+ self.thread = None
- def __str__(self):
- return "Worker %s %s" % (self.manager.site.address_short, self.key)
+ def __str__(self):
+ return "Worker %s %s" % (self.manager.site.address_short, self.key)
+ def __repr__(self):
+ return "<%s>" % self.__str__()
- def __repr__(self):
- return "<%s>" % self.__str__()
+ # Downloader thread
+ def downloader(self):
+ self.peer.hash_failed = 0 # Reset hash error counter
+ while self.running:
+ # Try to pickup free file download task
+ task = self.manager.getTask(self.peer)
+ if not task: # Die, no more task
+ self.manager.log.debug("%s: No task found, stopping" % self.key)
+ break
+ if not task["time_started"]:
+ task["time_started"] = time.time() # Task started now
+ if task["workers_num"] > 0: # Wait a bit if someone already working on it
+ self.manager.log.debug("%s: Someone already working on %s, sleeping 1 sec..." % (self.key, task["inner_path"]))
+ time.sleep(1)
+ self.manager.log.debug("%s: %s, task done after sleep: %s" % (self.key, task["inner_path"], task["done"]))
- # Downloader thread
- def downloader(self):
- self.peer.hash_failed = 0 # Reset hash error counter
- while self.running:
- # Try to pickup free file download task
- task = self.manager.getTask(self.peer)
- if not task: # Die, no more task
- self.manager.log.debug("%s: No task found, stopping" % self.key)
- break
- if not task["time_started"]: task["time_started"] = time.time() # Task started now
+ if task["done"] is False:
+ self.task = task
+ site = task["site"]
+ task["workers_num"] += 1
+ try:
+ buff = self.peer.getFile(site.address, task["inner_path"])
+ except Exception, err:
+ self.manager.log.debug("%s: getFile error: err" % (self.key, err))
+ buff = None
+ if self.running is False: # Worker no longer needed or got killed
+ self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"]))
+ break
+ if buff: # Download ok
+ correct = site.content_manager.verifyFile(task["inner_path"], buff)
+ else: # Download error
+ correct = False
+ if correct is True or correct is None: # Hash ok or same file
+ self.manager.log.debug("%s: Hash correct: %s" % (self.key, task["inner_path"]))
+ if correct is True and task["done"] is False: # Save if changed and task not done yet
+ buff.seek(0)
+ site.storage.write(task["inner_path"], buff)
+ if task["done"] is False:
+ self.manager.doneTask(task)
+ task["workers_num"] -= 1
+ self.task = None
+ else: # Hash failed
+ self.manager.log.debug(
+ "%s: Hash failed: %s, failed peers: %s" %
+ (self.key, task["inner_path"], len(task["failed"]))
+ )
+ task["failed"].append(self.peer)
+ self.task = None
+ self.peer.hash_failed += 1
+ if self.peer.hash_failed >= max(len(self.manager.tasks), 3):
+ # Broken peer: More fails than tasks number but atleast 3
+ break
+ task["workers_num"] -= 1
+ time.sleep(1)
+ self.peer.onWorkerDone()
+ self.running = False
+ self.manager.removeWorker(self)
- if task["workers_num"] > 0: # Wait a bit if someone already working on it
- self.manager.log.debug("%s: Someone already working on %s, sleeping 1 sec..." % (self.key, task["inner_path"]))
- time.sleep(1)
- self.manager.log.debug("%s: %s, task done after sleep: %s" % (self.key, task["inner_path"], task["done"]))
+ # Start the worker
+ def start(self):
+ self.running = True
+ self.thread = gevent.spawn(self.downloader)
- if task["done"] == False:
- self.task = task
- site = task["site"]
- task["workers_num"] += 1
- try:
- buff = self.peer.getFile(site.address, task["inner_path"])
- except Exception, err:
- self.manager.log.debug("%s: getFile error: err" % (self.key, err))
- buff = None
- if self.running == False: # Worker no longer needed or got killed
- self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"]))
- break
- if buff: # Download ok
- correct = site.content_manager.verifyFile(task["inner_path"], buff)
- else: # Download error
- correct = False
- if correct == True or correct == None: # Hash ok or same file
- self.manager.log.debug("%s: Hash correct: %s" % (self.key, task["inner_path"]))
- if correct == True and task["done"] == False: # Save if changed and task not done yet
- buff.seek(0)
- file_path = site.storage.getPath(task["inner_path"])
- site.storage.write(task["inner_path"], buff)
- if task["done"] == False: self.manager.doneTask(task)
- task["workers_num"] -= 1
- self.task = None
- else: # Hash failed
- self.manager.log.debug("%s: Hash failed: %s, failed peers: %s" % (self.key, task["inner_path"], len(task["failed"])))
- task["failed"].append(self.peer)
- self.task = None
- self.peer.hash_failed += 1
- if self.peer.hash_failed >= max(len(self.manager.tasks), 3): # More fails than tasks number but atleast 3: Broken peer
- break
- task["workers_num"] -= 1
- time.sleep(1)
- self.peer.onWorkerDone()
- self.running = False
- self.manager.removeWorker(self)
+ # Skip current task
+ def skip(self):
+ self.manager.log.debug("%s: Force skipping" % self.key)
+ if self.thread:
+ self.thread.kill(exception=Debug.Notify("Worker stopped"))
+ self.start()
-
- # Start the worker
- def start(self):
- self.running = True
- self.thread = gevent.spawn(self.downloader)
-
-
- # Skip current task
- def skip(self):
- self.manager.log.debug("%s: Force skipping" % self.key)
- if self.thread:
- self.thread.kill(exception=Debug.Notify("Worker stopped"))
- self.start()
-
-
- # Force stop the worker
- def stop(self):
- self.manager.log.debug("%s: Force stopping" % self.key)
- self.running = False
- if self.thread:
- self.thread.kill(exception=Debug.Notify("Worker stopped"))
- del self.thread
- self.manager.removeWorker(self)
+ # Force stop the worker
+ def stop(self):
+ self.manager.log.debug("%s: Force stopping" % self.key)
+ self.running = False
+ if self.thread:
+ self.thread.kill(exception=Debug.Notify("Worker stopped"))
+ del self.thread
+ self.manager.removeWorker(self)
diff --git a/src/Worker/WorkerManager.py b/src/Worker/WorkerManager.py
index 8f2dde35..905e4a72 100644
--- a/src/Worker/WorkerManager.py
+++ b/src/Worker/WorkerManager.py
@@ -1,204 +1,211 @@
+import time
+import logging
+import random
+
+import gevent
+
from Worker import Worker
-import gevent, time, logging, random
-MAX_WORKERS = 10
+MAX_WORKERS = 10 # Max concurent workers
+
-# Worker manager for site
class WorkerManager:
- def __init__(self, site):
- self.site = site
- self.workers = {} # Key: ip:port, Value: Worker.Worker
- self.tasks = [] # {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0, "failed": peer_ids}
- self.started_task_num = 0 # Last added task num
- self.running = True
- self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short)
- self.process_taskchecker = gevent.spawn(self.checkTasks)
+ def __init__(self, site):
+ self.site = site
+ self.workers = {} # Key: ip:port, Value: Worker.Worker
+ self.tasks = []
+ # {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False,
+ # "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0, "failed": peer_ids}
+ self.started_task_num = 0 # Last added task num
+ self.running = True
+ self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short)
+ self.process_taskchecker = gevent.spawn(self.checkTasks)
- def __str__(self):
- return "WorkerManager %s" % self.site.address_short
+ def __str__(self):
+ return "WorkerManager %s" % self.site.address_short
+ def __repr__(self):
+ return "<%s>" % self.__str__()
- def __repr__(self):
- return "<%s>" % self.__str__()
+ # Check expired tasks
+ def checkTasks(self):
+ while self.running:
+ tasks = task = worker = workers = None # Cleanup local variables
+ time.sleep(15) # Check every 15 sec
+ # Clean up workers
+ for worker in self.workers.values():
+ if worker.task and worker.task["done"]:
+ worker.skip() # Stop workers with task done
+ if not self.tasks:
+ continue
- # Check expired tasks
- def checkTasks(self):
- while self.running:
- tasks = task = worker = workers = None # Cleanup local variables
- time.sleep(15) # Check every 15 sec
+ tasks = self.tasks[:] # Copy it so removing elements wont cause any problem
+ for task in tasks:
+ if task["time_started"] and time.time() >= task["time_started"] + 60: # Task taking too long time, skip it
+ self.log.debug("Timeout, Skipping: %s" % task)
+ # Skip to next file workers
+ workers = self.findWorkers(task)
+ if workers:
+ for worker in workers:
+ worker.skip()
+ else:
+ self.failTask(task)
+ elif time.time() >= task["time_added"] + 60 and not self.workers: # No workers left
+ self.log.debug("Timeout, Cleanup task: %s" % task)
+ # Remove task
+ self.failTask(task)
- # Clean up workers
- for worker in self.workers.values():
- if worker.task and worker.task["done"]: worker.skip() # Stop workers with task done
+ elif (task["time_started"] and time.time() >= task["time_started"] + 15) or not self.workers:
+ # Task started more than 15 sec ago or no workers
+ self.log.debug("Task taking more than 15 secs, find more peers: %s" % task["inner_path"])
+ task["site"].announce() # Find more peers
+ if task["peers"]: # Release the peer lock
+ self.log.debug("Task peer lock release: %s" % task["inner_path"])
+ task["peers"] = []
+ self.startWorkers()
+ break # One reannounce per loop
- if not self.tasks: continue
+ self.log.debug("checkTasks stopped running")
- tasks = self.tasks[:] # Copy it so removing elements wont cause any problem
- for task in tasks:
- if task["time_started"] and time.time() >= task["time_started"]+60: # Task taking too long time, skip it
- self.log.debug("Timeout, Skipping: %s" % task)
- # Skip to next file workers
- workers = self.findWorkers(task)
- if workers:
- for worker in workers:
- worker.skip()
- else:
- self.failTask(task)
- elif time.time() >= task["time_added"]+60 and not self.workers: # No workers left
- self.log.debug("Timeout, Cleanup task: %s" % task)
- # Remove task
- self.failTask(task)
+ # Tasks sorted by this
+ def taskSorter(self, task):
+ if task["inner_path"] == "content.json":
+ return 9999 # Content.json always prority
+ if task["inner_path"] == "index.html":
+ return 9998 # index.html also important
+ priority = task["priority"]
+ if task["inner_path"].endswith(".js") or task["inner_path"].endswith(".css"):
+ priority += 1 # download js and css files first
+ return priority - task["workers_num"] # Prefer more priority and less workers
- elif (task["time_started"] and time.time() >= task["time_started"]+15) or not self.workers: # Task started more than 15 sec ago or no workers
- self.log.debug("Task taking more than 15 secs, find more peers: %s" % task["inner_path"])
- task["site"].announce() # Find more peers
- if task["peers"]: # Release the peer lock
- self.log.debug("Task peer lock release: %s" % task["inner_path"])
- task["peers"] = []
- self.startWorkers()
- break # One reannounce per loop
+ # Returns the next free or less worked task
+ def getTask(self, peer):
+ self.tasks.sort(key=self.taskSorter, reverse=True) # Sort tasks by priority and worker numbers
+ for task in self.tasks: # Find a task
+ if task["peers"] and peer not in task["peers"]:
+ continue # This peer not allowed to pick this task
+ if peer in task["failed"]:
+ continue # Peer already tried to solve this, but failed
+ return task
+ # New peers added to site
+ def onPeers(self):
+ self.startWorkers()
- self.log.debug("checkTasks stopped running")
+ # Add new worker
+ def addWorker(self, peer):
+ key = peer.key
+ if key not in self.workers and len(self.workers) < MAX_WORKERS:
+ # We dont have worker for that peer and workers num less than max
+ worker = Worker(self, peer)
+ self.workers[key] = worker
+ worker.key = key
+ worker.start()
+ return worker
+ else: # We have woker for this peer or its over the limit
+ return False
+ # Start workers to process tasks
+ def startWorkers(self, peers=None):
+ if not self.tasks:
+ return False # No task for workers
+ if len(self.workers) >= MAX_WORKERS and not peers:
+ return False # Workers number already maxed and no starting peers definied
+ if not peers:
+ peers = self.site.peers.values() # No peers definied, use any from site
+ random.shuffle(peers)
+ for peer in peers: # One worker for every peer
+ if peers and peer not in peers:
+ continue # If peers definied and peer not valid
+ worker = self.addWorker(peer)
+ if worker:
+ self.log.debug("Added worker: %s, workers: %s/%s" % (peer.key, len(self.workers), MAX_WORKERS))
+ # Stop all worker
+ def stopWorkers(self):
+ for worker in self.workers.values():
+ worker.stop()
+ tasks = self.tasks[:] # Copy
+ for task in tasks: # Mark all current task as failed
+ self.failTask(task)
+ # Find workers by task
+ def findWorkers(self, task):
+ workers = []
+ for worker in self.workers.values():
+ if worker.task == task:
+ workers.append(worker)
+ return workers
- # Tasks sorted by this
- def taskSorter(self, task):
- if task["inner_path"] == "content.json": return 9999 # Content.json always prority
- if task["inner_path"] == "index.html": return 9998 # index.html also important
- priority = task["priority"]
- if task["inner_path"].endswith(".js") or task["inner_path"].endswith(".css"): priority += 1 # download js and css files first
- return priority-task["workers_num"] # Prefer more priority and less workers
+ # Ends and remove a worker
+ def removeWorker(self, worker):
+ worker.running = False
+ if worker.key in self.workers:
+ del(self.workers[worker.key])
+ self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), MAX_WORKERS))
+ # Create new task and return asyncresult
+ def addTask(self, inner_path, peer=None, priority=0):
+ self.site.onFileStart(inner_path) # First task, trigger site download started
+ task = self.findTask(inner_path)
+ if task: # Already has task for that file
+ if peer and task["peers"]: # This peer also has new version, add it to task possible peers
+ task["peers"].append(peer)
+ self.log.debug("Added peer %s to %s" % (peer.key, task["inner_path"]))
+ self.startWorkers([peer])
+ elif peer and peer in task["failed"]:
+ task["failed"].remove(peer) # New update arrived, remove the peer from failed peers
+ self.log.debug("Removed peer %s from failed %s" % (peer.key, task["inner_path"]))
+ self.startWorkers([peer])
- # Returns the next free or less worked task
- def getTask(self, peer):
- self.tasks.sort(key=self.taskSorter, reverse=True) # Sort tasks by priority and worker numbers
- for task in self.tasks: # Find a task
- if task["peers"] and peer not in task["peers"]: continue # This peer not allowed to pick this task
- if peer in task["failed"]: continue # Peer already tried to solve this, but failed
- return task
+ if priority:
+ task["priority"] += priority # Boost on priority
+ return task["evt"]
+ else: # No task for that file yet
+ evt = gevent.event.AsyncResult()
+ if peer:
+ peers = [peer] # Only download from this peer
+ else:
+ peers = None
+ task = {
+ "evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False,
+ "time_added": time.time(), "time_started": None, "peers": peers, "priority": priority, "failed": []
+ }
+ self.tasks.append(task)
+ self.started_task_num += 1
+ self.log.debug(
+ "New task: %s, peer lock: %s, priority: %s, tasks: %s" %
+ (task["inner_path"], peers, priority, self.started_task_num)
+ )
+ self.startWorkers(peers)
+ return evt
+ # Find a task using inner_path
+ def findTask(self, inner_path):
+ for task in self.tasks:
+ if task["inner_path"] == inner_path:
+ return task
+ return None # Not found
- # New peers added to site
- def onPeers(self):
- self.startWorkers()
-
-
- # Add new worker
- def addWorker(self, peer):
- key = peer.key
- if key not in self.workers and len(self.workers) < MAX_WORKERS: # We dont have worker for that peer and workers num less than max
- worker = Worker(self, peer)
- self.workers[key] = worker
- worker.key = key
- worker.start()
- return worker
- else: # We have woker for this peer or its over the limit
- return False
-
-
- # Start workers to process tasks
- def startWorkers(self, peers=None):
- if not self.tasks: return False # No task for workers
- if len(self.workers) >= MAX_WORKERS and not peers: return False # Workers number already maxed and no starting peers definied
- if not peers: peers = self.site.peers.values() # No peers definied, use any from site
- random.shuffle(peers)
- for peer in peers: # One worker for every peer
- if peers and peer not in peers: continue # If peers definied and peer not valid
- worker = self.addWorker(peer)
- if worker: self.log.debug("Added worker: %s, workers: %s/%s" % (peer.key, len(self.workers), MAX_WORKERS))
-
-
- # Stop all worker
- def stopWorkers(self):
- for worker in self.workers.values():
- worker.stop()
- tasks = self.tasks[:] # Copy
- for task in tasks: # Mark all current task as failed
- self.failTask(task)
-
-
-
- # Find workers by task
- def findWorkers(self, task):
- workers = []
- for worker in self.workers.values():
- if worker.task == task: workers.append(worker)
- return workers
-
-
- # Ends and remove a worker
- def removeWorker(self, worker):
- worker.running = False
- if worker.key in self.workers:
- del(self.workers[worker.key])
- self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), MAX_WORKERS))
-
-
- # Create new task and return asyncresult
- def addTask(self, inner_path, peer=None, priority = 0):
- self.site.onFileStart(inner_path) # First task, trigger site download started
- task = self.findTask(inner_path)
- if task: # Already has task for that file
- if peer and task["peers"]: # This peer also has new version, add it to task possible peers
- task["peers"].append(peer)
- self.log.debug("Added peer %s to %s" % (peer.key, task["inner_path"]))
- self.startWorkers([peer])
- elif peer and peer in task["failed"]:
- task["failed"].remove(peer) # New update arrived, remove the peer from failed peers
- self.log.debug("Removed peer %s from failed %s" % (peer.key, task["inner_path"]))
- self.startWorkers([peer])
-
-
- if priority:
- task["priority"] += priority # Boost on priority
- return task["evt"]
- else: # No task for that file yet
- evt = gevent.event.AsyncResult()
- if peer:
- peers = [peer] # Only download from this peer
- else:
- peers = None
- task = {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_added": time.time(), "time_started": None, "peers": peers, "priority": priority, "failed": []}
- self.tasks.append(task)
- self.started_task_num += 1
- self.log.debug("New task: %s, peer lock: %s, priority: %s, tasks: %s" % (task["inner_path"], peers, priority, self.started_task_num))
- self.startWorkers(peers)
- return evt
-
-
- # Find a task using inner_path
- def findTask(self, inner_path):
- for task in self.tasks:
- if task["inner_path"] == inner_path:
- return task
- return None # Not found
-
-
- # Mark a task failed
- def failTask(self, task):
- if task in self.tasks:
- task["done"] = True
- self.tasks.remove(task) # Remove from queue
- self.site.onFileFail(task["inner_path"])
- task["evt"].set(False)
- if not self.tasks:
- self.started_task_num = 0
-
-
- # Mark a task done
- def doneTask(self, task):
- task["done"] = True
- self.tasks.remove(task) # Remove from queue
- self.site.onFileDone(task["inner_path"])
- task["evt"].set(True)
- if not self.tasks:
- self.started_task_num = 0
- self.site.onComplete() # No more task trigger site complete
+ # Mark a task failed
+ def failTask(self, task):
+ if task in self.tasks:
+ task["done"] = True
+ self.tasks.remove(task) # Remove from queue
+ self.site.onFileFail(task["inner_path"])
+ task["evt"].set(False)
+ if not self.tasks:
+ self.started_task_num = 0
+ # Mark a task done
+ def doneTask(self, task):
+ task["done"] = True
+ self.tasks.remove(task) # Remove from queue
+ self.site.onFileDone(task["inner_path"])
+ task["evt"].set(True)
+ if not self.tasks:
+ self.started_task_num = 0
+ self.site.onComplete() # No more task trigger site complete
diff --git a/src/lib/cssvendor/cssvendor.py b/src/lib/cssvendor/cssvendor.py
index 4dbf4f95..f7cb1ba5 100644
--- a/src/lib/cssvendor/cssvendor.py
+++ b/src/lib/cssvendor/cssvendor.py
@@ -1,29 +1,39 @@
import re
+
def prefix(content):
- content = re.sub("@keyframes (.*? {.*?[^ ]})", "@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n", content, flags=re.DOTALL)
- content = re.sub('([^-\*])(border-radius|box-shadow|transition|animation|box-sizing|transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])', '\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content)
- content = re.sub('(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])',
- '\\1: -webkit-\\2(\\3);'+
- '\\1: -moz-\\2(\\3);'+
- '\\1: -o-\\2(\\3);'+
- '\\1: -ms-\\2(\\3);'+
- '\\1: \\2(\\3);', content)
- return content
+ content = re.sub(
+ "@keyframes (.*? {.*?[^ ]})", "@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n",
+ content, flags=re.DOTALL
+ )
+ content = re.sub(
+ '([^-\*])(border-radius|box-shadow|transition|animation|box-sizing|' +
+ 'transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])',
+ '\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content
+ )
+ content = re.sub(
+ '(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])',
+ '\\1: -webkit-\\2(\\3);' +
+ '\\1: -moz-\\2(\\3);' +
+ '\\1: -o-\\2(\\3);' +
+ '\\1: -ms-\\2(\\3);' +
+ '\\1: \\2(\\3);', content
+ )
+ return content
if __name__ == "__main__":
- print prefix("""
- .test {
- border-radius: 5px;
- background: linear-gradient(red, blue);
- }
+ print prefix("""
+ .test {
+ border-radius: 5px;
+ background: linear-gradient(red, blue);
+ }
- @keyframes flip {
- 0% { transform: perspective(120px) rotateX(0deg) rotateY(0deg); }
- 50% { transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) }
- 100% { transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); }
- }
+ @keyframes flip {
+ 0% { transform: perspective(120px) rotateX(0deg) rotateY(0deg); }
+ 50% { transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) }
+ 100% { transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); }
+ }
- """)
\ No newline at end of file
+ """)
diff --git a/src/main.py b/src/main.py
index b1580662..380e87ec 100644
--- a/src/main.py
+++ b/src/main.py
@@ -16,8 +16,10 @@ update_after_shutdown = False # If set True then update and restart zeronet aft
from Config import config
# Create necessary files and dirs
-if not os.path.isdir(config.log_dir): os.mkdir(config.log_dir)
-if not os.path.isdir(config.data_dir): os.mkdir(config.data_dir)
+if not os.path.isdir(config.log_dir):
+ os.mkdir(config.log_dir)
+if not os.path.isdir(config.data_dir):
+ os.mkdir(config.data_dir)
if not os.path.isfile("%s/sites.json" % config.data_dir):
open("%s/sites.json" % config.data_dir, "w").write("{}")
if not os.path.isfile("%s/users.json" % config.data_dir):
@@ -50,12 +52,11 @@ logging.getLogger('').name = "-" # Remove root prefix
# Debug dependent configuration
from Debug import DebugHook
if config.debug:
- console_log.setLevel(logging.DEBUG) # Display everything to console
+ console_log.setLevel(logging.DEBUG) # Display everything to console
else:
- console_log.setLevel(logging.INFO) # Display only important info to console
-
-monkey.patch_all(thread=False) # Make time, socket gevent compatible. Not thread: pyfilesystem and system tray icon not compatible
+ console_log.setLevel(logging.INFO) # Display only important info to console
+monkey.patch_all(thread=False) # Not thread: pyfilesystem and system tray icon not compatible
# Log current config
@@ -67,7 +68,7 @@ if config.proxy:
from util import SocksProxy
import urllib2
logging.info("Patching sockets to socks proxy: %s" % config.proxy)
- config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
+ config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
SocksProxy.monkeyPath(*config.proxy.split(":"))
@@ -81,6 +82,7 @@ PluginManager.plugin_manager.loadPlugins()
@PluginManager.acceptPlugins
class Actions(object):
# Default action: Start serving UiServer and FileServer
+
def main(self):
logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__))
global ui_server, file_server
@@ -113,8 +115,10 @@ class Actions(object):
logging.info("----------------------------------------------------------------------")
while True:
- if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes": break
- else: logging.info("Please, secure it now, you going to need it to modify your site!")
+ if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes":
+ break
+ else:
+ logging.info("Please, secure it now, you going to need it to modify your site!")
logging.info("Creating directory structure...")
from Site import Site
@@ -132,7 +136,7 @@ class Actions(object):
def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False):
from Site import Site
logging.info("Signing site: %s..." % address)
- site = Site(address, allow_create = False)
+ site = Site(address, allow_create=False)
if not privatekey: # If no privatekey in args then ask it now
import getpass
@@ -151,7 +155,10 @@ class Actions(object):
for content_inner_path in site.content_manager.contents:
logging.info("Verifing %s signature..." % content_inner_path)
- if site.content_manager.verifyFile(content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False) == True:
+ file_correct = site.content_manager.verifyFile(
+ content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False
+ )
+ if file_correct is True:
logging.info("[OK] %s signed by address %s!" % (content_inner_path, address))
else:
logging.error("[ERROR] %s: invalid file!" % content_inner_path)
@@ -160,7 +167,7 @@ class Actions(object):
logging.info("Verifying site files...")
bad_files += site.storage.verifyFiles()
if not bad_files:
- logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time()-s))
+ logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s))
else:
logging.error("[ERROR] Error during verifying site files!")
@@ -170,7 +177,7 @@ class Actions(object):
site = Site(address)
s = time.time()
site.storage.rebuildDb()
- logging.info("Done in %.3fs" % (time.time()-s))
+ logging.info("Done in %.3fs" % (time.time() - s))
def dbQuery(self, address, query):
from Site import Site
@@ -188,46 +195,44 @@ class Actions(object):
s = time.time()
site.announce()
- print "Response time: %.3fs" % (time.time()-s)
+ print "Response time: %.3fs" % (time.time() - s)
print site.peers
-
def siteNeedFile(self, address, inner_path):
from Site import Site
site = Site(address)
site.announce()
print site.needFile(inner_path, update=True)
-
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"):
global file_server
from Site import Site
- from File import FileServer # We need fileserver to handle incoming file requests
+ from File import FileServer # We need fileserver to handle incoming file requests
logging.info("Creating FileServer....")
file_server = FileServer()
- file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity
+ file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity
file_server.openport()
site = file_server.sites[address]
- site.settings["serving"] = True # Serving the site even if its disabled
- if peer_ip: # Announce ip specificed
+ site.settings["serving"] = True # Serving the site even if its disabled
+ if peer_ip: # Announce ip specificed
site.addPeer(peer_ip, peer_port)
- else: # Just ask the tracker
+ else: # Just ask the tracker
logging.info("Gathering peers from tracker")
- site.announce() # Gather peers
- published = site.publish(20, inner_path) # Push to 20 peers
+ site.announce() # Gather peers
+ published = site.publish(20, inner_path) # Push to 20 peers
if published > 0:
time.sleep(3)
logging.info("Serving files (max 60s)...")
gevent.joinall([file_server_thread], timeout=60)
logging.info("Done.")
else:
- logging.info("No peers found for this site, sitePublish command only works if you already have peers serving your site")
+ logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
# Crypto commands
def cryptPrivatekeyToAddress(self, privatekey=None):
from Crypt import CryptBitcoin
- if not privatekey: # If no privatekey in args then ask it now
+ if not privatekey: # If no privatekey in args then ask it now
import getpass
privatekey = getpass.getpass("Private key (input hidden):")
@@ -252,7 +257,7 @@ class Actions(object):
for i in range(5):
s = time.time()
print peer.ping(),
- print "Response time: %.3fs (crypt: %s)" % (time.time()-s, peer.connection.crypt)
+ print "Response time: %.3fs (crypt: %s)" % (time.time() - s, peer.connection.crypt)
time.sleep(1)
def peerGetFile(self, peer_ip, peer_port, site, filename):
@@ -266,7 +271,7 @@ class Actions(object):
peer = Peer(peer_ip, peer_port)
s = time.time()
print peer.getFile(site, filename).read()
- print "Response time: %.3fs" % (time.time()-s)
+ print "Response time: %.3fs" % (time.time() - s)
def peerCmd(self, peer_ip, peer_port, cmd, parameters):
logging.info("Opening a simple connection server")
@@ -284,9 +289,10 @@ class Actions(object):
logging.info("Response: %s" % peer.request(cmd, parameters))
-
actions = Actions()
# Starts here when running zeronet.py
+
+
def start():
# Call function
func = getattr(actions, config.action, None)
diff --git a/src/util/Event.py b/src/util/Event.py
index 0eab1c63..44c9837e 100644
--- a/src/util/Event.py
+++ b/src/util/Event.py
@@ -1,60 +1,57 @@
# Based on http://stackoverflow.com/a/2022629
+
class Event(list):
- def __call__(self, *args, **kwargs):
- for f in self[:]:
- if "once" in dir(f) and f in self:
- self.remove(f)
- f(*args, **kwargs)
+ def __call__(self, *args, **kwargs):
+ for f in self[:]:
+ if "once" in dir(f) and f in self:
+ self.remove(f)
+ f(*args, **kwargs)
- def __repr__(self):
- return "Event(%s)" % list.__repr__(self)
-
-
- def once(self, func, name=None):
- func.once = True
- func.name = None
- if name: # Dont function with same name twice
- names = [f.name for f in self if "once" in dir(f)]
- if name not in names:
- func.name = name
- self.append(func)
- else:
- self.append(func)
- return self
-
+ def __repr__(self):
+ return "Event(%s)" % list.__repr__(self)
+ def once(self, func, name=None):
+ func.once = True
+ func.name = None
+ if name: # Dont function with same name twice
+ names = [f.name for f in self if "once" in dir(f)]
+ if name not in names:
+ func.name = name
+ self.append(func)
+ else:
+ self.append(func)
+ return self
def testBenchmark():
- def say(pre, text):
- print "%s Say: %s" % (pre, text)
-
- import time
- s = time.time()
- onChanged = Event()
- for i in range(1000):
- onChanged.once(lambda pre: say(pre, "once"), "once")
- print "Created 1000 once in %.3fs" % (time.time()-s)
- onChanged("#1")
+ def say(pre, text):
+ print "%s Say: %s" % (pre, text)
+ import time
+ s = time.time()
+ on_changed = Event()
+ for i in range(1000):
+ on_changed.once(lambda pre: say(pre, "once"), "once")
+ print "Created 1000 once in %.3fs" % (time.time() - s)
+ on_changed("#1")
def testUsage():
- def say(pre, text):
- print "%s Say: %s" % (pre, text)
-
- onChanged = Event()
- onChanged.once(lambda pre: say(pre, "once"))
- onChanged.once(lambda pre: say(pre, "once"))
- onChanged.once(lambda pre: say(pre, "namedonce"), "namedonce")
- onChanged.once(lambda pre: say(pre, "namedonce"), "namedonce")
- onChanged.append(lambda pre: say(pre, "always"))
- onChanged("#1")
- onChanged("#2")
- onChanged("#3")
+ def say(pre, text):
+ print "%s Say: %s" % (pre, text)
+
+ on_changed = Event()
+ on_changed.once(lambda pre: say(pre, "once"))
+ on_changed.once(lambda pre: say(pre, "once"))
+ on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce")
+ on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce")
+ on_changed.append(lambda pre: say(pre, "always"))
+ on_changed("#1")
+ on_changed("#2")
+ on_changed("#3")
if __name__ == "__main__":
- testBenchmark()
+ testBenchmark()
diff --git a/src/util/GeventSslPatch.py b/src/util/GeventSslPatch.py
index 5356b5f4..8eb8be80 100644
--- a/src/util/GeventSslPatch.py
+++ b/src/util/GeventSslPatch.py
@@ -1,19 +1,22 @@
# Re-add sslwrap to Python 2.7.9
# https://github.com/gevent/gevent/issues/477
-
+
import inspect
__ssl__ = __import__('ssl')
-
+
try:
_ssl = __ssl__._ssl
except AttributeError:
_ssl = __ssl__._ssl2
-
-
+
+
OldSSLSocket = __ssl__.SSLSocket
-
+
+
class NewSSLSocket(OldSSLSocket):
+
"""Fix SSLSocket constructor."""
+
def __init__(
self, sock, keyfile=None, certfile=None, server_side=False, cert_reqs=0,
ssl_version=2, ca_certs=None, do_handshake_on_connect=True,
@@ -25,8 +28,8 @@ class NewSSLSocket(OldSSLSocket):
ssl_version=2, ca_certs=None, do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None
)
-
-
+
+
def new_sslwrap(
sock, server_side=False, keyfile=None, certfile=None,
cert_reqs=__ssl__.CERT_NONE, ssl_version=__ssl__.PROTOCOL_SSLv23,
@@ -40,10 +43,10 @@ def new_sslwrap(
context.load_cert_chain(certfile, keyfile)
if ciphers:
context.set_ciphers(ciphers)
-
+
caller_self = inspect.currentframe().f_back.f_locals['self']
return context._wrap_socket(sock, server_side=server_side, ssl_sock=caller_self)
-
+
if not hasattr(_ssl, 'sslwrap'):
_ssl.sslwrap = new_sslwrap
- __ssl__.SSLSocket = NewSSLSocket
\ No newline at end of file
+ __ssl__.SSLSocket = NewSSLSocket
diff --git a/src/util/Http.py b/src/util/Http.py
index 62d4bc73..f8c62c72 100644
--- a/src/util/Http.py
+++ b/src/util/Http.py
@@ -1,11 +1,13 @@
-import urllib2, logging
+import urllib2
+import logging
+
import GeventSslPatch
from Config import config
-def get(url):
- logging.debug("Get %s" % url)
- req = urllib2.Request(url)
- req.add_header('User-Agent', "ZeroNet %s (https://github.com/HelloZeroNet/ZeroNet)" % config.version)
- req.add_header('Accept', 'application/json')
- return urllib2.urlopen(req)
+def get(url):
+ logging.debug("Get %s" % url)
+ req = urllib2.Request(url)
+ req.add_header('User-Agent', "ZeroNet %s (https://github.com/HelloZeroNet/ZeroNet)" % config.version)
+ req.add_header('Accept', 'application/json')
+ return urllib2.urlopen(req)
diff --git a/src/util/Noparallel.py b/src/util/Noparallel.py
index 45946789..a4862953 100644
--- a/src/util/Noparallel.py
+++ b/src/util/Noparallel.py
@@ -1,142 +1,145 @@
-import gevent, time
+import gevent
+import time
-class Noparallel(object): # Only allow function running once in same time
- def __init__(self,blocking=True):
- self.threads = {}
- self.blocking = blocking # Blocking: Acts like normal function else thread returned
+class Noparallel(object): # Only allow function running once in same time
+ def __init__(self, blocking=True):
+ self.threads = {}
+ self.blocking = blocking # Blocking: Acts like normal function else thread returned
- def __call__(self, func):
- def wrapper(*args, **kwargs):
- key = (func, tuple(args), tuple(kwargs.items())) # Unique key for function including parameters
- if key in self.threads: # Thread already running (if using blocking mode)
- thread = self.threads[key]
- if self.blocking:
- thread.join() # Blocking until its finished
- return thread.value # Return the value
- else: # No blocking
- if thread.ready(): # Its finished, create a new
- thread = gevent.spawn(func, *args, **kwargs)
- self.threads[key] = thread
- return thread
- else: # Still running
- return thread
- else: # Thread not running
- thread = gevent.spawn(func, *args, **kwargs) # Spawning new thread
- thread.link(lambda thread: self.cleanup(key, thread))
- self.threads[key] = thread
- if self.blocking: # Wait for finish
- thread.join()
- ret = thread.value
- return ret
- else: # No blocking just return the thread
- return thread
- wrapper.func_name = func.func_name
-
- return wrapper
+ def __call__(self, func):
+ def wrapper(*args, **kwargs):
+ key = (func, tuple(args), tuple(kwargs.items())) # Unique key for function including parameters
+ if key in self.threads: # Thread already running (if using blocking mode)
+ thread = self.threads[key]
+ if self.blocking:
+ thread.join() # Blocking until its finished
+ return thread.value # Return the value
+ else: # No blocking
+ if thread.ready(): # Its finished, create a new
+ thread = gevent.spawn(func, *args, **kwargs)
+ self.threads[key] = thread
+ return thread
+ else: # Still running
+ return thread
+ else: # Thread not running
+ thread = gevent.spawn(func, *args, **kwargs) # Spawning new thread
+ thread.link(lambda thread: self.cleanup(key, thread))
+ self.threads[key] = thread
+ if self.blocking: # Wait for finish
+ thread.join()
+ ret = thread.value
+ return ret
+ else: # No blocking just return the thread
+ return thread
+ wrapper.func_name = func.func_name
- # Cleanup finished threads
- def cleanup(self, key, thread):
- if key in self.threads: del(self.threads[key])
+ return wrapper
+
+ # Cleanup finished threads
+ def cleanup(self, key, thread):
+ if key in self.threads:
+ del(self.threads[key])
class Test():
- @Noparallel()
- def count(self, num=5):
- for i in range(num):
- print self, i
- time.sleep(1)
- return "%s return:%s" % (self, i)
+
+ @Noparallel()
+ def count(self, num=5):
+ for i in range(num):
+ print self, i
+ time.sleep(1)
+ return "%s return:%s" % (self, i)
class TestNoblock():
- @Noparallel(blocking=False)
- def count(self, num=5):
- for i in range(num):
- print self, i
- time.sleep(1)
- return "%s return:%s" % (self, i)
+
+ @Noparallel(blocking=False)
+ def count(self, num=5):
+ for i in range(num):
+ print self, i
+ time.sleep(1)
+ return "%s return:%s" % (self, i)
def testBlocking():
- test = Test()
- test2 = Test()
- print "Counting..."
- print "Creating class1/thread1"
- thread1 = gevent.spawn(test.count)
- print "Creating class1/thread2 (ignored)"
- thread2 = gevent.spawn(test.count)
- print "Creating class2/thread3"
- thread3 = gevent.spawn(test2.count)
+ test = Test()
+ test2 = Test()
+ print "Counting..."
+ print "Creating class1/thread1"
+ thread1 = gevent.spawn(test.count)
+ print "Creating class1/thread2 (ignored)"
+ thread2 = gevent.spawn(test.count)
+ print "Creating class2/thread3"
+ thread3 = gevent.spawn(test2.count)
- print "Joining class1/thread1"
- thread1.join()
- print "Joining class1/thread2"
- thread2.join()
- print "Joining class2/thread3"
- thread3.join()
+ print "Joining class1/thread1"
+ thread1.join()
+ print "Joining class1/thread2"
+ thread2.join()
+ print "Joining class2/thread3"
+ thread3.join()
- print "Creating class1/thread4 (its finished, allowed again)"
- thread4 = gevent.spawn(test.count)
- print "Joining thread4"
- thread4.join()
+ print "Creating class1/thread4 (its finished, allowed again)"
+ thread4 = gevent.spawn(test.count)
+ print "Joining thread4"
+ thread4.join()
+
+ print thread1.value, thread2.value, thread3.value, thread4.value
+ print "Done."
- print thread1.value, thread2.value, thread3.value, thread4.value
- print "Done."
def testNoblocking():
- test = TestNoblock()
- test2 = TestNoblock()
- print "Creating class1/thread1"
- thread1 = test.count()
- print "Creating class1/thread2 (ignored)"
- thread2 = test.count()
- print "Creating class2/thread3"
- thread3 = test2.count()
- print "Joining class1/thread1"
- thread1.join()
- print "Joining class1/thread2"
- thread2.join()
- print "Joining class2/thread3"
- thread3.join()
+ test = TestNoblock()
+ test2 = TestNoblock()
+ print "Creating class1/thread1"
+ thread1 = test.count()
+ print "Creating class1/thread2 (ignored)"
+ thread2 = test.count()
+ print "Creating class2/thread3"
+ thread3 = test2.count()
+ print "Joining class1/thread1"
+ thread1.join()
+ print "Joining class1/thread2"
+ thread2.join()
+ print "Joining class2/thread3"
+ thread3.join()
- print "Creating class1/thread4 (its finished, allowed again)"
- thread4 = test.count()
- print "Joining thread4"
- thread4.join()
+ print "Creating class1/thread4 (its finished, allowed again)"
+ thread4 = test.count()
+ print "Joining thread4"
+ thread4.join()
-
- print thread1.value, thread2.value, thread3.value, thread4.value
- print "Done."
+ print thread1.value, thread2.value, thread3.value, thread4.value
+ print "Done."
def testBenchmark():
- import time
- def printThreadNum():
- import gc
- from greenlet import greenlet
- objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
- print "Greenlets: %s" % len(objs)
+ import time
- printThreadNum()
- test = TestNoblock()
- s = time.time()
- for i in range(3):
- gevent.spawn(test.count, i+1)
- print "Created in %.3fs" % (time.time()-s)
- printThreadNum()
- time.sleep(5)
+ def printThreadNum():
+ import gc
+ from greenlet import greenlet
+ objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
+ print "Greenlets: %s" % len(objs)
+ printThreadNum()
+ test = TestNoblock()
+ s = time.time()
+ for i in range(3):
+ gevent.spawn(test.count, i + 1)
+ print "Created in %.3fs" % (time.time() - s)
+ printThreadNum()
+ time.sleep(5)
if __name__ == "__main__":
- from gevent import monkey
- monkey.patch_all()
+ from gevent import monkey
+ monkey.patch_all()
- testBenchmark()
- print "Testing blocking mode..."
- testBlocking()
- print "Testing noblocking mode..."
- testNoblocking()
- print [instance.threads for instance in registry]
+ testBenchmark()
+ print "Testing blocking mode..."
+ testBlocking()
+ print "Testing noblocking mode..."
+ testNoblocking()
diff --git a/src/util/QueryJson.py b/src/util/QueryJson.py
index 80be85d3..0eb56633 100644
--- a/src/util/QueryJson.py
+++ b/src/util/QueryJson.py
@@ -1,61 +1,64 @@
-import json, re, os
+import json
+import re
+import os
-def queryFile(file_path, filter_path, filter_key = None, filter_val = None):
- back = []
- data = json.load(open(file_path))
- if filter_path == ['']: return [data]
- for key in filter_path: # Get to the point
- data = data.get(key)
- if not data: return
+def queryFile(file_path, filter_path, filter_key=None, filter_val=None):
+ back = []
+ data = json.load(open(file_path))
+ if filter_path == ['']:
+ return [data]
+ for key in filter_path: # Get to the point
+ data = data.get(key)
+ if not data:
+ return
- for row in data:
- if filter_val: # Filter by value
- if row[filter_key] == filter_val: back.append(row)
- else:
- back.append(row)
+ for row in data:
+ if filter_val: # Filter by value
+ if row[filter_key] == filter_val:
+ back.append(row)
+ else:
+ back.append(row)
- return back
+ return back
# Find in json files
-# Return: [{u'body': u'Hello Topic 1!!', 'inner_path': '1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6', u'added': 1422740732, u'message_id': 1},...]
+# Return: [{u'body': u'Hello Topic 1!!', 'inner_path': '1KRxE1...beEp6', u'added': 1422740732, u'message_id': 1},...]
def query(path_pattern, filter):
- if "=" in filter: # Filter by value
- filter_path, filter_val = filter.split("=")
- filter_path = filter_path.split(".")
- filter_key = filter_path.pop() # Last element is the key
- filter_val = int(filter_val)
- else: # No filter
- filter_path = filter
- filter_path = filter_path.split(".")
- filter_key = None
- filter_val = None
+ if "=" in filter: # Filter by value
+ filter_path, filter_val = filter.split("=")
+ filter_path = filter_path.split(".")
+ filter_key = filter_path.pop() # Last element is the key
+ filter_val = int(filter_val)
+ else: # No filter
+ filter_path = filter
+ filter_path = filter_path.split(".")
+ filter_key = None
+ filter_val = None
- if "/*/" in path_pattern: # Wildcard search
- root_dir, file_pattern = path_pattern.replace("\\", "/").split("/*/")
- else: # No wildcard
- root_dir, file_pattern = re.match("(.*)/(.*?)$", path_pattern.replace("\\", "/")).groups()
- for root, dirs, files in os.walk(root_dir, topdown=False):
- root = root.replace("\\", "/")
- inner_path = root.replace(root_dir, "").strip("/")
- for file_name in files:
- if file_pattern != file_name: continue
-
- try:
- res = queryFile(root+"/"+file_name, filter_path, filter_key, filter_val)
- if not res: continue
- except Exception, err: # Json load error
- # print file_name, filter, err
- continue
- for row in res:
- row["inner_path"] = inner_path
- yield row
+ if "/*/" in path_pattern: # Wildcard search
+ root_dir, file_pattern = path_pattern.replace("\\", "/").split("/*/")
+ else: # No wildcard
+ root_dir, file_pattern = re.match("(.*)/(.*?)$", path_pattern.replace("\\", "/")).groups()
+ for root, dirs, files in os.walk(root_dir, topdown=False):
+ root = root.replace("\\", "/")
+ inner_path = root.replace(root_dir, "").strip("/")
+ for file_name in files:
+ if file_pattern != file_name:
+ continue
+ try:
+ res = queryFile(root + "/" + file_name, filter_path, filter_key, filter_val)
+ if not res:
+ continue
+ except Exception: # Json load error
+ continue
+ for row in res:
+ row["inner_path"] = inner_path
+ yield row
if __name__ == "__main__":
- #for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "topics")):
- # print row
- for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")):
- print row
\ No newline at end of file
+ for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")):
+ print row
diff --git a/src/util/RateLimit.py b/src/util/RateLimit.py
index 330fde6d..55933e40 100644
--- a/src/util/RateLimit.py
+++ b/src/util/RateLimit.py
@@ -4,119 +4,120 @@ import logging
log = logging.getLogger("RateLimit")
-called_db = {} # Holds events last call time
-queue_db = {} # Commands queued to run
+called_db = {} # Holds events last call time
+queue_db = {} # Commands queued to run
# Register event as called
# Return: None
+
+
def called(event):
- called_db[event] = time.time()
+ called_db[event] = time.time()
# Check if calling event is allowed
# Return: True if allowed False if not
def isAllowed(event, allowed_again=10):
- last_called = called_db.get(event)
- if not last_called: # Its not called before
- return True
- elif time.time()-last_called >= allowed_again:
- del called_db[event] # Delete last call time to save memory
- return True
- else:
- return False
+ last_called = called_db.get(event)
+ if not last_called: # Its not called before
+ return True
+ elif time.time() - last_called >= allowed_again:
+ del called_db[event] # Delete last call time to save memory
+ return True
+ else:
+ return False
def callQueue(event):
- func, args, kwargs, thread = queue_db[event]
- log.debug("Calling: %s" % event)
- del called_db[event]
- del queue_db[event]
- return func(*args, **kwargs)
+ func, args, kwargs, thread = queue_db[event]
+ log.debug("Calling: %s" % event)
+ del called_db[event]
+ del queue_db[event]
+ return func(*args, **kwargs)
-
-# Rate limit and delay function call if needed, If the function called again within the rate limit interval then previous queued call will be dropped
-# Return: Immedietly gevent thread
+# Rate limit and delay function call if necessary
+# If the function called again within the rate limit interval then previous queued call will be dropped
+# Return: Immediately gevent thread
def callAsync(event, allowed_again=10, func=None, *args, **kwargs):
- if isAllowed(event, allowed_again): # Not called recently, call it now
- called(event)
- # print "Calling now"
- return gevent.spawn(func, *args, **kwargs)
- else: # Called recently, schedule it for later
- time_left = allowed_again-max(0, time.time()-called_db[event])
- log.debug("Added to queue (%.2fs left): %s " % (time_left, event))
- if not queue_db.get(event): # Function call not queued yet
- thread = gevent.spawn_later(time_left, lambda: callQueue(event)) # Call this function later
- queue_db[event] = (func, args, kwargs, thread)
- return thread
- else: # Function call already queued, just update the parameters
- thread = queue_db[event][3]
- queue_db[event] = (func, args, kwargs, thread)
- return thread
+ if isAllowed(event, allowed_again): # Not called recently, call it now
+ called(event)
+ # print "Calling now"
+ return gevent.spawn(func, *args, **kwargs)
+ else: # Called recently, schedule it for later
+ time_left = allowed_again - max(0, time.time() - called_db[event])
+ log.debug("Added to queue (%.2fs left): %s " % (time_left, event))
+ if not queue_db.get(event): # Function call not queued yet
+ thread = gevent.spawn_later(time_left, lambda: callQueue(event)) # Call this function later
+ queue_db[event] = (func, args, kwargs, thread)
+ return thread
+ else: # Function call already queued, just update the parameters
+ thread = queue_db[event][3]
+ queue_db[event] = (func, args, kwargs, thread)
+ return thread
# Rate limit and delay function call if needed
# Return: Wait for execution/delay then return value
def call(event, allowed_again=10, func=None, *args, **kwargs):
- if isAllowed(event): # Not called recently, call it now
- called(event)
- # print "Calling now"
- return func(*args, **kwargs)
+ if isAllowed(event): # Not called recently, call it now
+ called(event)
+ # print "Calling now"
+ return func(*args, **kwargs)
- else: # Called recently, schedule it for later
- time_left = max(0, allowed_again-(time.time()-called_db[event]))
- # print "Time left: %s" % time_left, args, kwargs
- log.debug("Calling sync (%.2fs left): %s" % (time_left, event))
- time.sleep(time_left)
- called(event)
- back = func(*args, **kwargs)
- if event in called_db:
- del called_db[event]
- return back
+ else: # Called recently, schedule it for later
+ time_left = max(0, allowed_again - (time.time() - called_db[event]))
+ # print "Time left: %s" % time_left, args, kwargs
+ log.debug("Calling sync (%.2fs left): %s" % (time_left, event))
+ time.sleep(time_left)
+ called(event)
+ back = func(*args, **kwargs)
+ if event in called_db:
+ del called_db[event]
+ return back
# Cleanup expired events every 3 minutes
def cleanup():
- while 1:
- expired = time.time()-60*2 # Cleanup if older than 2 minutes
- for event in called_db.keys():
- if called_db[event] < expired:
- del called_db[event]
- time.sleep(60*3) # Every 3 minutes
+ while 1:
+ expired = time.time() - 60 * 2 # Cleanup if older than 2 minutes
+ for event in called_db.keys():
+ if called_db[event] < expired:
+ del called_db[event]
+ time.sleep(60 * 3) # Every 3 minutes
gevent.spawn(cleanup)
if __name__ == "__main__":
- from gevent import monkey
- monkey.patch_all()
- import random
+ from gevent import monkey
+ monkey.patch_all()
+ import random
- def publish(inner_path):
- print "Publishing %s..." % inner_path
- return 1
+ def publish(inner_path):
+ print "Publishing %s..." % inner_path
+ return 1
- def cb(thread):
- print "Value:", thread.value
+ def cb(thread):
+ print "Value:", thread.value
- print "Testing async spam requests rate limit to 1/sec..."
- for i in range(3000):
- thread = callAsync("publish content.json", 1, publish, "content.json %s" % i)
- time.sleep(float(random.randint(1,20))/100000)
- print thread.link(cb)
- print "Done"
+ print "Testing async spam requests rate limit to 1/sec..."
+ for i in range(3000):
+ thread = callAsync("publish content.json", 1, publish, "content.json %s" % i)
+ time.sleep(float(random.randint(1, 20)) / 100000)
+ print thread.link(cb)
+ print "Done"
- time.sleep(2)
+ time.sleep(2)
- print "Testing sync spam requests rate limit to 1/sec..."
- for i in range(5):
- call("publish data.json", 1, publish, "data.json %s" % i)
- time.sleep(float(random.randint(1,100))/100)
- print "Done"
-
- print "Testing cleanup"
- thread = callAsync("publish content.json single", 1, publish, "content.json single")
- print "Needs to cleanup:", called_db, queue_db
- print "Waiting 3min for cleanup process..."
- time.sleep(60*3)
- print "Cleaned up:", called_db, queue_db
+ print "Testing sync spam requests rate limit to 1/sec..."
+ for i in range(5):
+ call("publish data.json", 1, publish, "data.json %s" % i)
+ time.sleep(float(random.randint(1, 100)) / 100)
+ print "Done"
+ print "Testing cleanup"
+ thread = callAsync("publish content.json single", 1, publish, "content.json single")
+ print "Needs to cleanup:", called_db, queue_db
+ print "Waiting 3min for cleanup process..."
+ time.sleep(60 * 3)
+ print "Cleaned up:", called_db, queue_db
diff --git a/src/util/SocksProxy.py b/src/util/SocksProxy.py
index d8b0447d..a11a385d 100644
--- a/src/util/SocksProxy.py
+++ b/src/util/SocksProxy.py
@@ -1,22 +1,22 @@
-from lib.PySocks import socks
import socket
+from lib.PySocks import socks
+
def create_connection(address, timeout=None, source_address=None):
- sock = socks.socksocket()
- sock.connect(address)
- return sock
+ sock = socks.socksocket()
+ sock.connect(address)
+ return sock
# Dns queries using the proxy
def getaddrinfo(*args):
- return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
+ return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
def monkeyPath(proxy_ip, proxy_port):
- print proxy_ip, proxy_port
- socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port))
- socket.socket = socks.socksocket
- socket.create_connection = create_connection
- socket.getaddrinfo = getaddrinfo
-
+ print proxy_ip, proxy_port
+ socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port))
+ socket.socket = socks.socksocket
+ socket.create_connection = create_connection
+ socket.getaddrinfo = getaddrinfo
diff --git a/src/util/SslPatch.py b/src/util/SslPatch.py
index f25f33c9..408f4c76 100644
--- a/src/util/SslPatch.py
+++ b/src/util/SslPatch.py
@@ -2,6 +2,7 @@
# Disable SSL compression to save massive memory and cpu
import logging
+
from Config import config
@@ -9,7 +10,10 @@ def disableSSLCompression():
import ctypes
import ctypes.util
try:
- openssl = ctypes.CDLL(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32', ctypes.RTLD_GLOBAL)
+ openssl = ctypes.CDLL(
+ ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32',
+ ctypes.RTLD_GLOBAL
+ )
openssl.SSL_COMP_get_compression_methods.restype = ctypes.c_void_p
except Exception, err:
logging.debug("Disable SSL compression failed: %s (normal on Windows)" % err)
@@ -81,7 +85,7 @@ if not hasattr(_ssl, 'sslwrap'):
logging.debug("Missing SSLwrap, readded.")
-# Add SSLContext to gevent.ssl (Ubutunu 15 fix)
+# Add SSLContext to gevent.ssl (Ubuntu 15 fix)
try:
import gevent
if not hasattr(gevent.ssl, "SSLContext"):
diff --git a/src/util/StreamingMsgpack.py b/src/util/StreamingMsgpack.py
index b869d814..5ec868c4 100644
--- a/src/util/StreamingMsgpack.py
+++ b/src/util/StreamingMsgpack.py
@@ -1,36 +1,40 @@
-import msgpack, os, struct
+import os
+import struct
+
+import msgpack
def msgpackHeader(size):
- if size <= 2**8-1:
- return b"\xc4" + struct.pack("B", size)
- elif size <= 2**16-1:
- return b"\xc5" + struct.pack(">H", size)
- elif size <= 2**32-1:
- return b"\xc6" + struct.pack(">I", size)
- else:
- raise Exception("huge binary string")
+ if size <= 2 ** 8 - 1:
+ return b"\xc4" + struct.pack("B", size)
+ elif size <= 2 ** 16 - 1:
+ return b"\xc5" + struct.pack(">H", size)
+ elif size <= 2 ** 32 - 1:
+ return b"\xc6" + struct.pack(">I", size)
+ else:
+ raise Exception("huge binary string")
def stream(data, writer):
- packer = msgpack.Packer()
- writer(packer.pack_map_header(len(data)))
- for key, val in data.iteritems():
- writer(packer.pack(key))
- if issubclass(type(val), file): # File obj
- max_size = os.fstat(val.fileno()).st_size-val.tell()
- size = min(max_size, val.read_bytes)
- bytes_left = size
- writer(msgpackHeader(size))
- buff = 1024*64
- while 1:
- writer(val.read(min(bytes_left, buff)))
- bytes_left = bytes_left-buff
- if bytes_left <= 0: break
- else: # Simple
- writer(packer.pack(val))
- return size
+ packer = msgpack.Packer()
+ writer(packer.pack_map_header(len(data)))
+ for key, val in data.iteritems():
+ writer(packer.pack(key))
+ if issubclass(type(val), file): # File obj
+ max_size = os.fstat(val.fileno()).st_size - val.tell()
+ size = min(max_size, val.read_bytes)
+ bytes_left = size
+ writer(msgpackHeader(size))
+ buff = 1024 * 64
+ while 1:
+ writer(val.read(min(bytes_left, buff)))
+ bytes_left = bytes_left - buff
+ if bytes_left <= 0:
+ break
+ else: # Simple
+ writer(packer.pack(val))
+ return size
class FilePart(file):
- pass
\ No newline at end of file
+ pass
diff --git a/src/util/UpnpPunch.py b/src/util/UpnpPunch.py
index a514659d..eb4b3f16 100644
--- a/src/util/UpnpPunch.py
+++ b/src/util/UpnpPunch.py
@@ -1,10 +1,13 @@
-import gevent
-from gevent import socket
-
-import re, urllib2, httplib, logging
+import re
+import urllib2
+import httplib
+import logging
from urlparse import urlparse
from xml.dom.minidom import parseString
+import gevent
+from gevent import socket
+
# Relevant UPnP spec: http://www.upnp.org/specs/gw/UPnP-gw-WANIPConnection-v1-Service.pdf
# General TODOs:
@@ -14,222 +17,222 @@ remove_whitespace = re.compile(r'>\s*<')
def _m_search_ssdp(local_ip):
- """
- Broadcast a UDP SSDP M-SEARCH packet and return response.
- """
- search_target = "urn:schemas-upnp-org:device:InternetGatewayDevice:1"
+ """
+ Broadcast a UDP SSDP M-SEARCH packet and return response.
+ """
+ search_target = "urn:schemas-upnp-org:device:InternetGatewayDevice:1"
- ssdp_request = ''.join(
- ['M-SEARCH * HTTP/1.1\r\n',
- 'HOST: 239.255.255.250:1900\r\n',
- 'MAN: "ssdp:discover"\r\n',
- 'MX: 2\r\n',
- 'ST: {0}\r\n'.format(search_target),
- '\r\n']
- )
+ ssdp_request = ''.join(
+ ['M-SEARCH * HTTP/1.1\r\n',
+ 'HOST: 239.255.255.250:1900\r\n',
+ 'MAN: "ssdp:discover"\r\n',
+ 'MX: 2\r\n',
+ 'ST: {0}\r\n'.format(search_target),
+ '\r\n']
+ )
- sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- sock.bind((local_ip, 10000))
+ sock.bind((local_ip, 10000))
- sock.sendto(ssdp_request, ('239.255.255.250', 1900))
- sock.settimeout(5)
+ sock.sendto(ssdp_request, ('239.255.255.250', 1900))
+ sock.settimeout(5)
- try:
- return sock.recv(2048)
- except socket.error, err:
- # no reply from IGD, possibly no IGD on LAN
- logging.debug("UDP SSDP M-SEARCH send error using ip %s: %s" % (local_ip, err))
- return False
+ try:
+ return sock.recv(2048)
+ except socket.error, err:
+ # no reply from IGD, possibly no IGD on LAN
+ logging.debug("UDP SSDP M-SEARCH send error using ip %s: %s" % (local_ip, err))
+ return False
def _retrieve_location_from_ssdp(response):
- """
- Parse raw HTTP response to retrieve the UPnP location header
- and return a ParseResult object.
- """
- parsed = re.findall(r'(?P.*?): (?P.*?)\r\n', response)
- location_header = filter(lambda x: x[0].lower() == 'location', parsed)
+ """
+ Parse raw HTTP response to retrieve the UPnP location header
+ and return a ParseResult object.
+ """
+ parsed = re.findall(r'(?P.*?): (?P.*?)\r\n', response)
+ location_header = filter(lambda x: x[0].lower() == 'location', parsed)
- if not len(location_header):
- # no location header returned :(
- return False
+ if not len(location_header):
+ # no location header returned :(
+ return False
- return urlparse(location_header[0][1])
+ return urlparse(location_header[0][1])
def _retrieve_igd_profile(url):
- """
- Retrieve the device's UPnP profile.
- """
- return urllib2.urlopen(url.geturl()).read()
+ """
+ Retrieve the device's UPnP profile.
+ """
+ return urllib2.urlopen(url.geturl()).read()
def _node_val(node):
- """
- Get the text value of the first child text node of a node.
- """
- return node.childNodes[0].data
+ """
+ Get the text value of the first child text node of a node.
+ """
+ return node.childNodes[0].data
def _parse_igd_profile(profile_xml):
- """
- Traverse the profile xml DOM looking for either
- WANIPConnection or WANPPPConnection and return
- the value found as well as the 'controlURL'.
- """
- dom = parseString(profile_xml)
+ """
+ Traverse the profile xml DOM looking for either
+ WANIPConnection or WANPPPConnection and return
+ the value found as well as the 'controlURL'.
+ """
+ dom = parseString(profile_xml)
- service_types = dom.getElementsByTagName('serviceType')
- for service in service_types:
- if _node_val(service).find('WANIPConnection') > 0 or \
- _node_val(service).find('WANPPPConnection') > 0:
- control_url = service.parentNode.getElementsByTagName(
- 'controlURL'
- )[0].childNodes[0].data
- upnp_schema = _node_val(service).split(':')[-2]
- return control_url, upnp_schema
+ service_types = dom.getElementsByTagName('serviceType')
+ for service in service_types:
+ if _node_val(service).find('WANIPConnection') > 0 or \
+ _node_val(service).find('WANPPPConnection') > 0:
+ control_url = service.parentNode.getElementsByTagName(
+ 'controlURL'
+ )[0].childNodes[0].data
+ upnp_schema = _node_val(service).split(':')[-2]
+ return control_url, upnp_schema
- return False
+ return False
def _get_local_ip():
- s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
- # not using because gevents getaddrinfo doesn't like that
- # using port 1 as per hobbldygoop's comment about port 0 not working on osx:
- # https://github.com/sirMackk/ZeroNet/commit/fdcd15cf8df0008a2070647d4d28ffedb503fba2#commitcomment-9863928
- s.connect(('239.255.255.250', 1))
- return s.getsockname()[0]
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
+ # not using because gevents getaddrinfo doesn't like that
+ # using port 1 as per hobbldygoop's comment about port 0 not working on osx:
+ # https://github.com/sirMackk/ZeroNet/commit/fdcd15cf8df0008a2070647d4d28ffedb503fba2#commitcomment-9863928
+ s.connect(('239.255.255.250', 1))
+ return s.getsockname()[0]
def _create_soap_message(local_ip, port, description="UPnPPunch", protocol="TCP",
- upnp_schema='WANIPConnection'):
- """
- Build a SOAP AddPortMapping message.
- """
+ upnp_schema='WANIPConnection'):
+ """
+ Build a SOAP AddPortMapping message.
+ """
- soap_message = """
+ soap_message = """
-
-
-
- {port}
- {protocol}
- {port}
- {host_ip}
- 1
- {description}
- 0
-
-
+
+
+
+ {port}
+ {protocol}
+ {port}
+ {host_ip}
+ 1
+ {description}
+ 0
+
+
""".format(port=port,
- protocol=protocol,
- host_ip=local_ip,
- description=description,
- upnp_schema=upnp_schema)
- return remove_whitespace.sub('><', soap_message)
+ protocol=protocol,
+ host_ip=local_ip,
+ description=description,
+ upnp_schema=upnp_schema)
+ return remove_whitespace.sub('><', soap_message)
def _parse_for_errors(soap_response):
- if soap_response.status == 500:
- err_dom = parseString(soap_response.read())
- err_code = _node_val(err_dom.getElementsByTagName('errorCode')[0])
- err_msg = _node_val(
- err_dom.getElementsByTagName('errorDescription')[0]
- )
- logging.error('SOAP request error: {0} - {1}'.format(err_code, err_msg))
- raise Exception(
- 'SOAP request error: {0} - {1}'.format(err_code, err_msg)
- )
+ if soap_response.status == 500:
+ err_dom = parseString(soap_response.read())
+ err_code = _node_val(err_dom.getElementsByTagName('errorCode')[0])
+ err_msg = _node_val(
+ err_dom.getElementsByTagName('errorDescription')[0]
+ )
+ logging.error('SOAP request error: {0} - {1}'.format(err_code, err_msg))
+ raise Exception(
+ 'SOAP request error: {0} - {1}'.format(err_code, err_msg)
+ )
- return False
- else:
- return True
+ return False
+ else:
+ return True
def _send_soap_request(location, upnp_schema, control_url, soap_message):
- """
- Send out SOAP request to UPnP device and return a response.
- """
- headers = {
- 'SOAPAction': (
- '"urn:schemas-upnp-org:service:{schema}:'
- '1#AddPortMapping"'.format(schema=upnp_schema)
- ),
- 'Content-Type': 'text/xml'
- }
- conn = httplib.HTTPConnection(location.hostname, location.port)
- conn.request('POST', control_url, soap_message, headers)
+ """
+ Send out SOAP request to UPnP device and return a response.
+ """
+ headers = {
+ 'SOAPAction': (
+ '"urn:schemas-upnp-org:service:{schema}:'
+ '1#AddPortMapping"'.format(schema=upnp_schema)
+ ),
+ 'Content-Type': 'text/xml'
+ }
+ conn = httplib.HTTPConnection(location.hostname, location.port)
+ conn.request('POST', control_url, soap_message, headers)
- response = conn.getresponse()
- conn.close()
+ response = conn.getresponse()
+ conn.close()
- return _parse_for_errors(response)
+ return _parse_for_errors(response)
def open_port(port=15441, desc="UpnpPunch"):
- """
- Attempt to forward a port using UPnP.
- """
+ """
+ Attempt to forward a port using UPnP.
+ """
- local_ips = [_get_local_ip()]
- try:
- local_ips += socket.gethostbyname_ex('')[2] # Get ip by '' hostname not supported on all platform
- except:
- pass
+ local_ips = [_get_local_ip()]
+ try:
+ local_ips += socket.gethostbyname_ex('')[2] # Get ip by '' hostname not supported on all platform
+ except:
+ pass
- try:
- s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- s.connect(('8.8.8.8', 0)) # Using google dns route
- local_ips.append(s.getsockname()[0])
- except:
- pass
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ s.connect(('8.8.8.8', 0)) # Using google dns route
+ local_ips.append(s.getsockname()[0])
+ except:
+ pass
- local_ips = list(set(local_ips)) # Delete duplicates
- logging.debug("Found local ips: %s" % local_ips)
- local_ips = local_ips*3 # Retry every ip 3 times
+ local_ips = list(set(local_ips)) # Delete duplicates
+ logging.debug("Found local ips: %s" % local_ips)
+ local_ips = local_ips * 3 # Retry every ip 3 times
- for local_ip in local_ips:
- logging.debug("Trying using local ip: %s" % local_ip)
- idg_response = _m_search_ssdp(local_ip)
+ for local_ip in local_ips:
+ logging.debug("Trying using local ip: %s" % local_ip)
+ idg_response = _m_search_ssdp(local_ip)
- if not idg_response:
- logging.debug("No IGD response")
- continue
+ if not idg_response:
+ logging.debug("No IGD response")
+ continue
- location = _retrieve_location_from_ssdp(idg_response)
+ location = _retrieve_location_from_ssdp(idg_response)
- if not location:
- logging.debug("No location")
- continue
+ if not location:
+ logging.debug("No location")
+ continue
- parsed = _parse_igd_profile(
- _retrieve_igd_profile(location)
- )
+ parsed = _parse_igd_profile(
+ _retrieve_igd_profile(location)
+ )
- if not parsed:
- logging.debug("IGD parse error using location %s" % repr(location))
- continue
+ if not parsed:
+ logging.debug("IGD parse error using location %s" % repr(location))
+ continue
- control_url, upnp_schema = parsed
+ control_url, upnp_schema = parsed
- soap_messages = [_create_soap_message(local_ip, port, desc, proto, upnp_schema)
- for proto in ['TCP', 'UDP']]
+ soap_messages = [_create_soap_message(local_ip, port, desc, proto, upnp_schema)
+ for proto in ['TCP', 'UDP']]
- requests = [gevent.spawn(
- _send_soap_request, location, upnp_schema, control_url, message
- ) for message in soap_messages]
+ requests = [gevent.spawn(
+ _send_soap_request, location, upnp_schema, control_url, message
+ ) for message in soap_messages]
- gevent.joinall(requests, timeout=3)
+ gevent.joinall(requests, timeout=3)
- if all([request.value for request in requests]):
- return True
- return False
+ if all([request.value for request in requests]):
+ return True
+ return False
if __name__ == "__main__":
- from gevent import monkey
- monkey.patch_socket()
+ from gevent import monkey
+ monkey.patch_socket()
- logging.getLogger().setLevel(logging.DEBUG)
- print open_port(15441, "ZeroNet")
+ logging.getLogger().setLevel(logging.DEBUG)
+ print open_port(15441, "ZeroNet")
diff --git a/update.py b/update.py
index 9b5f9793..3830dd37 100644
--- a/update.py
+++ b/update.py
@@ -1,72 +1,81 @@
-from gevent import monkey; monkey.patch_all()
-import urllib, zipfile, os, ssl, httplib, socket, re
+import urllib
+import zipfile
+import os
+import ssl
+import httplib
+import socket
+import re
import cStringIO as StringIO
+from gevent import monkey
+monkey.patch_all()
+
+
def update():
- # Gevent https bug workaround (https://github.com/gevent/gevent/issues/477)
- reload(socket)
- reload(httplib)
- reload(ssl)
+ # Gevent https bug workaround (https://github.com/gevent/gevent/issues/477)
+ reload(socket)
+ reload(httplib)
+ reload(ssl)
- print "Downloading.",
- file = urllib.urlopen("https://github.com/HelloZeroNet/ZeroNet/archive/master.zip")
- data = StringIO.StringIO()
- while True:
- buff = file.read(1024*16)
- if not buff: break
- data.write(buff)
- print ".",
- print "Downloaded."
+ print "Downloading.",
+ file = urllib.urlopen("https://github.com/HelloZeroNet/ZeroNet/archive/master.zip")
+ data = StringIO.StringIO()
+ while True:
+ buff = file.read(1024 * 16)
+ if not buff:
+ break
+ data.write(buff)
+ print ".",
+ print "Downloaded."
- # Checking plugins
- plugins_enabled = []
- plugins_disabled = []
- if os.path.isdir("plugins"):
- for dir in os.listdir("plugins"):
- if dir.startswith("disabled-"):
- plugins_disabled.append(dir.replace("disabled-", ""))
- else:
- plugins_enabled.append(dir)
- print "Plugins enabled:", plugins_enabled, "disabled:", plugins_disabled
+ # Checking plugins
+ plugins_enabled = []
+ plugins_disabled = []
+ if os.path.isdir("plugins"):
+ for dir in os.listdir("plugins"):
+ if dir.startswith("disabled-"):
+ plugins_disabled.append(dir.replace("disabled-", ""))
+ else:
+ plugins_enabled.append(dir)
+ print "Plugins enabled:", plugins_enabled, "disabled:", plugins_disabled
+ print "Extracting...",
+ zip = zipfile.ZipFile(data)
+ for inner_path in zip.namelist():
+ inner_path = inner_path.replace("\\", "/") # Make sure we have unix path
+ print ".",
+ dest_path = inner_path.replace("ZeroNet-master/", "")
+ if not dest_path:
+ continue
- print "Extracting...",
- zip = zipfile.ZipFile(data)
- for inner_path in zip.namelist():
- inner_path = inner_path.replace("\\", "/") # Make sure we have unix path
- print ".",
- dest_path = inner_path.replace("ZeroNet-master/", "")
- if not dest_path: continue
+ # Keep plugin disabled/enabled status
+ match = re.match("plugins/([^/]+)", dest_path)
+ if match:
+ plugin_name = match.group(1).replace("disabled-", "")
+ if plugin_name in plugins_enabled: # Plugin was enabled
+ dest_path = dest_path.replace("plugins/disabled-" + plugin_name, "plugins/" + plugin_name)
+ elif plugin_name in plugins_disabled: # Plugin was disabled
+ dest_path = dest_path.replace("plugins/" + plugin_name, "plugins/disabled-" + plugin_name)
+ print "P",
+ dest_dir = os.path.dirname(dest_path)
- # Keep plugin disabled/enabled status
- match = re.match("plugins/([^/]+)", dest_path)
- if match:
- plugin_name = match.group(1).replace("disabled-","")
- if plugin_name in plugins_enabled: # Plugin was enabled
- dest_path = dest_path.replace("plugins/disabled-"+plugin_name, "plugins/"+plugin_name)
- elif plugin_name in plugins_disabled: # Plugin was disabled
- dest_path = dest_path.replace("plugins/"+plugin_name, "plugins/disabled-"+plugin_name)
- print "P",
+ if dest_dir and not os.path.isdir(dest_dir):
+ os.makedirs(dest_dir)
- dest_dir = os.path.dirname(dest_path)
+ if dest_dir != dest_path.strip("/"):
+ data = zip.read(inner_path)
+ try:
+ open(dest_path, 'wb').write(data)
+ except Exception, err:
+ print dest_path, err
- if dest_dir and not os.path.isdir(dest_dir):
- os.makedirs(dest_dir)
-
- if dest_dir != dest_path.strip("/"):
- data = zip.read(inner_path)
- try:
- open(dest_path, 'wb').write(data)
- except Exception, err:
- print dest_path, err
-
- print "Done."
+ print "Done."
if __name__ == "__main__":
- try:
- update()
- except Exception, err:
- print "Update error: %s" % err
- raw_input("Press enter to exit")
\ No newline at end of file
+ try:
+ update()
+ except Exception, err:
+ print "Update error: %s" % err
+ raw_input("Press enter to exit")