First release, remove not used lines from gitignore
This commit is contained in:
parent
c0bfb3b062
commit
d28e1cb4a6
85 changed files with 7205 additions and 50 deletions
52
.gitignore
vendored
52
.gitignore
vendored
|
@ -2,53 +2,5 @@
|
||||||
__pycache__/
|
__pycache__/
|
||||||
*.py[cod]
|
*.py[cod]
|
||||||
|
|
||||||
# C extensions
|
# Log files
|
||||||
*.so
|
*.log
|
||||||
|
|
||||||
# Distribution / packaging
|
|
||||||
.Python
|
|
||||||
env/
|
|
||||||
build/
|
|
||||||
develop-eggs/
|
|
||||||
dist/
|
|
||||||
downloads/
|
|
||||||
eggs/
|
|
||||||
lib/
|
|
||||||
lib64/
|
|
||||||
parts/
|
|
||||||
sdist/
|
|
||||||
var/
|
|
||||||
*.egg-info/
|
|
||||||
.installed.cfg
|
|
||||||
*.egg
|
|
||||||
|
|
||||||
# PyInstaller
|
|
||||||
# Usually these files are written by a python script from a template
|
|
||||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
||||||
*.manifest
|
|
||||||
*.spec
|
|
||||||
|
|
||||||
# Installer logs
|
|
||||||
pip-log.txt
|
|
||||||
pip-delete-this-directory.txt
|
|
||||||
|
|
||||||
# Unit test / coverage reports
|
|
||||||
htmlcov/
|
|
||||||
.tox/
|
|
||||||
.coverage
|
|
||||||
.cache
|
|
||||||
nosetests.xml
|
|
||||||
coverage.xml
|
|
||||||
|
|
||||||
# Translations
|
|
||||||
*.mo
|
|
||||||
*.pot
|
|
||||||
|
|
||||||
# Django stuff:
|
|
||||||
*.log
|
|
||||||
|
|
||||||
# Sphinx documentation
|
|
||||||
docs/_build/
|
|
||||||
|
|
||||||
# PyBuilder
|
|
||||||
target/
|
|
2
data/sites.json
Normal file
2
data/sites.json
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
{
|
||||||
|
}
|
1
log/empty.txt
Normal file
1
log/empty.txt
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Place for log files.
|
129
src/Config.py
Normal file
129
src/Config.py
Normal file
|
@ -0,0 +1,129 @@
|
||||||
|
import argparse, sys, os, time
|
||||||
|
import ConfigParser
|
||||||
|
|
||||||
|
class Config(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.version = "0.1"
|
||||||
|
self.parser = self.createArguments()
|
||||||
|
argv = sys.argv[:] # Copy command line arguments
|
||||||
|
argv = self.parseConfig(argv) # Add arguments from config file
|
||||||
|
self.parseCommandline(argv) # Parse argv
|
||||||
|
self.setAttributes()
|
||||||
|
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.arguments).replace("Namespace", "Config") # Using argparse str output
|
||||||
|
|
||||||
|
|
||||||
|
# Create command line arguments
|
||||||
|
def createArguments(self):
|
||||||
|
# Platform specific
|
||||||
|
if sys.platform.startswith("win"):
|
||||||
|
upnpc = "tools\\upnpc\\upnpc-static.exe"
|
||||||
|
coffeescript = "type %s | tools\\coffee\\coffee.cmd"
|
||||||
|
else:
|
||||||
|
upnpc = None
|
||||||
|
coffeescript = None
|
||||||
|
|
||||||
|
# Create parser
|
||||||
|
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
subparsers = parser.add_subparsers(title="Action to perform", dest="action")
|
||||||
|
|
||||||
|
# Main
|
||||||
|
action = subparsers.add_parser("main", help='Start UiServer and FileServer (default)')
|
||||||
|
|
||||||
|
# SiteCreate
|
||||||
|
action = subparsers.add_parser("siteCreate", help='Create a new site')
|
||||||
|
|
||||||
|
# SiteSign
|
||||||
|
action = subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]')
|
||||||
|
action.add_argument('address', help='Site to sign')
|
||||||
|
action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?')
|
||||||
|
|
||||||
|
# SitePublish
|
||||||
|
action = subparsers.add_parser("sitePublish", help='Publish site to other peers: address')
|
||||||
|
action.add_argument('address', help='Site to publish')
|
||||||
|
|
||||||
|
# SiteVerify
|
||||||
|
action = subparsers.add_parser("siteVerify", help='Verify site files using md5: address')
|
||||||
|
action.add_argument('address', help='Site to verify')
|
||||||
|
|
||||||
|
|
||||||
|
# Config parameters
|
||||||
|
parser.add_argument('--debug', help='Debug mode', action='store_true')
|
||||||
|
|
||||||
|
parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='host')
|
||||||
|
parser.add_argument('--ui_port', help='Web interface bind port', default=43110, metavar='port')
|
||||||
|
parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip')
|
||||||
|
parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr', metavar='address')
|
||||||
|
|
||||||
|
parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='host')
|
||||||
|
parser.add_argument('--fileserver_port',help='FileServer bind port', default=15441, metavar='port')
|
||||||
|
|
||||||
|
parser.add_argument('--ip_external', help='External ip (tested on start if None)', metavar='ip')
|
||||||
|
parser.add_argument('--upnpc', help='MiniUPnP binary for open port on router', default=upnpc, metavar='executable_path')
|
||||||
|
|
||||||
|
parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript, metavar='executable_path')
|
||||||
|
|
||||||
|
parser.add_argument('--version', action='version', version='ZeroNet %s' % self.version)
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
# Find arguments specificed for current action
|
||||||
|
def getActionArguments(self):
|
||||||
|
back = {}
|
||||||
|
arguments = self.parser._subparsers._group_actions[0].choices[self.action]._actions[1:] # First is --version
|
||||||
|
for argument in arguments:
|
||||||
|
back[argument.dest] = getattr(self, argument.dest)
|
||||||
|
return back
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Try to find action from sys.argv
|
||||||
|
def getAction(self, argv):
|
||||||
|
actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions
|
||||||
|
found_action = False
|
||||||
|
for action in actions: # See if any in sys.argv
|
||||||
|
if action in argv:
|
||||||
|
found_action = action
|
||||||
|
break
|
||||||
|
return found_action
|
||||||
|
|
||||||
|
|
||||||
|
# Parse command line arguments
|
||||||
|
def parseCommandline(self, argv):
|
||||||
|
# Find out if action is specificed on start
|
||||||
|
action = self.getAction(argv)
|
||||||
|
if len(argv) == 1 or not action: # If no action specificed set the main action
|
||||||
|
argv.append("main")
|
||||||
|
if "zeronet.py" in argv[0]:
|
||||||
|
self.arguments = self.parser.parse_args(argv[1:])
|
||||||
|
else: # Silent errors if not started with zeronet.py
|
||||||
|
self.arguments = self.parser.parse_args(argv[1:])
|
||||||
|
|
||||||
|
|
||||||
|
# Parse config file
|
||||||
|
def parseConfig(self, argv):
|
||||||
|
if os.path.isfile("zeronet.conf"):
|
||||||
|
config = ConfigParser.ConfigParser(allow_no_value=True)
|
||||||
|
config.read('zeronet.conf')
|
||||||
|
for section in config.sections():
|
||||||
|
for key, val in config.items(section):
|
||||||
|
if section != "global": # If not global prefix key with section
|
||||||
|
key = section+"_"+key
|
||||||
|
if val: argv.insert(1, val)
|
||||||
|
argv.insert(1, "--%s" % key)
|
||||||
|
return argv
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Expose arguments as class attributes
|
||||||
|
def setAttributes(self):
|
||||||
|
# Set attributes from arguments
|
||||||
|
args = vars(self.arguments)
|
||||||
|
for key, val in args.items():
|
||||||
|
setattr(self, key, val)
|
||||||
|
|
||||||
|
|
||||||
|
config = Config()
|
26
src/Crypt/CryptBitcoin.py
Normal file
26
src/Crypt/CryptBitcoin.py
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
from src.lib.BitcoinECC import BitcoinECC
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
|
||||||
|
def newPrivatekey(): # Return new private key
|
||||||
|
bitcoin = BitcoinECC.Bitcoin()
|
||||||
|
bitcoin.GeneratePrivateKey()
|
||||||
|
return bitcoin.PrivateEncoding()
|
||||||
|
|
||||||
|
|
||||||
|
def privatekeyToAddress(privatekey): # Return address from private key
|
||||||
|
bitcoin = BitcoinECC.Bitcoin()
|
||||||
|
bitcoin.BitcoinAddressFromPrivate(privatekey)
|
||||||
|
return bitcoin.BitcoinAddresFromPublicKey()
|
||||||
|
|
||||||
|
|
||||||
|
def sign(data, privatekey): # Return sign to data using private key
|
||||||
|
bitcoin = BitcoinECC.Bitcoin()
|
||||||
|
bitcoin.BitcoinAddressFromPrivate(privatekey)
|
||||||
|
sign = bitcoin.SignECDSA(data)
|
||||||
|
return sign
|
||||||
|
|
||||||
|
|
||||||
|
def verify(data, address, sign): # Verify data using address and sign
|
||||||
|
bitcoin = BitcoinECC.Bitcoin()
|
||||||
|
return bitcoin.VerifyMessageFromBitcoinAddress(address, data, sign)
|
18
src/Crypt/CryptHash.py
Normal file
18
src/Crypt/CryptHash.py
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
def sha1sum(file, blocksize=65536):
|
||||||
|
if hasattr(file, "endswith"): # Its a string open it
|
||||||
|
file = open(file, "rb")
|
||||||
|
hash = hashlib.sha1()
|
||||||
|
for block in iter(lambda: file.read(blocksize), ""):
|
||||||
|
hash.update(block)
|
||||||
|
return hash.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import cStringIO as StringIO
|
||||||
|
a = StringIO.StringIO()
|
||||||
|
a.write("hello!")
|
||||||
|
a.seek(0)
|
||||||
|
print hashlib.sha1("hello!").hexdigest()
|
||||||
|
print sha1sum(a)
|
0
src/Crypt/__init__.py
Normal file
0
src/Crypt/__init__.py
Normal file
22
src/Debug/DebugHook.py
Normal file
22
src/Debug/DebugHook.py
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
import gevent, sys
|
||||||
|
|
||||||
|
last_error = None
|
||||||
|
def handleError(*args):
|
||||||
|
global last_error
|
||||||
|
if not args: # Get last error
|
||||||
|
args = sys.exc_info()
|
||||||
|
silent = True
|
||||||
|
else:
|
||||||
|
silent = False
|
||||||
|
print "Error catched", args
|
||||||
|
last_error = args
|
||||||
|
if not silent: sys.__excepthook__(*args)
|
||||||
|
|
||||||
|
OriginalGreenlet = gevent.Greenlet
|
||||||
|
class ErrorhookedGreenlet(OriginalGreenlet):
|
||||||
|
def _report_error(self, exc_info):
|
||||||
|
handleError(exc_info[0], exc_info[1], exc_info[2])
|
||||||
|
|
||||||
|
sys.excepthook = handleError
|
||||||
|
gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet
|
||||||
|
reload(gevent)
|
60
src/Debug/DebugMedia.py
Normal file
60
src/Debug/DebugMedia.py
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
import os, subprocess, re, logging, time
|
||||||
|
from Config import config
|
||||||
|
|
||||||
|
# Find files with extension in path
|
||||||
|
def findfiles(path, find_ext):
|
||||||
|
for root, dirs, files in os.walk(path, topdown = False):
|
||||||
|
for file in sorted(files):
|
||||||
|
file_path = root+"/"+file
|
||||||
|
file_ext = file.split(".")[-1]
|
||||||
|
if file_ext in find_ext and not file.startswith("all."): yield file_path
|
||||||
|
|
||||||
|
|
||||||
|
# Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features
|
||||||
|
def merge(merged_path):
|
||||||
|
merge_dir = os.path.dirname(merged_path)
|
||||||
|
s = time.time()
|
||||||
|
ext = merged_path.split(".")[-1]
|
||||||
|
if ext == "js": # If merging .js find .coffee too
|
||||||
|
find_ext = ["js", "coffee"]
|
||||||
|
else:
|
||||||
|
find_ext = [ext]
|
||||||
|
|
||||||
|
# If exits check the other files modification date
|
||||||
|
if os.path.isfile(merged_path):
|
||||||
|
merged_mtime = os.path.getmtime(merged_path)
|
||||||
|
changed = False
|
||||||
|
for file_path in findfiles(merge_dir, find_ext):
|
||||||
|
if os.path.getmtime(file_path) > merged_mtime:
|
||||||
|
changed = True
|
||||||
|
break
|
||||||
|
if not changed: return # Assets not changed, nothing to do
|
||||||
|
|
||||||
|
# Merge files
|
||||||
|
parts = []
|
||||||
|
for file_path in findfiles(merge_dir, find_ext):
|
||||||
|
parts.append("\n\n/* ---- %s ---- */\n\n" % file_path)
|
||||||
|
if file_path.endswith(".coffee"): # Compile coffee script
|
||||||
|
if not config.coffeescript_compiler:
|
||||||
|
logging.error("No coffeescript compiler definied, skipping compiling %s" % merged_path)
|
||||||
|
return False # No coffeescript compiler, skip this file
|
||||||
|
command = config.coffeescript_compiler % file_path.replace("/", "\\")
|
||||||
|
s = time.time()
|
||||||
|
compiler = subprocess.Popen(command, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||||
|
logging.debug("Running: %s (Done in %.2fs)" % (command, time.time()-s))
|
||||||
|
source = compiler.stdout.read()
|
||||||
|
if source:
|
||||||
|
parts.append(source)
|
||||||
|
else:
|
||||||
|
error = compiler.stderr.read()
|
||||||
|
parts.append("alert('%s compile error: %s');" % (file_path, re.escape(error)) )
|
||||||
|
else: # Add to parts
|
||||||
|
parts.append(open(file_path).read())
|
||||||
|
|
||||||
|
merged = "\n".join(parts)
|
||||||
|
if ext == "css": # Vendor prefix css
|
||||||
|
from lib.cssvendor import cssvendor
|
||||||
|
merged = cssvendor.prefix(merged)
|
||||||
|
merged = merged.replace("\r", "")
|
||||||
|
open(merged_path, "wb").write(merged)
|
||||||
|
logging.debug("Merged %s (%.2fs)" % (merged_path, time.time()-s))
|
35
src/Debug/DebugReloader.py
Normal file
35
src/Debug/DebugReloader.py
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
import logging, os, sys, time
|
||||||
|
import threading
|
||||||
|
|
||||||
|
try:
|
||||||
|
from fs.osfs import OSFS
|
||||||
|
pyfilesystem = OSFS("src")
|
||||||
|
except Exception, err:
|
||||||
|
logging.info("%s: For autoreload please download pyfilesystem (https://code.google.com/p/pyfilesystem/)" % err)
|
||||||
|
pyfilesystem = False
|
||||||
|
|
||||||
|
|
||||||
|
class DebugReloader:
|
||||||
|
def __init__ (self, callback, directory = "/"):
|
||||||
|
if pyfilesystem:
|
||||||
|
self.directory = directory
|
||||||
|
self.callback = callback
|
||||||
|
logging.debug("Adding autoreload: %s, cb: %s" % (directory, callback))
|
||||||
|
thread = threading.Thread(target=self.addWatcher)
|
||||||
|
thread.daemon = True
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
|
||||||
|
def addWatcher(self, recursive=True):
|
||||||
|
try:
|
||||||
|
time.sleep(1) # Wait for .pyc compiles
|
||||||
|
pyfilesystem.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive)
|
||||||
|
except Exception, err:
|
||||||
|
print "File system watcher failed: %s (on linux pyinotify not gevent compatible yet :( )" % err
|
||||||
|
|
||||||
|
|
||||||
|
def changed(self, evt):
|
||||||
|
if not evt.path or evt.path.endswith("pyc"): return False # Ignore *.pyc changes
|
||||||
|
#logging.debug("Changed: %s" % evt)
|
||||||
|
time.sleep(0.1) # Wait for lock release
|
||||||
|
self.callback()
|
1
src/Debug/__init__.py
Normal file
1
src/Debug/__init__.py
Normal file
|
@ -0,0 +1 @@
|
||||||
|
from DebugReloader import DebugReloader
|
98
src/File/FileRequest.py
Normal file
98
src/File/FileRequest.py
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
import os, msgpack, shutil
|
||||||
|
from Site import SiteManager
|
||||||
|
from cStringIO import StringIO
|
||||||
|
|
||||||
|
FILE_BUFF = 1024*512
|
||||||
|
|
||||||
|
# Request from me
|
||||||
|
class FileRequest:
|
||||||
|
def __init__(self, server = None):
|
||||||
|
if server:
|
||||||
|
self.server = server
|
||||||
|
self.log = server.log
|
||||||
|
self.sites = SiteManager.list()
|
||||||
|
|
||||||
|
|
||||||
|
def send(self, msg):
|
||||||
|
if not isinstance(msg, dict): # If msg not a dict create a {"body": msg}
|
||||||
|
msg = {"body": msg}
|
||||||
|
self.server.socket.send(msgpack.packb(msg, use_bin_type=True))
|
||||||
|
|
||||||
|
|
||||||
|
# Route file requests
|
||||||
|
def route(self, cmd, params):
|
||||||
|
if cmd == "getFile":
|
||||||
|
self.actionGetFile(params)
|
||||||
|
elif cmd == "update":
|
||||||
|
self.actionUpdate(params)
|
||||||
|
elif cmd == "ping":
|
||||||
|
self.actionPing()
|
||||||
|
else:
|
||||||
|
self.actionUnknown(cmd, params)
|
||||||
|
|
||||||
|
|
||||||
|
# Update a site file request
|
||||||
|
def actionUpdate(self, params):
|
||||||
|
site = self.sites.get(params["site"])
|
||||||
|
if not site or not site.settings["serving"]: # Site unknown or not serving
|
||||||
|
self.send({"error": "Unknown site"})
|
||||||
|
return False
|
||||||
|
if site.settings["own"]:
|
||||||
|
self.log.debug("Someone trying to push a file to own site %s, reload local content.json first" % site.address)
|
||||||
|
site.loadContent()
|
||||||
|
buff = StringIO(params["body"])
|
||||||
|
valid = site.verifyFile(params["inner_path"], buff)
|
||||||
|
if valid == True: # Valid and changed
|
||||||
|
buff.seek(0)
|
||||||
|
file = open(site.getPath(params["inner_path"]), "wb")
|
||||||
|
shutil.copyfileobj(buff, file) # Write buff to disk
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
if params["inner_path"] == "content.json": # Download every changed file from peer
|
||||||
|
changed = site.loadContent() # Get changed files
|
||||||
|
peer = site.addPeer(*params["peer"], return_peer = True) # Add or get peer
|
||||||
|
self.log.info("%s changed files: %s" % (site.address_short, changed))
|
||||||
|
for inner_path in changed: # Updated files in content.json
|
||||||
|
site.needFile(inner_path, peer=peer, update=True, blocking=False) # Download file from peer
|
||||||
|
site.onComplete.once(lambda: site.publish()) # On complete publish to other peers
|
||||||
|
|
||||||
|
self.send({"ok": "Thanks, file %s updated!" % params["inner_path"]})
|
||||||
|
|
||||||
|
elif valid == None: # Not changed
|
||||||
|
peer = site.addPeer(*params["peer"], return_peer = True) # Add or get peer
|
||||||
|
for task in site.worker_manager.tasks: # New peer add to every ongoing task
|
||||||
|
site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) # Download file from peer
|
||||||
|
|
||||||
|
self.send({"ok": "File file not changed"})
|
||||||
|
|
||||||
|
else: # Invalid sign or sha1 hash
|
||||||
|
self.send({"error": "File invalid"})
|
||||||
|
|
||||||
|
|
||||||
|
# Send file content request
|
||||||
|
def actionGetFile(self, params):
|
||||||
|
site = self.sites.get(params["site"])
|
||||||
|
if not site or not site.settings["serving"]: # Site unknown or not serving
|
||||||
|
self.send({"error": "Unknown site"})
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
file = open(site.getPath(params["inner_path"]), "rb")
|
||||||
|
file.seek(params["location"])
|
||||||
|
back = {}
|
||||||
|
back["body"] = file.read(FILE_BUFF)
|
||||||
|
back["location"] = file.tell()
|
||||||
|
back["size"] = os.fstat(file.fileno()).st_size
|
||||||
|
self.send(back)
|
||||||
|
except Exception, err:
|
||||||
|
self.send({"error": "File read error: %s" % err})
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# Send a simple Pong! answer
|
||||||
|
def actionPing(self):
|
||||||
|
self.send("Pong!")
|
||||||
|
|
||||||
|
|
||||||
|
# Unknown command
|
||||||
|
def actionUnknown(self, cmd, params):
|
||||||
|
self.send({"error": "Unknown command: %s" % cmd})
|
165
src/File/FileServer.py
Normal file
165
src/File/FileServer.py
Normal file
|
@ -0,0 +1,165 @@
|
||||||
|
import os, logging, urllib2, urllib, re, time
|
||||||
|
import gevent, msgpack
|
||||||
|
import zmq.green as zmq
|
||||||
|
from Config import config
|
||||||
|
from FileRequest import FileRequest
|
||||||
|
from Site import SiteManager
|
||||||
|
|
||||||
|
|
||||||
|
class FileServer:
|
||||||
|
def __init__(self):
|
||||||
|
self.ip = config.fileserver_ip
|
||||||
|
self.port = config.fileserver_port
|
||||||
|
self.log = logging.getLogger(__name__)
|
||||||
|
if config.ip_external: # Ip external definied in arguments
|
||||||
|
self.port_opened = True
|
||||||
|
SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
|
||||||
|
else:
|
||||||
|
self.port_opened = None # Is file server opened on router
|
||||||
|
self.sites = SiteManager.list()
|
||||||
|
|
||||||
|
|
||||||
|
# Handle request to fileserver
|
||||||
|
def handleRequest(self, msg):
|
||||||
|
if "params" in msg:
|
||||||
|
self.log.debug("FileRequest: %s %s %s" % (msg["cmd"], msg["params"].get("site"), msg["params"].get("inner_path")))
|
||||||
|
else:
|
||||||
|
self.log.debug("FileRequest: %s" % msg["cmd"])
|
||||||
|
req = FileRequest(self)
|
||||||
|
req.route(msg["cmd"], msg.get("params"))
|
||||||
|
|
||||||
|
|
||||||
|
# Reload the FileRequest class to prevent restarts in debug mode
|
||||||
|
def reload(self):
|
||||||
|
global FileRequest
|
||||||
|
import imp
|
||||||
|
FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest
|
||||||
|
|
||||||
|
|
||||||
|
# Try to open the port using upnp
|
||||||
|
def openport(self, port=None, check=True):
|
||||||
|
if not port: port = self.port
|
||||||
|
if self.port_opened: return True # Port already opened
|
||||||
|
if check: # Check first if its already opened
|
||||||
|
if self.testOpenport(port)["result"] == True:
|
||||||
|
return True # Port already opened
|
||||||
|
|
||||||
|
if config.upnpc: # If we have upnpc util, try to use it to puch port on our router
|
||||||
|
self.log.info("Try to open port using upnpc...")
|
||||||
|
try:
|
||||||
|
exit = os.system("%s -e ZeroNet -r %s tcp" % (config.upnpc, self.port))
|
||||||
|
if exit == 0:
|
||||||
|
upnpc_success = True
|
||||||
|
else:
|
||||||
|
upnpc_success = False
|
||||||
|
except Exception, err:
|
||||||
|
self.log.error("Upnpc run error: %s" % err)
|
||||||
|
upnpc_success = False
|
||||||
|
|
||||||
|
if upnpc_success and self.testOpenport(port)["result"] == True:
|
||||||
|
return True
|
||||||
|
|
||||||
|
self.log.info("Upnp mapping failed :( Please forward port %s on your router to your ipaddress" % port)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# Test if the port is open
|
||||||
|
def testOpenport(self, port = None):
|
||||||
|
time.sleep(1) # Wait for port open
|
||||||
|
if not port: port = self.port
|
||||||
|
self.log.info("Checking port %s using canyouseeme.org..." % port)
|
||||||
|
try:
|
||||||
|
data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read()
|
||||||
|
message = re.match('.*<p style="padding-left:15px">(.*?)</p>', data, re.DOTALL).group(1)
|
||||||
|
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace(" ", " ")) # Strip http tags
|
||||||
|
except Exception, err:
|
||||||
|
message = "Error: %s" % err
|
||||||
|
if "Error" in message:
|
||||||
|
self.log.info("[BAD :(] Port closed: %s" % message)
|
||||||
|
if port == self.port:
|
||||||
|
self.port_opened = False # Self port, update port_opened status
|
||||||
|
config.ip_external = False
|
||||||
|
return {"result": False, "message": message}
|
||||||
|
else:
|
||||||
|
self.log.info("[OK :)] Port open: %s" % message)
|
||||||
|
if port == self.port: # Self port, update port_opened status
|
||||||
|
self.port_opened = True
|
||||||
|
match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message
|
||||||
|
if match: # Found my ip in message
|
||||||
|
config.ip_external = match.group(1)
|
||||||
|
SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
|
||||||
|
else:
|
||||||
|
config.ip_external = False
|
||||||
|
return {"result": True, "message": message}
|
||||||
|
|
||||||
|
|
||||||
|
# Set external ip without testing
|
||||||
|
def setIpExternal(self, ip_external):
|
||||||
|
logging.info("Setting external ip without testing: %s..." % ip_external)
|
||||||
|
config.ip_external = ip_external
|
||||||
|
self.port_opened = True
|
||||||
|
|
||||||
|
|
||||||
|
# Check site file integrity
|
||||||
|
def checkSite(self, site):
|
||||||
|
if site.settings["serving"]:
|
||||||
|
site.announce() # Announce site to tracker
|
||||||
|
site.update() # Update site's content.json and download changed files
|
||||||
|
|
||||||
|
|
||||||
|
# Check sites integrity
|
||||||
|
def checkSites(self):
|
||||||
|
if self.port_opened == None: # Test and open port if not tested yet
|
||||||
|
self.openport()
|
||||||
|
|
||||||
|
self.log.debug("Checking sites integrity..")
|
||||||
|
for address, site in self.sites.items(): # Check sites integrity
|
||||||
|
gevent.spawn(self.checkSite, site) # Check in new thread
|
||||||
|
time.sleep(2) # Prevent too quick request
|
||||||
|
|
||||||
|
|
||||||
|
# Announce sites every 10 min
|
||||||
|
def announceSites(self):
|
||||||
|
while 1:
|
||||||
|
time.sleep(10*60) # Announce sites every 10 min
|
||||||
|
for address, site in self.sites.items():
|
||||||
|
if site.settings["serving"]:
|
||||||
|
site.announce() # Announce site to tracker
|
||||||
|
time.sleep(2) # Prevent too quick request
|
||||||
|
|
||||||
|
|
||||||
|
# Bind and start serving sites
|
||||||
|
def start(self, check_sites = True):
|
||||||
|
self.log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
if config.debug:
|
||||||
|
# Auto reload FileRequest on change
|
||||||
|
from Debug import DebugReloader
|
||||||
|
DebugReloader(self.reload)
|
||||||
|
|
||||||
|
self.context = zmq.Context()
|
||||||
|
socket = self.context.socket(zmq.REP)
|
||||||
|
self.socket = socket
|
||||||
|
self.socket.setsockopt(zmq.RCVTIMEO, 5000) # Wait for data receive
|
||||||
|
self.log.info("Binding to tcp://%s:%s" % (self.ip, self.port))
|
||||||
|
try:
|
||||||
|
self.socket.bind('tcp://%s:%s' % (self.ip, self.port))
|
||||||
|
except Exception, err:
|
||||||
|
self.log.error("Can't bind, FileServer must be running already")
|
||||||
|
return
|
||||||
|
if check_sites: # Open port, Update sites, Check files integrity
|
||||||
|
gevent.spawn(self.checkSites)
|
||||||
|
|
||||||
|
gevent.spawn(self.announceSites)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
ret = {}
|
||||||
|
req = msgpack.unpackb(socket.recv())
|
||||||
|
self.handleRequest(req)
|
||||||
|
except Exception, err:
|
||||||
|
self.log.error(err)
|
||||||
|
self.socket.send(msgpack.packb({"error": "%s" % err}, use_bin_type=True))
|
||||||
|
if config.debug: # Raise exception
|
||||||
|
import sys
|
||||||
|
sys.excepthook(*sys.exc_info())
|
2
src/File/__init__.py
Normal file
2
src/File/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
from FileServer import FileServer
|
||||||
|
from FileRequest import FileRequest
|
84
src/Peer/Peer.py
Normal file
84
src/Peer/Peer.py
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
import os, logging, gevent, time, msgpack
|
||||||
|
import zmq.green as zmq
|
||||||
|
from cStringIO import StringIO
|
||||||
|
from Config import config
|
||||||
|
|
||||||
|
context = zmq.Context()
|
||||||
|
|
||||||
|
# Communicate remote peers
|
||||||
|
class Peer:
|
||||||
|
def __init__(self, ip, port):
|
||||||
|
self.ip = ip
|
||||||
|
self.port = port
|
||||||
|
self.socket = None
|
||||||
|
self.last_found = None
|
||||||
|
self.added = time.time()
|
||||||
|
|
||||||
|
self.hash_failed = 0
|
||||||
|
self.download_bytes = 0
|
||||||
|
self.download_time = 0
|
||||||
|
|
||||||
|
|
||||||
|
# Connect to host
|
||||||
|
def connect(self):
|
||||||
|
self.log = logging.getLogger("Peer:%s:%s" % (self.ip, self.port))
|
||||||
|
self.socket = context.socket(zmq.REQ)
|
||||||
|
self.socket.setsockopt(zmq.SNDTIMEO, 5000) # Wait for data send
|
||||||
|
self.socket.setsockopt(zmq.LINGER, 500) # Wait for socket close
|
||||||
|
self.socket.connect('tcp://%s:%s' % (self.ip, self.port))
|
||||||
|
|
||||||
|
|
||||||
|
# Done working with peer
|
||||||
|
def disconnect(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# Found a peer on tracker
|
||||||
|
def found(self):
|
||||||
|
self.last_found = time.time()
|
||||||
|
|
||||||
|
|
||||||
|
# Send a command to peer
|
||||||
|
def sendCmd(self, cmd, params = {}):
|
||||||
|
if not self.socket: self.connect()
|
||||||
|
try:
|
||||||
|
self.socket.send(msgpack.packb({"cmd": cmd, "params": params}, use_bin_type=True))
|
||||||
|
response = msgpack.unpackb(self.socket.recv())
|
||||||
|
if "error" in response:
|
||||||
|
self.log.error("%s %s error: %s" % (cmd, params, response["error"]))
|
||||||
|
return response
|
||||||
|
except Exception, err:
|
||||||
|
self.log.error("%s" % err)
|
||||||
|
if config.debug:
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
self.socket.close()
|
||||||
|
time.sleep(1)
|
||||||
|
self.connect()
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Get a file content from peer
|
||||||
|
def getFile(self, site, inner_path):
|
||||||
|
location = 0
|
||||||
|
buff = StringIO()
|
||||||
|
s = time.time()
|
||||||
|
while 1: # Read in 512k parts
|
||||||
|
back = self.sendCmd("getFile", {"site": site, "inner_path": inner_path, "location": location}) # Get file content from last location
|
||||||
|
if "body" not in back: # Error
|
||||||
|
return False
|
||||||
|
|
||||||
|
buff.write(back["body"])
|
||||||
|
if back["location"] == back["size"]: # End of file
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
location = back["location"]
|
||||||
|
self.download_bytes += back["location"]
|
||||||
|
self.download_time += (time.time() - s)
|
||||||
|
buff.seek(0)
|
||||||
|
return buff
|
||||||
|
|
||||||
|
|
||||||
|
# Send a ping request
|
||||||
|
def ping(self):
|
||||||
|
return self.sendCmd("ping")
|
1
src/Peer/__init__.py
Normal file
1
src/Peer/__init__.py
Normal file
|
@ -0,0 +1 @@
|
||||||
|
from Peer import Peer
|
432
src/Site/Site.py
Normal file
432
src/Site/Site.py
Normal file
|
@ -0,0 +1,432 @@
|
||||||
|
import os, json, logging, hashlib, re, time, string, random
|
||||||
|
from lib.subtl.subtl import UdpTrackerClient
|
||||||
|
import gevent
|
||||||
|
import util
|
||||||
|
from Config import config
|
||||||
|
from Peer import Peer
|
||||||
|
from Worker import WorkerManager
|
||||||
|
from Crypt import CryptHash
|
||||||
|
import SiteManager
|
||||||
|
|
||||||
|
class Site:
|
||||||
|
def __init__(self, address, allow_create=True):
|
||||||
|
|
||||||
|
self.address = re.sub("[^A-Za-z0-9]", "", address) # Make sure its correct address
|
||||||
|
self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging
|
||||||
|
self.directory = "data/%s" % self.address # Site data diretory
|
||||||
|
self.log = logging.getLogger("Site:%s" % self.address_short)
|
||||||
|
|
||||||
|
if not os.path.isdir(self.directory):
|
||||||
|
if allow_create:
|
||||||
|
os.mkdir(self.directory) # Create directory if not found
|
||||||
|
else:
|
||||||
|
raise Exception("Directory not exists: %s" % self.directory)
|
||||||
|
self.content = None # Load content.json
|
||||||
|
self.peers = {} # Key: ip:port, Value: Peer.Peer
|
||||||
|
self.peer_blacklist = SiteManager.peer_blacklist # Ignore this peers (eg. myself)
|
||||||
|
self.last_announce = 0 # Last announce time to tracker
|
||||||
|
self.worker_manager = WorkerManager(self) # Handle site download from other peers
|
||||||
|
self.bad_files = {} # SHA1 check failed files, need to redownload
|
||||||
|
self.content_updated = None # Content.js update time
|
||||||
|
self.last_downloads = [] # Files downloaded in run of self.download()
|
||||||
|
self.notifications = [] # Pending notifications displayed once on page load [error|ok|info, message, timeout]
|
||||||
|
self.page_requested = False # Page viewed in browser
|
||||||
|
|
||||||
|
self.loadContent(init=True) # Load content.json
|
||||||
|
self.loadSettings() # Load settings from sites.json
|
||||||
|
|
||||||
|
if not self.settings.get("auth_key"):
|
||||||
|
self.settings["auth_key"] = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(12)) # To auth websocket
|
||||||
|
self.log.debug("New auth key: %s" % self.settings["auth_key"])
|
||||||
|
self.saveSettings()
|
||||||
|
self.websockets = [] # Active site websocket connections
|
||||||
|
|
||||||
|
# Add event listeners
|
||||||
|
self.addEventListeners()
|
||||||
|
|
||||||
|
|
||||||
|
# Load content.json to self.content
|
||||||
|
def loadContent(self, init=False):
|
||||||
|
old_content = self.content
|
||||||
|
content_path = "%s/content.json" % self.directory
|
||||||
|
if os.path.isfile(content_path):
|
||||||
|
try:
|
||||||
|
new_content = json.load(open(content_path))
|
||||||
|
except Exception, err:
|
||||||
|
self.log.error("Content.json load error: %s" % err)
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return None # Content.json not exits
|
||||||
|
|
||||||
|
try:
|
||||||
|
changed = []
|
||||||
|
for inner_path, details in new_content["files"].items():
|
||||||
|
new_sha1 = details["sha1"]
|
||||||
|
if old_content and old_content["files"].get(inner_path):
|
||||||
|
old_sha1 = old_content["files"][inner_path]["sha1"]
|
||||||
|
else:
|
||||||
|
old_sha1 = None
|
||||||
|
if old_sha1 != new_sha1: changed.append(inner_path)
|
||||||
|
self.content = new_content
|
||||||
|
except Exception, err:
|
||||||
|
self.log.error("Content.json parse error: %s" % err)
|
||||||
|
return None # Content.json parse error
|
||||||
|
# Add to bad files
|
||||||
|
if not init:
|
||||||
|
for inner_path in changed:
|
||||||
|
self.bad_files[inner_path] = True
|
||||||
|
return changed
|
||||||
|
|
||||||
|
|
||||||
|
# Load site settings from data/sites.json
|
||||||
|
def loadSettings(self):
|
||||||
|
sites_settings = json.load(open("data/sites.json"))
|
||||||
|
if self.address in sites_settings:
|
||||||
|
self.settings = sites_settings[self.address]
|
||||||
|
else:
|
||||||
|
if self.address == config.homepage: # Add admin permissions to homepage
|
||||||
|
permissions = ["ADMIN"]
|
||||||
|
else:
|
||||||
|
permissions = []
|
||||||
|
self.settings = { "own": False, "serving": True, "permissions": permissions } # Default
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
# Save site settings to data/sites.json
|
||||||
|
def saveSettings(self):
|
||||||
|
sites_settings = json.load(open("data/sites.json"))
|
||||||
|
sites_settings[self.address] = self.settings
|
||||||
|
open("data/sites.json", "w").write(json.dumps(sites_settings, indent=4, sort_keys=True))
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
# Sercurity check and return path of site's file
|
||||||
|
def getPath(self, inner_path):
|
||||||
|
inner_path = inner_path.replace("\\", "/") # Windows separator fix
|
||||||
|
inner_path = re.sub("^%s/" % re.escape(self.directory), "", inner_path) # Remove site directory if begins with it
|
||||||
|
file_path = self.directory+"/"+inner_path
|
||||||
|
allowed_dir = os.path.abspath(self.directory) # Only files within this directory allowed
|
||||||
|
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
|
||||||
|
raise Exception("File not allowed: %s" % file_path)
|
||||||
|
return file_path
|
||||||
|
|
||||||
|
|
||||||
|
# Start downloading site
|
||||||
|
@util.Noparallel(blocking=False)
|
||||||
|
def download(self):
|
||||||
|
self.log.debug("Start downloading...")
|
||||||
|
self.announce()
|
||||||
|
found = self.needFile("content.json", update=self.bad_files.get("content.json"))
|
||||||
|
if not found: return False # Could not download content.json
|
||||||
|
self.loadContent() # Load the content.json
|
||||||
|
self.log.debug("Got content.json")
|
||||||
|
evts = []
|
||||||
|
self.last_downloads = ["content.json"] # Files downloaded in this run
|
||||||
|
for inner_path in self.content["files"].keys():
|
||||||
|
res = self.needFile(inner_path, blocking=False, update=self.bad_files.get(inner_path)) # No waiting for finish, return the event
|
||||||
|
if res != True: # Need downloading
|
||||||
|
self.last_downloads.append(inner_path)
|
||||||
|
evts.append(res) # Append evt
|
||||||
|
self.log.debug("Downloading %s files..." % len(evts))
|
||||||
|
s = time.time()
|
||||||
|
gevent.joinall(evts)
|
||||||
|
self.log.debug("All file downloaded in %.2fs" % (time.time()-s))
|
||||||
|
|
||||||
|
|
||||||
|
# Update content.json from peers and download changed files
|
||||||
|
@util.Noparallel()
|
||||||
|
def update(self):
|
||||||
|
self.loadContent() # Reload content.json
|
||||||
|
self.content_updated = None
|
||||||
|
self.needFile("content.json", update=True)
|
||||||
|
changed_files = self.loadContent()
|
||||||
|
if changed_files:
|
||||||
|
for changed_file in changed_files:
|
||||||
|
self.bad_files[changed_file] = True
|
||||||
|
self.checkFiles(quick_check=True) # Quick check files based on file size
|
||||||
|
if self.bad_files:
|
||||||
|
self.download()
|
||||||
|
return changed_files
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Update content.json on peers
|
||||||
|
def publish(self, limit=3):
|
||||||
|
self.log.info("Publishing to %s/%s peers..." % (limit, len(self.peers)))
|
||||||
|
published = 0
|
||||||
|
for key, peer in self.peers.items(): # Send update command to each peer
|
||||||
|
result = {"exception": "Timeout"}
|
||||||
|
try:
|
||||||
|
with gevent.Timeout(2, False): # 2 sec timeout
|
||||||
|
result = peer.sendCmd("update", {
|
||||||
|
"site": self.address,
|
||||||
|
"inner_path": "content.json",
|
||||||
|
"body": open(self.getPath("content.json")).read(),
|
||||||
|
"peer": (config.ip_external, config.fileserver_port)
|
||||||
|
})
|
||||||
|
except Exception, err:
|
||||||
|
result = {"exception": err}
|
||||||
|
|
||||||
|
if result and "ok" in result:
|
||||||
|
published += 1
|
||||||
|
self.log.info("[OK] %s: %s" % (key, result["ok"]))
|
||||||
|
else:
|
||||||
|
self.log.info("[ERROR] %s: %s" % (key, result))
|
||||||
|
|
||||||
|
if published >= limit: break
|
||||||
|
self.log.info("Successfuly published to %s peers" % published)
|
||||||
|
return published
|
||||||
|
|
||||||
|
|
||||||
|
# Check and download if file not exits
|
||||||
|
def needFile(self, inner_path, update=False, blocking=True, peer=None):
|
||||||
|
if os.path.isfile(self.getPath(inner_path)) and not update: # File exits, no need to do anything
|
||||||
|
return True
|
||||||
|
elif self.settings["serving"] == False: # Site not serving
|
||||||
|
return False
|
||||||
|
else: # Wait until file downloaded
|
||||||
|
if not self.content: # No content.json, download it first!
|
||||||
|
self.log.debug("Need content.json first")
|
||||||
|
self.announce()
|
||||||
|
if inner_path != "content.json": # Prevent double download
|
||||||
|
task = self.worker_manager.addTask("content.json", peer)
|
||||||
|
task.get()
|
||||||
|
self.loadContent()
|
||||||
|
if not self.content: return False
|
||||||
|
|
||||||
|
task = self.worker_manager.addTask(inner_path, peer)
|
||||||
|
if blocking:
|
||||||
|
return task.get()
|
||||||
|
else:
|
||||||
|
return task
|
||||||
|
|
||||||
|
|
||||||
|
# Add or update a peer to site
|
||||||
|
def addPeer(self, ip, port, return_peer = False):
|
||||||
|
key = "%s:%s" % (ip, port)
|
||||||
|
if key in self.peers: # Already has this ip
|
||||||
|
self.peers[key].found()
|
||||||
|
if return_peer: # Always return peer
|
||||||
|
return self.peers[key]
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
else: # New peer
|
||||||
|
peer = Peer(ip, port)
|
||||||
|
self.peers[key] = peer
|
||||||
|
return peer
|
||||||
|
|
||||||
|
|
||||||
|
# Add myself and get other peers from tracker
|
||||||
|
def announce(self, force=False):
|
||||||
|
if time.time() < self.last_announce+15 and not force: return # No reannouncing within 15 secs
|
||||||
|
self.last_announce = time.time()
|
||||||
|
|
||||||
|
for protocol, ip, port in SiteManager.TRACKERS:
|
||||||
|
if protocol == "udp":
|
||||||
|
self.log.debug("Announing to %s://%s:%s..." % (protocol, ip, port))
|
||||||
|
tracker = UdpTrackerClient(ip, port)
|
||||||
|
tracker.peer_port = config.fileserver_port
|
||||||
|
try:
|
||||||
|
tracker.connect()
|
||||||
|
tracker.poll_once()
|
||||||
|
tracker.announce(info_hash=hashlib.sha1(self.address).hexdigest())
|
||||||
|
back = tracker.poll_once()
|
||||||
|
except Exception, err:
|
||||||
|
self.log.error("Tracker error: %s" % err)
|
||||||
|
continue
|
||||||
|
if back: # Tracker announce success
|
||||||
|
peers = back["response"]["peers"]
|
||||||
|
added = 0
|
||||||
|
for peer in peers:
|
||||||
|
if (peer["addr"], peer["port"]) in self.peer_blacklist: # Ignore blacklist (eg. myself)
|
||||||
|
continue
|
||||||
|
if self.addPeer(peer["addr"], peer["port"]): added += 1
|
||||||
|
if added:
|
||||||
|
self.worker_manager.onPeers()
|
||||||
|
self.updateWebsocket(peers_added=added)
|
||||||
|
self.log.debug("Found %s peers, new: %s" % (len(peers), added))
|
||||||
|
break # Successful announcing, break the list
|
||||||
|
else:
|
||||||
|
self.log.error("Tracker bad response, trying next in list...") # Failed to announce, go to next
|
||||||
|
time.sleep(1)
|
||||||
|
else:
|
||||||
|
pass # TODO: http tracker support
|
||||||
|
|
||||||
|
|
||||||
|
# Check and try to fix site files integrity
|
||||||
|
def checkFiles(self, quick_check=True):
|
||||||
|
self.log.debug("Checking files... Quick:%s" % quick_check)
|
||||||
|
bad_files = self.verifyFiles(quick_check)
|
||||||
|
if bad_files:
|
||||||
|
for bad_file in bad_files:
|
||||||
|
self.bad_files[bad_file] = True
|
||||||
|
|
||||||
|
|
||||||
|
# - Events -
|
||||||
|
|
||||||
|
# Add event listeners
|
||||||
|
def addEventListeners(self):
|
||||||
|
self.onFileStart = util.Event() # If WorkerManager added new task
|
||||||
|
self.onFileDone = util.Event() # If WorkerManager successfuly downloaded a file
|
||||||
|
self.onFileFail = util.Event() # If WorkerManager failed to download a file
|
||||||
|
self.onComplete = util.Event() # All file finished
|
||||||
|
|
||||||
|
self.onFileStart.append(lambda inner_path: self.fileStarted()) # No parameters to make Noparallel batching working
|
||||||
|
self.onFileDone.append(lambda inner_path: self.fileDone(inner_path))
|
||||||
|
self.onFileFail.append(lambda inner_path: self.fileFailed(inner_path))
|
||||||
|
|
||||||
|
|
||||||
|
# Send site status update to websocket clients
|
||||||
|
def updateWebsocket(self, **kwargs):
|
||||||
|
if kwargs:
|
||||||
|
param = {"event": kwargs.items()[0]}
|
||||||
|
else:
|
||||||
|
param = None
|
||||||
|
for ws in self.websockets:
|
||||||
|
ws.event("siteChanged", self, param)
|
||||||
|
|
||||||
|
|
||||||
|
# File download started
|
||||||
|
@util.Noparallel(blocking=False)
|
||||||
|
def fileStarted(self):
|
||||||
|
time.sleep(0.001) # Wait for other files adds
|
||||||
|
self.updateWebsocket(file_started=True)
|
||||||
|
|
||||||
|
|
||||||
|
# File downloaded successful
|
||||||
|
def fileDone(self, inner_path):
|
||||||
|
# File downloaded, remove it from bad files
|
||||||
|
if inner_path in self.bad_files:
|
||||||
|
self.log.debug("Bad file solved: %s" % inner_path)
|
||||||
|
del(self.bad_files[inner_path])
|
||||||
|
|
||||||
|
# Update content.json last downlad time
|
||||||
|
if inner_path == "content.json":
|
||||||
|
self.content_updated = time.time()
|
||||||
|
|
||||||
|
self.updateWebsocket(file_done=inner_path)
|
||||||
|
|
||||||
|
|
||||||
|
# File download failed
|
||||||
|
def fileFailed(self, inner_path):
|
||||||
|
if inner_path == "content.json":
|
||||||
|
self.content_updated = False
|
||||||
|
self.log.error("Can't update content.json")
|
||||||
|
|
||||||
|
self.updateWebsocket(file_failed=inner_path)
|
||||||
|
|
||||||
|
|
||||||
|
# - Sign and verify -
|
||||||
|
|
||||||
|
|
||||||
|
# Verify fileobj using sha1 in content.json
|
||||||
|
def verifyFile(self, inner_path, file, force=False):
|
||||||
|
if inner_path == "content.json": # Check using sign
|
||||||
|
from Crypt import CryptBitcoin
|
||||||
|
|
||||||
|
try:
|
||||||
|
content = json.load(file)
|
||||||
|
if self.content and not force:
|
||||||
|
if self.content["modified"] == content["modified"]: # Ignore, have the same content.json
|
||||||
|
return None
|
||||||
|
elif self.content["modified"] > content["modified"]: # We have newer
|
||||||
|
return False
|
||||||
|
if content["modified"] > time.time()+60*60*24: # Content modified in the far future (allow 1 day window)
|
||||||
|
self.log.error("Content.json modify is in the future!")
|
||||||
|
return False
|
||||||
|
# Check sign
|
||||||
|
sign = content["sign"]
|
||||||
|
del(content["sign"]) # The file signed without the sign
|
||||||
|
sign_content = json.dumps(content, sort_keys=True) # Dump the json to string to remove whitepsace
|
||||||
|
|
||||||
|
return CryptBitcoin.verify(sign_content, self.address, sign)
|
||||||
|
except Exception, err:
|
||||||
|
self.log.error("Verify sign error: %s" % err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
else: # Check using sha1 hash
|
||||||
|
if self.content and inner_path in self.content["files"]:
|
||||||
|
return CryptHash.sha1sum(file) == self.content["files"][inner_path]["sha1"]
|
||||||
|
else: # File not in content.json
|
||||||
|
self.log.error("File not in content.json: %s" % inner_path)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# Verify all files sha1sum using content.json
|
||||||
|
def verifyFiles(self, quick_check=False): # Fast = using file size
|
||||||
|
bad_files = []
|
||||||
|
if not self.content: # No content.json, download it first
|
||||||
|
self.needFile("content.json", update=True) # Force update to fix corrupt file
|
||||||
|
self.loadContent() # Reload content.json
|
||||||
|
for inner_path in self.content["files"].keys():
|
||||||
|
file_path = self.getPath(inner_path)
|
||||||
|
if not os.path.isfile(file_path):
|
||||||
|
self.log.error("[MISSING] %s" % inner_path)
|
||||||
|
bad_files.append(inner_path)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if quick_check:
|
||||||
|
ok = os.path.getsize(file_path) == self.content["files"][inner_path]["size"]
|
||||||
|
else:
|
||||||
|
ok = self.verifyFile(inner_path, open(file_path, "rb"))
|
||||||
|
|
||||||
|
if ok:
|
||||||
|
self.log.debug("[OK] %s" % inner_path)
|
||||||
|
else:
|
||||||
|
self.log.error("[ERROR] %s" % inner_path)
|
||||||
|
bad_files.append(inner_path)
|
||||||
|
|
||||||
|
return bad_files
|
||||||
|
|
||||||
|
|
||||||
|
# Create and sign content.json using private key
|
||||||
|
def signContent(self, privatekey=None):
|
||||||
|
if not self.content: # New site
|
||||||
|
self.log.info("Site not exits yet, loading default content.json values...")
|
||||||
|
self.content = {"files": {}, "title": "%s - ZeroNet_" % self.address, "sign": "", "modified": 0.0, "description": "", "address": self.address, "ignore": ""} # Default content.json
|
||||||
|
|
||||||
|
self.log.info("Opening site data directory: %s..." % self.directory)
|
||||||
|
|
||||||
|
hashed_files = {}
|
||||||
|
|
||||||
|
for root, dirs, files in os.walk(self.directory):
|
||||||
|
for file_name in files:
|
||||||
|
file_path = self.getPath("%s/%s" % (root, file_name))
|
||||||
|
|
||||||
|
if file_name == "content.json" or (self.content["ignore"] and re.match(self.content["ignore"], file_path.replace(self.directory+"/", "") )): # Dont add content.json and ignore regexp pattern definied in content.json
|
||||||
|
self.log.info("- [SKIPPED] %s" % file_path)
|
||||||
|
else:
|
||||||
|
sha1sum = CryptHash.sha1sum(file_path) # Calculate sha sum of file
|
||||||
|
inner_path = re.sub("^%s/" % re.escape(self.directory), "", file_path)
|
||||||
|
self.log.info("- %s (SHA1: %s)" % (file_path, sha1sum))
|
||||||
|
hashed_files[inner_path] = {"sha1": sha1sum, "size": os.path.getsize(file_path)}
|
||||||
|
|
||||||
|
# Generate new content.json
|
||||||
|
self.log.info("Adding timestamp and sha1sums to new content.json...")
|
||||||
|
import datetime, time
|
||||||
|
|
||||||
|
content = self.content.copy() # Create a copy of current content.json
|
||||||
|
content["address"] = self.address # Add files sha1 hash
|
||||||
|
content["files"] = hashed_files # Add files sha1 hash
|
||||||
|
content["modified"] = time.mktime(datetime.datetime.utcnow().utctimetuple()) # Add timestamp
|
||||||
|
del(content["sign"]) # Delete old site
|
||||||
|
|
||||||
|
# Signing content
|
||||||
|
from Crypt import CryptBitcoin
|
||||||
|
|
||||||
|
self.log.info("Verifying private key...")
|
||||||
|
privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
|
||||||
|
if self.address != privatekey_address:
|
||||||
|
return self.log.error("Private key invalid! Site address: %s, Private key address: %s" % (self.address, privatekey_address))
|
||||||
|
|
||||||
|
self.log.info("Signing modified content.json...")
|
||||||
|
sign_content = json.dumps(content, sort_keys=True)
|
||||||
|
self.log.debug("Content: %s" % sign_content)
|
||||||
|
sign = CryptBitcoin.sign(sign_content, privatekey)
|
||||||
|
content["sign"] = sign
|
||||||
|
|
||||||
|
# Saving modified content.json
|
||||||
|
self.log.info("Saving to %s/content.json..." % self.directory)
|
||||||
|
open("%s/content.json" % self.directory, "w").write(json.dumps(content, indent=4, sort_keys=True))
|
||||||
|
|
||||||
|
self.log.info("Site signed!")
|
62
src/Site/SiteManager.py
Normal file
62
src/Site/SiteManager.py
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
import json, logging, time, re, os
|
||||||
|
import gevent
|
||||||
|
|
||||||
|
TRACKERS = [
|
||||||
|
("udp", "sugoi.pomf.se", 2710),
|
||||||
|
("udp", "open.demonii.com", 1337), # Retry 3 times
|
||||||
|
("udp", "open.demonii.com", 1337),
|
||||||
|
("udp", "open.demonii.com", 1337),
|
||||||
|
("udp", "bigfoot1942.sektori.org", 6969),
|
||||||
|
("udp", "tracker.coppersurfer.tk", 80),
|
||||||
|
("udp", "tracker.leechers-paradise.org", 6969),
|
||||||
|
("udp", "tracker.blazing.de", 80),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Load all sites from data/sites.json
|
||||||
|
def load():
|
||||||
|
from Site import Site
|
||||||
|
global sites
|
||||||
|
if not sites: sites = {}
|
||||||
|
address_found = []
|
||||||
|
added = 0
|
||||||
|
# Load new adresses
|
||||||
|
for address in json.load(open("data/sites.json")):
|
||||||
|
if address not in sites and os.path.isfile("data/%s/content.json" % address):
|
||||||
|
sites[address] = Site(address)
|
||||||
|
added += 1
|
||||||
|
address_found.append(address)
|
||||||
|
|
||||||
|
# Remove deleted adresses
|
||||||
|
for address in sites.keys():
|
||||||
|
if address not in address_found:
|
||||||
|
del(sites[address])
|
||||||
|
logging.debug("Removed site: %s" % address)
|
||||||
|
|
||||||
|
if added: logging.debug("SiteManager added %s sites" % added)
|
||||||
|
|
||||||
|
|
||||||
|
# Checks if its a valid address
|
||||||
|
def isAddress(address):
|
||||||
|
return re.match("^[A-Za-z0-9]{34}$", address)
|
||||||
|
|
||||||
|
|
||||||
|
# Return site and start download site files
|
||||||
|
def need(address, all_file=True):
|
||||||
|
from Site import Site
|
||||||
|
if address not in sites: # Site not exits yet
|
||||||
|
if not isAddress(address): raise Exception("Not address: %s" % address)
|
||||||
|
sites[address] = Site(address)
|
||||||
|
site = sites[address]
|
||||||
|
if all_file: site.download()
|
||||||
|
return site
|
||||||
|
|
||||||
|
|
||||||
|
# Lazy load sites
|
||||||
|
def list():
|
||||||
|
if sites == None: # Not loaded yet
|
||||||
|
load()
|
||||||
|
return sites
|
||||||
|
|
||||||
|
|
||||||
|
sites = None
|
||||||
|
peer_blacklist = [] # Dont download from this peers
|
1
src/Site/__init__.py
Normal file
1
src/Site/__init__.py
Normal file
|
@ -0,0 +1 @@
|
||||||
|
from Site import Site
|
0
src/Test/__init__.py
Normal file
0
src/Test/__init__.py
Normal file
46
src/Test/test.py
Normal file
46
src/Test/test.py
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
import sys, os, unittest, urllib, time
|
||||||
|
sys.path.append(os.path.abspath("src")) # Imports relative to src dir
|
||||||
|
|
||||||
|
from Crypt import CryptBitcoin
|
||||||
|
from Ui import UiRequest
|
||||||
|
|
||||||
|
class TestCase(unittest.TestCase):
|
||||||
|
|
||||||
|
def testMediaRoute(self):
|
||||||
|
try:
|
||||||
|
urllib.urlopen("http://127.0.0.1:43110").read()
|
||||||
|
except Exception, err:
|
||||||
|
raise unittest.SkipTest(err)
|
||||||
|
self.assertIn("Not Found", urllib.urlopen("http://127.0.0.1:43110/media//sites.json").read())
|
||||||
|
self.assertIn("Not Found", urllib.urlopen("http://127.0.0.1:43110/media/./sites.json").read())
|
||||||
|
self.assertIn("Not Found", urllib.urlopen("http://127.0.0.1:43110/media/../config.py").read())
|
||||||
|
self.assertIn("Forbidden", urllib.urlopen("http://127.0.0.1:43110/media/1P2rJhkQjYSHdHpWDDwxfRGYXaoWE8u1vV/../sites.json").read())
|
||||||
|
self.assertIn("Forbidden", urllib.urlopen("http://127.0.0.1:43110/media/1P2rJhkQjYSHdHpWDDwxfRGYXaoWE8u1vV/..//sites.json").read())
|
||||||
|
self.assertIn("Forbidden", urllib.urlopen("http://127.0.0.1:43110/media/1P2rJhkQjYSHdHpWDDwxfRGYXaoWE8u1vV/../../config.py").read())
|
||||||
|
|
||||||
|
|
||||||
|
def testBitcoinSign(self):
|
||||||
|
s = time.time()
|
||||||
|
privatekey = "23DKQpDz7bXM7w5KN5Wnmz7bwRNqNHcdQjb2WwrdB1QtTf5gM3pFdf"
|
||||||
|
privatekey_bad = "23DKQpDz7bXM7w5KN5Wnmz6bwRNqNHcdQjb2WwrdB1QtTf5gM3pFdf"
|
||||||
|
|
||||||
|
address = CryptBitcoin.privatekeyToAddress(privatekey)
|
||||||
|
self.assertEqual(address, "12vTsjscg4hYPewUL2onma5pgQmWPMs3ez")
|
||||||
|
|
||||||
|
address_bad = CryptBitcoin.privatekeyToAddress(privatekey_bad)
|
||||||
|
self.assertNotEqual(address_bad, "12vTsjscg4hYPewUL2onma5pgQmWPMs3ez")
|
||||||
|
|
||||||
|
sign = CryptBitcoin.sign("hello", privatekey)
|
||||||
|
|
||||||
|
self.assertTrue(CryptBitcoin.verify("hello", address, sign))
|
||||||
|
self.assertFalse(CryptBitcoin.verify("not hello", address, sign))
|
||||||
|
|
||||||
|
sign_bad = CryptBitcoin.sign("hello", privatekey_bad)
|
||||||
|
self.assertFalse(CryptBitcoin.verify("hello", address, sign_bad))
|
||||||
|
|
||||||
|
print "Taken: %.3fs, " % (time.time()-s),
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main(verbosity=2)
|
||||||
|
|
286
src/Ui/UiRequest.py
Normal file
286
src/Ui/UiRequest.py
Normal file
|
@ -0,0 +1,286 @@
|
||||||
|
import time, re, os, mimetypes, json
|
||||||
|
from Config import config
|
||||||
|
from Site import SiteManager
|
||||||
|
from Ui.UiWebsocket import UiWebsocket
|
||||||
|
|
||||||
|
status_texts = {
|
||||||
|
200: "200 OK",
|
||||||
|
400: "400 Bad Request",
|
||||||
|
403: "403 Forbidden",
|
||||||
|
404: "404 Not Found",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class UiRequest:
|
||||||
|
def __init__(self, server = None):
|
||||||
|
if server:
|
||||||
|
self.server = server
|
||||||
|
self.log = server.log
|
||||||
|
self.get = {} # Get parameters
|
||||||
|
self.env = {} # Enviroment settings
|
||||||
|
self.start_response = None # Start response function
|
||||||
|
|
||||||
|
|
||||||
|
# Call the request handler function base on path
|
||||||
|
def route(self, path):
|
||||||
|
if config.ui_restrict and self.env['REMOTE_ADDR'] != config.ui_restrict: # Restict Ui access by ip
|
||||||
|
return self.error403()
|
||||||
|
|
||||||
|
if path == "/":
|
||||||
|
return self.actionIndex()
|
||||||
|
elif path == "/favicon.ico":
|
||||||
|
return self.actionFile("src/Ui/media/img/favicon.ico")
|
||||||
|
# Media
|
||||||
|
elif path.startswith("/uimedia/"):
|
||||||
|
return self.actionUiMedia(path)
|
||||||
|
elif path.startswith("/media"):
|
||||||
|
return self.actionSiteMedia(path)
|
||||||
|
# Websocket
|
||||||
|
elif path == "/Websocket":
|
||||||
|
return self.actionWebsocket()
|
||||||
|
# Debug
|
||||||
|
elif path == "/Debug" and config.debug:
|
||||||
|
return self.actionDebug()
|
||||||
|
elif path == "/Console" and config.debug:
|
||||||
|
return self.actionConsole()
|
||||||
|
# Test
|
||||||
|
elif path == "/Test/Websocket":
|
||||||
|
return self.actionFile("Data/temp/ws_test.html")
|
||||||
|
elif path == "/Test/Stream":
|
||||||
|
return self.actionTestStream()
|
||||||
|
# Site media wrapper
|
||||||
|
else:
|
||||||
|
return self.actionWrapper(path)
|
||||||
|
|
||||||
|
|
||||||
|
# Get mime by filename
|
||||||
|
def getContentType(self, file_name):
|
||||||
|
content_type = mimetypes.guess_type(file_name)[0]
|
||||||
|
if not content_type:
|
||||||
|
if file_name.endswith("json"): # Correct json header
|
||||||
|
content_type = "application/json"
|
||||||
|
else:
|
||||||
|
content_type = "application/octet-stream"
|
||||||
|
return content_type
|
||||||
|
|
||||||
|
|
||||||
|
# Send response headers
|
||||||
|
def sendHeader(self, status=200, content_type="text/html; charset=utf-8", extra_headers=[]):
|
||||||
|
headers = []
|
||||||
|
headers.append(("Version", "HTTP/1.1"))
|
||||||
|
headers.append(("Access-Control-Allow-Origin", "*")) # Allow json access
|
||||||
|
headers.append(("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept")) # Allow json access
|
||||||
|
headers.append(("Cache-Control", "no-cache, no-store, private, must-revalidate, max-age=0")) # No caching at all
|
||||||
|
#headers.append(("Cache-Control", "public, max-age=604800")) # Cache 1 week
|
||||||
|
headers.append(("Content-Type", content_type))
|
||||||
|
for extra_header in extra_headers:
|
||||||
|
headers.append(extra_header)
|
||||||
|
self.start_response(status_texts[status], headers)
|
||||||
|
|
||||||
|
|
||||||
|
# Renders a template
|
||||||
|
def render(self, template_path, *args, **kwargs):
|
||||||
|
#template = SimpleTemplate(open(template_path), lookup=[os.path.dirname(template_path)])
|
||||||
|
#yield str(template.render(*args, **kwargs).encode("utf8"))
|
||||||
|
template = open(template_path).read()
|
||||||
|
yield template.format(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
# - Actions -
|
||||||
|
|
||||||
|
# Redirect to an url
|
||||||
|
def actionRedirect(self, url):
|
||||||
|
self.start_response('301 Redirect', [('Location', url)])
|
||||||
|
yield "Location changed: %s" % url
|
||||||
|
|
||||||
|
|
||||||
|
def actionIndex(self):
|
||||||
|
return self.actionRedirect("/"+config.homepage)
|
||||||
|
|
||||||
|
|
||||||
|
# Render a file from media with iframe site wrapper
|
||||||
|
def actionWrapper(self, path):
|
||||||
|
if self.env.get("HTTP_X_REQUESTED_WITH"): return self.error403() # No ajax allowed on wrapper
|
||||||
|
|
||||||
|
match = re.match("/(?P<site>[A-Za-z0-9]+)(?P<inner_path>/.*|$)", path)
|
||||||
|
if match:
|
||||||
|
inner_path = match.group("inner_path").lstrip("/")
|
||||||
|
if not inner_path: inner_path = "index.html" # If inner path defaults to index.html
|
||||||
|
|
||||||
|
site = self.server.sites.get(match.group("site"))
|
||||||
|
if site and site.content and not site.bad_files: # Its downloaded
|
||||||
|
title = site.content["title"]
|
||||||
|
else:
|
||||||
|
title = "Loading %s..." % match.group("site")
|
||||||
|
site = SiteManager.need(match.group("site")) # Start download site
|
||||||
|
if not site: self.error404()
|
||||||
|
|
||||||
|
|
||||||
|
self.sendHeader(extra_headers=[("X-Frame-Options", "DENY")])
|
||||||
|
return self.render("src/Ui/template/wrapper.html",
|
||||||
|
inner_path=inner_path,
|
||||||
|
address=match.group("site"),
|
||||||
|
title=title,
|
||||||
|
auth_key=site.settings["auth_key"],
|
||||||
|
permissions=json.dumps(site.settings["permissions"]),
|
||||||
|
show_loadingscreen=json.dumps(not os.path.isfile(site.getPath(inner_path))),
|
||||||
|
homepage=config.homepage
|
||||||
|
)
|
||||||
|
|
||||||
|
else: # Bad url
|
||||||
|
return self.error404(path)
|
||||||
|
|
||||||
|
|
||||||
|
# Serve a media for site
|
||||||
|
def actionSiteMedia(self, path):
|
||||||
|
match = re.match("/media/(?P<site>[A-Za-z0-9]+)/(?P<inner_path>.*)", path)
|
||||||
|
|
||||||
|
referer = self.env.get("HTTP_REFERER")
|
||||||
|
if referer: # Only allow same site to receive media
|
||||||
|
referer = re.sub("http://.*?/", "/", referer) # Remove server address
|
||||||
|
referer = referer.replace("/media", "") # Media
|
||||||
|
if not referer.startswith("/"+match.group("site")): return self.error403() # Referer not starts same address as requested path
|
||||||
|
|
||||||
|
if match: # Looks like a valid path
|
||||||
|
file_path = "data/%s/%s" % (match.group("site"), match.group("inner_path"))
|
||||||
|
allowed_dir = os.path.abspath("data/%s" % match.group("site")) # Only files within data/sitehash allowed
|
||||||
|
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): # File not in allowed path
|
||||||
|
return self.error403()
|
||||||
|
else:
|
||||||
|
if config.debug and file_path.split("/")[-1].startswith("all."): # When debugging merge *.css to all.css and *.js to all.js
|
||||||
|
site = self.server.sites.get(match.group("site"))
|
||||||
|
if site.settings["own"]:
|
||||||
|
from Debug import DebugMedia
|
||||||
|
DebugMedia.merge(file_path)
|
||||||
|
if os.path.isfile(file_path): # File exits
|
||||||
|
return self.actionFile(file_path)
|
||||||
|
else: # File not exits, try to download
|
||||||
|
site = SiteManager.need(match.group("site"), all_file=False)
|
||||||
|
self.sendHeader(content_type=self.getContentType(file_path)) # ?? Get Exception without this
|
||||||
|
result = site.needFile(match.group("inner_path")) # Wait until file downloads
|
||||||
|
return self.actionFile(file_path)
|
||||||
|
|
||||||
|
else: # Bad url
|
||||||
|
return self.error404(path)
|
||||||
|
|
||||||
|
|
||||||
|
# Serve a media for ui
|
||||||
|
def actionUiMedia(self, path):
|
||||||
|
match = re.match("/uimedia/(?P<inner_path>.*)", path)
|
||||||
|
if match: # Looks like a valid path
|
||||||
|
file_path = "src/Ui/media/%s" % match.group("inner_path")
|
||||||
|
allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed
|
||||||
|
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): # File not in allowed path
|
||||||
|
return self.error403()
|
||||||
|
else:
|
||||||
|
if config.debug and match.group("inner_path").startswith("all."): # When debugging merge *.css to all.css and *.js to all.js
|
||||||
|
from Debug import DebugMedia
|
||||||
|
DebugMedia.merge(file_path)
|
||||||
|
return self.actionFile(file_path)
|
||||||
|
else: # Bad url
|
||||||
|
return self.error400()
|
||||||
|
|
||||||
|
|
||||||
|
# Stream a file to client
|
||||||
|
def actionFile(self, file_path, block_size = 64*1024):
|
||||||
|
if os.path.isfile(file_path):
|
||||||
|
# Try to figure out content type by extension
|
||||||
|
content_type = self.getContentType(file_path)
|
||||||
|
|
||||||
|
self.sendHeader(content_type = content_type) # TODO: Dont allow external access: extra_headers=[("Content-Security-Policy", "default-src 'unsafe-inline' data: http://localhost:43110 ws://localhost:43110")]
|
||||||
|
if self.env["REQUEST_METHOD"] != "OPTIONS":
|
||||||
|
file = open(file_path, "rb")
|
||||||
|
while 1:
|
||||||
|
try:
|
||||||
|
block = file.read(block_size)
|
||||||
|
if block:
|
||||||
|
yield block
|
||||||
|
else:
|
||||||
|
raise StopIteration
|
||||||
|
except StopIteration:
|
||||||
|
file.close()
|
||||||
|
break
|
||||||
|
else: # File not exits
|
||||||
|
yield self.error404(file_path)
|
||||||
|
|
||||||
|
|
||||||
|
# On websocket connection
|
||||||
|
def actionWebsocket(self):
|
||||||
|
ws = self.env.get("wsgi.websocket")
|
||||||
|
if ws:
|
||||||
|
auth_key = self.get["auth_key"]
|
||||||
|
# Find site by auth_key
|
||||||
|
site = None
|
||||||
|
for site_check in self.server.sites.values():
|
||||||
|
if site_check.settings["auth_key"] == auth_key: site = site_check
|
||||||
|
|
||||||
|
if site: # Correct auth key
|
||||||
|
ui_websocket = UiWebsocket(ws, site, self.server)
|
||||||
|
site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events
|
||||||
|
ui_websocket.start()
|
||||||
|
for site_check in self.server.sites.values(): # Remove websocket from every site (admin sites allowed to join other sites event channels)
|
||||||
|
if ui_websocket in site_check.websockets:
|
||||||
|
site_check.websockets.remove(ui_websocket)
|
||||||
|
return "Bye."
|
||||||
|
else: # No site found by auth key
|
||||||
|
self.log.error("Auth key not found: %s" % auth_key)
|
||||||
|
return self.error403()
|
||||||
|
else:
|
||||||
|
start_response("400 Bad Request", [])
|
||||||
|
return "Not a websocket!"
|
||||||
|
|
||||||
|
|
||||||
|
# Debug last error
|
||||||
|
def actionDebug(self):
|
||||||
|
# Raise last error from DebugHook
|
||||||
|
import sys
|
||||||
|
last_error = sys.modules["src.main"].DebugHook.last_error
|
||||||
|
if last_error:
|
||||||
|
raise last_error[0], last_error[1], last_error[2]
|
||||||
|
else:
|
||||||
|
self.sendHeader()
|
||||||
|
yield "No error! :)"
|
||||||
|
|
||||||
|
|
||||||
|
# Just raise an error to get console
|
||||||
|
def actionConsole(self):
|
||||||
|
raise Exception("Here is your console")
|
||||||
|
|
||||||
|
|
||||||
|
# - Tests -
|
||||||
|
|
||||||
|
def actionTestStream(self):
|
||||||
|
self.sendHeader()
|
||||||
|
yield " "*1080 # Overflow browser's buffer
|
||||||
|
yield "He"
|
||||||
|
time.sleep(1)
|
||||||
|
yield "llo!"
|
||||||
|
yield "Running websockets: %s" % len(self.server.websockets)
|
||||||
|
self.server.sendMessage("Hello!")
|
||||||
|
|
||||||
|
|
||||||
|
# - Errors -
|
||||||
|
|
||||||
|
# Send bad request error
|
||||||
|
def error400(self):
|
||||||
|
self.sendHeader(400)
|
||||||
|
return "Bad Request"
|
||||||
|
|
||||||
|
|
||||||
|
# You are not allowed to access this
|
||||||
|
def error403(self):
|
||||||
|
self.sendHeader(403)
|
||||||
|
return "Forbidden"
|
||||||
|
|
||||||
|
|
||||||
|
# Send file not found error
|
||||||
|
def error404(self, path = None):
|
||||||
|
self.sendHeader(404)
|
||||||
|
return "Not Found: %s" % path
|
||||||
|
|
||||||
|
# - Reload for eaiser developing -
|
||||||
|
def reload(self):
|
||||||
|
import imp
|
||||||
|
global UiWebsocket
|
||||||
|
UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket
|
93
src/Ui/UiServer.py
Normal file
93
src/Ui/UiServer.py
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
from gevent import monkey; monkey.patch_all(thread = False)
|
||||||
|
import logging, time, cgi, string, random
|
||||||
|
from gevent.pywsgi import WSGIServer
|
||||||
|
from gevent.pywsgi import WSGIHandler
|
||||||
|
from lib.geventwebsocket.handler import WebSocketHandler
|
||||||
|
from Ui import UiRequest
|
||||||
|
from Site import SiteManager
|
||||||
|
from Config import config
|
||||||
|
|
||||||
|
# Skip websocket handler if not necessary
|
||||||
|
class UiWSGIHandler(WSGIHandler):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(UiWSGIHandler, self).__init__(*args, **kwargs)
|
||||||
|
self.ws_handler = WebSocketHandler(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def run_application(self):
|
||||||
|
if "HTTP_UPGRADE" in self.environ: # Websocket request
|
||||||
|
self.ws_handler.__dict__ = self.__dict__ # Match class variables
|
||||||
|
self.ws_handler.run_application()
|
||||||
|
else: # Standard HTTP request
|
||||||
|
#print self.application.__class__.__name__
|
||||||
|
return super(UiWSGIHandler, self).run_application()
|
||||||
|
|
||||||
|
|
||||||
|
class UiServer:
|
||||||
|
def __init__(self):
|
||||||
|
self.ip = config.ui_ip
|
||||||
|
self.port = config.ui_port
|
||||||
|
if self.ip == "*": self.ip = "" # Bind all
|
||||||
|
#self.sidebar_websockets = [] # Sidebar websocket connections
|
||||||
|
#self.auth_key = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(12)) # Global admin auth key
|
||||||
|
self.sites = SiteManager.list()
|
||||||
|
self.log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
self.ui_request = UiRequest(self)
|
||||||
|
|
||||||
|
|
||||||
|
# Handle WSGI request
|
||||||
|
def handleRequest(self, env, start_response):
|
||||||
|
path = env["PATH_INFO"]
|
||||||
|
self.ui_request.env = env
|
||||||
|
self.ui_request.start_response = start_response
|
||||||
|
if env.get("QUERY_STRING"):
|
||||||
|
self.ui_request.get = dict(cgi.parse_qsl(env['QUERY_STRING']))
|
||||||
|
else:
|
||||||
|
self.ui_request.get = {}
|
||||||
|
return self.ui_request.route(path)
|
||||||
|
|
||||||
|
|
||||||
|
# Send a message to all connected client
|
||||||
|
def sendMessage(self, message):
|
||||||
|
sent = 0
|
||||||
|
for ws in self.websockets:
|
||||||
|
try:
|
||||||
|
ws.send(message)
|
||||||
|
sent += 1
|
||||||
|
except Exception, err:
|
||||||
|
self.log.error("addMessage error: %s" % err)
|
||||||
|
self.server.websockets.remove(ws)
|
||||||
|
return sent
|
||||||
|
|
||||||
|
|
||||||
|
# Reload the UiRequest class to prevent restarts in debug mode
|
||||||
|
def reload(self):
|
||||||
|
import imp
|
||||||
|
self.ui_request = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest(self)
|
||||||
|
self.ui_request.reload()
|
||||||
|
|
||||||
|
|
||||||
|
# Bind and run the server
|
||||||
|
def start(self):
|
||||||
|
handler = self.handleRequest
|
||||||
|
|
||||||
|
if config.debug:
|
||||||
|
# Auto reload UiRequest on change
|
||||||
|
from Debug import DebugReloader
|
||||||
|
DebugReloader(self.reload)
|
||||||
|
|
||||||
|
# Werkzeug Debugger
|
||||||
|
try:
|
||||||
|
from werkzeug.debug import DebuggedApplication
|
||||||
|
handler = DebuggedApplication(self.handleRequest, evalex=True)
|
||||||
|
except Exception, err:
|
||||||
|
self.log.info("%s: For debugging please download Werkzeug (http://werkzeug.pocoo.org/)" % err)
|
||||||
|
from Debug import DebugReloader
|
||||||
|
self.log.write = lambda msg: self.log.debug(msg.strip()) # For Wsgi access.log
|
||||||
|
self.log.info("--------------------------------------")
|
||||||
|
self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port))
|
||||||
|
self.log.info("--------------------------------------")
|
||||||
|
|
||||||
|
|
||||||
|
WSGIServer((self.ip, self.port), handler, handler_class=UiWSGIHandler, log=self.log).serve_forever()
|
217
src/Ui/UiWebsocket.py
Normal file
217
src/Ui/UiWebsocket.py
Normal file
|
@ -0,0 +1,217 @@
|
||||||
|
import json, gevent, time, sys, hashlib
|
||||||
|
from Config import config
|
||||||
|
from Site import SiteManager
|
||||||
|
|
||||||
|
class UiWebsocket:
|
||||||
|
def __init__(self, ws, site, server):
|
||||||
|
self.ws = ws
|
||||||
|
self.site = site
|
||||||
|
self.server = server
|
||||||
|
self.next_message_id = 1
|
||||||
|
self.waiting_cb = {} # Waiting for callback. Key: message_id, Value: function pointer
|
||||||
|
self.channels = [] # Channels joined to
|
||||||
|
|
||||||
|
|
||||||
|
# Start listener loop
|
||||||
|
def start(self):
|
||||||
|
ws = self.ws
|
||||||
|
if self.site.address == config.homepage and not self.site.page_requested: # Add open fileserver port message or closed port error to homepage at first request after start
|
||||||
|
if config.ip_external:
|
||||||
|
self.site.notifications.append(["done", "Congratulation, your port <b>"+str(config.fileserver_port)+"</b> is opened. <br>You are full member of ZeroNet network!", 10000])
|
||||||
|
elif config.ip_external == False:
|
||||||
|
self.site.notifications.append(["error", "Your network connection is restricted. Please, open <b>"+str(config.fileserver_port)+"</b> port <br>on your router to become full member of ZeroNet network.", 0])
|
||||||
|
self.site.page_requested = True # Dont add connection notification anymore
|
||||||
|
|
||||||
|
for notification in self.site.notifications: # Send pending notification messages
|
||||||
|
self.cmd("notification", notification)
|
||||||
|
self.site.notifications = []
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
message = ws.receive()
|
||||||
|
if message:
|
||||||
|
self.handleRequest(message)
|
||||||
|
except Exception, err:
|
||||||
|
if err.message != 'Connection is already closed':
|
||||||
|
if config.debug: # Allow websocket errors to appear on /Debug
|
||||||
|
import sys
|
||||||
|
sys.modules["src.main"].DebugHook.handleError()
|
||||||
|
self.site.log.error("WebSocket error: %s" % err)
|
||||||
|
return "Bye."
|
||||||
|
|
||||||
|
|
||||||
|
# Event in a channel
|
||||||
|
def event(self, channel, *params):
|
||||||
|
if channel in self.channels: # We are joined to channel
|
||||||
|
if channel == "siteChanged":
|
||||||
|
site = params[0] # Triggerer site
|
||||||
|
site_info = self.siteInfo(site)
|
||||||
|
if len(params) > 1 and params[1]: # Extra data
|
||||||
|
site_info.update(params[1])
|
||||||
|
self.cmd("setSiteInfo", site_info)
|
||||||
|
|
||||||
|
|
||||||
|
# Send response to client (to = message.id)
|
||||||
|
def response(self, to, result):
|
||||||
|
self.send({"cmd": "response", "to": to, "result": result})
|
||||||
|
|
||||||
|
|
||||||
|
# Send a command
|
||||||
|
def cmd(self, cmd, params={}, cb = None):
|
||||||
|
self.send({"cmd": cmd, "params": params}, cb)
|
||||||
|
|
||||||
|
|
||||||
|
# Encode to json and send message
|
||||||
|
def send(self, message, cb = None):
|
||||||
|
message["id"] = self.next_message_id # Add message id to allow response
|
||||||
|
self.next_message_id += 1
|
||||||
|
self.ws.send(json.dumps(message))
|
||||||
|
if cb: # Callback after client responsed
|
||||||
|
self.waiting_cb[message["id"]] = cb
|
||||||
|
|
||||||
|
|
||||||
|
# Handle incoming messages
|
||||||
|
def handleRequest(self, data):
|
||||||
|
req = json.loads(data)
|
||||||
|
cmd = req["cmd"]
|
||||||
|
permissions = self.site.settings["permissions"]
|
||||||
|
if cmd == "response":
|
||||||
|
self.actionResponse(req)
|
||||||
|
elif cmd == "ping":
|
||||||
|
self.actionPing(req["id"])
|
||||||
|
elif cmd == "channelJoin":
|
||||||
|
self.actionChannelJoin(req["id"], req["params"])
|
||||||
|
elif cmd == "siteInfo":
|
||||||
|
self.actionSiteInfo(req["id"], req["params"])
|
||||||
|
elif cmd == "serverInfo":
|
||||||
|
self.actionServerInfo(req["id"], req["params"])
|
||||||
|
elif cmd == "siteUpdate":
|
||||||
|
self.actionSiteUpdate(req["id"], req["params"])
|
||||||
|
# Admin commands
|
||||||
|
elif cmd == "sitePause" and "ADMIN" in permissions:
|
||||||
|
self.actionSitePause(req["id"], req["params"])
|
||||||
|
elif cmd == "siteResume" and "ADMIN" in permissions:
|
||||||
|
self.actionSiteResume(req["id"], req["params"])
|
||||||
|
elif cmd == "siteList" and "ADMIN" in permissions:
|
||||||
|
self.actionSiteList(req["id"], req["params"])
|
||||||
|
elif cmd == "channelJoinAllsite" and "ADMIN" in permissions:
|
||||||
|
self.actionChannelJoinAllsite(req["id"], req["params"])
|
||||||
|
# Unknown command
|
||||||
|
else:
|
||||||
|
self.response(req["id"], "Unknown command: %s" % cmd)
|
||||||
|
|
||||||
|
|
||||||
|
# - Actions -
|
||||||
|
|
||||||
|
# Do callback on response {"cmd": "response", "to": message_id, "result": result}
|
||||||
|
def actionResponse(self, req):
|
||||||
|
if req["to"] in self.waiting_cb:
|
||||||
|
self.waiting_cb(req["result"]) # Call callback function
|
||||||
|
else:
|
||||||
|
self.site.log.error("Websocket callback not found: %s" % req)
|
||||||
|
|
||||||
|
|
||||||
|
# Send a simple pong answer
|
||||||
|
def actionPing(self, to):
|
||||||
|
self.response(to, "pong")
|
||||||
|
|
||||||
|
|
||||||
|
# Format site info
|
||||||
|
def siteInfo(self, site):
|
||||||
|
ret = {
|
||||||
|
"auth_id": self.site.settings["auth_key"][0:10],
|
||||||
|
"auth_id_md5": hashlib.md5(self.site.settings["auth_key"][0:10]).hexdigest(),
|
||||||
|
"address": site.address,
|
||||||
|
"settings": site.settings,
|
||||||
|
"content_updated": site.content_updated,
|
||||||
|
"bad_files": site.bad_files.keys(),
|
||||||
|
"last_downloads": site.last_downloads,
|
||||||
|
"peers": len(site.peers),
|
||||||
|
"tasks": [task["inner_path"] for task in site.worker_manager.tasks],
|
||||||
|
"content": site.content
|
||||||
|
}
|
||||||
|
if site.settings["serving"] and site.content: ret["peers"] += 1 # Add myself if serving
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
# Send site details
|
||||||
|
def actionSiteInfo(self, to, params):
|
||||||
|
ret = self.siteInfo(self.site)
|
||||||
|
self.response(to, ret)
|
||||||
|
|
||||||
|
|
||||||
|
# Join to an event channel
|
||||||
|
def actionChannelJoin(self, to, params):
|
||||||
|
if params["channel"] not in self.channels:
|
||||||
|
self.channels.append(params["channel"])
|
||||||
|
|
||||||
|
|
||||||
|
# Server variables
|
||||||
|
def actionServerInfo(self, to, params):
|
||||||
|
ret = {
|
||||||
|
"ip_external": config.ip_external,
|
||||||
|
"platform": sys.platform,
|
||||||
|
"fileserver_ip": config.fileserver_ip,
|
||||||
|
"fileserver_port": config.fileserver_port,
|
||||||
|
"ui_ip": config.ui_ip,
|
||||||
|
"ui_port": config.ui_port,
|
||||||
|
"debug": config.debug
|
||||||
|
}
|
||||||
|
self.response(to, ret)
|
||||||
|
|
||||||
|
|
||||||
|
# - Admin actions -
|
||||||
|
|
||||||
|
# List all site info
|
||||||
|
def actionSiteList(self, to, params):
|
||||||
|
ret = []
|
||||||
|
SiteManager.load() # Reload sites
|
||||||
|
for site in self.server.sites.values():
|
||||||
|
if not site.content: continue # Broken site
|
||||||
|
ret.append(self.siteInfo(site))
|
||||||
|
self.response(to, ret)
|
||||||
|
|
||||||
|
|
||||||
|
# Join to an event channel on all sites
|
||||||
|
def actionChannelJoinAllsite(self, to, params):
|
||||||
|
if params["channel"] not in self.channels: # Add channel to channels
|
||||||
|
self.channels.append(params["channel"])
|
||||||
|
|
||||||
|
for site in self.server.sites.values(): # Add websocket to every channel
|
||||||
|
if self not in site.websockets:
|
||||||
|
site.websockets.append(self)
|
||||||
|
|
||||||
|
|
||||||
|
# Update site content.json
|
||||||
|
def actionSiteUpdate(self, to, params):
|
||||||
|
address = params.get("address")
|
||||||
|
site = self.server.sites.get(address)
|
||||||
|
if site and (site.address == self.site.address or "ADMIN" in self.site.settings["permissions"]):
|
||||||
|
gevent.spawn(site.update)
|
||||||
|
else:
|
||||||
|
self.response(to, {"error": "Unknown site: %s" % address})
|
||||||
|
|
||||||
|
|
||||||
|
# Pause site serving
|
||||||
|
def actionSitePause(self, to, params):
|
||||||
|
address = params.get("address")
|
||||||
|
site = self.server.sites.get(address)
|
||||||
|
if site:
|
||||||
|
site.settings["serving"] = False
|
||||||
|
site.saveSettings()
|
||||||
|
site.updateWebsocket()
|
||||||
|
else:
|
||||||
|
self.response(to, {"error": "Unknown site: %s" % address})
|
||||||
|
|
||||||
|
|
||||||
|
# Resume site serving
|
||||||
|
def actionSiteResume(self, to, params):
|
||||||
|
address = params.get("address")
|
||||||
|
site = self.server.sites.get(address)
|
||||||
|
if site:
|
||||||
|
site.settings["serving"] = True
|
||||||
|
site.saveSettings()
|
||||||
|
gevent.spawn(site.update)
|
||||||
|
time.sleep(0.001) # Wait for update thread starting
|
||||||
|
site.updateWebsocket()
|
||||||
|
else:
|
||||||
|
self.response(to, {"error": "Unknown site: %s" % address})
|
3
src/Ui/__init__.py
Normal file
3
src/Ui/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
from UiServer import UiServer
|
||||||
|
from UiRequest import UiRequest
|
||||||
|
from UiWebsocket import UiWebsocket
|
40
src/Ui/media/Loading.coffee
Normal file
40
src/Ui/media/Loading.coffee
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
class Loading
|
||||||
|
constructor: ->
|
||||||
|
if window.show_loadingscreen then @showScreen()
|
||||||
|
|
||||||
|
|
||||||
|
showScreen: ->
|
||||||
|
$(".loadingscreen").css("display", "block").addClassLater("ready")
|
||||||
|
@screen_visible = true
|
||||||
|
@printLine " Connecting..."
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# We dont need loadingscreen anymore
|
||||||
|
hideScreen: ->
|
||||||
|
if @screen_visible # Hide with animate
|
||||||
|
$(".loadingscreen").addClass("done").removeLater(2000)
|
||||||
|
else # Not visible, just remove
|
||||||
|
$(".loadingscreen").remove()
|
||||||
|
@screen_visible = false
|
||||||
|
|
||||||
|
|
||||||
|
# Append text to last line of loadingscreen
|
||||||
|
print: (text, type="normal") ->
|
||||||
|
if not @screen_visible then return false
|
||||||
|
$(".loadingscreen .console .cursor").remove() # Remove previous cursor
|
||||||
|
last_line = $(".loadingscreen .console .console-line:last-child")
|
||||||
|
if type == "error" then text = "<span class='console-error'>#{text}</span>"
|
||||||
|
last_line.html(last_line.html()+text)
|
||||||
|
|
||||||
|
|
||||||
|
# Add line to loading screen
|
||||||
|
printLine: (text, type="normal") ->
|
||||||
|
if not @screen_visible then return false
|
||||||
|
$(".loadingscreen .console .cursor").remove() # Remove previous cursor
|
||||||
|
if type == "error" then text = "<span class='console-error'>#{text}</span>" else text = text+"<span class='cursor'> </span>"
|
||||||
|
$(".loadingscreen .console").append("<div class='console-line'>#{text}</div>")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
window.Loading = Loading
|
68
src/Ui/media/Notifications.coffee
Normal file
68
src/Ui/media/Notifications.coffee
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
class Notifications
|
||||||
|
constructor: (@elem) ->
|
||||||
|
@
|
||||||
|
|
||||||
|
test: ->
|
||||||
|
setTimeout (=>
|
||||||
|
@add("connection", "error", "Connection lost to <b>UiServer</b> on <b>localhost</b>!")
|
||||||
|
@add("message-Anyone", "info", "New from <b>Anyone</b>.")
|
||||||
|
), 1000
|
||||||
|
setTimeout (=>
|
||||||
|
@add("connection", "done", "<b>UiServer</b> connection recovered.", 5000)
|
||||||
|
), 3000
|
||||||
|
|
||||||
|
|
||||||
|
add: (id, type, body, timeout=0) ->
|
||||||
|
@log id, type, body, timeout
|
||||||
|
# Close notifications with same id
|
||||||
|
for elem in $(".notification-#{id}")
|
||||||
|
@close $(elem)
|
||||||
|
|
||||||
|
# Create element
|
||||||
|
elem = $(".notification.template", @elem).clone().removeClass("template")
|
||||||
|
elem.addClass("notification-#{type}").addClass("notification-#{id}")
|
||||||
|
|
||||||
|
# Update text
|
||||||
|
if type == "error"
|
||||||
|
$(".notification-icon", elem).html("!")
|
||||||
|
else if type == "done"
|
||||||
|
$(".notification-icon", elem).html("<div class='icon-success'></div>")
|
||||||
|
else
|
||||||
|
$(".notification-icon", elem).html("i")
|
||||||
|
|
||||||
|
$(".body", elem).html(body)
|
||||||
|
|
||||||
|
elem.appendTo(@elem)
|
||||||
|
|
||||||
|
# Timeout
|
||||||
|
if timeout
|
||||||
|
$(".close", elem).remove() # No need of close button
|
||||||
|
setTimeout (=>
|
||||||
|
@close elem
|
||||||
|
), timeout
|
||||||
|
|
||||||
|
# Animate
|
||||||
|
width = elem.outerWidth()
|
||||||
|
if not timeout then width += 20 # Add space for close button
|
||||||
|
elem.css({"width": "50px", "transform": "scale(0.01)"})
|
||||||
|
elem.animate({"scale": 1}, 800, "easeOutElastic")
|
||||||
|
elem.animate({"width": width}, 700, "easeInOutCubic")
|
||||||
|
|
||||||
|
# Close button
|
||||||
|
$(".close", elem).on "click", =>
|
||||||
|
@close elem
|
||||||
|
return false
|
||||||
|
|
||||||
|
@
|
||||||
|
|
||||||
|
|
||||||
|
close: (elem) ->
|
||||||
|
elem.stop().animate {"width": 0, "opacity": 0}, 700, "easeInOutCubic"
|
||||||
|
elem.slideUp 300, (-> elem.remove())
|
||||||
|
|
||||||
|
|
||||||
|
log: (args...) ->
|
||||||
|
console.log "[Notifications]", args...
|
||||||
|
|
||||||
|
|
||||||
|
window.Notifications = Notifications
|
33
src/Ui/media/Sidebar.coffee
Normal file
33
src/Ui/media/Sidebar.coffee
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
class Sidebar
|
||||||
|
constructor: ->
|
||||||
|
@initFixbutton()
|
||||||
|
|
||||||
|
|
||||||
|
initFixbutton: ->
|
||||||
|
$(".fixbutton-bg").on "mouseover", ->
|
||||||
|
$(@).stop().animate({"scale": 0.7}, 800, "easeOutElastic")
|
||||||
|
$(".fixbutton-burger").stop().animate({"opacity": 1.5, "left": 0}, 800, "easeOutElastic")
|
||||||
|
$(".fixbutton-text").stop().animate({"opacity": 0, "left": 20}, 300, "easeOutCubic")
|
||||||
|
|
||||||
|
$(".fixbutton-bg").on "mouseout", ->
|
||||||
|
$(@).stop().animate({"scale": 0.6}, 300, "easeOutCubic")
|
||||||
|
$(".fixbutton-burger").stop().animate({"opacity": 0, "left": -20}, 300, "easeOutCubic")
|
||||||
|
$(".fixbutton-text").stop().animate({"opacity": 1, "left": 0}, 300, "easeOutBack")
|
||||||
|
|
||||||
|
|
||||||
|
###$(".fixbutton-bg").on "click", ->
|
||||||
|
return false
|
||||||
|
###
|
||||||
|
|
||||||
|
$(".fixbutton-bg").on "mousedown", ->
|
||||||
|
$(".fixbutton-burger").stop().animate({"scale": 0.7, "left": 0}, 300, "easeOutCubic")
|
||||||
|
#$("#inner-iframe").toggleClass("back")
|
||||||
|
#$(".wrapper-iframe").stop().animate({"scale": 0.9}, 600, "easeOutCubic")
|
||||||
|
#$("body").addClass("back")
|
||||||
|
|
||||||
|
$(".fixbutton-bg").on "mouseup", ->
|
||||||
|
$(".fixbutton-burger").stop().animate({"scale": 1, "left": 0}, 600, "easeOutElastic")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
window.Sidebar = Sidebar
|
152
src/Ui/media/Wrapper.coffee
Normal file
152
src/Ui/media/Wrapper.coffee
Normal file
|
@ -0,0 +1,152 @@
|
||||||
|
class Wrapper
|
||||||
|
constructor: (ws_url) ->
|
||||||
|
@log "Created!"
|
||||||
|
|
||||||
|
@loading = new Loading()
|
||||||
|
@notifications = new Notifications($(".notifications"))
|
||||||
|
@sidebar = new Sidebar()
|
||||||
|
|
||||||
|
window.addEventListener("message", @onMessageInner, false)
|
||||||
|
@inner = document.getElementById("inner-iframe").contentWindow
|
||||||
|
@ws = new ZeroWebsocket(ws_url)
|
||||||
|
@ws.next_message_id = 1000000 # Avoid messageid collision :)
|
||||||
|
@ws.onOpen = @onOpenWebsocket
|
||||||
|
@ws.onClose = @onCloseWebsocket
|
||||||
|
@ws.onMessage = @onMessageWebsocket
|
||||||
|
@ws.connect()
|
||||||
|
@ws_error = null # Ws error message
|
||||||
|
|
||||||
|
@site_info = null # Hold latest site info
|
||||||
|
@inner_loaded = false # If iframe loaded or not
|
||||||
|
@inner_ready = false # Inner frame ready to receive messages
|
||||||
|
@wrapperWsInited = false # Wrapper notified on websocket open
|
||||||
|
@site_error = null # Latest failed file download
|
||||||
|
|
||||||
|
window.onload = @onLoad # On iframe loaded
|
||||||
|
@
|
||||||
|
|
||||||
|
|
||||||
|
# Incoming message from UiServer websocket
|
||||||
|
onMessageWebsocket: (e) =>
|
||||||
|
message = JSON.parse(e.data)
|
||||||
|
cmd = message.cmd
|
||||||
|
if cmd == "response"
|
||||||
|
if @ws.waiting_cb[message.to]? # We are waiting for response
|
||||||
|
@ws.waiting_cb[message.to](message.result)
|
||||||
|
else
|
||||||
|
@sendInner message # Pass message to inner frame
|
||||||
|
else if cmd == "notification" # Display notification
|
||||||
|
@notifications.add("notification-#{message.id}", message.params[0], message.params[1], message.params[2])
|
||||||
|
else if cmd == "setSiteInfo"
|
||||||
|
@sendInner message # Pass to inner frame
|
||||||
|
if message.params.address == window.address # Current page
|
||||||
|
@setSiteInfo message.params
|
||||||
|
else
|
||||||
|
@sendInner message # Pass message to inner frame
|
||||||
|
|
||||||
|
|
||||||
|
# Incoming message from inner frame
|
||||||
|
onMessageInner: (e) =>
|
||||||
|
message = e.data
|
||||||
|
cmd = message.cmd
|
||||||
|
if cmd == "innerReady"
|
||||||
|
@inner_ready = true
|
||||||
|
@log "innerReady", @ws.ws.readyState, @wrapperWsInited
|
||||||
|
if @ws.ws.readyState == 1 and not @wrapperWsInited # If ws already opened
|
||||||
|
@sendInner {"cmd": "wrapperOpenedWebsocket"}
|
||||||
|
@wrapperWsInited = true
|
||||||
|
else if cmd == "wrapperNotification"
|
||||||
|
@notifications.add("notification-#{message.id}", message.params[0], message.params[1], message.params[2])
|
||||||
|
else # Send to websocket
|
||||||
|
@ws.send(message) # Pass message to websocket
|
||||||
|
|
||||||
|
|
||||||
|
onOpenWebsocket: (e) =>
|
||||||
|
@ws.cmd "channelJoin", {"channel": "siteChanged"} # Get info on modifications
|
||||||
|
@log "onOpenWebsocket", @inner_ready, @wrapperWsInited
|
||||||
|
if not @wrapperWsInited and @inner_ready
|
||||||
|
@sendInner {"cmd": "wrapperOpenedWebsocket"} # Send to inner frame
|
||||||
|
@wrapperWsInited = true
|
||||||
|
if @inner_loaded # Update site info
|
||||||
|
@reloadSiteInfo()
|
||||||
|
|
||||||
|
# If inner frame not loaded for 2 sec show peer informations on loading screen by loading site info
|
||||||
|
setTimeout (=>
|
||||||
|
if not @site_info then @reloadSiteInfo()
|
||||||
|
), 2000
|
||||||
|
|
||||||
|
if @ws_error
|
||||||
|
@notifications.add("connection", "done", "Connection with <b>UiServer Websocket</b> recovered.", 6000)
|
||||||
|
@ws_error = null
|
||||||
|
|
||||||
|
|
||||||
|
onCloseWebsocket: (e) =>
|
||||||
|
@wrapperWsInited = false
|
||||||
|
setTimeout (=> # Wait a bit, maybe its page closing
|
||||||
|
@sendInner {"cmd": "wrapperClosedWebsocket"} # Send to inner frame
|
||||||
|
if e.code == 1000 # Server error please reload page
|
||||||
|
@ws_error = @notifications.add("connection", "error", "UiServer Websocket error, please reload the page.")
|
||||||
|
else if not @ws_error
|
||||||
|
@ws_error = @notifications.add("connection", "error", "Connection with <b>UiServer Websocket</b> was lost. Reconnecting...")
|
||||||
|
), 500
|
||||||
|
|
||||||
|
|
||||||
|
# Iframe loaded
|
||||||
|
onLoad: (e) =>
|
||||||
|
@log "onLoad", e
|
||||||
|
@inner_loaded = true
|
||||||
|
if not @inner_ready then @sendInner {"cmd": "wrapperReady"} # Inner frame loaded before wrapper
|
||||||
|
if not @site_error then @loading.hideScreen() # Hide loading screen
|
||||||
|
if @ws.ws.readyState == 1 and not @site_info # Ws opened
|
||||||
|
@reloadSiteInfo()
|
||||||
|
|
||||||
|
|
||||||
|
# Send message to innerframe
|
||||||
|
sendInner: (message) ->
|
||||||
|
@inner.postMessage(message, '*')
|
||||||
|
|
||||||
|
|
||||||
|
# Get site info from UiServer
|
||||||
|
reloadSiteInfo: ->
|
||||||
|
@ws.cmd "siteInfo", {}, (site_info) =>
|
||||||
|
@setSiteInfo site_info
|
||||||
|
window.document.title = site_info.content.title+" - ZeroNet"
|
||||||
|
@log "Setting title to", window.document.title
|
||||||
|
|
||||||
|
|
||||||
|
# Got setSiteInfo from websocket UiServer
|
||||||
|
setSiteInfo: (site_info) ->
|
||||||
|
if site_info.event? # If loading screen visible add event to it
|
||||||
|
# File started downloading
|
||||||
|
if site_info.event[0] == "file_added" and site_info.bad_files.length
|
||||||
|
@loading.printLine("#{site_info.bad_files.length} files needs to be downloaded")
|
||||||
|
# File finished downloading
|
||||||
|
else if site_info.event[0] == "file_done"
|
||||||
|
@loading.printLine("#{site_info.event[1]} downloaded")
|
||||||
|
if site_info.event[1] == window.inner_path # File downloaded we currently on
|
||||||
|
@loading.hideScreen()
|
||||||
|
if not $(".loadingscreen").length # Loading screen already removed (loaded +2sec)
|
||||||
|
@notifications.add("modified", "info", "New version of this page has just released.<br>Reload to see the modified content.")
|
||||||
|
# File failed downloading
|
||||||
|
else if site_info.event[0] == "file_failed"
|
||||||
|
@site_error = site_info.event[1]
|
||||||
|
@loading.printLine("#{site_info.event[1]} download failed", "error")
|
||||||
|
# New peers found
|
||||||
|
else if site_info.event[0] == "peers_added"
|
||||||
|
@loading.printLine("Peers found: #{site_info.peers}")
|
||||||
|
|
||||||
|
if @loading.screen_visible and not @site_info # First site info display current peers
|
||||||
|
if site_info.peers > 1
|
||||||
|
@loading.printLine "Peers found: #{site_info.peers}"
|
||||||
|
else
|
||||||
|
@site_error = "No peers found"
|
||||||
|
@loading.printLine "No peers found"
|
||||||
|
@site_info = site_info
|
||||||
|
|
||||||
|
|
||||||
|
log: (args...) ->
|
||||||
|
console.log "[Wrapper]", args...
|
||||||
|
|
||||||
|
|
||||||
|
ws_url = "ws://#{window.location.hostname}:#{window.location.port}/Websocket?auth_key=#{window.auth_key}"
|
||||||
|
window.wrapper = new Wrapper(ws_url)
|
102
src/Ui/media/Wrapper.css
Normal file
102
src/Ui/media/Wrapper.css
Normal file
|
@ -0,0 +1,102 @@
|
||||||
|
body { margin: 0px; padding: 0px; height: 100%; background-color: #D2CECD; overflow: hidden }
|
||||||
|
body.back { background-color: #090909 }
|
||||||
|
a { color: black }
|
||||||
|
|
||||||
|
.template { display: none !important }
|
||||||
|
|
||||||
|
#inner-iframe { width: 100%; height: 100%; position: absolute; border: 0px; transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out }
|
||||||
|
#inner-iframe.back { transform: scale(0.95) translate(-300px, 0px); opacity: 0.4 }
|
||||||
|
|
||||||
|
|
||||||
|
/* Fixbutton */
|
||||||
|
|
||||||
|
.fixbutton {
|
||||||
|
position: absolute; right: 35px; top: 15px; width: 40px; z-index: 999;
|
||||||
|
text-align: center; color: white; font-family: Consolas; font-size: 25px; line-height: 40px;
|
||||||
|
}
|
||||||
|
.fixbutton-bg {
|
||||||
|
border-radius: 80px; background-color: rgba(180, 180, 180, 0.5); cursor: pointer;
|
||||||
|
display: block; width: 80px; height: 80px; transition: background-color 0.2s, box-shadow 0.5s; transform: scale(0.6); margin-left: -20px; margin-top: -20px; /* 2x size to prevent blur on anim */
|
||||||
|
/*box-shadow: inset 105px 260px 0px -200px rgba(0,0,0,0.1);*/ /* box-shadow: inset -75px 183px 0px -200px rgba(0,0,0,0.1); */
|
||||||
|
}
|
||||||
|
.fixbutton-text { pointer-events: none; position: absolute; z-index: 999; width: 40px; backface-visibility: hidden; perspective: 1000px }
|
||||||
|
.fixbutton-burger { pointer-events: none; position: absolute; z-index: 999; width: 40px; opacity: 0; left: -20px }
|
||||||
|
.fixbutton-bg:hover { background-color: #AF3BFF }
|
||||||
|
|
||||||
|
|
||||||
|
/* Notification */
|
||||||
|
|
||||||
|
.notifications { position: absolute; top: 0px; right: 85px; display: inline-block; z-index: 999; white-space: nowrap }
|
||||||
|
.notification {
|
||||||
|
position: relative; float: right; clear: both; margin: 10px; height: 50px; box-sizing: border-box; overflow: hidden; backface-visibility: hidden; perspective: 1000px;
|
||||||
|
background-color: white; color: #4F4F4F; font-family: 'Helvetica Neue', 'Segoe UI', Helvetica, Arial, sans-serif; font-size: 14px; line-height: 20px
|
||||||
|
}
|
||||||
|
.notification-icon {
|
||||||
|
display: block; width: 50px; height: 50px; position: absolute; float: left; z-index: 1;
|
||||||
|
text-align: center; background-color: #e74c3c; line-height: 45px; vertical-align: bottom; font-size: 40px; color: white;
|
||||||
|
}
|
||||||
|
.notification .body { max-width: 420px; padding-left: 68px; padding-right: 17px; height: 50px; vertical-align: middle; display: table-cell }
|
||||||
|
.notification.visible { max-width: 350px }
|
||||||
|
|
||||||
|
.notification .close { position: absolute; top: 0px; right: 0px; font-size: 19px; line-height: 13px; color: #DDD; padding: 7px; text-decoration: none }
|
||||||
|
.notification .close:hover { color: black }
|
||||||
|
.notification .close:active, .notification .close:focus { color: #AF3BFF }
|
||||||
|
|
||||||
|
/* Notification types */
|
||||||
|
.notification-info .notification-icon { font-size: 22px; font-weight: bold; background-color: #2980b9; line-height: 48px }
|
||||||
|
.notification-done .notification-icon { font-size: 22px; background-color: #27ae60 }
|
||||||
|
|
||||||
|
|
||||||
|
/* Icons (based on http://nicolasgallagher.com/pure-css-gui-icons/demo/) */
|
||||||
|
|
||||||
|
.icon-success { left:6px; width:5px; height:12px; border-width:0 5px 5px 0; border-style:solid; border-color:white; margin-left: 20px; margin-top: 15px; transform:rotate(45deg) }
|
||||||
|
|
||||||
|
|
||||||
|
/* Loading screen */
|
||||||
|
|
||||||
|
.loadingscreen { width: 100%; height: 100%; position: absolute; background-color: #EEE; z-index: 1; overflow: hidden; display: none }
|
||||||
|
.loading-text { text-align: center; vertical-align: middle; top: 50%; position: absolute; margin-top: 39px; width: 100% }
|
||||||
|
|
||||||
|
/* Console */
|
||||||
|
.console { line-height: 24px; font-family: monospace; font-size: 14px; color: #ADADAD; text-transform: uppercase; opacity: 0; transform: translateY(-20px); }
|
||||||
|
.console-line:last-child { color: #6C6767 }
|
||||||
|
.console .cursor {
|
||||||
|
background-color: #999; color: #999; animation: pulse 1.5s infinite ease-in-out; margin-right: -9px;
|
||||||
|
display: inline-block; width: 9px; height: 19px; vertical-align: -4px;
|
||||||
|
}
|
||||||
|
.console .console-error { color: #e74c3c; font-weight: bold; animation: pulse 2s infinite linear }
|
||||||
|
|
||||||
|
/* Flipper loading anim */
|
||||||
|
.flipper-container { width: 40px; height: 40px; position: absolute; top: 0%; left: 50%; transform: translate3d(-50%, -50%, 0); perspective: 1200; opacity: 0 }
|
||||||
|
.flipper { position: relative; display: block; height: inherit; width: inherit; animation: flip 1.2s infinite ease-in-out; -webkit-transform-style: preserve-3d; }
|
||||||
|
.flipper .front, .flipper .back {
|
||||||
|
position: absolute; top: 0; left: 0; backface-visibility: hidden; /*transform-style: preserve-3d;*/ display: block;
|
||||||
|
background-color: #d50000; height: 100%; width: 100%; /*outline: 1px solid transparent; /* FF AA fix */
|
||||||
|
}
|
||||||
|
.flipper .back { background-color: white; z-index: 800; transform: rotateY(-180deg) }
|
||||||
|
|
||||||
|
/* Loading ready */
|
||||||
|
.loadingscreen.ready .console { opacity: 1; transform: translateY(0px); transition: all 0.3s }
|
||||||
|
.loadingscreen.ready .flipper-container { top: 50%; opacity: 1; transition: all 1s cubic-bezier(1, 0, 0, 1); }
|
||||||
|
|
||||||
|
|
||||||
|
/* Loading done */
|
||||||
|
.loadingscreen.done { height: 0%; transition: all 1s cubic-bezier(0.6, -0.28, 0.735, 0.045); }
|
||||||
|
.loadingscreen.done .console { transform: translateY(300px); opacity: 0; transition: all 1.5s }
|
||||||
|
.loadingscreen.done .flipper-container { opacity: 0; transition: all 1.5s }
|
||||||
|
|
||||||
|
/* Animations */
|
||||||
|
|
||||||
|
@keyframes flip {
|
||||||
|
0% { transform: perspective(120px) rotateX(0deg) rotateY(0deg); }
|
||||||
|
50% { transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) }
|
||||||
|
100% { transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); }
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes pulse {
|
||||||
|
0% { opacity: 0 }
|
||||||
|
5% { opacity: 1 }
|
||||||
|
30% { opacity: 1 }
|
||||||
|
70% { opacity: 0 }
|
||||||
|
100% { opacity: 0 }
|
||||||
|
}
|
133
src/Ui/media/all.css
Normal file
133
src/Ui/media/all.css
Normal file
|
@ -0,0 +1,133 @@
|
||||||
|
|
||||||
|
|
||||||
|
/* ---- src/Ui/media/Wrapper.css ---- */
|
||||||
|
|
||||||
|
|
||||||
|
body { margin: 0px; padding: 0px; height: 100%; background-color: #D2CECD; overflow: hidden }
|
||||||
|
body.back { background-color: #090909 }
|
||||||
|
a { color: black }
|
||||||
|
|
||||||
|
.template { display: none !important }
|
||||||
|
|
||||||
|
#inner-iframe { width: 100%; height: 100%; position: absolute; border: 0px; -webkit-transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out ; -moz-transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out ; -o-transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out ; -ms-transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out ; transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out }
|
||||||
|
#inner-iframe.back { -webkit-transform: scale(0.95) translate(-300px, 0px); -moz-transform: scale(0.95) translate(-300px, 0px); -o-transform: scale(0.95) translate(-300px, 0px); -ms-transform: scale(0.95) translate(-300px, 0px); transform: scale(0.95) translate(-300px, 0px) ; opacity: 0.4 }
|
||||||
|
|
||||||
|
|
||||||
|
/* Fixbutton */
|
||||||
|
|
||||||
|
.fixbutton {
|
||||||
|
position: absolute; right: 35px; top: 15px; width: 40px; z-index: 999;
|
||||||
|
text-align: center; color: white; font-family: Consolas; font-size: 25px; line-height: 40px;
|
||||||
|
}
|
||||||
|
.fixbutton-bg {
|
||||||
|
-webkit-border-radius: 80px; -moz-border-radius: 80px; -o-border-radius: 80px; -ms-border-radius: 80px; border-radius: 80px ; background-color: rgba(180, 180, 180, 0.5); cursor: pointer;
|
||||||
|
display: block; width: 80px; height: 80px; -webkit-transition: background-color 0.2s, box-shadow 0.5s; -moz-transition: background-color 0.2s, box-shadow 0.5s; -o-transition: background-color 0.2s, box-shadow 0.5s; -ms-transition: background-color 0.2s, box-shadow 0.5s; transition: background-color 0.2s, box-shadow 0.5s ; -webkit-transform: scale(0.6); -moz-transform: scale(0.6); -o-transform: scale(0.6); -ms-transform: scale(0.6); transform: scale(0.6) ; margin-left: -20px; margin-top: -20px; /* 2x size to prevent blur on anim */
|
||||||
|
/*box-shadow: inset 105px 260px 0px -200px rgba(0,0,0,0.1);*/ /* -webkit-box-shadow: inset -75px 183px 0px -200px rgba(0,0,0,0.1); -moz-box-shadow: inset -75px 183px 0px -200px rgba(0,0,0,0.1); -o-box-shadow: inset -75px 183px 0px -200px rgba(0,0,0,0.1); -ms-box-shadow: inset -75px 183px 0px -200px rgba(0,0,0,0.1); box-shadow: inset -75px 183px 0px -200px rgba(0,0,0,0.1) ; */
|
||||||
|
}
|
||||||
|
.fixbutton-text { pointer-events: none; position: absolute; z-index: 999; width: 40px; backface-visibility: hidden; -webkit-perspective: 1000px ; -moz-perspective: 1000px ; -o-perspective: 1000px ; -ms-perspective: 1000px ; perspective: 1000px }
|
||||||
|
.fixbutton-burger { pointer-events: none; position: absolute; z-index: 999; width: 40px; opacity: 0; left: -20px }
|
||||||
|
.fixbutton-bg:hover { background-color: #AF3BFF }
|
||||||
|
|
||||||
|
|
||||||
|
/* Notification */
|
||||||
|
|
||||||
|
.notifications { position: absolute; top: 0px; right: 85px; display: inline-block; z-index: 999; white-space: nowrap }
|
||||||
|
.notification {
|
||||||
|
position: relative; float: right; clear: both; margin: 10px; height: 50px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; overflow: hidden; backface-visibility: hidden; -webkit-perspective: 1000px; -moz-perspective: 1000px; -o-perspective: 1000px; -ms-perspective: 1000px; perspective: 1000px ;
|
||||||
|
background-color: white; color: #4F4F4F; font-family: 'Helvetica Neue', 'Segoe UI', Helvetica, Arial, sans-serif; font-size: 14px; line-height: 20px
|
||||||
|
}
|
||||||
|
.notification-icon {
|
||||||
|
display: block; width: 50px; height: 50px; position: absolute; float: left; z-index: 1;
|
||||||
|
text-align: center; background-color: #e74c3c; line-height: 45px; vertical-align: bottom; font-size: 40px; color: white;
|
||||||
|
}
|
||||||
|
.notification .body { max-width: 420px; padding-left: 68px; padding-right: 17px; height: 50px; vertical-align: middle; display: table-cell }
|
||||||
|
.notification.visible { max-width: 350px }
|
||||||
|
|
||||||
|
.notification .close { position: absolute; top: 0px; right: 0px; font-size: 19px; line-height: 13px; color: #DDD; padding: 7px; text-decoration: none }
|
||||||
|
.notification .close:hover { color: black }
|
||||||
|
.notification .close:active, .notification .close:focus { color: #AF3BFF }
|
||||||
|
|
||||||
|
/* Notification types */
|
||||||
|
.notification-info .notification-icon { font-size: 22px; font-weight: bold; background-color: #2980b9; line-height: 48px }
|
||||||
|
.notification-done .notification-icon { font-size: 22px; background-color: #27ae60 }
|
||||||
|
|
||||||
|
|
||||||
|
/* Icons (based on http://nicolasgallagher.com/pure-css-gui-icons/demo/) */
|
||||||
|
|
||||||
|
.icon-success { left:6px; width:5px; height:12px; border-width:0 5px 5px 0; border-style:solid; border-color:white; margin-left: 20px; margin-top: 15px; transform:rotate(45deg) }
|
||||||
|
|
||||||
|
|
||||||
|
/* Loading screen */
|
||||||
|
|
||||||
|
.loadingscreen { width: 100%; height: 100%; position: absolute; background-color: #EEE; z-index: 1; overflow: hidden; display: none }
|
||||||
|
.loading-text { text-align: center; vertical-align: middle; top: 50%; position: absolute; margin-top: 39px; width: 100% }
|
||||||
|
|
||||||
|
/* Console */
|
||||||
|
.console { line-height: 24px; font-family: monospace; font-size: 14px; color: #ADADAD; text-transform: uppercase; opacity: 0; -webkit-transform: translateY(-20px); -moz-transform: translateY(-20px); -o-transform: translateY(-20px); -ms-transform: translateY(-20px); transform: translateY(-20px) ; }
|
||||||
|
.console-line:last-child { color: #6C6767 }
|
||||||
|
.console .cursor {
|
||||||
|
background-color: #999; color: #999; -webkit-animation: pulse 1.5s infinite ease-in-out; -moz-animation: pulse 1.5s infinite ease-in-out; -o-animation: pulse 1.5s infinite ease-in-out; -ms-animation: pulse 1.5s infinite ease-in-out; animation: pulse 1.5s infinite ease-in-out ; margin-right: -9px;
|
||||||
|
display: inline-block; width: 9px; height: 19px; vertical-align: -4px;
|
||||||
|
}
|
||||||
|
.console .console-error { color: #e74c3c; font-weight: bold; -webkit-animation: pulse 2s infinite linear ; -moz-animation: pulse 2s infinite linear ; -o-animation: pulse 2s infinite linear ; -ms-animation: pulse 2s infinite linear ; animation: pulse 2s infinite linear }
|
||||||
|
|
||||||
|
/* Flipper loading anim */
|
||||||
|
.flipper-container { width: 40px; height: 40px; position: absolute; top: 0%; left: 50%; -webkit-transform: translate3d(-50%, -50%, 0); -moz-transform: translate3d(-50%, -50%, 0); -o-transform: translate3d(-50%, -50%, 0); -ms-transform: translate3d(-50%, -50%, 0); transform: translate3d(-50%, -50%, 0) ; -webkit-perspective: 1200; -moz-perspective: 1200; -o-perspective: 1200; -ms-perspective: 1200; perspective: 1200 ; opacity: 0 }
|
||||||
|
.flipper { position: relative; display: block; height: inherit; width: inherit; -webkit-animation: flip 1.2s infinite ease-in-out; -moz-animation: flip 1.2s infinite ease-in-out; -o-animation: flip 1.2s infinite ease-in-out; -ms-animation: flip 1.2s infinite ease-in-out; animation: flip 1.2s infinite ease-in-out ; -webkit-transform-style: preserve-3d; }
|
||||||
|
.flipper .front, .flipper .back {
|
||||||
|
position: absolute; top: 0; left: 0; backface-visibility: hidden; /*transform-style: preserve-3d;*/ display: block;
|
||||||
|
background-color: #d50000; height: 100%; width: 100%; /*outline: 1px solid transparent; /* FF AA fix */
|
||||||
|
}
|
||||||
|
.flipper .back { background-color: white; z-index: 800; -webkit-transform: rotateY(-180deg) ; -moz-transform: rotateY(-180deg) ; -o-transform: rotateY(-180deg) ; -ms-transform: rotateY(-180deg) ; transform: rotateY(-180deg) }
|
||||||
|
|
||||||
|
/* Loading ready */
|
||||||
|
.loadingscreen.ready .console { opacity: 1; -webkit-transform: translateY(0px); -moz-transform: translateY(0px); -o-transform: translateY(0px); -ms-transform: translateY(0px); transform: translateY(0px) ; -webkit-transition: all 0.3s ; -moz-transition: all 0.3s ; -o-transition: all 0.3s ; -ms-transition: all 0.3s ; transition: all 0.3s }
|
||||||
|
.loadingscreen.ready .flipper-container { top: 50%; opacity: 1; -webkit-transition: all 1s cubic-bezier(1, 0, 0, 1); -moz-transition: all 1s cubic-bezier(1, 0, 0, 1); -o-transition: all 1s cubic-bezier(1, 0, 0, 1); -ms-transition: all 1s cubic-bezier(1, 0, 0, 1); transition: all 1s cubic-bezier(1, 0, 0, 1) ; }
|
||||||
|
|
||||||
|
|
||||||
|
/* Loading done */
|
||||||
|
.loadingscreen.done { height: 0%; -webkit-transition: all 1s cubic-bezier(0.6, -0.28, 0.735, 0.045); -moz-transition: all 1s cubic-bezier(0.6, -0.28, 0.735, 0.045); -o-transition: all 1s cubic-bezier(0.6, -0.28, 0.735, 0.045); -ms-transition: all 1s cubic-bezier(0.6, -0.28, 0.735, 0.045); transition: all 1s cubic-bezier(0.6, -0.28, 0.735, 0.045) ; }
|
||||||
|
.loadingscreen.done .console { -webkit-transform: translateY(300px); -moz-transform: translateY(300px); -o-transform: translateY(300px); -ms-transform: translateY(300px); transform: translateY(300px) ; opacity: 0; -webkit-transition: all 1.5s ; -moz-transition: all 1.5s ; -o-transition: all 1.5s ; -ms-transition: all 1.5s ; transition: all 1.5s }
|
||||||
|
.loadingscreen.done .flipper-container { opacity: 0; -webkit-transition: all 1.5s ; -moz-transition: all 1.5s ; -o-transition: all 1.5s ; -ms-transition: all 1.5s ; transition: all 1.5s }
|
||||||
|
|
||||||
|
/* Animations */
|
||||||
|
|
||||||
|
@keyframes flip {
|
||||||
|
0% { -webkit-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -moz-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -o-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -ms-transform: perspective(120px) rotateX(0deg) rotateY(0deg); transform: perspective(120px) rotateX(0deg) rotateY(0deg) ; }
|
||||||
|
50% { -webkit-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -moz-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -o-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -ms-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) }
|
||||||
|
100% { -webkit-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -moz-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -o-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -ms-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg) ; }
|
||||||
|
}
|
||||||
|
@-webkit-keyframes flip {
|
||||||
|
0% { -webkit-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -moz-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -o-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -ms-transform: perspective(120px) rotateX(0deg) rotateY(0deg); transform: perspective(120px) rotateX(0deg) rotateY(0deg) ; }
|
||||||
|
50% { -webkit-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -moz-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -o-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -ms-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) }
|
||||||
|
100% { -webkit-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -moz-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -o-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -ms-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg) ; }
|
||||||
|
}
|
||||||
|
@-moz-keyframes flip {
|
||||||
|
0% { -webkit-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -moz-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -o-transform: perspective(120px) rotateX(0deg) rotateY(0deg); -ms-transform: perspective(120px) rotateX(0deg) rotateY(0deg); transform: perspective(120px) rotateX(0deg) rotateY(0deg) ; }
|
||||||
|
50% { -webkit-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -moz-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -o-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; -ms-transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) ; transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) }
|
||||||
|
100% { -webkit-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -moz-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -o-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); -ms-transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg) ; }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@keyframes pulse {
|
||||||
|
0% { opacity: 0 }
|
||||||
|
5% { opacity: 1 }
|
||||||
|
30% { opacity: 1 }
|
||||||
|
70% { opacity: 0 }
|
||||||
|
100% { opacity: 0 }
|
||||||
|
}
|
||||||
|
@-webkit-keyframes pulse {
|
||||||
|
0% { opacity: 0 }
|
||||||
|
5% { opacity: 1 }
|
||||||
|
30% { opacity: 1 }
|
||||||
|
70% { opacity: 0 }
|
||||||
|
100% { opacity: 0 }
|
||||||
|
}
|
||||||
|
@-moz-keyframes pulse {
|
||||||
|
0% { opacity: 0 }
|
||||||
|
5% { opacity: 1 }
|
||||||
|
30% { opacity: 1 }
|
||||||
|
70% { opacity: 0 }
|
||||||
|
100% { opacity: 0 }
|
||||||
|
}
|
||||||
|
|
895
src/Ui/media/all.js
Normal file
895
src/Ui/media/all.js
Normal file
File diff suppressed because one or more lines are too long
BIN
src/Ui/media/img/favicon.ico
Normal file
BIN
src/Ui/media/img/favicon.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.1 KiB |
BIN
src/Ui/media/img/favicon.psd
Normal file
BIN
src/Ui/media/img/favicon.psd
Normal file
Binary file not shown.
4
src/Ui/media/lib/00-jquery.min.js
vendored
Normal file
4
src/Ui/media/lib/00-jquery.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
81
src/Ui/media/lib/ZeroWebsocket.coffee
Normal file
81
src/Ui/media/lib/ZeroWebsocket.coffee
Normal file
|
@ -0,0 +1,81 @@
|
||||||
|
class ZeroWebsocket
|
||||||
|
constructor: (url) ->
|
||||||
|
@url = url
|
||||||
|
@next_message_id = 1
|
||||||
|
@waiting_cb = {}
|
||||||
|
@init()
|
||||||
|
|
||||||
|
|
||||||
|
init: ->
|
||||||
|
@
|
||||||
|
|
||||||
|
|
||||||
|
connect: ->
|
||||||
|
@ws = new WebSocket(@url)
|
||||||
|
@ws.onmessage = @onMessage
|
||||||
|
@ws.onopen = @onOpenWebsocket
|
||||||
|
@ws.onerror = @onErrorWebsocket
|
||||||
|
@ws.onclose = @onCloseWebsocket
|
||||||
|
|
||||||
|
|
||||||
|
onMessage: (e) =>
|
||||||
|
message = JSON.parse(e.data)
|
||||||
|
cmd = message.cmd
|
||||||
|
if cmd == "response"
|
||||||
|
if @waiting_cb[message.to]?
|
||||||
|
@waiting_cb[message.to](message.result)
|
||||||
|
else
|
||||||
|
@log "Websocket callback not found:", message
|
||||||
|
else if cmd == "ping"
|
||||||
|
@response message.id, "pong"
|
||||||
|
else
|
||||||
|
@route cmd, message
|
||||||
|
|
||||||
|
route: (cmd, message) =>
|
||||||
|
@log "Unknown command", message
|
||||||
|
|
||||||
|
|
||||||
|
response: (to, result) ->
|
||||||
|
@send {"cmd": "response", "to": to, "result": result}
|
||||||
|
|
||||||
|
|
||||||
|
cmd: (cmd, params={}, cb=null) ->
|
||||||
|
@send {"cmd": cmd, "params": params}, cb
|
||||||
|
|
||||||
|
|
||||||
|
send: (message, cb=null) ->
|
||||||
|
if not message.id?
|
||||||
|
message.id = @next_message_id
|
||||||
|
@next_message_id += 1
|
||||||
|
@ws.send(JSON.stringify(message))
|
||||||
|
if cb
|
||||||
|
@waiting_cb[message.id] = cb
|
||||||
|
|
||||||
|
|
||||||
|
log: (args...) =>
|
||||||
|
console.log "[ZeroWebsocket]", args...
|
||||||
|
|
||||||
|
|
||||||
|
onOpenWebsocket: (e) =>
|
||||||
|
@log "Open", e
|
||||||
|
if @onOpen? then @onOpen(e)
|
||||||
|
|
||||||
|
|
||||||
|
onErrorWebsocket: (e) =>
|
||||||
|
@log "Error", e
|
||||||
|
if @onError? then @onError(e)
|
||||||
|
|
||||||
|
|
||||||
|
onCloseWebsocket: (e) =>
|
||||||
|
@log "Closed", e
|
||||||
|
if e.code == 1000
|
||||||
|
@log "Server error, please reload the page"
|
||||||
|
else # Connection error
|
||||||
|
setTimeout (=>
|
||||||
|
@log "Reconnecting..."
|
||||||
|
@connect()
|
||||||
|
), 10000
|
||||||
|
if @onClose? then @onClose(e)
|
||||||
|
|
||||||
|
|
||||||
|
window.ZeroWebsocket = ZeroWebsocket
|
27
src/Ui/media/lib/jquery.cssanim.js
Normal file
27
src/Ui/media/lib/jquery.cssanim.js
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
jQuery.cssHooks['scale'] = {
|
||||||
|
get: function(elem, computed, extra) {
|
||||||
|
var match = window.getComputedStyle(elem).transform.match("[0-9\.]+")
|
||||||
|
if (match) {
|
||||||
|
var scale = parseFloat(match[0])
|
||||||
|
return scale
|
||||||
|
} else {
|
||||||
|
return 1.0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
set: function(elem, val) {
|
||||||
|
//var transforms = $(elem).css("transform").match(/[0-9\.]+/g)
|
||||||
|
var transforms = window.getComputedStyle(elem).transform.match(/[0-9\.]+/g)
|
||||||
|
if (transforms) {
|
||||||
|
transforms[0] = val
|
||||||
|
transforms[3] = val
|
||||||
|
//$(elem).css("transform", 'matrix('+transforms.join(", ")+")")
|
||||||
|
elem.style.transform = 'matrix('+transforms.join(", ")+')'
|
||||||
|
} else {
|
||||||
|
elem.style.transform = "scale("+val+")"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
jQuery.fx.step.scale = function(fx) {
|
||||||
|
jQuery.cssHooks['scale'].set(fx.elem, fx.now)
|
||||||
|
};
|
35
src/Ui/media/lib/jquery.csslater.coffee
Normal file
35
src/Ui/media/lib/jquery.csslater.coffee
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
jQuery.fn.readdClass = (class_name) ->
|
||||||
|
elem = @
|
||||||
|
elem.removeClass class_name
|
||||||
|
setTimeout ( ->
|
||||||
|
elem.addClass class_name
|
||||||
|
), 1
|
||||||
|
return @
|
||||||
|
|
||||||
|
jQuery.fn.removeLater = (time = 500) ->
|
||||||
|
elem = @
|
||||||
|
setTimeout ( ->
|
||||||
|
elem.remove()
|
||||||
|
), time
|
||||||
|
return @
|
||||||
|
|
||||||
|
jQuery.fn.hideLater = (time = 500) ->
|
||||||
|
elem = @
|
||||||
|
setTimeout ( ->
|
||||||
|
elem.css("display", "none")
|
||||||
|
), time
|
||||||
|
return @
|
||||||
|
|
||||||
|
jQuery.fn.addClassLater = (class_name, time = 5) ->
|
||||||
|
elem = @
|
||||||
|
setTimeout ( ->
|
||||||
|
elem.addClass(class_name)
|
||||||
|
), time
|
||||||
|
return @
|
||||||
|
|
||||||
|
jQuery.fn.cssLater = (name, val, time = 500) ->
|
||||||
|
elem = @
|
||||||
|
setTimeout ( ->
|
||||||
|
elem.css name, val
|
||||||
|
), time
|
||||||
|
return @
|
205
src/Ui/media/lib/jquery.easing.1.3.js
Normal file
205
src/Ui/media/lib/jquery.easing.1.3.js
Normal file
|
@ -0,0 +1,205 @@
|
||||||
|
/*
|
||||||
|
* jQuery Easing v1.3 - http://gsgd.co.uk/sandbox/jquery/easing/
|
||||||
|
*
|
||||||
|
* Uses the built in easing capabilities added In jQuery 1.1
|
||||||
|
* to offer multiple easing options
|
||||||
|
*
|
||||||
|
* TERMS OF USE - jQuery Easing
|
||||||
|
*
|
||||||
|
* Open source under the BSD License.
|
||||||
|
*
|
||||||
|
* Copyright © 2008 George McGinley Smith
|
||||||
|
* All rights reserved.
|
||||||
|
*
|
||||||
|
* Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
* are permitted provided that the following conditions are met:
|
||||||
|
*
|
||||||
|
* Redistributions of source code must retain the above copyright notice, this list of
|
||||||
|
* conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above copyright notice, this list
|
||||||
|
* of conditions and the following disclaimer in the documentation and/or other materials
|
||||||
|
* provided with the distribution.
|
||||||
|
*
|
||||||
|
* Neither the name of the author nor the names of contributors may be used to endorse
|
||||||
|
* or promote products derived from this software without specific prior written permission.
|
||||||
|
*
|
||||||
|
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
|
||||||
|
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||||
|
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||||
|
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||||
|
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
|
||||||
|
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
|
||||||
|
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||||
|
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
|
||||||
|
* OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
// t: current time, b: begInnIng value, c: change In value, d: duration
|
||||||
|
jQuery.easing['jswing'] = jQuery.easing['swing'];
|
||||||
|
|
||||||
|
jQuery.extend( jQuery.easing,
|
||||||
|
{
|
||||||
|
def: 'easeOutQuad',
|
||||||
|
swing: function (x, t, b, c, d) {
|
||||||
|
//alert(jQuery.easing.default);
|
||||||
|
return jQuery.easing[jQuery.easing.def](x, t, b, c, d);
|
||||||
|
},
|
||||||
|
easeInQuad: function (x, t, b, c, d) {
|
||||||
|
return c*(t/=d)*t + b;
|
||||||
|
},
|
||||||
|
easeOutQuad: function (x, t, b, c, d) {
|
||||||
|
return -c *(t/=d)*(t-2) + b;
|
||||||
|
},
|
||||||
|
easeInOutQuad: function (x, t, b, c, d) {
|
||||||
|
if ((t/=d/2) < 1) return c/2*t*t + b;
|
||||||
|
return -c/2 * ((--t)*(t-2) - 1) + b;
|
||||||
|
},
|
||||||
|
easeInCubic: function (x, t, b, c, d) {
|
||||||
|
return c*(t/=d)*t*t + b;
|
||||||
|
},
|
||||||
|
easeOutCubic: function (x, t, b, c, d) {
|
||||||
|
return c*((t=t/d-1)*t*t + 1) + b;
|
||||||
|
},
|
||||||
|
easeInOutCubic: function (x, t, b, c, d) {
|
||||||
|
if ((t/=d/2) < 1) return c/2*t*t*t + b;
|
||||||
|
return c/2*((t-=2)*t*t + 2) + b;
|
||||||
|
},
|
||||||
|
easeInQuart: function (x, t, b, c, d) {
|
||||||
|
return c*(t/=d)*t*t*t + b;
|
||||||
|
},
|
||||||
|
easeOutQuart: function (x, t, b, c, d) {
|
||||||
|
return -c * ((t=t/d-1)*t*t*t - 1) + b;
|
||||||
|
},
|
||||||
|
easeInOutQuart: function (x, t, b, c, d) {
|
||||||
|
if ((t/=d/2) < 1) return c/2*t*t*t*t + b;
|
||||||
|
return -c/2 * ((t-=2)*t*t*t - 2) + b;
|
||||||
|
},
|
||||||
|
easeInQuint: function (x, t, b, c, d) {
|
||||||
|
return c*(t/=d)*t*t*t*t + b;
|
||||||
|
},
|
||||||
|
easeOutQuint: function (x, t, b, c, d) {
|
||||||
|
return c*((t=t/d-1)*t*t*t*t + 1) + b;
|
||||||
|
},
|
||||||
|
easeInOutQuint: function (x, t, b, c, d) {
|
||||||
|
if ((t/=d/2) < 1) return c/2*t*t*t*t*t + b;
|
||||||
|
return c/2*((t-=2)*t*t*t*t + 2) + b;
|
||||||
|
},
|
||||||
|
easeInSine: function (x, t, b, c, d) {
|
||||||
|
return -c * Math.cos(t/d * (Math.PI/2)) + c + b;
|
||||||
|
},
|
||||||
|
easeOutSine: function (x, t, b, c, d) {
|
||||||
|
return c * Math.sin(t/d * (Math.PI/2)) + b;
|
||||||
|
},
|
||||||
|
easeInOutSine: function (x, t, b, c, d) {
|
||||||
|
return -c/2 * (Math.cos(Math.PI*t/d) - 1) + b;
|
||||||
|
},
|
||||||
|
easeInExpo: function (x, t, b, c, d) {
|
||||||
|
return (t==0) ? b : c * Math.pow(2, 10 * (t/d - 1)) + b;
|
||||||
|
},
|
||||||
|
easeOutExpo: function (x, t, b, c, d) {
|
||||||
|
return (t==d) ? b+c : c * (-Math.pow(2, -10 * t/d) + 1) + b;
|
||||||
|
},
|
||||||
|
easeInOutExpo: function (x, t, b, c, d) {
|
||||||
|
if (t==0) return b;
|
||||||
|
if (t==d) return b+c;
|
||||||
|
if ((t/=d/2) < 1) return c/2 * Math.pow(2, 10 * (t - 1)) + b;
|
||||||
|
return c/2 * (-Math.pow(2, -10 * --t) + 2) + b;
|
||||||
|
},
|
||||||
|
easeInCirc: function (x, t, b, c, d) {
|
||||||
|
return -c * (Math.sqrt(1 - (t/=d)*t) - 1) + b;
|
||||||
|
},
|
||||||
|
easeOutCirc: function (x, t, b, c, d) {
|
||||||
|
return c * Math.sqrt(1 - (t=t/d-1)*t) + b;
|
||||||
|
},
|
||||||
|
easeInOutCirc: function (x, t, b, c, d) {
|
||||||
|
if ((t/=d/2) < 1) return -c/2 * (Math.sqrt(1 - t*t) - 1) + b;
|
||||||
|
return c/2 * (Math.sqrt(1 - (t-=2)*t) + 1) + b;
|
||||||
|
},
|
||||||
|
easeInElastic: function (x, t, b, c, d) {
|
||||||
|
var s=1.70158;var p=0;var a=c;
|
||||||
|
if (t==0) return b; if ((t/=d)==1) return b+c; if (!p) p=d*.3;
|
||||||
|
if (a < Math.abs(c)) { a=c; var s=p/4; }
|
||||||
|
else var s = p/(2*Math.PI) * Math.asin (c/a);
|
||||||
|
return -(a*Math.pow(2,10*(t-=1)) * Math.sin( (t*d-s)*(2*Math.PI)/p )) + b;
|
||||||
|
},
|
||||||
|
easeOutElastic: function (x, t, b, c, d) {
|
||||||
|
var s=1.70158;var p=0;var a=c;
|
||||||
|
if (t==0) return b; if ((t/=d)==1) return b+c; if (!p) p=d*.3;
|
||||||
|
if (a < Math.abs(c)) { a=c; var s=p/4; }
|
||||||
|
else var s = p/(2*Math.PI) * Math.asin (c/a);
|
||||||
|
return a*Math.pow(2,-10*t) * Math.sin( (t*d-s)*(2*Math.PI)/p ) + c + b;
|
||||||
|
},
|
||||||
|
easeInOutElastic: function (x, t, b, c, d) {
|
||||||
|
var s=1.70158;var p=0;var a=c;
|
||||||
|
if (t==0) return b; if ((t/=d/2)==2) return b+c; if (!p) p=d*(.3*1.5);
|
||||||
|
if (a < Math.abs(c)) { a=c; var s=p/4; }
|
||||||
|
else var s = p/(2*Math.PI) * Math.asin (c/a);
|
||||||
|
if (t < 1) return -.5*(a*Math.pow(2,10*(t-=1)) * Math.sin( (t*d-s)*(2*Math.PI)/p )) + b;
|
||||||
|
return a*Math.pow(2,-10*(t-=1)) * Math.sin( (t*d-s)*(2*Math.PI)/p )*.5 + c + b;
|
||||||
|
},
|
||||||
|
easeInBack: function (x, t, b, c, d, s) {
|
||||||
|
if (s == undefined) s = 1.70158;
|
||||||
|
return c*(t/=d)*t*((s+1)*t - s) + b;
|
||||||
|
},
|
||||||
|
easeOutBack: function (x, t, b, c, d, s) {
|
||||||
|
if (s == undefined) s = 1.70158;
|
||||||
|
return c*((t=t/d-1)*t*((s+1)*t + s) + 1) + b;
|
||||||
|
},
|
||||||
|
easeInOutBack: function (x, t, b, c, d, s) {
|
||||||
|
if (s == undefined) s = 1.70158;
|
||||||
|
if ((t/=d/2) < 1) return c/2*(t*t*(((s*=(1.525))+1)*t - s)) + b;
|
||||||
|
return c/2*((t-=2)*t*(((s*=(1.525))+1)*t + s) + 2) + b;
|
||||||
|
},
|
||||||
|
easeInBounce: function (x, t, b, c, d) {
|
||||||
|
return c - jQuery.easing.easeOutBounce (x, d-t, 0, c, d) + b;
|
||||||
|
},
|
||||||
|
easeOutBounce: function (x, t, b, c, d) {
|
||||||
|
if ((t/=d) < (1/2.75)) {
|
||||||
|
return c*(7.5625*t*t) + b;
|
||||||
|
} else if (t < (2/2.75)) {
|
||||||
|
return c*(7.5625*(t-=(1.5/2.75))*t + .75) + b;
|
||||||
|
} else if (t < (2.5/2.75)) {
|
||||||
|
return c*(7.5625*(t-=(2.25/2.75))*t + .9375) + b;
|
||||||
|
} else {
|
||||||
|
return c*(7.5625*(t-=(2.625/2.75))*t + .984375) + b;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
easeInOutBounce: function (x, t, b, c, d) {
|
||||||
|
if (t < d/2) return jQuery.easing.easeInBounce (x, t*2, 0, c, d) * .5 + b;
|
||||||
|
return jQuery.easing.easeOutBounce (x, t*2-d, 0, c, d) * .5 + c*.5 + b;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
*
|
||||||
|
* TERMS OF USE - EASING EQUATIONS
|
||||||
|
*
|
||||||
|
* Open source under the BSD License.
|
||||||
|
*
|
||||||
|
* Copyright © 2001 Robert Penner
|
||||||
|
* All rights reserved.
|
||||||
|
*
|
||||||
|
* Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
* are permitted provided that the following conditions are met:
|
||||||
|
*
|
||||||
|
* Redistributions of source code must retain the above copyright notice, this list of
|
||||||
|
* conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above copyright notice, this list
|
||||||
|
* of conditions and the following disclaimer in the documentation and/or other materials
|
||||||
|
* provided with the distribution.
|
||||||
|
*
|
||||||
|
* Neither the name of the author nor the names of contributors may be used to endorse
|
||||||
|
* or promote products derived from this software without specific prior written permission.
|
||||||
|
*
|
||||||
|
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
|
||||||
|
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||||
|
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||||
|
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||||
|
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
|
||||||
|
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
|
||||||
|
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||||
|
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
|
||||||
|
* OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*
|
||||||
|
*/
|
50
src/Ui/template/wrapper.html
Normal file
50
src/Ui/template/wrapper.html
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
|
||||||
|
<!DOCTYPE html>
|
||||||
|
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>{title} - ZeroNet</title>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
|
||||||
|
<link rel="stylesheet" href="/uimedia/all.css" />
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
|
||||||
|
<!-- Fixed button -->
|
||||||
|
<div class='fixbutton'>
|
||||||
|
<div class='fixbutton-text'>0</div>
|
||||||
|
<div class='fixbutton-burger'>☰</div>
|
||||||
|
<a class='fixbutton-bg' href="/{homepage}"></a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Notifications -->
|
||||||
|
<div class='notifications'>
|
||||||
|
<div class='notification template'><span class='notification-icon'>!</span> <span class='body'>Test notification</span><a class="close" href="#Close">×</a><div style="clear: both"></div></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Loadingscreen -->
|
||||||
|
<div class='loadingscreen'>
|
||||||
|
<div class='loading-text console'>
|
||||||
|
</div>
|
||||||
|
<div class="flipper-container">
|
||||||
|
<div class="flipper"> <div class="front"></div><div class="back"></div> </div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Site Iframe -->
|
||||||
|
<iframe src='/media/{address}/{inner_path}#auth_key={auth_key}' id='inner-iframe' sandbox="allow-forms allow-scripts allow-top-navigation"></iframe>
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Site info -->
|
||||||
|
<script>address = "{address}"</script>
|
||||||
|
<script>auth_key = "{auth_key}"</script>
|
||||||
|
<script>inner_path = "{inner_path}"</script>
|
||||||
|
<script>permissions = {permissions}</script>
|
||||||
|
<script>show_loadingscreen = {show_loadingscreen}</script>
|
||||||
|
<script type="text/javascript" src="/uimedia/all.js" asyc></script>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
</html>
|
68
src/Worker/Worker.py
Normal file
68
src/Worker/Worker.py
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
import gevent, time, logging, shutil, os
|
||||||
|
from Peer import Peer
|
||||||
|
|
||||||
|
class Worker:
|
||||||
|
def __init__(self, manager, peer):
|
||||||
|
self.manager = manager
|
||||||
|
self.peer = peer
|
||||||
|
self.task = None
|
||||||
|
self.key = None
|
||||||
|
self.running = False
|
||||||
|
self.thread = None
|
||||||
|
|
||||||
|
|
||||||
|
# Downloader thread
|
||||||
|
def downloader(self):
|
||||||
|
while self.running:
|
||||||
|
# Try to pickup free file download task
|
||||||
|
task = self.manager.getTask(self.peer)
|
||||||
|
if not task: # Die, no more task
|
||||||
|
self.manager.log.debug("%s: No task found, stopping" % self.key)
|
||||||
|
break
|
||||||
|
|
||||||
|
if task["workers_num"] > 0: # Wait a bit if someone already working on it
|
||||||
|
self.manager.log.debug("%s: Someone already working on %s, sleeping 1 sec..." % (self.key, task["inner_path"]))
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
if task["done"] == False:
|
||||||
|
self.task = task
|
||||||
|
task["workers_num"] += 1
|
||||||
|
buff = self.peer.getFile(task["site"].address, task["inner_path"])
|
||||||
|
if buff: # Download ok
|
||||||
|
correct = task["site"].verifyFile(task["inner_path"], buff)
|
||||||
|
else: # Download error
|
||||||
|
correct = False
|
||||||
|
if correct == True or correct == None: # Hash ok or same file
|
||||||
|
self.manager.log.debug("%s: Hash correct: %s" % (self.key, task["inner_path"]))
|
||||||
|
if task["done"] == False: # Task not done yet
|
||||||
|
buff.seek(0)
|
||||||
|
file_path = task["site"].getPath(task["inner_path"])
|
||||||
|
file_dir = os.path.dirname(file_path)
|
||||||
|
if not os.path.isdir(file_dir): os.makedirs(file_dir) # Make directory for files
|
||||||
|
file = open(file_path, "wb")
|
||||||
|
shutil.copyfileobj(buff, file) # Write buff to disk
|
||||||
|
file.close()
|
||||||
|
task["workers_num"] -= 1
|
||||||
|
self.manager.doneTask(task)
|
||||||
|
self.task = None
|
||||||
|
else: # Hash failed
|
||||||
|
self.task = None
|
||||||
|
self.peer.hash_failed += 1
|
||||||
|
if self.peer.hash_failed > 5: # Broken peer
|
||||||
|
break
|
||||||
|
task["workers_num"] -= 1
|
||||||
|
self.manager.log.error("%s: Hash failed: %s" % (self.key, task["inner_path"]))
|
||||||
|
time.sleep(1)
|
||||||
|
self.running = False
|
||||||
|
self.peer.disconnect()
|
||||||
|
self.manager.removeWorker(self)
|
||||||
|
|
||||||
|
|
||||||
|
# Start the worker
|
||||||
|
def start(self):
|
||||||
|
self.running = True
|
||||||
|
self.thread = gevent.spawn(self.downloader)
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.running = False
|
||||||
|
self.manager.removeWorker(self)
|
131
src/Worker/WorkerManager.py
Normal file
131
src/Worker/WorkerManager.py
Normal file
|
@ -0,0 +1,131 @@
|
||||||
|
from Worker import Worker
|
||||||
|
import gevent, time, logging
|
||||||
|
|
||||||
|
MAX_WORKERS = 10
|
||||||
|
|
||||||
|
# Worker manager for site
|
||||||
|
class WorkerManager:
|
||||||
|
def __init__(self, site):
|
||||||
|
self.site = site
|
||||||
|
self.workers = {} # Key: ip:port, Value: Worker.Worker
|
||||||
|
self.tasks = [] # {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_start": time.time(), "peers": peers}
|
||||||
|
self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short)
|
||||||
|
self.process_taskchecker = gevent.spawn(self.checkTasks)
|
||||||
|
|
||||||
|
|
||||||
|
# Check expired tasks
|
||||||
|
def checkTasks(self):
|
||||||
|
while 1:
|
||||||
|
time.sleep(15) # Check every 30 sec
|
||||||
|
if not self.tasks: continue
|
||||||
|
tasks = self.tasks[:] # Copy it so removing elements wont cause any problem
|
||||||
|
for task in tasks:
|
||||||
|
if time.time() >= task["time_start"]+60: # Task timed out
|
||||||
|
self.log.debug("Cleaning up task: %s" % task)
|
||||||
|
|
||||||
|
# Clean up workers
|
||||||
|
workers = self.findWorkers(task)
|
||||||
|
for worker in workers:
|
||||||
|
worker.stop()
|
||||||
|
|
||||||
|
# Remove task
|
||||||
|
self.failTask(task)
|
||||||
|
elif time.time() >= task["time_start"]+15: # Task taking long time
|
||||||
|
self.log.debug("Task taking long time, find more peers: %s" % task["inner_path"])
|
||||||
|
task["site"].announce() # Find more peers
|
||||||
|
if task["peers"]: # Release the peer olck
|
||||||
|
self.log.debug("Task peer lock release: %s" % task["inner_path"])
|
||||||
|
task["peers"] = []
|
||||||
|
self.startWorkers()
|
||||||
|
continue # One reannounce per loop
|
||||||
|
|
||||||
|
|
||||||
|
# Returns the next free or less worked task
|
||||||
|
def getTask(self, peer, only_free=False):
|
||||||
|
best_task = None
|
||||||
|
for task in self.tasks: # Find out the task with lowest worker number
|
||||||
|
if task["peers"] and peer not in task["peers"]: continue # This peer not allowed to pick this task
|
||||||
|
if task["inner_path"] == "content.json": return task # Content.json always prority
|
||||||
|
if not best_task or task["workers_num"] < best_task["workers_num"]: # If task has lower worker number then its better
|
||||||
|
best_task = task
|
||||||
|
return best_task
|
||||||
|
|
||||||
|
|
||||||
|
# New peers added to site
|
||||||
|
def onPeers(self):
|
||||||
|
self.startWorkers()
|
||||||
|
|
||||||
|
|
||||||
|
# Start workers to process tasks
|
||||||
|
def startWorkers(self):
|
||||||
|
if len(self.workers) >= MAX_WORKERS: return False # Workers number already maxed
|
||||||
|
if not self.tasks: return False # No task for workers
|
||||||
|
for key, peer in self.site.peers.iteritems(): # One worker for every peer
|
||||||
|
if key not in self.workers and len(self.workers) < MAX_WORKERS: # We dont have worker for that peer and workers num less than max
|
||||||
|
worker = Worker(self, peer)
|
||||||
|
self.workers[key] = worker
|
||||||
|
worker.key = key
|
||||||
|
worker.start()
|
||||||
|
self.log.debug("Added worker: %s, workers: %s/%s" % (key, len(self.workers), MAX_WORKERS))
|
||||||
|
|
||||||
|
|
||||||
|
# Find workers by task
|
||||||
|
def findWorkers(self, task):
|
||||||
|
workers = []
|
||||||
|
for worker in self.workers.values():
|
||||||
|
if worker.task == task: workers.append(worker)
|
||||||
|
return workers
|
||||||
|
|
||||||
|
# Ends and remove a worker
|
||||||
|
def removeWorker(self, worker):
|
||||||
|
worker.running = False
|
||||||
|
del(self.workers[worker.key])
|
||||||
|
self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), MAX_WORKERS))
|
||||||
|
|
||||||
|
|
||||||
|
# Create new task and return asyncresult
|
||||||
|
def addTask(self, inner_path, peer=None):
|
||||||
|
self.site.onFileStart(inner_path) # First task, trigger site download started
|
||||||
|
task = self.findTask(inner_path)
|
||||||
|
if task: # Already has task for that file
|
||||||
|
if peer and task["peers"]: # This peer has new version too
|
||||||
|
task["peers"].append(peer)
|
||||||
|
self.startWorkers()
|
||||||
|
return task["evt"]
|
||||||
|
else: # No task for that file yet
|
||||||
|
evt = gevent.event.AsyncResult()
|
||||||
|
if peer:
|
||||||
|
peers = [peer] # Only download from this peer
|
||||||
|
else:
|
||||||
|
peers = None
|
||||||
|
task = {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_start": time.time(), "peers": peers}
|
||||||
|
self.tasks.append(task)
|
||||||
|
self.log.debug("New task: %s" % task)
|
||||||
|
self.startWorkers()
|
||||||
|
return evt
|
||||||
|
|
||||||
|
|
||||||
|
# Find a task using inner_path
|
||||||
|
def findTask(self, inner_path):
|
||||||
|
for task in self.tasks:
|
||||||
|
if task["inner_path"] == inner_path:
|
||||||
|
return task
|
||||||
|
return None # Not found
|
||||||
|
|
||||||
|
|
||||||
|
# Mark a task failed
|
||||||
|
def failTask(self, task):
|
||||||
|
task["done"] = True
|
||||||
|
self.tasks.remove(task) # Remove from queue
|
||||||
|
self.site.onFileFail(task["inner_path"])
|
||||||
|
task["evt"].set(False)
|
||||||
|
|
||||||
|
|
||||||
|
# Mark a task done
|
||||||
|
def doneTask(self, task):
|
||||||
|
task["done"] = True
|
||||||
|
self.tasks.remove(task) # Remove from queue
|
||||||
|
self.site.onFileDone(task["inner_path"])
|
||||||
|
task["evt"].set(True)
|
||||||
|
if not self.tasks: self.site.onComplete() # No more task trigger site compelte
|
||||||
|
|
2
src/Worker/__init__.py
Normal file
2
src/Worker/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
from Worker import Worker
|
||||||
|
from WorkerManager import WorkerManager
|
0
src/__init__.py
Normal file
0
src/__init__.py
Normal file
466
src/lib/BitcoinECC/BitcoinECC.py
Normal file
466
src/lib/BitcoinECC/BitcoinECC.py
Normal file
|
@ -0,0 +1,466 @@
|
||||||
|
# By: HurlSly
|
||||||
|
# Source: https://github.com/HurlSly/Python/blob/master/BitcoinECC.py
|
||||||
|
# Modified: random number generator in def GeneratePrivateKey(self):
|
||||||
|
|
||||||
|
import random
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
|
||||||
|
class GaussInt:
|
||||||
|
#A class for the Gauss integers of the form a + b sqrt(n) where a,b are integers.
|
||||||
|
#n can be positive or negative.
|
||||||
|
def __init__(self,x,y,n,p=0):
|
||||||
|
if p:
|
||||||
|
self.x=x%p
|
||||||
|
self.y=y%p
|
||||||
|
self.n=n%p
|
||||||
|
else:
|
||||||
|
self.x=x
|
||||||
|
self.y=y
|
||||||
|
self.n=n
|
||||||
|
|
||||||
|
self.p=p
|
||||||
|
|
||||||
|
def __add__(self,b):
|
||||||
|
return GaussInt(self.x+b.x,self.y+b.y,self.n,self.p)
|
||||||
|
|
||||||
|
def __sub__(self,b):
|
||||||
|
return GaussInt(self.x-b.x,self.y-b.y,self.n,self.p)
|
||||||
|
|
||||||
|
def __mul__(self,b):
|
||||||
|
return GaussInt(self.x*b.x+self.n*self.y*b.y,self.x*b.y+self.y*b.x,self.n,self.p)
|
||||||
|
|
||||||
|
def __div__(self,b):
|
||||||
|
return GaussInt((self.x*b.x-self.n*self.y*b.y)/(b.x*b.x-self.n*b.y*b.y),(-self.x*b.y+self.y*b.x)/(b.x*b.x-self.n*b.y*b.y),self.n,self.p)
|
||||||
|
|
||||||
|
def __eq__(self,b):
|
||||||
|
return self.x==b.x and self.y==b.y
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
if self.p:
|
||||||
|
return "%s+%s (%d,%d)"%(self.x,self.y,self.n,self.p)
|
||||||
|
else:
|
||||||
|
return "%s+%s (%d)"%(self.x,self.y,self.n)
|
||||||
|
|
||||||
|
def __pow__(self,n):
|
||||||
|
b=Base(n,2)
|
||||||
|
t=GaussInt(1,0,self.n)
|
||||||
|
while b:
|
||||||
|
t=t*t
|
||||||
|
if b.pop():
|
||||||
|
t=self*t
|
||||||
|
|
||||||
|
return t
|
||||||
|
|
||||||
|
def Inv(self):
|
||||||
|
return GaussInt(self.x/(self.x*self.x-self.n*self.y*self.y),-self.y/(self.x*self.x-self.n*self.y*self.y),self.n,self.p)
|
||||||
|
|
||||||
|
def Cipolla(a,p):
|
||||||
|
#Find a square root of a modulo p using the algorithm of Cipolla
|
||||||
|
b=0
|
||||||
|
while pow((b*b-a)%p,(p-1)/2,p)==1:
|
||||||
|
b+=1
|
||||||
|
|
||||||
|
return (GaussInt(b,1,b**2-a,p)**((p+1)/2)).x
|
||||||
|
|
||||||
|
def Base(n,b):
|
||||||
|
#Decompose n in base b
|
||||||
|
l=[]
|
||||||
|
while n:
|
||||||
|
l.append(n%b)
|
||||||
|
n/=b
|
||||||
|
|
||||||
|
return l
|
||||||
|
|
||||||
|
def InvMod(a,n):
|
||||||
|
#Find the inverse mod n of a.
|
||||||
|
#Use the Extended Euclides Algorithm.
|
||||||
|
m=[]
|
||||||
|
|
||||||
|
s=n
|
||||||
|
while n:
|
||||||
|
m.append(a/n)
|
||||||
|
(a,n)=(n,a%n)
|
||||||
|
|
||||||
|
u=1
|
||||||
|
v=0
|
||||||
|
while m:
|
||||||
|
(u,v)=(v,u-m.pop()*v)
|
||||||
|
|
||||||
|
return u%s
|
||||||
|
|
||||||
|
def b58encode(v):
|
||||||
|
#Encode a byte string to the Base58
|
||||||
|
digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
|
||||||
|
base=len(digit)
|
||||||
|
val=0
|
||||||
|
for c in v:
|
||||||
|
val*=256
|
||||||
|
val+=ord(c)
|
||||||
|
|
||||||
|
result=""
|
||||||
|
while val:
|
||||||
|
(val,mod)=divmod(val,base)
|
||||||
|
result=digit[mod]+result
|
||||||
|
|
||||||
|
pad=0
|
||||||
|
for c in v:
|
||||||
|
if c=="\0":
|
||||||
|
pad+=1
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
return (digit[0]*pad)+result
|
||||||
|
|
||||||
|
def b58decode(v):
|
||||||
|
#Decode a Base58 string to byte string
|
||||||
|
digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
|
||||||
|
base=len(digit)
|
||||||
|
val=0
|
||||||
|
for c in v:
|
||||||
|
val*=base
|
||||||
|
val+=digit.find(c)
|
||||||
|
|
||||||
|
result=""
|
||||||
|
while val:
|
||||||
|
(val,mod)=divmod(val,256)
|
||||||
|
result=chr(mod)+result
|
||||||
|
|
||||||
|
pad=0
|
||||||
|
for c in v:
|
||||||
|
if c==digit[0]:
|
||||||
|
pad+=1
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
result="\0"*pad+result
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def Byte2Hex(b):
|
||||||
|
#Convert a byte string to hex number
|
||||||
|
out=""
|
||||||
|
for x in b:
|
||||||
|
y=hex(ord(x))[2:]
|
||||||
|
if len(y)==1:
|
||||||
|
y="0"+y
|
||||||
|
out+="%2s"%y
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
def Int2Byte(n,b):
|
||||||
|
#Convert a integer to a byte string of length b
|
||||||
|
out=""
|
||||||
|
|
||||||
|
for i in range(b):
|
||||||
|
(n,m)=divmod(n,256)
|
||||||
|
out=chr(m)+out
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
class EllipticCurvePoint:
|
||||||
|
#Main class
|
||||||
|
#It is an point on an Elliptic Curve
|
||||||
|
|
||||||
|
def __init__(self,x,a,b,p,n=0):
|
||||||
|
#We store the coordinate in x and the elliptic curbe parameter.
|
||||||
|
#x is of length 3. This is the 3 projective coordinates of the point.
|
||||||
|
self.x=x[:]
|
||||||
|
self.a=a
|
||||||
|
self.b=b
|
||||||
|
self.p=p
|
||||||
|
self.n=n
|
||||||
|
|
||||||
|
def EqualProj(self,y):
|
||||||
|
#Does y equals self ?
|
||||||
|
#It computes self cross product with y and check if the result is 0.
|
||||||
|
return self.x[0]*y.x[1]==self.x[1]*y.x[0] and self.x[1]*y.x[2]==self.x[2]*y.x[1] and self.x[2]*y.x[0]==self.x[0]*y.x[2]
|
||||||
|
|
||||||
|
def __add__(self,y):
|
||||||
|
#The main function to add self and y
|
||||||
|
#It uses the formulas I derived in projective coordinates.
|
||||||
|
#Projectives coordinates are more performant than the usual (x,y) coordinates
|
||||||
|
#because it we don't need to compute inverse mod p, which is faster.
|
||||||
|
z=EllipticCurvePoint([0,0,0],self.a,self.b,self.p)
|
||||||
|
|
||||||
|
if self.EqualProj(y):
|
||||||
|
d=(2*self.x[1]*self.x[2])%self.p
|
||||||
|
d3=pow(d,3,self.p)
|
||||||
|
n=(3*pow(self.x[0],2,self.p)+self.a*pow(self.x[2],2,self.p))%self.p
|
||||||
|
|
||||||
|
z.x[0]=(pow(n,2,self.p)*d*self.x[2]-2*d3*self.x[0])%self.p
|
||||||
|
z.x[1]=(3*self.x[0]*n*pow(d,2,self.p)-pow(n,3,self.p)*self.x[2]-self.x[1]*d3)%self.p
|
||||||
|
z.x[2]=(self.x[2]*d3)%self.p
|
||||||
|
else:
|
||||||
|
d=(y.x[0]*self.x[2]-y.x[2]*self.x[0])%self.p
|
||||||
|
d3=pow(d,3,self.p)
|
||||||
|
n=(y.x[1]*self.x[2]-self.x[1]*y.x[2])%self.p
|
||||||
|
|
||||||
|
z.x[0]=(y.x[2]*self.x[2]*pow(n,2,self.p)*d-d3*(y.x[2]*self.x[0]+y.x[0]*self.x[2]))%self.p
|
||||||
|
z.x[1]=(pow(d,2,self.p)*n*(2*self.x[0]*y.x[2]+y.x[0]*self.x[2])-pow(n,3,self.p)*self.x[2]*y.x[2]-self.x[1]*d3*y.x[2])%self.p
|
||||||
|
z.x[2]=(self.x[2]*d3*y.x[2])%self.p
|
||||||
|
|
||||||
|
return z
|
||||||
|
|
||||||
|
def __mul__(self,n):
|
||||||
|
#The fast multiplication of point n times by itself.
|
||||||
|
b=Base(n,2)
|
||||||
|
t=EllipticCurvePoint(self.x,self.a,self.b,self.p)
|
||||||
|
b.pop()
|
||||||
|
while b:
|
||||||
|
t+=t
|
||||||
|
if b.pop():
|
||||||
|
t+=self
|
||||||
|
|
||||||
|
return t
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
#print a point in (x,y) coordinate.
|
||||||
|
return "x=%d\ny=%d\n"%((self.x[0]*InvMod(self.x[2],self.p))%self.p,(self.x[1]*InvMod(self.x[2],self.p))%self.p)
|
||||||
|
|
||||||
|
def __eq__(self,x):
|
||||||
|
#Does self==x ?
|
||||||
|
return self.x==x.x and self.a==x.a and self.b==x.b and self.p==x.p
|
||||||
|
|
||||||
|
def __ne__(self,x):
|
||||||
|
#Does self!=x ?
|
||||||
|
return self.x!=x.x or self.a!=x.a or self.b!=x.b or self.p!=x.p
|
||||||
|
|
||||||
|
def Check(self):
|
||||||
|
#Is self on the curve ?
|
||||||
|
return (self.x[0]**3+self.a*self.x[0]*self.x[2]**2+self.b*self.x[2]**3-self.x[1]**2*self.x[2])%self.p==0
|
||||||
|
|
||||||
|
def GeneratePrivateKey(self):
|
||||||
|
#Generate a private key. It's just a random number between 1 and n-1.
|
||||||
|
#Of course, this function isn't cryptographically secure.
|
||||||
|
#Don't use it to generate your key. Use a cryptographically secure source of randomness instead.
|
||||||
|
#self.d = random.randint(1,self.n-1)
|
||||||
|
self.d = int(os.urandom(32).encode("hex"), 16) # Better random fix
|
||||||
|
|
||||||
|
def SignECDSA(self,m):
|
||||||
|
#Sign a message. The private key is self.d .
|
||||||
|
h=hashlib.new("SHA256")
|
||||||
|
h.update(m)
|
||||||
|
z=int(h.hexdigest(),16)
|
||||||
|
|
||||||
|
r=0
|
||||||
|
s=0
|
||||||
|
while not r or not s:
|
||||||
|
k=random.randint(1,self.n-1)
|
||||||
|
R=self*k
|
||||||
|
R.Normalize()
|
||||||
|
r=R.x[0]%self.n
|
||||||
|
s=(InvMod(k,self.n)*(z+r*self.d))%self.n
|
||||||
|
|
||||||
|
return (r,s)
|
||||||
|
|
||||||
|
def CheckECDSA(self,sig,m):
|
||||||
|
#Check a signature (r,s) of the message m using the public key self.Q
|
||||||
|
# and the generator which is self.
|
||||||
|
#This is not the one used by Bitcoin because the public key isn't known;
|
||||||
|
# only a hash of the public key is known. See the next function.
|
||||||
|
(r,s)=sig
|
||||||
|
|
||||||
|
h=hashlib.new("SHA256")
|
||||||
|
h.update(m)
|
||||||
|
z=int(h.hexdigest(),16)
|
||||||
|
|
||||||
|
if self.Q.x[2]==0:
|
||||||
|
return False
|
||||||
|
if not self.Q.Check():
|
||||||
|
return False
|
||||||
|
if (self.Q*self.n).x[2]!=0:
|
||||||
|
return False
|
||||||
|
if r<1 or r>self.n-1 or s<1 or s>self.n-1:
|
||||||
|
return False
|
||||||
|
|
||||||
|
w=InvMod(s,self.n)
|
||||||
|
u1=(z*w)%self.n
|
||||||
|
u2=(r*w)%self.n
|
||||||
|
R=self*u1+self.Q*u2
|
||||||
|
R.Normalize()
|
||||||
|
|
||||||
|
return (R.x[0]-r)%self.n==0
|
||||||
|
|
||||||
|
def VerifyMessageFromBitcoinAddress(self,adresse,m,sig):
|
||||||
|
#Check a signature (r,s) for the message m signed by the Bitcoin
|
||||||
|
# address "addresse".
|
||||||
|
h=hashlib.new("SHA256")
|
||||||
|
h.update(m)
|
||||||
|
z=int(h.hexdigest(),16)
|
||||||
|
|
||||||
|
(r,s)=sig
|
||||||
|
x=r
|
||||||
|
y2=(pow(x,3,self.p)+self.a*x+self.b)%self.p
|
||||||
|
y=Cipolla(y2,self.p)
|
||||||
|
|
||||||
|
for i in range(2):
|
||||||
|
kG=EllipticCurvePoint([x,y,1],self.a,self.b,self.p,self.n)
|
||||||
|
mzG=self*((-z)%self.n)
|
||||||
|
self.Q=(kG*s+mzG)*InvMod(r,self.n)
|
||||||
|
|
||||||
|
adr=self.BitcoinAddresFromPublicKey()
|
||||||
|
if adr==adresse:
|
||||||
|
break
|
||||||
|
y=(-y)%self.p
|
||||||
|
|
||||||
|
if adr!=adresse:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def BitcoinAddressFromPrivate(self,pri=None):
|
||||||
|
#Transform a private key in base58 encoding to a bitcoin address.
|
||||||
|
#normal means "uncompressed".
|
||||||
|
if not pri:
|
||||||
|
print "Private Key :",
|
||||||
|
pri=raw_input()
|
||||||
|
|
||||||
|
normal=(len(pri)==51)
|
||||||
|
pri=b58decode(pri)
|
||||||
|
|
||||||
|
if normal:
|
||||||
|
pri=pri[1:-4]
|
||||||
|
else:
|
||||||
|
pri=pri[1:-5]
|
||||||
|
|
||||||
|
self.d=int(Byte2Hex(pri),16)
|
||||||
|
|
||||||
|
return self.BitcoinAddress(normal)
|
||||||
|
|
||||||
|
def PrivateEncoding(self,normal=True):
|
||||||
|
#Encode a private key self.d to base58 encoding.
|
||||||
|
p=Int2Byte(self.d,32)
|
||||||
|
p="\80"+p
|
||||||
|
|
||||||
|
if not normal:
|
||||||
|
p+=chr(1)
|
||||||
|
|
||||||
|
h=hashlib.new("SHA256")
|
||||||
|
h.update(p)
|
||||||
|
s=h.digest()
|
||||||
|
|
||||||
|
h=hashlib.new("SHA256")
|
||||||
|
h.update(s)
|
||||||
|
s=h.digest()
|
||||||
|
|
||||||
|
cs=s[:4]
|
||||||
|
|
||||||
|
p+=cs
|
||||||
|
p=b58encode(p)
|
||||||
|
|
||||||
|
return p
|
||||||
|
|
||||||
|
def BitcoinAddresFromPublicKey(self,normal=True):
|
||||||
|
#Find the bitcoin address from the public key self.Q
|
||||||
|
#We do normalization to go from the projective coordinates to the usual
|
||||||
|
# (x,y) coordinates.
|
||||||
|
self.Q.Normalize()
|
||||||
|
if normal:
|
||||||
|
pk=chr(4)+Int2Byte(self.Q.x[0],32)+Int2Byte((self.Q.x[1])%self.p,32)
|
||||||
|
else:
|
||||||
|
if self.Q.x[1]%2==0:
|
||||||
|
pk=chr(2)+Int2Byte(self.Q.x[0],32)
|
||||||
|
else:
|
||||||
|
pk=chr(3)+Int2Byte(self.Q.x[0],32)
|
||||||
|
|
||||||
|
version=chr(0)
|
||||||
|
|
||||||
|
h=hashlib.new("SHA256")
|
||||||
|
h.update(pk)
|
||||||
|
s=h.digest()
|
||||||
|
|
||||||
|
h=hashlib.new("RIPEMD160")
|
||||||
|
h.update(s)
|
||||||
|
kh=version+h.digest()
|
||||||
|
|
||||||
|
h=hashlib.new("SHA256")
|
||||||
|
h.update(kh)
|
||||||
|
cs=h.digest()
|
||||||
|
|
||||||
|
h=hashlib.new("SHA256")
|
||||||
|
h.update(cs)
|
||||||
|
cs=h.digest()[:4]
|
||||||
|
|
||||||
|
adr=b58encode(kh+cs)
|
||||||
|
|
||||||
|
return adr
|
||||||
|
|
||||||
|
def BitcoinAddress(self,normal=True):
|
||||||
|
#Computes a bitcoin address given the private key self.d.
|
||||||
|
self.Q=self*self.d
|
||||||
|
|
||||||
|
return self.BitcoinAddresFromPublicKey(normal)
|
||||||
|
|
||||||
|
def BitcoinAddressGenerator(self,k,filename):
|
||||||
|
#Generate Bitcoin address and write them in the filename in the multibit format.
|
||||||
|
#Change the date as you like.
|
||||||
|
f=open(filename,"w")
|
||||||
|
for i in range(k):
|
||||||
|
self.GeneratePrivateKey()
|
||||||
|
adr=self.BitcoinAddress()
|
||||||
|
p=self.PrivateEncoding()
|
||||||
|
f.write("#%s\n%s 2014-01-30T12:00:00Z\n"%(adr,p))
|
||||||
|
|
||||||
|
#print hex(self.d)
|
||||||
|
print adr,p
|
||||||
|
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
def TestSign(self):
|
||||||
|
#Test signature
|
||||||
|
self.GeneratePrivateKey()
|
||||||
|
self.Q=self*self.d
|
||||||
|
m="Hello World"
|
||||||
|
adresse=self.BitcoinAddresFromPublicKey()
|
||||||
|
(r,s)=self.SignECDSA(m)
|
||||||
|
|
||||||
|
m="Hello World"
|
||||||
|
print self.VerifyMessageFromBitcoinAddress(adresse,m,r,s)
|
||||||
|
|
||||||
|
def Normalize(self):
|
||||||
|
#Transform projective coordinates of self to the usual (x,y) coordinates.
|
||||||
|
if self.x[2]:
|
||||||
|
self.x[0]=(self.x[0]*InvMod(self.x[2],self.p))%self.p
|
||||||
|
self.x[1]=(self.x[1]*InvMod(self.x[2],self.p))%self.p
|
||||||
|
self.x[2]=1
|
||||||
|
elif self.x[1]:
|
||||||
|
self.x[0]=(self.x[0]*InvMod(self.x[1],self.p))%self.p
|
||||||
|
self.x[1]=1
|
||||||
|
elif self.x[0]:
|
||||||
|
self.x[0]=1
|
||||||
|
else:
|
||||||
|
raise Exception
|
||||||
|
|
||||||
|
def Bitcoin():
|
||||||
|
#Create the Bitcoin elliptiv curve
|
||||||
|
a=0
|
||||||
|
b=7
|
||||||
|
p=2**256-2**32-2**9-2**8-2**7-2**6-2**4-1
|
||||||
|
|
||||||
|
#Create the generator G of the Bitcoin elliptic curve, with is order n.
|
||||||
|
Gx=int("79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",16)
|
||||||
|
Gy=int("483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8",16)
|
||||||
|
n =int("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141",16)
|
||||||
|
|
||||||
|
#Create the generator
|
||||||
|
return EllipticCurvePoint([Gx,Gy,1],a,b,p,n)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
bitcoin=Bitcoin()
|
||||||
|
|
||||||
|
#Generate the public key from the private one
|
||||||
|
print bitcoin.BitcoinAddressFromPrivate("23DKRBLkeDbcSaddsMYLAHXhanPmGwkWAhSPVGbspAkc72Hw9BdrDF")
|
||||||
|
print bitcoin.BitcoinAddress()
|
||||||
|
|
||||||
|
#Print the bitcoin address of the public key generated at the previous line
|
||||||
|
adr=bitcoin.BitcoinAddresFromPublicKey()
|
||||||
|
print adr
|
||||||
|
|
||||||
|
#Sign a message with the current address
|
||||||
|
m="Hello World"
|
||||||
|
sig=bitcoin.SignECDSA("Hello World")
|
||||||
|
#Verify the message using only the bitcoin adress, the signature and the message.
|
||||||
|
#Not using the public key as it is not needed.
|
||||||
|
print bitcoin.VerifyMessageFromBitcoinAddress(adr,m,sig)
|
0
src/lib/BitcoinECC/__init__.py
Normal file
0
src/lib/BitcoinECC/__init__.py
Normal file
0
src/lib/__init__.py
Normal file
0
src/lib/__init__.py
Normal file
0
src/lib/cssvendor/__init__.py
Normal file
0
src/lib/cssvendor/__init__.py
Normal file
29
src/lib/cssvendor/cssvendor.py
Normal file
29
src/lib/cssvendor/cssvendor.py
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
import re
|
||||||
|
|
||||||
|
def prefix(content):
|
||||||
|
content = re.sub("@keyframes (.*? {.*?[^ ]})", "@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n", content, flags=re.DOTALL)
|
||||||
|
content = re.sub('([^-\*])(border-radius|box-shadow|transition|animation|box-sizing|transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])', '\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content)
|
||||||
|
content = re.sub('(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])',
|
||||||
|
'\\1: -webkit-\\2(\\3);'+
|
||||||
|
'\\1: -moz-\\2(\\3);'+
|
||||||
|
'\\1: -o-\\2(\\3);'+
|
||||||
|
'\\1: -ms-\\2(\\3);'+
|
||||||
|
'\\1: \\2(\\3);', content)
|
||||||
|
return content
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print prefix("""
|
||||||
|
.test {
|
||||||
|
border-radius: 5px;
|
||||||
|
background: linear-gradient(red, blue);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@keyframes flip {
|
||||||
|
0% { transform: perspective(120px) rotateX(0deg) rotateY(0deg); }
|
||||||
|
50% { transform: perspective(120px) rotateX(-180.1deg) rotateY(0deg) }
|
||||||
|
100% { transform: perspective(120px) rotateX(-180deg) rotateY(-179.9deg); }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
""")
|
21
src/lib/geventwebsocket/__init__.py
Normal file
21
src/lib/geventwebsocket/__init__.py
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
VERSION = (0, 9, 3, 'final', 0)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'WebSocketApplication',
|
||||||
|
'Resource',
|
||||||
|
'WebSocketServer',
|
||||||
|
'WebSocketError',
|
||||||
|
'get_version'
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def get_version(*args, **kwargs):
|
||||||
|
from .utils import get_version
|
||||||
|
return get_version(*args, **kwargs)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from .resource import WebSocketApplication, Resource
|
||||||
|
from .server import WebSocketServer
|
||||||
|
from .exceptions import WebSocketError
|
||||||
|
except ImportError:
|
||||||
|
pass
|
19
src/lib/geventwebsocket/exceptions.py
Normal file
19
src/lib/geventwebsocket/exceptions.py
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
from socket import error as socket_error
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketError(socket_error):
|
||||||
|
"""
|
||||||
|
Base class for all websocket errors.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ProtocolError(WebSocketError):
|
||||||
|
"""
|
||||||
|
Raised if an error occurs when de/encoding the websocket protocol.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class FrameTooLargeException(ProtocolError):
|
||||||
|
"""
|
||||||
|
Raised if a frame is received that is too large.
|
||||||
|
"""
|
0
src/lib/geventwebsocket/gunicorn/__init__.py
Normal file
0
src/lib/geventwebsocket/gunicorn/__init__.py
Normal file
6
src/lib/geventwebsocket/gunicorn/workers.py
Normal file
6
src/lib/geventwebsocket/gunicorn/workers.py
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
from geventwebsocket.handler import WebSocketHandler
|
||||||
|
from gunicorn.workers.ggevent import GeventPyWSGIWorker
|
||||||
|
|
||||||
|
|
||||||
|
class GeventWebSocketWorker(GeventPyWSGIWorker):
|
||||||
|
wsgi_handler = WebSocketHandler
|
283
src/lib/geventwebsocket/handler.py
Normal file
283
src/lib/geventwebsocket/handler.py
Normal file
|
@ -0,0 +1,283 @@
|
||||||
|
# Modified: Werkzeug Debugger workaround in run_websocket(self):
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from gevent.pywsgi import WSGIHandler
|
||||||
|
from .websocket import WebSocket, Stream
|
||||||
|
from .logging import create_logger
|
||||||
|
|
||||||
|
|
||||||
|
class Client(object):
|
||||||
|
def __init__(self, address, ws):
|
||||||
|
self.address = address
|
||||||
|
self.ws = ws
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketHandler(WSGIHandler):
|
||||||
|
"""
|
||||||
|
Automatically upgrades the connection to a websocket.
|
||||||
|
|
||||||
|
To prevent the WebSocketHandler to call the underlying WSGI application,
|
||||||
|
but only setup the WebSocket negotiations, do:
|
||||||
|
|
||||||
|
mywebsockethandler.prevent_wsgi_call = True
|
||||||
|
|
||||||
|
before calling run_application(). This is useful if you want to do more
|
||||||
|
things before calling the app, and want to off-load the WebSocket
|
||||||
|
negotiations to this library. Socket.IO needs this for example, to send
|
||||||
|
the 'ack' before yielding the control to your WSGI app.
|
||||||
|
"""
|
||||||
|
|
||||||
|
SUPPORTED_VERSIONS = ('13', '8', '7')
|
||||||
|
GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
|
||||||
|
|
||||||
|
def run_websocket(self):
|
||||||
|
"""
|
||||||
|
Called when a websocket has been created successfully.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if getattr(self, 'prevent_wsgi_call', False):
|
||||||
|
return
|
||||||
|
|
||||||
|
# In case WebSocketServer is not used
|
||||||
|
if not hasattr(self.server, 'clients'):
|
||||||
|
self.server.clients = {}
|
||||||
|
|
||||||
|
# Since we're now a websocket connection, we don't care what the
|
||||||
|
# application actually responds with for the http response
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.server.clients[self.client_address] = Client(
|
||||||
|
self.client_address, self.websocket)
|
||||||
|
if self.application.__class__.__name__ == "DebuggedApplication": # Modified: Werkzeug Debugger workaround (https://bitbucket.org/Jeffrey/gevent-websocket/issue/53/if-the-application-returns-a-generator-we)
|
||||||
|
list(self.application(self.environ, lambda s, h: []))
|
||||||
|
else:
|
||||||
|
self.application(self.environ, lambda s, h: [])
|
||||||
|
finally:
|
||||||
|
del self.server.clients[self.client_address]
|
||||||
|
if not self.websocket.closed:
|
||||||
|
self.websocket.close()
|
||||||
|
self.environ.update({
|
||||||
|
'wsgi.websocket': None
|
||||||
|
})
|
||||||
|
self.websocket = None
|
||||||
|
|
||||||
|
def run_application(self):
|
||||||
|
if (hasattr(self.server, 'pre_start_hook')
|
||||||
|
and self.server.pre_start_hook):
|
||||||
|
self.logger.debug("Calling pre-start hook")
|
||||||
|
if self.server.pre_start_hook(self):
|
||||||
|
return super(WebSocketHandler, self).run_application()
|
||||||
|
|
||||||
|
self.logger.debug("Initializing WebSocket")
|
||||||
|
self.result = self.upgrade_websocket()
|
||||||
|
|
||||||
|
if hasattr(self, 'websocket'):
|
||||||
|
if self.status and not self.headers_sent:
|
||||||
|
self.write('')
|
||||||
|
|
||||||
|
self.run_websocket()
|
||||||
|
else:
|
||||||
|
if self.status:
|
||||||
|
# A status was set, likely an error so just send the response
|
||||||
|
if not self.result:
|
||||||
|
self.result = []
|
||||||
|
|
||||||
|
self.process_result()
|
||||||
|
return
|
||||||
|
|
||||||
|
# This handler did not handle the request, so defer it to the
|
||||||
|
# underlying application object
|
||||||
|
return super(WebSocketHandler, self).run_application()
|
||||||
|
|
||||||
|
def upgrade_websocket(self):
|
||||||
|
"""
|
||||||
|
Attempt to upgrade the current environ into a websocket enabled
|
||||||
|
connection. If successful, the environ dict with be updated with two
|
||||||
|
new entries, `wsgi.websocket` and `wsgi.websocket_version`.
|
||||||
|
|
||||||
|
:returns: Whether the upgrade was successful.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Some basic sanity checks first
|
||||||
|
|
||||||
|
self.logger.debug("Validating WebSocket request")
|
||||||
|
|
||||||
|
if self.environ.get('REQUEST_METHOD', '') != 'GET':
|
||||||
|
# This is not a websocket request, so we must not handle it
|
||||||
|
self.logger.debug('Can only upgrade connection if using GET method.')
|
||||||
|
return
|
||||||
|
|
||||||
|
upgrade = self.environ.get('HTTP_UPGRADE', '').lower()
|
||||||
|
|
||||||
|
if upgrade == 'websocket':
|
||||||
|
connection = self.environ.get('HTTP_CONNECTION', '').lower()
|
||||||
|
|
||||||
|
if 'upgrade' not in connection:
|
||||||
|
# This is not a websocket request, so we must not handle it
|
||||||
|
self.logger.warning("Client didn't ask for a connection "
|
||||||
|
"upgrade")
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
# This is not a websocket request, so we must not handle it
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.request_version != 'HTTP/1.1':
|
||||||
|
self.start_response('402 Bad Request', [])
|
||||||
|
self.logger.warning("Bad server protocol in headers")
|
||||||
|
|
||||||
|
return ['Bad protocol version']
|
||||||
|
|
||||||
|
if self.environ.get('HTTP_SEC_WEBSOCKET_VERSION'):
|
||||||
|
return self.upgrade_connection()
|
||||||
|
else:
|
||||||
|
self.logger.warning("No protocol defined")
|
||||||
|
self.start_response('426 Upgrade Required', [
|
||||||
|
('Sec-WebSocket-Version', ', '.join(self.SUPPORTED_VERSIONS))])
|
||||||
|
|
||||||
|
return ['No Websocket protocol version defined']
|
||||||
|
|
||||||
|
def upgrade_connection(self):
|
||||||
|
"""
|
||||||
|
Validate and 'upgrade' the HTTP request to a WebSocket request.
|
||||||
|
|
||||||
|
If an upgrade succeeded then then handler will have `start_response`
|
||||||
|
with a status of `101`, the environ will also be updated with
|
||||||
|
`wsgi.websocket` and `wsgi.websocket_version` keys.
|
||||||
|
|
||||||
|
:param environ: The WSGI environ dict.
|
||||||
|
:param start_response: The callable used to start the response.
|
||||||
|
:param stream: File like object that will be read from/written to by
|
||||||
|
the underlying WebSocket object, if created.
|
||||||
|
:return: The WSGI response iterator is something went awry.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.logger.debug("Attempting to upgrade connection")
|
||||||
|
|
||||||
|
version = self.environ.get("HTTP_SEC_WEBSOCKET_VERSION")
|
||||||
|
|
||||||
|
if version not in self.SUPPORTED_VERSIONS:
|
||||||
|
msg = "Unsupported WebSocket Version: {0}".format(version)
|
||||||
|
|
||||||
|
self.logger.warning(msg)
|
||||||
|
self.start_response('400 Bad Request', [
|
||||||
|
('Sec-WebSocket-Version', ', '.join(self.SUPPORTED_VERSIONS))
|
||||||
|
])
|
||||||
|
|
||||||
|
return [msg]
|
||||||
|
|
||||||
|
key = self.environ.get("HTTP_SEC_WEBSOCKET_KEY", '').strip()
|
||||||
|
|
||||||
|
if not key:
|
||||||
|
# 5.2.1 (3)
|
||||||
|
msg = "Sec-WebSocket-Key header is missing/empty"
|
||||||
|
|
||||||
|
self.logger.warning(msg)
|
||||||
|
self.start_response('400 Bad Request', [])
|
||||||
|
|
||||||
|
return [msg]
|
||||||
|
|
||||||
|
try:
|
||||||
|
key_len = len(base64.b64decode(key))
|
||||||
|
except TypeError:
|
||||||
|
msg = "Invalid key: {0}".format(key)
|
||||||
|
|
||||||
|
self.logger.warning(msg)
|
||||||
|
self.start_response('400 Bad Request', [])
|
||||||
|
|
||||||
|
return [msg]
|
||||||
|
|
||||||
|
if key_len != 16:
|
||||||
|
# 5.2.1 (3)
|
||||||
|
msg = "Invalid key: {0}".format(key)
|
||||||
|
|
||||||
|
self.logger.warning(msg)
|
||||||
|
self.start_response('400 Bad Request', [])
|
||||||
|
|
||||||
|
return [msg]
|
||||||
|
|
||||||
|
# Check for WebSocket Protocols
|
||||||
|
requested_protocols = self.environ.get(
|
||||||
|
'HTTP_SEC_WEBSOCKET_PROTOCOL', '')
|
||||||
|
protocol = None
|
||||||
|
|
||||||
|
if hasattr(self.application, 'app_protocol'):
|
||||||
|
allowed_protocol = self.application.app_protocol(
|
||||||
|
self.environ['PATH_INFO'])
|
||||||
|
|
||||||
|
if allowed_protocol and allowed_protocol in requested_protocols:
|
||||||
|
protocol = allowed_protocol
|
||||||
|
self.logger.debug("Protocol allowed: {0}".format(protocol))
|
||||||
|
|
||||||
|
self.websocket = WebSocket(self.environ, Stream(self), self)
|
||||||
|
self.environ.update({
|
||||||
|
'wsgi.websocket_version': version,
|
||||||
|
'wsgi.websocket': self.websocket
|
||||||
|
})
|
||||||
|
|
||||||
|
headers = [
|
||||||
|
("Upgrade", "websocket"),
|
||||||
|
("Connection", "Upgrade"),
|
||||||
|
("Sec-WebSocket-Accept", base64.b64encode(
|
||||||
|
hashlib.sha1(key + self.GUID).digest())),
|
||||||
|
]
|
||||||
|
|
||||||
|
if protocol:
|
||||||
|
headers.append(("Sec-WebSocket-Protocol", protocol))
|
||||||
|
|
||||||
|
self.logger.debug("WebSocket request accepted, switching protocols")
|
||||||
|
self.start_response("101 Switching Protocols", headers)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def logger(self):
|
||||||
|
if not hasattr(self.server, 'logger'):
|
||||||
|
self.server.logger = create_logger(__name__)
|
||||||
|
|
||||||
|
return self.server.logger
|
||||||
|
|
||||||
|
def log_request(self):
|
||||||
|
if '101' not in self.status:
|
||||||
|
self.logger.info(self.format_request())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def active_client(self):
|
||||||
|
return self.server.clients[self.client_address]
|
||||||
|
|
||||||
|
def start_response(self, status, headers, exc_info=None):
|
||||||
|
"""
|
||||||
|
Called when the handler is ready to send a response back to the remote
|
||||||
|
endpoint. A websocket connection may have not been created.
|
||||||
|
"""
|
||||||
|
writer = super(WebSocketHandler, self).start_response(
|
||||||
|
status, headers, exc_info=exc_info)
|
||||||
|
|
||||||
|
self._prepare_response()
|
||||||
|
|
||||||
|
return writer
|
||||||
|
|
||||||
|
def _prepare_response(self):
|
||||||
|
"""
|
||||||
|
Sets up the ``pywsgi.Handler`` to work with a websocket response.
|
||||||
|
|
||||||
|
This is used by other projects that need to support WebSocket
|
||||||
|
connections as part of a larger effort.
|
||||||
|
"""
|
||||||
|
assert not self.headers_sent
|
||||||
|
|
||||||
|
if not self.environ.get('wsgi.websocket'):
|
||||||
|
# a WebSocket connection is not established, do nothing
|
||||||
|
return
|
||||||
|
|
||||||
|
# So that `finalize_headers` doesn't write a Content-Length header
|
||||||
|
self.provided_content_length = False
|
||||||
|
|
||||||
|
# The websocket is now controlling the response
|
||||||
|
self.response_use_chunked = False
|
||||||
|
|
||||||
|
# Once the request is over, the connection must be closed
|
||||||
|
self.close_connection = True
|
||||||
|
|
||||||
|
# Prevents the Date header from being written
|
||||||
|
self.provided_date = True
|
31
src/lib/geventwebsocket/logging.py
Normal file
31
src/lib/geventwebsocket/logging.py
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from logging import getLogger, StreamHandler, getLoggerClass, Formatter, DEBUG
|
||||||
|
|
||||||
|
|
||||||
|
def create_logger(name, debug=False, format=None):
|
||||||
|
Logger = getLoggerClass()
|
||||||
|
|
||||||
|
class DebugLogger(Logger):
|
||||||
|
def getEffectiveLevel(x):
|
||||||
|
if x.level == 0 and debug:
|
||||||
|
return DEBUG
|
||||||
|
else:
|
||||||
|
return Logger.getEffectiveLevel(x)
|
||||||
|
|
||||||
|
class DebugHandler(StreamHandler):
|
||||||
|
def emit(x, record):
|
||||||
|
StreamHandler.emit(x, record) if debug else None
|
||||||
|
|
||||||
|
handler = DebugHandler()
|
||||||
|
handler.setLevel(DEBUG)
|
||||||
|
|
||||||
|
if format:
|
||||||
|
handler.setFormatter(Formatter(format))
|
||||||
|
|
||||||
|
logger = getLogger(name)
|
||||||
|
del logger.handlers[:]
|
||||||
|
logger.__class__ = DebugLogger
|
||||||
|
logger.addHandler(handler)
|
||||||
|
|
||||||
|
return logger
|
0
src/lib/geventwebsocket/protocols/__init__.py
Normal file
0
src/lib/geventwebsocket/protocols/__init__.py
Normal file
35
src/lib/geventwebsocket/protocols/base.py
Normal file
35
src/lib/geventwebsocket/protocols/base.py
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
class BaseProtocol(object):
|
||||||
|
PROTOCOL_NAME = ''
|
||||||
|
|
||||||
|
def __init__(self, app):
|
||||||
|
self._app = app
|
||||||
|
|
||||||
|
def on_open(self):
|
||||||
|
self.app.on_open()
|
||||||
|
|
||||||
|
def on_message(self, message):
|
||||||
|
self.app.on_message(message)
|
||||||
|
|
||||||
|
def on_close(self, reason=None):
|
||||||
|
self.app.on_close(reason)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def app(self):
|
||||||
|
if self._app:
|
||||||
|
return self._app
|
||||||
|
else:
|
||||||
|
raise Exception("No application coupled")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def server(self):
|
||||||
|
if not hasattr(self.app, 'ws'):
|
||||||
|
return None
|
||||||
|
|
||||||
|
return self.app.ws.handler.server
|
||||||
|
|
||||||
|
@property
|
||||||
|
def handler(self):
|
||||||
|
if not hasattr(self.app, 'ws'):
|
||||||
|
return None
|
||||||
|
|
||||||
|
return self.app.ws.handler
|
234
src/lib/geventwebsocket/protocols/wamp.py
Normal file
234
src/lib/geventwebsocket/protocols/wamp.py
Normal file
|
@ -0,0 +1,234 @@
|
||||||
|
import inspect
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
import types
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ujson as json
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
import simplejson as json
|
||||||
|
except ImportError:
|
||||||
|
import json
|
||||||
|
|
||||||
|
from ..exceptions import WebSocketError
|
||||||
|
from .base import BaseProtocol
|
||||||
|
|
||||||
|
|
||||||
|
def export_rpc(arg=None):
|
||||||
|
if isinstance(arg, types.FunctionType):
|
||||||
|
arg._rpc = arg.__name__
|
||||||
|
return arg
|
||||||
|
|
||||||
|
|
||||||
|
def serialize(data):
|
||||||
|
return json.dumps(data)
|
||||||
|
|
||||||
|
|
||||||
|
class Prefixes(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.prefixes = {}
|
||||||
|
|
||||||
|
def add(self, prefix, uri):
|
||||||
|
self.prefixes[prefix] = uri
|
||||||
|
|
||||||
|
def resolve(self, curie_or_uri):
|
||||||
|
if "http://" in curie_or_uri:
|
||||||
|
return curie_or_uri
|
||||||
|
elif ':' in curie_or_uri:
|
||||||
|
prefix, proc = curie_or_uri.split(':', 1)
|
||||||
|
return self.prefixes[prefix] + proc
|
||||||
|
else:
|
||||||
|
raise Exception(curie_or_uri)
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteProcedures(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.calls = {}
|
||||||
|
|
||||||
|
def register_procedure(self, uri, proc):
|
||||||
|
self.calls[uri] = proc
|
||||||
|
|
||||||
|
def register_object(self, uri, obj):
|
||||||
|
for k in inspect.getmembers(obj, inspect.ismethod):
|
||||||
|
if '_rpc' in k[1].__dict__:
|
||||||
|
proc_uri = uri + k[1]._rpc
|
||||||
|
self.calls[proc_uri] = (obj, k[1])
|
||||||
|
|
||||||
|
def call(self, uri, args):
|
||||||
|
if uri in self.calls:
|
||||||
|
proc = self.calls[uri]
|
||||||
|
|
||||||
|
# Do the correct call whether it's a function or instance method.
|
||||||
|
if isinstance(proc, tuple):
|
||||||
|
if proc[1].__self__ is None:
|
||||||
|
# Create instance of object and call method
|
||||||
|
return proc[1](proc[0](), *args)
|
||||||
|
else:
|
||||||
|
# Call bound method on instance
|
||||||
|
return proc[1](*args)
|
||||||
|
else:
|
||||||
|
return self.calls[uri](*args)
|
||||||
|
else:
|
||||||
|
raise Exception("no such uri '{}'".format(uri))
|
||||||
|
|
||||||
|
|
||||||
|
class Channels(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.channels = {}
|
||||||
|
|
||||||
|
def create(self, uri, prefix_matching=False):
|
||||||
|
if uri not in self.channels:
|
||||||
|
self.channels[uri] = []
|
||||||
|
|
||||||
|
# TODO: implement prefix matching
|
||||||
|
|
||||||
|
def subscribe(self, uri, client):
|
||||||
|
if uri in self.channels:
|
||||||
|
self.channels[uri].append(client)
|
||||||
|
|
||||||
|
def unsubscribe(self, uri, client):
|
||||||
|
if uri not in self.channels:
|
||||||
|
return
|
||||||
|
|
||||||
|
client_index = self.channels[uri].index(client)
|
||||||
|
self.channels[uri].pop(client_index)
|
||||||
|
|
||||||
|
if len(self.channels[uri]) == 0:
|
||||||
|
del self.channels[uri]
|
||||||
|
|
||||||
|
def publish(self, uri, event, exclude=None, eligible=None):
|
||||||
|
if uri not in self.channels:
|
||||||
|
return
|
||||||
|
|
||||||
|
# TODO: exclude & eligible
|
||||||
|
|
||||||
|
msg = [WampProtocol.MSG_EVENT, uri, event]
|
||||||
|
|
||||||
|
for client in self.channels[uri]:
|
||||||
|
try:
|
||||||
|
client.ws.send(serialize(msg))
|
||||||
|
except WebSocketError:
|
||||||
|
# Seems someone didn't unsubscribe before disconnecting
|
||||||
|
self.channels[uri].remove(client)
|
||||||
|
|
||||||
|
|
||||||
|
class WampProtocol(BaseProtocol):
|
||||||
|
MSG_WELCOME = 0
|
||||||
|
MSG_PREFIX = 1
|
||||||
|
MSG_CALL = 2
|
||||||
|
MSG_CALL_RESULT = 3
|
||||||
|
MSG_CALL_ERROR = 4
|
||||||
|
MSG_SUBSCRIBE = 5
|
||||||
|
MSG_UNSUBSCRIBE = 6
|
||||||
|
MSG_PUBLISH = 7
|
||||||
|
MSG_EVENT = 8
|
||||||
|
|
||||||
|
PROTOCOL_NAME = "wamp"
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.procedures = RemoteProcedures()
|
||||||
|
self.prefixes = Prefixes()
|
||||||
|
self.session_id = ''.join(
|
||||||
|
[random.choice(string.digits + string.letters)
|
||||||
|
for i in xrange(16)])
|
||||||
|
|
||||||
|
super(WampProtocol, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def register_procedure(self, *args, **kwargs):
|
||||||
|
self.procedures.register_procedure(*args, **kwargs)
|
||||||
|
|
||||||
|
def register_object(self, *args, **kwargs):
|
||||||
|
self.procedures.register_object(*args, **kwargs)
|
||||||
|
|
||||||
|
def register_pubsub(self, *args, **kwargs):
|
||||||
|
if not hasattr(self.server, 'channels'):
|
||||||
|
self.server.channels = Channels()
|
||||||
|
|
||||||
|
self.server.channels.create(*args, **kwargs)
|
||||||
|
|
||||||
|
def do_handshake(self):
|
||||||
|
from geventwebsocket import get_version
|
||||||
|
|
||||||
|
welcome = [
|
||||||
|
self.MSG_WELCOME,
|
||||||
|
self.session_id,
|
||||||
|
1,
|
||||||
|
'gevent-websocket/' + get_version()
|
||||||
|
]
|
||||||
|
self.app.ws.send(serialize(welcome))
|
||||||
|
|
||||||
|
def _get_exception_info(self, e):
|
||||||
|
uri = 'http://TODO#generic'
|
||||||
|
desc = str(type(e))
|
||||||
|
details = str(e)
|
||||||
|
return [uri, desc, details]
|
||||||
|
|
||||||
|
def rpc_call(self, data):
|
||||||
|
call_id, curie_or_uri = data[1:3]
|
||||||
|
args = data[3:]
|
||||||
|
|
||||||
|
if not isinstance(call_id, (str, unicode)):
|
||||||
|
raise Exception()
|
||||||
|
if not isinstance(curie_or_uri, (str, unicode)):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
|
uri = self.prefixes.resolve(curie_or_uri)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = self.procedures.call(uri, args)
|
||||||
|
result_msg = [self.MSG_CALL_RESULT, call_id, result]
|
||||||
|
except Exception, e:
|
||||||
|
result_msg = [self.MSG_CALL_ERROR,
|
||||||
|
call_id] + self._get_exception_info(e)
|
||||||
|
|
||||||
|
self.app.on_message(serialize(result_msg))
|
||||||
|
|
||||||
|
def pubsub_action(self, data):
|
||||||
|
action = data[0]
|
||||||
|
curie_or_uri = data[1]
|
||||||
|
|
||||||
|
if not isinstance(action, int):
|
||||||
|
raise Exception()
|
||||||
|
if not isinstance(curie_or_uri, (str, unicode)):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
|
uri = self.prefixes.resolve(curie_or_uri)
|
||||||
|
|
||||||
|
if action == self.MSG_SUBSCRIBE and len(data) == 2:
|
||||||
|
self.server.channels.subscribe(data[1], self.handler.active_client)
|
||||||
|
|
||||||
|
elif action == self.MSG_UNSUBSCRIBE and len(data) == 2:
|
||||||
|
self.server.channels.unsubscribe(
|
||||||
|
data[1], self.handler.active_client)
|
||||||
|
|
||||||
|
elif action == self.MSG_PUBLISH and len(data) >= 3:
|
||||||
|
payload = data[2] if len(data) >= 3 else None
|
||||||
|
exclude = data[3] if len(data) >= 4 else None
|
||||||
|
eligible = data[4] if len(data) >= 5 else None
|
||||||
|
|
||||||
|
self.server.channels.publish(uri, payload, exclude, eligible)
|
||||||
|
|
||||||
|
def on_open(self):
|
||||||
|
self.app.on_open()
|
||||||
|
self.do_handshake()
|
||||||
|
|
||||||
|
def on_message(self, message):
|
||||||
|
data = json.loads(message)
|
||||||
|
|
||||||
|
if not isinstance(data, list):
|
||||||
|
raise Exception('incoming data is no list')
|
||||||
|
|
||||||
|
if data[0] == self.MSG_PREFIX and len(data) == 3:
|
||||||
|
prefix, uri = data[1:3]
|
||||||
|
self.prefixes.add(prefix, uri)
|
||||||
|
|
||||||
|
elif data[0] == self.MSG_CALL and len(data) >= 3:
|
||||||
|
return self.rpc_call(data)
|
||||||
|
|
||||||
|
elif data[0] in (self.MSG_SUBSCRIBE, self.MSG_UNSUBSCRIBE,
|
||||||
|
self.MSG_PUBLISH):
|
||||||
|
return self.pubsub_action(data)
|
||||||
|
else:
|
||||||
|
raise Exception("Unknown call")
|
||||||
|
|
74
src/lib/geventwebsocket/resource.py
Normal file
74
src/lib/geventwebsocket/resource.py
Normal file
|
@ -0,0 +1,74 @@
|
||||||
|
import re
|
||||||
|
|
||||||
|
from .protocols.base import BaseProtocol
|
||||||
|
from .exceptions import WebSocketError
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketApplication(object):
|
||||||
|
protocol_class = BaseProtocol
|
||||||
|
|
||||||
|
def __init__(self, ws):
|
||||||
|
self.protocol = self.protocol_class(self)
|
||||||
|
self.ws = ws
|
||||||
|
|
||||||
|
def handle(self):
|
||||||
|
self.protocol.on_open()
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
message = self.ws.receive()
|
||||||
|
except WebSocketError:
|
||||||
|
self.protocol.on_close()
|
||||||
|
break
|
||||||
|
|
||||||
|
self.protocol.on_message(message)
|
||||||
|
|
||||||
|
def on_open(self, *args, **kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def on_close(self, *args, **kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def on_message(self, message, *args, **kwargs):
|
||||||
|
self.ws.send(message, **kwargs)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def protocol_name(cls):
|
||||||
|
return cls.protocol_class.PROTOCOL_NAME
|
||||||
|
|
||||||
|
|
||||||
|
class Resource(object):
|
||||||
|
def __init__(self, apps=None):
|
||||||
|
self.apps = apps if apps else []
|
||||||
|
|
||||||
|
def _app_by_path(self, environ_path):
|
||||||
|
# Which app matched the current path?
|
||||||
|
|
||||||
|
for path, app in self.apps.iteritems():
|
||||||
|
if re.match(path, environ_path):
|
||||||
|
return app
|
||||||
|
|
||||||
|
def app_protocol(self, path):
|
||||||
|
app = self._app_by_path(path)
|
||||||
|
|
||||||
|
if hasattr(app, 'protocol_name'):
|
||||||
|
return app.protocol_name()
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def __call__(self, environ, start_response):
|
||||||
|
environ = environ
|
||||||
|
current_app = self._app_by_path(environ['PATH_INFO'])
|
||||||
|
|
||||||
|
if current_app is None:
|
||||||
|
raise Exception("No apps defined")
|
||||||
|
|
||||||
|
if 'wsgi.websocket' in environ:
|
||||||
|
ws = environ['wsgi.websocket']
|
||||||
|
current_app = current_app(ws)
|
||||||
|
current_app.ws = ws # TODO: needed?
|
||||||
|
current_app.handle()
|
||||||
|
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return current_app(environ, start_response)
|
34
src/lib/geventwebsocket/server.py
Normal file
34
src/lib/geventwebsocket/server.py
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
from gevent.pywsgi import WSGIServer
|
||||||
|
|
||||||
|
from .handler import WebSocketHandler
|
||||||
|
from .logging import create_logger
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketServer(WSGIServer):
|
||||||
|
debug_log_format = (
|
||||||
|
'-' * 80 + '\n' +
|
||||||
|
'%(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:\n' +
|
||||||
|
'%(message)s\n' +
|
||||||
|
'-' * 80
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.debug = kwargs.pop('debug', False)
|
||||||
|
self.pre_start_hook = kwargs.pop('pre_start_hook', None)
|
||||||
|
self._logger = None
|
||||||
|
self.clients = {}
|
||||||
|
|
||||||
|
kwargs['handler_class'] = WebSocketHandler
|
||||||
|
super(WebSocketServer, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def handle(self, socket, address):
|
||||||
|
handler = self.handler_class(socket, address, self)
|
||||||
|
handler.handle()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def logger(self):
|
||||||
|
if not self._logger:
|
||||||
|
self._logger = create_logger(
|
||||||
|
__name__, self.debug, self.debug_log_format)
|
||||||
|
|
||||||
|
return self._logger
|
128
src/lib/geventwebsocket/utf8validator.py
Normal file
128
src/lib/geventwebsocket/utf8validator.py
Normal file
|
@ -0,0 +1,128 @@
|
||||||
|
###############################################################################
|
||||||
|
##
|
||||||
|
## Copyright 2011-2013 Tavendo GmbH
|
||||||
|
##
|
||||||
|
## Note:
|
||||||
|
##
|
||||||
|
## This code is a Python implementation of the algorithm
|
||||||
|
##
|
||||||
|
## "Flexible and Economical UTF-8 Decoder"
|
||||||
|
##
|
||||||
|
## by Bjoern Hoehrmann
|
||||||
|
##
|
||||||
|
## bjoern@hoehrmann.de
|
||||||
|
## http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
|
||||||
|
##
|
||||||
|
## Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
## you may not use this file except in compliance with the License.
|
||||||
|
## You may obtain a copy of the License at
|
||||||
|
##
|
||||||
|
## http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
##
|
||||||
|
## Unless required by applicable law or agreed to in writing, software
|
||||||
|
## distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
## See the License for the specific language governing permissions and
|
||||||
|
## limitations under the License.
|
||||||
|
##
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
|
||||||
|
## use Cython implementation of UTF8 validator if available
|
||||||
|
##
|
||||||
|
try:
|
||||||
|
from wsaccel.utf8validator import Utf8Validator
|
||||||
|
except:
|
||||||
|
## fallback to pure Python implementation
|
||||||
|
|
||||||
|
class Utf8Validator:
|
||||||
|
"""
|
||||||
|
Incremental UTF-8 validator with constant memory consumption (minimal
|
||||||
|
state).
|
||||||
|
|
||||||
|
Implements the algorithm "Flexible and Economical UTF-8 Decoder" by
|
||||||
|
Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/).
|
||||||
|
"""
|
||||||
|
|
||||||
|
## DFA transitions
|
||||||
|
UTF8VALIDATOR_DFA = [
|
||||||
|
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 00..1f
|
||||||
|
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 20..3f
|
||||||
|
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 40..5f
|
||||||
|
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 60..7f
|
||||||
|
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, # 80..9f
|
||||||
|
7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, # a0..bf
|
||||||
|
8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, # c0..df
|
||||||
|
0xa,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x4,0x3,0x3, # e0..ef
|
||||||
|
0xb,0x6,0x6,0x6,0x5,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8, # f0..ff
|
||||||
|
0x0,0x1,0x2,0x3,0x5,0x8,0x7,0x1,0x1,0x1,0x4,0x6,0x1,0x1,0x1,0x1, # s0..s0
|
||||||
|
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1, # s1..s2
|
||||||
|
1,2,1,1,1,1,1,2,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1, # s3..s4
|
||||||
|
1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,3,1,3,1,1,1,1,1,1, # s5..s6
|
||||||
|
1,3,1,1,1,1,1,3,1,3,1,1,1,1,1,1,1,3,1,1,1,1,1,1,1,1,1,1,1,1,1,1, # s7..s8
|
||||||
|
]
|
||||||
|
|
||||||
|
UTF8_ACCEPT = 0
|
||||||
|
UTF8_REJECT = 1
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.reset()
|
||||||
|
|
||||||
|
def decode(self, b):
|
||||||
|
"""
|
||||||
|
Eat one UTF-8 octet, and validate on the fly.
|
||||||
|
|
||||||
|
Returns UTF8_ACCEPT when enough octets have been consumed, in which case
|
||||||
|
self.codepoint contains the decoded Unicode code point.
|
||||||
|
|
||||||
|
Returns UTF8_REJECT when invalid UTF-8 was encountered.
|
||||||
|
|
||||||
|
Returns some other positive integer when more octets need to be eaten.
|
||||||
|
"""
|
||||||
|
type = Utf8Validator.UTF8VALIDATOR_DFA[b]
|
||||||
|
|
||||||
|
if self.state != Utf8Validator.UTF8_ACCEPT:
|
||||||
|
self.codepoint = (b & 0x3f) | (self.codepoint << 6)
|
||||||
|
else:
|
||||||
|
self.codepoint = (0xff >> type) & b
|
||||||
|
|
||||||
|
self.state = Utf8Validator.UTF8VALIDATOR_DFA[256 + self.state * 16 + type]
|
||||||
|
|
||||||
|
return self.state
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
"""
|
||||||
|
Reset validator to start new incremental UTF-8 decode/validation.
|
||||||
|
"""
|
||||||
|
self.state = Utf8Validator.UTF8_ACCEPT
|
||||||
|
self.codepoint = 0
|
||||||
|
self.i = 0
|
||||||
|
|
||||||
|
def validate(self, ba):
|
||||||
|
"""
|
||||||
|
Incrementally validate a chunk of bytes provided as string.
|
||||||
|
|
||||||
|
Will return a quad (valid?, endsOnCodePoint?, currentIndex, totalIndex).
|
||||||
|
|
||||||
|
As soon as an octet is encountered which renders the octet sequence
|
||||||
|
invalid, a quad with valid? == False is returned. currentIndex returns
|
||||||
|
the index within the currently consumed chunk, and totalIndex the
|
||||||
|
index within the total consumed sequence that was the point of bail out.
|
||||||
|
When valid? == True, currentIndex will be len(ba) and totalIndex the
|
||||||
|
total amount of consumed bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
l = len(ba)
|
||||||
|
|
||||||
|
for i in xrange(l):
|
||||||
|
## optimized version of decode(), since we are not interested in actual code points
|
||||||
|
|
||||||
|
self.state = Utf8Validator.UTF8VALIDATOR_DFA[256 + (self.state << 4) + Utf8Validator.UTF8VALIDATOR_DFA[ord(ba[i])]]
|
||||||
|
|
||||||
|
if self.state == Utf8Validator.UTF8_REJECT:
|
||||||
|
self.i += i
|
||||||
|
return False, False, i, self.i
|
||||||
|
|
||||||
|
self.i += l
|
||||||
|
|
||||||
|
return True, self.state == Utf8Validator.UTF8_ACCEPT, l, self.i
|
45
src/lib/geventwebsocket/utils.py
Normal file
45
src/lib/geventwebsocket/utils.py
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
def get_version(version=None):
|
||||||
|
"Returns a PEP 386-compliant version number from VERSION."
|
||||||
|
|
||||||
|
if version is None:
|
||||||
|
from geventwebsocket import VERSION as version
|
||||||
|
else:
|
||||||
|
assert len(version) == 5
|
||||||
|
assert version[3] in ('alpha', 'beta', 'rc', 'final')
|
||||||
|
|
||||||
|
# Now build the two parts of the version number:
|
||||||
|
# main = X.Y[.Z]
|
||||||
|
# sub = .devN - for pre-alpha releases
|
||||||
|
# | {a|b|c}N - for alpha, beta and rc releases
|
||||||
|
|
||||||
|
parts = 2 if version[2] == 0 else 3
|
||||||
|
main = '.'.join(str(x) for x in version[:parts])
|
||||||
|
|
||||||
|
sub = ''
|
||||||
|
if version[3] == 'alpha' and version[4] == 0:
|
||||||
|
hg_changeset = get_hg_changeset()
|
||||||
|
if hg_changeset:
|
||||||
|
sub = '.dev{0}'.format(hg_changeset)
|
||||||
|
|
||||||
|
elif version[3] != 'final':
|
||||||
|
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
|
||||||
|
sub = mapping[version[3]] + str(version[4])
|
||||||
|
|
||||||
|
return str(main + sub)
|
||||||
|
|
||||||
|
|
||||||
|
def get_hg_changeset():
|
||||||
|
rev, err = subprocess.Popen(
|
||||||
|
'hg id -i',
|
||||||
|
shell=True,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE
|
||||||
|
).communicate()
|
||||||
|
|
||||||
|
if err:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return rev.strip().replace('+', '')
|
543
src/lib/geventwebsocket/websocket.py
Normal file
543
src/lib/geventwebsocket/websocket.py
Normal file
|
@ -0,0 +1,543 @@
|
||||||
|
import struct
|
||||||
|
|
||||||
|
from socket import error
|
||||||
|
|
||||||
|
from .exceptions import ProtocolError
|
||||||
|
from .exceptions import WebSocketError
|
||||||
|
from .exceptions import FrameTooLargeException
|
||||||
|
|
||||||
|
from .utf8validator import Utf8Validator
|
||||||
|
|
||||||
|
|
||||||
|
MSG_SOCKET_DEAD = "Socket is dead"
|
||||||
|
MSG_ALREADY_CLOSED = "Connection is already closed"
|
||||||
|
MSG_CLOSED = "Connection closed"
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocket(object):
|
||||||
|
"""
|
||||||
|
Base class for supporting websocket operations.
|
||||||
|
|
||||||
|
:ivar environ: The http environment referenced by this connection.
|
||||||
|
:ivar closed: Whether this connection is closed/closing.
|
||||||
|
:ivar stream: The underlying file like object that will be read from /
|
||||||
|
written to by this WebSocket object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ('utf8validator', 'utf8validate_last', 'environ', 'closed',
|
||||||
|
'stream', 'raw_write', 'raw_read', 'handler')
|
||||||
|
|
||||||
|
OPCODE_CONTINUATION = 0x00
|
||||||
|
OPCODE_TEXT = 0x01
|
||||||
|
OPCODE_BINARY = 0x02
|
||||||
|
OPCODE_CLOSE = 0x08
|
||||||
|
OPCODE_PING = 0x09
|
||||||
|
OPCODE_PONG = 0x0a
|
||||||
|
|
||||||
|
def __init__(self, environ, stream, handler):
|
||||||
|
self.environ = environ
|
||||||
|
self.closed = False
|
||||||
|
|
||||||
|
self.stream = stream
|
||||||
|
|
||||||
|
self.raw_write = stream.write
|
||||||
|
self.raw_read = stream.read
|
||||||
|
|
||||||
|
self.utf8validator = Utf8Validator()
|
||||||
|
self.handler = handler
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
try:
|
||||||
|
self.close()
|
||||||
|
except:
|
||||||
|
# close() may fail if __init__ didn't complete
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _decode_bytes(self, bytestring):
|
||||||
|
"""
|
||||||
|
Internal method used to convert the utf-8 encoded bytestring into
|
||||||
|
unicode.
|
||||||
|
|
||||||
|
If the conversion fails, the socket will be closed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not bytestring:
|
||||||
|
return u''
|
||||||
|
|
||||||
|
try:
|
||||||
|
return bytestring.decode('utf-8')
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
self.close(1007)
|
||||||
|
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _encode_bytes(self, text):
|
||||||
|
"""
|
||||||
|
:returns: The utf-8 byte string equivalent of `text`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(text, str):
|
||||||
|
return text
|
||||||
|
|
||||||
|
if not isinstance(text, unicode):
|
||||||
|
text = unicode(text or '')
|
||||||
|
|
||||||
|
return text.encode('utf-8')
|
||||||
|
|
||||||
|
def _is_valid_close_code(self, code):
|
||||||
|
"""
|
||||||
|
:returns: Whether the returned close code is a valid hybi return code.
|
||||||
|
"""
|
||||||
|
if code < 1000:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if 1004 <= code <= 1006:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if 1012 <= code <= 1016:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if code == 1100:
|
||||||
|
# not sure about this one but the autobahn fuzzer requires it.
|
||||||
|
return False
|
||||||
|
|
||||||
|
if 2000 <= code <= 2999:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_app(self):
|
||||||
|
if hasattr(self.handler.server.application, 'current_app'):
|
||||||
|
return self.handler.server.application.current_app
|
||||||
|
else:
|
||||||
|
# For backwards compatibility reasons
|
||||||
|
class MockApp():
|
||||||
|
def on_close(self, *args):
|
||||||
|
pass
|
||||||
|
|
||||||
|
return MockApp()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def origin(self):
|
||||||
|
if not self.environ:
|
||||||
|
return
|
||||||
|
|
||||||
|
return self.environ.get('HTTP_ORIGIN')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def protocol(self):
|
||||||
|
if not self.environ:
|
||||||
|
return
|
||||||
|
|
||||||
|
return self.environ.get('HTTP_SEC_WEBSOCKET_PROTOCOL')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version(self):
|
||||||
|
if not self.environ:
|
||||||
|
return
|
||||||
|
|
||||||
|
return self.environ.get('HTTP_SEC_WEBSOCKET_VERSION')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self):
|
||||||
|
if not self.environ:
|
||||||
|
return
|
||||||
|
|
||||||
|
return self.environ.get('PATH_INFO')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def logger(self):
|
||||||
|
return self.handler.logger
|
||||||
|
|
||||||
|
def handle_close(self, header, payload):
|
||||||
|
"""
|
||||||
|
Called when a close frame has been decoded from the stream.
|
||||||
|
|
||||||
|
:param header: The decoded `Header`.
|
||||||
|
:param payload: The bytestring payload associated with the close frame.
|
||||||
|
"""
|
||||||
|
if not payload:
|
||||||
|
self.close(1000, None)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
if len(payload) < 2:
|
||||||
|
raise ProtocolError('Invalid close frame: {0} {1}'.format(
|
||||||
|
header, payload))
|
||||||
|
|
||||||
|
code = struct.unpack('!H', str(payload[:2]))[0]
|
||||||
|
payload = payload[2:]
|
||||||
|
|
||||||
|
if payload:
|
||||||
|
validator = Utf8Validator()
|
||||||
|
val = validator.validate(payload)
|
||||||
|
|
||||||
|
if not val[0]:
|
||||||
|
raise UnicodeError
|
||||||
|
|
||||||
|
if not self._is_valid_close_code(code):
|
||||||
|
raise ProtocolError('Invalid close code {0}'.format(code))
|
||||||
|
|
||||||
|
self.close(code, payload)
|
||||||
|
|
||||||
|
def handle_ping(self, header, payload):
|
||||||
|
self.send_frame(payload, self.OPCODE_PONG)
|
||||||
|
|
||||||
|
def handle_pong(self, header, payload):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def read_frame(self):
|
||||||
|
"""
|
||||||
|
Block until a full frame has been read from the socket.
|
||||||
|
|
||||||
|
This is an internal method as calling this will not cleanup correctly
|
||||||
|
if an exception is called. Use `receive` instead.
|
||||||
|
|
||||||
|
:return: The header and payload as a tuple.
|
||||||
|
"""
|
||||||
|
|
||||||
|
header = Header.decode_header(self.stream)
|
||||||
|
|
||||||
|
if header.flags:
|
||||||
|
raise ProtocolError
|
||||||
|
|
||||||
|
if not header.length:
|
||||||
|
return header, ''
|
||||||
|
|
||||||
|
try:
|
||||||
|
payload = self.raw_read(header.length)
|
||||||
|
except error:
|
||||||
|
payload = ''
|
||||||
|
except Exception:
|
||||||
|
# TODO log out this exception
|
||||||
|
payload = ''
|
||||||
|
|
||||||
|
if len(payload) != header.length:
|
||||||
|
raise WebSocketError('Unexpected EOF reading frame payload')
|
||||||
|
|
||||||
|
if header.mask:
|
||||||
|
payload = header.unmask_payload(payload)
|
||||||
|
|
||||||
|
return header, payload
|
||||||
|
|
||||||
|
def validate_utf8(self, payload):
|
||||||
|
# Make sure the frames are decodable independently
|
||||||
|
self.utf8validate_last = self.utf8validator.validate(payload)
|
||||||
|
|
||||||
|
if not self.utf8validate_last[0]:
|
||||||
|
raise UnicodeError("Encountered invalid UTF-8 while processing "
|
||||||
|
"text message at payload octet index "
|
||||||
|
"{0:d}".format(self.utf8validate_last[3]))
|
||||||
|
|
||||||
|
def read_message(self):
|
||||||
|
"""
|
||||||
|
Return the next text or binary message from the socket.
|
||||||
|
|
||||||
|
This is an internal method as calling this will not cleanup correctly
|
||||||
|
if an exception is called. Use `receive` instead.
|
||||||
|
"""
|
||||||
|
opcode = None
|
||||||
|
message = ""
|
||||||
|
|
||||||
|
while True:
|
||||||
|
header, payload = self.read_frame()
|
||||||
|
f_opcode = header.opcode
|
||||||
|
|
||||||
|
if f_opcode in (self.OPCODE_TEXT, self.OPCODE_BINARY):
|
||||||
|
# a new frame
|
||||||
|
if opcode:
|
||||||
|
raise ProtocolError("The opcode in non-fin frame is "
|
||||||
|
"expected to be zero, got "
|
||||||
|
"{0!r}".format(f_opcode))
|
||||||
|
|
||||||
|
# Start reading a new message, reset the validator
|
||||||
|
self.utf8validator.reset()
|
||||||
|
self.utf8validate_last = (True, True, 0, 0)
|
||||||
|
|
||||||
|
opcode = f_opcode
|
||||||
|
|
||||||
|
elif f_opcode == self.OPCODE_CONTINUATION:
|
||||||
|
if not opcode:
|
||||||
|
raise ProtocolError("Unexpected frame with opcode=0")
|
||||||
|
|
||||||
|
elif f_opcode == self.OPCODE_PING:
|
||||||
|
self.handle_ping(header, payload)
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif f_opcode == self.OPCODE_PONG:
|
||||||
|
self.handle_pong(header, payload)
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif f_opcode == self.OPCODE_CLOSE:
|
||||||
|
self.handle_close(header, payload)
|
||||||
|
return
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ProtocolError("Unexpected opcode={0!r}".format(f_opcode))
|
||||||
|
|
||||||
|
if opcode == self.OPCODE_TEXT:
|
||||||
|
self.validate_utf8(payload)
|
||||||
|
|
||||||
|
message += payload
|
||||||
|
|
||||||
|
if header.fin:
|
||||||
|
break
|
||||||
|
|
||||||
|
if opcode == self.OPCODE_TEXT:
|
||||||
|
self.validate_utf8(message)
|
||||||
|
return message
|
||||||
|
else:
|
||||||
|
return bytearray(message)
|
||||||
|
|
||||||
|
def receive(self):
|
||||||
|
"""
|
||||||
|
Read and return a message from the stream. If `None` is returned, then
|
||||||
|
the socket is considered closed/errored.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.closed:
|
||||||
|
self.current_app.on_close(MSG_ALREADY_CLOSED)
|
||||||
|
raise WebSocketError(MSG_ALREADY_CLOSED)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return self.read_message()
|
||||||
|
except UnicodeError:
|
||||||
|
self.close(1007)
|
||||||
|
except ProtocolError:
|
||||||
|
self.close(1002)
|
||||||
|
except error:
|
||||||
|
self.close()
|
||||||
|
self.current_app.on_close(MSG_CLOSED)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def send_frame(self, message, opcode):
|
||||||
|
"""
|
||||||
|
Send a frame over the websocket with message as its payload
|
||||||
|
"""
|
||||||
|
if self.closed:
|
||||||
|
self.current_app.on_close(MSG_ALREADY_CLOSED)
|
||||||
|
raise WebSocketError(MSG_ALREADY_CLOSED)
|
||||||
|
|
||||||
|
if opcode == self.OPCODE_TEXT:
|
||||||
|
message = self._encode_bytes(message)
|
||||||
|
elif opcode == self.OPCODE_BINARY:
|
||||||
|
message = str(message)
|
||||||
|
|
||||||
|
header = Header.encode_header(True, opcode, '', len(message), 0)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.raw_write(header + message)
|
||||||
|
except error:
|
||||||
|
raise WebSocketError(MSG_SOCKET_DEAD)
|
||||||
|
|
||||||
|
def send(self, message, binary=None):
|
||||||
|
"""
|
||||||
|
Send a frame over the websocket with message as its payload
|
||||||
|
"""
|
||||||
|
if binary is None:
|
||||||
|
binary = not isinstance(message, (str, unicode))
|
||||||
|
|
||||||
|
opcode = self.OPCODE_BINARY if binary else self.OPCODE_TEXT
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.send_frame(message, opcode)
|
||||||
|
except WebSocketError:
|
||||||
|
self.current_app.on_close(MSG_SOCKET_DEAD)
|
||||||
|
raise WebSocketError(MSG_SOCKET_DEAD)
|
||||||
|
|
||||||
|
def close(self, code=1000, message=''):
|
||||||
|
"""
|
||||||
|
Close the websocket and connection, sending the specified code and
|
||||||
|
message. The underlying socket object is _not_ closed, that is the
|
||||||
|
responsibility of the initiator.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.closed:
|
||||||
|
self.current_app.on_close(MSG_ALREADY_CLOSED)
|
||||||
|
|
||||||
|
try:
|
||||||
|
message = self._encode_bytes(message)
|
||||||
|
|
||||||
|
self.send_frame(
|
||||||
|
struct.pack('!H%ds' % len(message), code, message),
|
||||||
|
opcode=self.OPCODE_CLOSE)
|
||||||
|
except WebSocketError:
|
||||||
|
# Failed to write the closing frame but it's ok because we're
|
||||||
|
# closing the socket anyway.
|
||||||
|
self.logger.debug("Failed to write closing frame -> closing socket")
|
||||||
|
finally:
|
||||||
|
self.logger.debug("Closed WebSocket")
|
||||||
|
self.closed = True
|
||||||
|
|
||||||
|
self.stream = None
|
||||||
|
self.raw_write = None
|
||||||
|
self.raw_read = None
|
||||||
|
|
||||||
|
self.environ = None
|
||||||
|
|
||||||
|
#self.current_app.on_close(MSG_ALREADY_CLOSED)
|
||||||
|
|
||||||
|
|
||||||
|
class Stream(object):
|
||||||
|
"""
|
||||||
|
Wraps the handler's socket/rfile attributes and makes it in to a file like
|
||||||
|
object that can be read from/written to by the lower level websocket api.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ('handler', 'read', 'write')
|
||||||
|
|
||||||
|
def __init__(self, handler):
|
||||||
|
self.handler = handler
|
||||||
|
self.read = handler.rfile.read
|
||||||
|
self.write = handler.socket.sendall
|
||||||
|
|
||||||
|
|
||||||
|
class Header(object):
|
||||||
|
__slots__ = ('fin', 'mask', 'opcode', 'flags', 'length')
|
||||||
|
|
||||||
|
FIN_MASK = 0x80
|
||||||
|
OPCODE_MASK = 0x0f
|
||||||
|
MASK_MASK = 0x80
|
||||||
|
LENGTH_MASK = 0x7f
|
||||||
|
|
||||||
|
RSV0_MASK = 0x40
|
||||||
|
RSV1_MASK = 0x20
|
||||||
|
RSV2_MASK = 0x10
|
||||||
|
|
||||||
|
# bitwise mask that will determine the reserved bits for a frame header
|
||||||
|
HEADER_FLAG_MASK = RSV0_MASK | RSV1_MASK | RSV2_MASK
|
||||||
|
|
||||||
|
def __init__(self, fin=0, opcode=0, flags=0, length=0):
|
||||||
|
self.mask = ''
|
||||||
|
self.fin = fin
|
||||||
|
self.opcode = opcode
|
||||||
|
self.flags = flags
|
||||||
|
self.length = length
|
||||||
|
|
||||||
|
def mask_payload(self, payload):
|
||||||
|
payload = bytearray(payload)
|
||||||
|
mask = bytearray(self.mask)
|
||||||
|
|
||||||
|
for i in xrange(self.length):
|
||||||
|
payload[i] ^= mask[i % 4]
|
||||||
|
|
||||||
|
return str(payload)
|
||||||
|
|
||||||
|
# it's the same operation
|
||||||
|
unmask_payload = mask_payload
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return ("<Header fin={0} opcode={1} length={2} flags={3} at "
|
||||||
|
"0x{4:x}>").format(self.fin, self.opcode, self.length,
|
||||||
|
self.flags, id(self))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def decode_header(cls, stream):
|
||||||
|
"""
|
||||||
|
Decode a WebSocket header.
|
||||||
|
|
||||||
|
:param stream: A file like object that can be 'read' from.
|
||||||
|
:returns: A `Header` instance.
|
||||||
|
"""
|
||||||
|
read = stream.read
|
||||||
|
data = read(2)
|
||||||
|
|
||||||
|
if len(data) != 2:
|
||||||
|
raise WebSocketError("Unexpected EOF while decoding header")
|
||||||
|
|
||||||
|
first_byte, second_byte = struct.unpack('!BB', data)
|
||||||
|
|
||||||
|
header = cls(
|
||||||
|
fin=first_byte & cls.FIN_MASK == cls.FIN_MASK,
|
||||||
|
opcode=first_byte & cls.OPCODE_MASK,
|
||||||
|
flags=first_byte & cls.HEADER_FLAG_MASK,
|
||||||
|
length=second_byte & cls.LENGTH_MASK)
|
||||||
|
|
||||||
|
has_mask = second_byte & cls.MASK_MASK == cls.MASK_MASK
|
||||||
|
|
||||||
|
if header.opcode > 0x07:
|
||||||
|
if not header.fin:
|
||||||
|
raise ProtocolError(
|
||||||
|
"Received fragmented control frame: {0!r}".format(data))
|
||||||
|
|
||||||
|
# Control frames MUST have a payload length of 125 bytes or less
|
||||||
|
if header.length > 125:
|
||||||
|
raise FrameTooLargeException(
|
||||||
|
"Control frame cannot be larger than 125 bytes: "
|
||||||
|
"{0!r}".format(data))
|
||||||
|
|
||||||
|
if header.length == 126:
|
||||||
|
# 16 bit length
|
||||||
|
data = read(2)
|
||||||
|
|
||||||
|
if len(data) != 2:
|
||||||
|
raise WebSocketError('Unexpected EOF while decoding header')
|
||||||
|
|
||||||
|
header.length = struct.unpack('!H', data)[0]
|
||||||
|
elif header.length == 127:
|
||||||
|
# 64 bit length
|
||||||
|
data = read(8)
|
||||||
|
|
||||||
|
if len(data) != 8:
|
||||||
|
raise WebSocketError('Unexpected EOF while decoding header')
|
||||||
|
|
||||||
|
header.length = struct.unpack('!Q', data)[0]
|
||||||
|
|
||||||
|
if has_mask:
|
||||||
|
mask = read(4)
|
||||||
|
|
||||||
|
if len(mask) != 4:
|
||||||
|
raise WebSocketError('Unexpected EOF while decoding header')
|
||||||
|
|
||||||
|
header.mask = mask
|
||||||
|
|
||||||
|
return header
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def encode_header(cls, fin, opcode, mask, length, flags):
|
||||||
|
"""
|
||||||
|
Encodes a WebSocket header.
|
||||||
|
|
||||||
|
:param fin: Whether this is the final frame for this opcode.
|
||||||
|
:param opcode: The opcode of the payload, see `OPCODE_*`
|
||||||
|
:param mask: Whether the payload is masked.
|
||||||
|
:param length: The length of the frame.
|
||||||
|
:param flags: The RSV* flags.
|
||||||
|
:return: A bytestring encoded header.
|
||||||
|
"""
|
||||||
|
first_byte = opcode
|
||||||
|
second_byte = 0
|
||||||
|
extra = ''
|
||||||
|
|
||||||
|
if fin:
|
||||||
|
first_byte |= cls.FIN_MASK
|
||||||
|
|
||||||
|
if flags & cls.RSV0_MASK:
|
||||||
|
first_byte |= cls.RSV0_MASK
|
||||||
|
|
||||||
|
if flags & cls.RSV1_MASK:
|
||||||
|
first_byte |= cls.RSV1_MASK
|
||||||
|
|
||||||
|
if flags & cls.RSV2_MASK:
|
||||||
|
first_byte |= cls.RSV2_MASK
|
||||||
|
|
||||||
|
# now deal with length complexities
|
||||||
|
if length < 126:
|
||||||
|
second_byte += length
|
||||||
|
elif length <= 0xffff:
|
||||||
|
second_byte += 126
|
||||||
|
extra = struct.pack('!H', length)
|
||||||
|
elif length <= 0xffffffffffffffff:
|
||||||
|
second_byte += 127
|
||||||
|
extra = struct.pack('!Q', length)
|
||||||
|
else:
|
||||||
|
raise FrameTooLargeException
|
||||||
|
|
||||||
|
if mask:
|
||||||
|
second_byte |= cls.MASK_MASK
|
||||||
|
|
||||||
|
extra += mask
|
||||||
|
|
||||||
|
return chr(first_byte) + chr(second_byte) + extra
|
23
src/lib/subtl/LICENCE
Normal file
23
src/lib/subtl/LICENCE
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
Copyright (c) 2012, Packetloop. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are met:
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
* Neither the name of Packetloop nor the names of its contributors may be
|
||||||
|
used to endorse or promote products derived from this software without
|
||||||
|
specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY
|
||||||
|
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||||
|
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
28
src/lib/subtl/README.md
Normal file
28
src/lib/subtl/README.md
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
# subtl
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
SUBTL is a **s**imple **U**DP **B**itTorrent **t**racker **l**ibrary for Python, licenced under the modified BSD license.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
This short example will list a few IP Addresses from a certain hash:
|
||||||
|
|
||||||
|
from subtl import UdpTrackerClient
|
||||||
|
utc = UdpTrackerClient('tracker.openbittorrent.com', 80)
|
||||||
|
utc.connect()
|
||||||
|
if not utc.poll_once():
|
||||||
|
raise Exception('Could not connect')
|
||||||
|
print('Success!')
|
||||||
|
|
||||||
|
utc.announce(info_hash='089184ED52AA37F71801391C451C5D5ADD0D9501')
|
||||||
|
data = utc.poll_once()
|
||||||
|
if not data:
|
||||||
|
raise Exception('Could not announce')
|
||||||
|
for a in data['response']['peers']:
|
||||||
|
print(a)
|
||||||
|
|
||||||
|
## Caveats
|
||||||
|
|
||||||
|
* There is no automatic retrying of sending packets yet.
|
||||||
|
* This library won't download torrent files--it is simply a tracker client.
|
0
src/lib/subtl/__init__.py
Normal file
0
src/lib/subtl/__init__.py
Normal file
220
src/lib/subtl/subtl.py
Normal file
220
src/lib/subtl/subtl.py
Normal file
|
@ -0,0 +1,220 @@
|
||||||
|
'''
|
||||||
|
Based on the specification at http://bittorrent.org/beps/bep_0015.html
|
||||||
|
'''
|
||||||
|
import random
|
||||||
|
import struct
|
||||||
|
import time
|
||||||
|
import socket
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
|
||||||
|
__version__ = '0.0.1'
|
||||||
|
|
||||||
|
CONNECT = 0
|
||||||
|
ANNOUNCE = 1
|
||||||
|
SCRAPE = 2
|
||||||
|
ERROR = 3
|
||||||
|
|
||||||
|
|
||||||
|
def norm_info_hash(info_hash):
|
||||||
|
if len(info_hash) == 40:
|
||||||
|
info_hash = info_hash.decode('hex')
|
||||||
|
if len(info_hash) != 20:
|
||||||
|
raise UdpTrackerClientException(
|
||||||
|
'info_hash length is not 20: {}'.format(len(info_hash)))
|
||||||
|
return info_hash
|
||||||
|
|
||||||
|
|
||||||
|
def info_hash_to_str(info_hash):
|
||||||
|
return binascii.hexlify(info_hash)
|
||||||
|
|
||||||
|
|
||||||
|
class UdpTrackerClientException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class UdpTrackerClient:
|
||||||
|
|
||||||
|
def __init__(self, host, port):
|
||||||
|
self.host = host
|
||||||
|
self.port = port
|
||||||
|
self.peer_port = 6881
|
||||||
|
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||||
|
self.conn_id = 0x41727101980
|
||||||
|
self.transactions = {}
|
||||||
|
self.peer_id = self._generate_peer_id()
|
||||||
|
self.timeout = 2
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
return self._send(CONNECT)
|
||||||
|
|
||||||
|
def announce(self, **kwargs):
|
||||||
|
if not kwargs:
|
||||||
|
raise UdpTrackerClientException('arguments missing')
|
||||||
|
args = {
|
||||||
|
'peer_id': self.peer_id,
|
||||||
|
'downloaded': 0,
|
||||||
|
'left': 0,
|
||||||
|
'uploaded': 0,
|
||||||
|
'event': 0,
|
||||||
|
'key': 0,
|
||||||
|
'num_want': 10,
|
||||||
|
'ip_address': 0,
|
||||||
|
'port': self.peer_port,
|
||||||
|
}
|
||||||
|
args.update(kwargs)
|
||||||
|
|
||||||
|
fields = 'info_hash peer_id downloaded left uploaded event ' \
|
||||||
|
'ip_address key num_want port'
|
||||||
|
|
||||||
|
# Check and raise if missing fields
|
||||||
|
self._check_fields(args, fields)
|
||||||
|
|
||||||
|
# Humans tend to use hex representations of the hash. Wasteful humans.
|
||||||
|
args['info_hash'] = norm_info_hash(args['info_hash'])
|
||||||
|
|
||||||
|
values = [args[a] for a in fields.split()]
|
||||||
|
payload = struct.pack('!20s20sQQQLLLLH', *values)
|
||||||
|
return self._send(ANNOUNCE, payload)
|
||||||
|
|
||||||
|
def scrape(self, info_hash_list):
|
||||||
|
if len(info_hash_list) > 74:
|
||||||
|
raise UdpTrackerClientException('Max info_hashes is 74')
|
||||||
|
|
||||||
|
payload = ''
|
||||||
|
for info_hash in info_hash_list:
|
||||||
|
info_hash = norm_info_hash(info_hash)
|
||||||
|
payload += info_hash
|
||||||
|
|
||||||
|
trans = self._send(SCRAPE, payload)
|
||||||
|
trans['sent_hashes'] = info_hash_list
|
||||||
|
return trans
|
||||||
|
|
||||||
|
def poll_once(self):
|
||||||
|
self.sock.settimeout(self.timeout)
|
||||||
|
try:
|
||||||
|
response = self.sock.recv(10240)
|
||||||
|
except socket.timeout:
|
||||||
|
return
|
||||||
|
|
||||||
|
header = response[:8]
|
||||||
|
payload = response[8:]
|
||||||
|
action, trans_id = struct.unpack('!LL', header)
|
||||||
|
try:
|
||||||
|
trans = self.transactions[trans_id]
|
||||||
|
except KeyError:
|
||||||
|
self.error('transaction_id not found')
|
||||||
|
return
|
||||||
|
trans['response'] = self._process_response(action, payload, trans)
|
||||||
|
trans['completed'] = True
|
||||||
|
del self.transactions[trans_id]
|
||||||
|
return trans
|
||||||
|
|
||||||
|
def error(self, message):
|
||||||
|
print('error: {}'.format(message))
|
||||||
|
|
||||||
|
def _send(self, action, payload=None):
|
||||||
|
if not payload:
|
||||||
|
payload = ''
|
||||||
|
trans_id, header = self._request_header(action)
|
||||||
|
self.transactions[trans_id] = trans = {
|
||||||
|
'action': action,
|
||||||
|
'time': time.time(),
|
||||||
|
'payload': payload,
|
||||||
|
'completed': False,
|
||||||
|
}
|
||||||
|
self.sock.sendto(header + payload, (self.host, self.port))
|
||||||
|
return trans
|
||||||
|
|
||||||
|
def _request_header(self, action):
|
||||||
|
trans_id = random.randint(0, (1 << 32) - 1)
|
||||||
|
return trans_id, struct.pack('!QLL', self.conn_id, action, trans_id)
|
||||||
|
|
||||||
|
def _process_response(self, action, payload, trans):
|
||||||
|
if action == CONNECT:
|
||||||
|
return self._process_connect(payload, trans)
|
||||||
|
elif action == ANNOUNCE:
|
||||||
|
return self._process_announce(payload, trans)
|
||||||
|
elif action == SCRAPE:
|
||||||
|
return self._process_scrape(payload, trans)
|
||||||
|
elif action == ERROR:
|
||||||
|
return self._proecss_error(payload, trans)
|
||||||
|
else:
|
||||||
|
raise UdpTrackerClientException(
|
||||||
|
'Unknown action response: {}'.format(action))
|
||||||
|
|
||||||
|
def _process_connect(self, payload, trans):
|
||||||
|
self.conn_id = struct.unpack('!Q', payload)[0]
|
||||||
|
return self.conn_id
|
||||||
|
|
||||||
|
def _process_announce(self, payload, trans):
|
||||||
|
response = {}
|
||||||
|
|
||||||
|
info_struct = '!LLL'
|
||||||
|
info_size = struct.calcsize(info_struct)
|
||||||
|
info = payload[:info_size]
|
||||||
|
interval, leechers, seeders = struct.unpack(info_struct, info)
|
||||||
|
|
||||||
|
peer_data = payload[info_size:]
|
||||||
|
peer_struct = '!LH'
|
||||||
|
peer_size = struct.calcsize(peer_struct)
|
||||||
|
peer_count = len(peer_data) / peer_size
|
||||||
|
peers = []
|
||||||
|
|
||||||
|
for peer_offset in xrange(peer_count):
|
||||||
|
off = peer_size * peer_offset
|
||||||
|
peer = peer_data[off:off + peer_size]
|
||||||
|
addr, port = struct.unpack(peer_struct, peer)
|
||||||
|
peers.append({
|
||||||
|
'addr': socket.inet_ntoa(struct.pack('!L', addr)),
|
||||||
|
'port': port,
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
'interval': interval,
|
||||||
|
'leechers': leechers,
|
||||||
|
'seeders': seeders,
|
||||||
|
'peers': peers,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _process_scrape(self, payload, trans):
|
||||||
|
info_struct = '!LLL'
|
||||||
|
info_size = struct.calcsize(info_struct)
|
||||||
|
info_count = len(payload) / info_size
|
||||||
|
hashes = trans['sent_hashes']
|
||||||
|
response = {}
|
||||||
|
for info_offset in xrange(info_count):
|
||||||
|
off = info_size * info_offset
|
||||||
|
info = payload[off:off + info_size]
|
||||||
|
seeders, completed, leechers = struct.unpack(info_struct, info)
|
||||||
|
response[hashes[info_offset]] = {
|
||||||
|
'seeders': seeders,
|
||||||
|
'completed': completed,
|
||||||
|
'leechers': leechers,
|
||||||
|
}
|
||||||
|
return response
|
||||||
|
|
||||||
|
def _process_error(self, payload, trans):
|
||||||
|
'''
|
||||||
|
I haven't seen this action type be sent from a tracker, but I've left
|
||||||
|
it here for the possibility.
|
||||||
|
'''
|
||||||
|
self.error(payload)
|
||||||
|
return payload
|
||||||
|
|
||||||
|
def _generate_peer_id(self):
|
||||||
|
'''http://www.bittorrent.org/beps/bep_0020.html'''
|
||||||
|
peer_id = '-PU' + __version__.replace('.', '-') + '-'
|
||||||
|
remaining = 20 - len(peer_id)
|
||||||
|
numbers = [str(random.randint(0, 9)) for _ in xrange(remaining)]
|
||||||
|
peer_id += ''.join(numbers)
|
||||||
|
assert(len(peer_id) == 20)
|
||||||
|
return peer_id
|
||||||
|
|
||||||
|
def _check_fields(self, args, fields):
|
||||||
|
for f in fields:
|
||||||
|
try:
|
||||||
|
args.get(f)
|
||||||
|
except KeyError:
|
||||||
|
raise UdpTrackerClientException('field missing: {}'.format(f))
|
||||||
|
|
175
src/main.py
Normal file
175
src/main.py
Normal file
|
@ -0,0 +1,175 @@
|
||||||
|
import os, sys
|
||||||
|
sys.path.append(os.path.dirname(__file__)) # Imports relative to main.py
|
||||||
|
|
||||||
|
# Load config
|
||||||
|
from Config import config
|
||||||
|
|
||||||
|
# Init logging
|
||||||
|
import logging
|
||||||
|
if config.action == "main":
|
||||||
|
if os.path.isfile("log/debug.log"): # Simple logrotate
|
||||||
|
if os.path.isfile("log/debug-last.log"): os.unlink("log/debug-last.log")
|
||||||
|
os.rename("log/debug.log", "log/debug-last.log")
|
||||||
|
logging.basicConfig(format='[%(asctime)s] %(levelname)-8s %(name)s %(message)s', level=logging.DEBUG, filename="log/debug.log")
|
||||||
|
else:
|
||||||
|
logging.basicConfig(level=logging.DEBUG, stream=open(os.devnull,"w")) # No file logging if action is not main
|
||||||
|
|
||||||
|
console_log = logging.StreamHandler()
|
||||||
|
console_log.setFormatter(logging.Formatter('%(name)s %(message)s', "%H:%M:%S"))
|
||||||
|
logging.getLogger('').addHandler(console_log) # Add console logger
|
||||||
|
logging.getLogger('').name = "-" # Remove root prefix
|
||||||
|
|
||||||
|
# Debug dependent configuration
|
||||||
|
if config.debug:
|
||||||
|
console_log.setLevel(logging.DEBUG)
|
||||||
|
from Debug import DebugHook
|
||||||
|
from gevent import monkey; monkey.patch_all(thread=False) # thread=False because of pyfilesystem
|
||||||
|
else:
|
||||||
|
console_log.setLevel(logging.INFO)
|
||||||
|
from gevent import monkey; monkey.patch_all()
|
||||||
|
|
||||||
|
import gevent
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
logging.debug("Starting... %s" % config)
|
||||||
|
|
||||||
|
|
||||||
|
# Start serving UiServer and PeerServer
|
||||||
|
def main():
|
||||||
|
from File import FileServer
|
||||||
|
from Ui import UiServer
|
||||||
|
logging.info("Creating UiServer....")
|
||||||
|
ui_server = UiServer()
|
||||||
|
|
||||||
|
logging.info("Creating FileServer....")
|
||||||
|
file_server = FileServer()
|
||||||
|
|
||||||
|
logging.info("Starting servers....")
|
||||||
|
gevent.joinall([gevent.spawn(ui_server.start), gevent.spawn(file_server.start)])
|
||||||
|
|
||||||
|
|
||||||
|
# Site commands
|
||||||
|
|
||||||
|
def siteCreate():
|
||||||
|
logging.info("Generating new privatekey...")
|
||||||
|
from src.Crypt import CryptBitcoin
|
||||||
|
privatekey = CryptBitcoin.newPrivatekey()
|
||||||
|
logging.info("-----------------------------------------------------------")
|
||||||
|
logging.info("Site private key: %s (save it, required to modify the site)" % privatekey)
|
||||||
|
address = CryptBitcoin.privatekeyToAddress(privatekey)
|
||||||
|
logging.info("Site address: %s" % address)
|
||||||
|
logging.info("-----------------------------------------------------------")
|
||||||
|
|
||||||
|
logging.info("Creating directory structure...")
|
||||||
|
from Site import Site
|
||||||
|
os.mkdir("data/%s" % address)
|
||||||
|
open("data/%s/index.html" % address, "w").write("Hello %s!" % address)
|
||||||
|
|
||||||
|
logging.info("Creating content.json...")
|
||||||
|
site = Site(address)
|
||||||
|
site.signContent(privatekey)
|
||||||
|
|
||||||
|
logging.info("Site created!")
|
||||||
|
|
||||||
|
|
||||||
|
def siteSign(address, privatekey=None):
|
||||||
|
from Site import Site
|
||||||
|
logging.info("Signing site: %s..." % address)
|
||||||
|
site = Site(address, allow_create = False)
|
||||||
|
|
||||||
|
if not privatekey: # If no privatekey in args then ask it now
|
||||||
|
import getpass
|
||||||
|
privatekey = getpass.getpass("Private key (input hidden):")
|
||||||
|
site.signContent(privatekey)
|
||||||
|
|
||||||
|
|
||||||
|
def siteVerify(address):
|
||||||
|
from Site import Site
|
||||||
|
logging.info("Verifing site: %s..." % address)
|
||||||
|
site = Site(address)
|
||||||
|
|
||||||
|
logging.info("Verifing content.json signature...")
|
||||||
|
if site.verifyFile("content.json", open(site.getPath("content.json"), "rb"), force=True) != False: # Force check the sign
|
||||||
|
logging.info("[OK] content.json signed by address %s!" % address)
|
||||||
|
else:
|
||||||
|
logging.error("[ERROR] Content.json not signed by address %s!" % address)
|
||||||
|
|
||||||
|
logging.info("Verifying site files...")
|
||||||
|
bad_files = site.verifyFiles()
|
||||||
|
if not bad_files:
|
||||||
|
logging.info("[OK] All file sha1sum matches!")
|
||||||
|
else:
|
||||||
|
logging.error("[ERROR] Error during verifying site files!")
|
||||||
|
|
||||||
|
|
||||||
|
def siteAnnounce(address):
|
||||||
|
from Site.Site import Site
|
||||||
|
logging.info("Announcing site %s to tracker..." % address)
|
||||||
|
site = Site(address)
|
||||||
|
|
||||||
|
s = time.time()
|
||||||
|
site.announce()
|
||||||
|
print "Response time: %.3fs" % (time.time()-s)
|
||||||
|
print site.peers
|
||||||
|
|
||||||
|
|
||||||
|
def siteNeedFile(address, inner_path):
|
||||||
|
from Site import Site
|
||||||
|
site = Site(address)
|
||||||
|
site.announce()
|
||||||
|
print site.needFile(inner_path, update=True)
|
||||||
|
|
||||||
|
|
||||||
|
def sitePublish(address):
|
||||||
|
from Site import Site
|
||||||
|
from File import FileServer # We need fileserver to handle incoming file requests
|
||||||
|
logging.info("Creating FileServer....")
|
||||||
|
file_server = FileServer()
|
||||||
|
file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity
|
||||||
|
file_server.openport()
|
||||||
|
if file_server.port_opened == False:
|
||||||
|
logging.info("Port not opened, passive publishing not supported yet :(")
|
||||||
|
return
|
||||||
|
site = file_server.sites[address]
|
||||||
|
site.settings["serving"] = True # Serving the site even if its disabled
|
||||||
|
site.announce() # Gather peers
|
||||||
|
site.publish(10) # Push to 10 peers
|
||||||
|
logging.info("Serving files....")
|
||||||
|
gevent.joinall([file_server_thread])
|
||||||
|
|
||||||
|
|
||||||
|
# Crypto commands
|
||||||
|
|
||||||
|
def cryptoPrivatekeyToAddress(privatekey=None):
|
||||||
|
from src.Crypt import CryptBitcoin
|
||||||
|
if not privatekey: # If no privatekey in args then ask it now
|
||||||
|
import getpass
|
||||||
|
privatekey = getpass.getpass("Private key (input hidden):")
|
||||||
|
|
||||||
|
print CryptBitcoin.privatekeyToAddress(privatekey)
|
||||||
|
|
||||||
|
|
||||||
|
# Peer
|
||||||
|
|
||||||
|
def peerPing(ip, port):
|
||||||
|
from Peer import Peer
|
||||||
|
logging.info("Pinging 5 times peer: %s:%s..." % (ip, port))
|
||||||
|
peer = Peer(ip, port)
|
||||||
|
for i in range(5):
|
||||||
|
s = time.time()
|
||||||
|
print peer.ping(),
|
||||||
|
print "Response time: %.3fs" % (time.time()-s)
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
|
||||||
|
def peerGetFile(ip, port, site, filename=None):
|
||||||
|
from Peer import Peer
|
||||||
|
if not site: site = config.homepage
|
||||||
|
if not filename: filename = "content.json"
|
||||||
|
logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, ip, port))
|
||||||
|
peer = Peer(ip, port)
|
||||||
|
s = time.time()
|
||||||
|
print peer.getFile(site, filename).read()
|
||||||
|
print "Response time: %.3fs" % (time.time()-s)
|
||||||
|
|
29
src/util/Event.py
Normal file
29
src/util/Event.py
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
# Based on http://stackoverflow.com/a/2022629
|
||||||
|
|
||||||
|
class Event(list):
|
||||||
|
def __call__(self, *args, **kwargs):
|
||||||
|
for f in self[:]:
|
||||||
|
if "once" in dir(f):
|
||||||
|
self.remove(f)
|
||||||
|
f(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Event(%s)" % list.__repr__(self)
|
||||||
|
|
||||||
|
|
||||||
|
def once(self, func):
|
||||||
|
func.once = True
|
||||||
|
self.append(func)
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
def say(pre, text):
|
||||||
|
print "%s Say: %s" % (pre, text)
|
||||||
|
onChanged = Event()
|
||||||
|
onChanged.once(lambda pre: say(pre, "once"))
|
||||||
|
onChanged.append(lambda pre: say(pre, "always"))
|
||||||
|
onChanged("#1")
|
||||||
|
onChanged("#2")
|
||||||
|
onChanged("#3")
|
114
src/util/Noparallel.py
Normal file
114
src/util/Noparallel.py
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
import gevent, time
|
||||||
|
|
||||||
|
class Noparallel(object): # Only allow function running once in same time
|
||||||
|
def __init__(self,blocking=True):
|
||||||
|
self.threads = {}
|
||||||
|
self.blocking = blocking # Blocking: Acts like normal function else thread returned
|
||||||
|
|
||||||
|
|
||||||
|
def __call__(self, func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
key = (func, tuple(args), tuple(kwargs)) # Unique key for function including parameters
|
||||||
|
if key in self.threads: # Thread already running (if using blocking mode)
|
||||||
|
thread = self.threads[key]
|
||||||
|
if self.blocking:
|
||||||
|
thread.join() # Blocking until its finished
|
||||||
|
return thread.value # Return the value
|
||||||
|
else: # No blocking
|
||||||
|
if thread.ready(): # Its finished, create a new
|
||||||
|
thread = gevent.spawn(func, *args, **kwargs)
|
||||||
|
self.threads[key] = thread
|
||||||
|
return thread
|
||||||
|
else: # Still running
|
||||||
|
return thread
|
||||||
|
else: # Thread not running
|
||||||
|
thread = gevent.spawn(func, *args, **kwargs) # Spawning new thread
|
||||||
|
self.threads[key] = thread
|
||||||
|
if self.blocking: # Wait for finish
|
||||||
|
thread.join()
|
||||||
|
ret = thread.value
|
||||||
|
if key in self.threads: del(self.threads[key]) # Allowing it to run again
|
||||||
|
return ret
|
||||||
|
else: # No blocking just return the thread
|
||||||
|
return thread
|
||||||
|
wrapper.func_name = func.func_name
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
class Test():
|
||||||
|
@Noparallel()
|
||||||
|
def count(self):
|
||||||
|
for i in range(5):
|
||||||
|
print self, i
|
||||||
|
time.sleep(1)
|
||||||
|
return "%s return:%s" % (self, i)
|
||||||
|
|
||||||
|
|
||||||
|
class TestNoblock():
|
||||||
|
@Noparallel(blocking=False)
|
||||||
|
def count(self):
|
||||||
|
for i in range(5):
|
||||||
|
print self, i
|
||||||
|
time.sleep(1)
|
||||||
|
return "%s return:%s" % (self, i)
|
||||||
|
|
||||||
|
|
||||||
|
def testBlocking():
|
||||||
|
test = Test()
|
||||||
|
test2 = Test()
|
||||||
|
print "Counting..."
|
||||||
|
print "Creating class1/thread1"
|
||||||
|
thread1 = gevent.spawn(test.count)
|
||||||
|
print "Creating class1/thread2 (ignored)"
|
||||||
|
thread2 = gevent.spawn(test.count)
|
||||||
|
print "Creating class2/thread3"
|
||||||
|
thread3 = gevent.spawn(test2.count)
|
||||||
|
|
||||||
|
print "Joining class1/thread1"
|
||||||
|
thread1.join()
|
||||||
|
print "Joining class1/thread2"
|
||||||
|
thread2.join()
|
||||||
|
print "Joining class2/thread3"
|
||||||
|
thread3.join()
|
||||||
|
|
||||||
|
print "Creating class1/thread4 (its finished, allowed again)"
|
||||||
|
thread4 = gevent.spawn(test.count)
|
||||||
|
print "Joining thread4"
|
||||||
|
thread4.join()
|
||||||
|
|
||||||
|
print thread1.value, thread2.value, thread3.value, thread4.value
|
||||||
|
print "Done."
|
||||||
|
|
||||||
|
def testNoblocking():
|
||||||
|
test = TestNoblock()
|
||||||
|
test2 = TestNoblock()
|
||||||
|
print "Creating class1/thread1"
|
||||||
|
thread1 = test.count()
|
||||||
|
print "Creating class1/thread2 (ignored)"
|
||||||
|
thread2 = test.count()
|
||||||
|
print "Creating class2/thread3"
|
||||||
|
thread3 = test2.count()
|
||||||
|
print "Joining class1/thread1"
|
||||||
|
thread1.join()
|
||||||
|
print "Joining class1/thread2"
|
||||||
|
thread2.join()
|
||||||
|
print "Joining class2/thread3"
|
||||||
|
thread3.join()
|
||||||
|
|
||||||
|
print "Creating class1/thread4 (its finished, allowed again)"
|
||||||
|
thread4 = test.count()
|
||||||
|
print "Joining thread4"
|
||||||
|
thread4.join()
|
||||||
|
|
||||||
|
|
||||||
|
print thread1.value, thread2.value, thread3.value, thread4.value
|
||||||
|
print "Done."
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
from gevent import monkey
|
||||||
|
monkey.patch_all()
|
||||||
|
|
||||||
|
print "Testing blocking mode..."
|
||||||
|
testBlocking()
|
||||||
|
print "Testing noblocking mode..."
|
||||||
|
testNoblocking()
|
2
src/util/__init__.py
Normal file
2
src/util/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
from Event import Event
|
||||||
|
from Noparallel import Noparallel
|
19
tools/coffee/README.md
Normal file
19
tools/coffee/README.md
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
# CoffeeScript compiler for Windows
|
||||||
|
|
||||||
|
A simple command-line utilty for Windows that will compile `*.coffee` files to JavaScript `*.js` files using [CoffeeScript](http://jashkenas.github.com/coffee-script/) and the venerable Windows Script Host, ubiquitous on Windows since the 90s.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
To use it, invoke `coffee.cmd` like so:
|
||||||
|
|
||||||
|
coffee input.coffee output.js
|
||||||
|
|
||||||
|
If an output is not specified, it is written to `stdout`. In neither an input or output are specified then data is assumed to be on `stdin`. For example:
|
||||||
|
|
||||||
|
type input.coffee | coffee > output.js
|
||||||
|
|
||||||
|
Errors are written to `stderr`.
|
||||||
|
|
||||||
|
In the `test` directory there's a version of the standard CoffeeScript tests which can be kicked off using `test.cmd`. The test just attempts to compile the *.coffee files but doesn't execute them.
|
||||||
|
|
||||||
|
To upgrade to the latest CoffeeScript simply replace `coffee-script.js` from the upstream https://github.com/jashkenas/coffee-script/blob/master/extras/coffee-script.js (the tests will likely need updating as well, if you want to run them).
|
12
tools/coffee/coffee-script.js
Normal file
12
tools/coffee/coffee-script.js
Normal file
File diff suppressed because one or more lines are too long
2
tools/coffee/coffee.cmd
Normal file
2
tools/coffee/coffee.cmd
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
::For convenience
|
||||||
|
@cscript //nologo "%~dp0coffee.wsf" %*
|
93
tools/coffee/coffee.wsf
Normal file
93
tools/coffee/coffee.wsf
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
<job>
|
||||||
|
<!-- https://github.com/jashkenas/coffee-script/raw/master/extras/coffee-script.js -->
|
||||||
|
<script src="coffee-script.js" language="JScript" />
|
||||||
|
<script language="JScript">
|
||||||
|
(function() {
|
||||||
|
|
||||||
|
var args = [];
|
||||||
|
for (var i = 0; i < WScript.Arguments.Length; i++) {
|
||||||
|
args.push(WScript.Arguments.Item(i));
|
||||||
|
}
|
||||||
|
|
||||||
|
// FileSystemObject: http://msdn.microsoft.com/en-us/library/bkx696eh.aspx
|
||||||
|
var fso = new ActiveXObject("Scripting.FileSystemObject");
|
||||||
|
|
||||||
|
var isfolder = (args[0] && fso.folderExists(args[0]));
|
||||||
|
|
||||||
|
if (isfolder) {
|
||||||
|
f = fso.getFolder(args[0]);
|
||||||
|
e = new Enumerator(f.files);
|
||||||
|
for (; !e.atEnd(); e.moveNext()) {
|
||||||
|
if (e.item().path.toLowerCase().lastIndexOf('.coffee') != -1) {
|
||||||
|
convert(e.item(), args[1]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
convert(args[0], args[1])
|
||||||
|
}
|
||||||
|
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
|
function convert(input, output) {
|
||||||
|
|
||||||
|
var fso = new ActiveXObject("Scripting.FileSystemObject");
|
||||||
|
|
||||||
|
if (output) {
|
||||||
|
// if output specifies a folder name, output filename is same as input filename with .coffee extension
|
||||||
|
if (fso.folderExists(output)) {
|
||||||
|
output = output + '\\' + fso.getFile(input).name.replace('\.coffee', '.js')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var coffee;
|
||||||
|
if (!input) {
|
||||||
|
// Read all input data from STDIN
|
||||||
|
var chunks = [];
|
||||||
|
while (!WScript.StdIn.AtEndOfStream)
|
||||||
|
chunks.push(WScript.StdIn.ReadAll());
|
||||||
|
coffee = chunks.join('');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
coffee = readUtf8(input);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
var js = CoffeeScript.compile(coffee);
|
||||||
|
|
||||||
|
if (!output) {
|
||||||
|
WScript.StdOut.Write(js);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
writeUtf8(output, js);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
WScript.StdErr.WriteLine(err.message);
|
||||||
|
WScript.Quit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function readUtf8(filename) {
|
||||||
|
var stream = new ActiveXObject("ADODB.Stream");
|
||||||
|
stream.Open();
|
||||||
|
stream.Type = 2; // Text
|
||||||
|
stream.Charset = 'utf-8';
|
||||||
|
stream.LoadFromFile(filename);
|
||||||
|
var text = stream.ReadText();
|
||||||
|
stream.Close();
|
||||||
|
return text;
|
||||||
|
}
|
||||||
|
|
||||||
|
function writeUtf8(filename, text) {
|
||||||
|
var stream = new ActiveXObject("ADODB.Stream");
|
||||||
|
stream.Open();
|
||||||
|
stream.Type = 2; // Text
|
||||||
|
stream.Charset = 'utf-8';
|
||||||
|
stream.WriteText(text);
|
||||||
|
stream.SaveToFile(filename, 2);
|
||||||
|
stream.Close();
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
</job>
|
585
tools/upnpc/Changelog.txt
Normal file
585
tools/upnpc/Changelog.txt
Normal file
|
@ -0,0 +1,585 @@
|
||||||
|
$Id: Changelog.txt,v 1.193 2014/02/05 17:26:45 nanard Exp $
|
||||||
|
miniUPnP client Changelog.
|
||||||
|
|
||||||
|
2014/02/05:
|
||||||
|
handle EINPROGRESS after connect()
|
||||||
|
|
||||||
|
2014/02/03:
|
||||||
|
minixml now handle XML comments
|
||||||
|
|
||||||
|
VERSION 1.9 : released 2014/01/31
|
||||||
|
|
||||||
|
2014/01/31:
|
||||||
|
added argument remoteHost to UPNP_GetSpecificPortMappingEntry()
|
||||||
|
increment API_VERSION to 10
|
||||||
|
|
||||||
|
2013/12/09:
|
||||||
|
--help and -h arguments in upnpc.c
|
||||||
|
|
||||||
|
2013/10/07:
|
||||||
|
fixed potential buffer overrun in miniwget.c
|
||||||
|
Modified UPNP_GetValidIGD() to check for ExternalIpAddress
|
||||||
|
|
||||||
|
2013/08/01:
|
||||||
|
define MAXHOSTNAMELEN if not already done
|
||||||
|
|
||||||
|
2013/06/06:
|
||||||
|
update upnpreplyparse to allow larger values (128 chars instead of 64)
|
||||||
|
|
||||||
|
2013/05/14:
|
||||||
|
Update upnpreplyparse to take into account "empty" elements
|
||||||
|
validate upnpreplyparse.c code with "make check"
|
||||||
|
|
||||||
|
2013/05/03:
|
||||||
|
Fix Solaris build thanks to Maciej Małecki
|
||||||
|
|
||||||
|
2013/04/27:
|
||||||
|
Fix testminiwget.sh for BSD
|
||||||
|
|
||||||
|
2013/03/23:
|
||||||
|
Fixed Makefile for *BSD
|
||||||
|
|
||||||
|
2013/03/11:
|
||||||
|
Update Makefile to use JNAerator version 0.11
|
||||||
|
|
||||||
|
2013/02/11:
|
||||||
|
Fix testminiwget.sh for use with dash
|
||||||
|
Use $(DESTDIR) in Makefile
|
||||||
|
|
||||||
|
VERSION 1.8 : released 2013/02/06
|
||||||
|
|
||||||
|
2012/10/16:
|
||||||
|
fix testminiwget with no IPv6 support
|
||||||
|
|
||||||
|
2012/09/27:
|
||||||
|
Rename all include guards to not clash with C99
|
||||||
|
(7.1.3 Reserved identifiers).
|
||||||
|
|
||||||
|
2012/08/30:
|
||||||
|
Added -e option to upnpc program (set description for port mappings)
|
||||||
|
|
||||||
|
2012/08/29:
|
||||||
|
Python 3 support (thanks to Christopher Foo)
|
||||||
|
|
||||||
|
2012/08/11:
|
||||||
|
Fix a memory link in UPNP_GetValidIGD()
|
||||||
|
Try to handle scope id in link local IPv6 URL under MS Windows
|
||||||
|
|
||||||
|
2012/07/20:
|
||||||
|
Disable HAS_IP_MREQN on DragonFly BSD
|
||||||
|
|
||||||
|
2012/06/28:
|
||||||
|
GetUPNPUrls() now inserts scope into link-local IPv6 addresses
|
||||||
|
|
||||||
|
2012/06/23:
|
||||||
|
More error return checks in upnpc.c
|
||||||
|
#define MINIUPNPC_GET_SRC_ADDR enables receivedata() to get scope_id
|
||||||
|
parseURL() now parses IPv6 addresses scope
|
||||||
|
new parameter for miniwget() : IPv6 address scope
|
||||||
|
increment API_VERSION to 9
|
||||||
|
|
||||||
|
2012/06/20:
|
||||||
|
fixed CMakeLists.txt
|
||||||
|
|
||||||
|
2012/05/29
|
||||||
|
Improvements in testminiwget.sh
|
||||||
|
|
||||||
|
VERSION 1.7 : released 2012/05/24
|
||||||
|
|
||||||
|
2012/05/01:
|
||||||
|
Cleanup settings of CFLAGS in Makefile
|
||||||
|
Fix signed/unsigned integer comparaisons
|
||||||
|
|
||||||
|
2012/04/20:
|
||||||
|
Allow to specify protocol with TCP or UDP for -A option
|
||||||
|
|
||||||
|
2012/04/09:
|
||||||
|
Only try to fetch XML description once in UPNP_GetValidIGD()
|
||||||
|
Added -ansi flag to compilation, and fixed C++ comments to ANSI C comments.
|
||||||
|
|
||||||
|
2012/04/05:
|
||||||
|
minor improvements to minihttptestserver.c
|
||||||
|
|
||||||
|
2012/03/15:
|
||||||
|
upnperrors.c returns valid error string for unrecognized error codes
|
||||||
|
|
||||||
|
2012/03/08:
|
||||||
|
make minihttptestserver listen on loopback interface instead of 0.0.0.0
|
||||||
|
|
||||||
|
2012/01/25:
|
||||||
|
Maven installation thanks to Alexey Kuznetsov
|
||||||
|
|
||||||
|
2012/01/21:
|
||||||
|
Replace WIN32 macro by _WIN32
|
||||||
|
|
||||||
|
2012/01/19:
|
||||||
|
Fixes in java wrappers thanks to Alexey Kuznetsov :
|
||||||
|
https://github.com/axet/miniupnp/tree/fix-javatest/miniupnpc
|
||||||
|
Make and install .deb packages (python) thanks to Alexey Kuznetsov :
|
||||||
|
https://github.com/axet/miniupnp/tree/feature-debbuild/miniupnpc
|
||||||
|
|
||||||
|
2012/01/07:
|
||||||
|
The multicast interface can now be specified by name with IPv4.
|
||||||
|
|
||||||
|
2012/01/02:
|
||||||
|
Install man page
|
||||||
|
|
||||||
|
2011/11/25:
|
||||||
|
added header to Port Mappings list in upnpc.c
|
||||||
|
|
||||||
|
2011/10/09:
|
||||||
|
Makefile : make clean now removes jnaerator generated files.
|
||||||
|
MINIUPNPC_VERSION in miniupnpc.h (updated by make)
|
||||||
|
|
||||||
|
2011/09/12:
|
||||||
|
added rootdescURL to UPNPUrls structure.
|
||||||
|
|
||||||
|
VERSION 1.6 : released 2011/07/25
|
||||||
|
|
||||||
|
2011/07/25:
|
||||||
|
Update doc for version 1.6 release
|
||||||
|
|
||||||
|
2011/06/18:
|
||||||
|
Fix for windows in miniwget.c
|
||||||
|
|
||||||
|
2011/06/04:
|
||||||
|
display remote host in port mapping listing
|
||||||
|
|
||||||
|
2011/06/03:
|
||||||
|
Fix in make install : there were missing headers
|
||||||
|
|
||||||
|
2011/05/26:
|
||||||
|
Fix the socket leak in miniwget thanks to Richard Marsh.
|
||||||
|
Permit to add leaseduration in -a command. Display lease duration.
|
||||||
|
|
||||||
|
2011/05/15:
|
||||||
|
Try both LinkLocal and SiteLocal multicast address for SSDP in IPv6
|
||||||
|
|
||||||
|
2011/05/09:
|
||||||
|
add a test in testminiwget.sh.
|
||||||
|
more error checking in miniwget.c
|
||||||
|
|
||||||
|
2011/05/06:
|
||||||
|
Adding some tool to test and validate miniwget.c
|
||||||
|
simplified and debugged miniwget.c
|
||||||
|
|
||||||
|
2011/04/11:
|
||||||
|
moving ReceiveData() to a receivedata.c file.
|
||||||
|
parsing presentation url
|
||||||
|
adding IGD v2 WANIPv6FirewallControl commands
|
||||||
|
|
||||||
|
2011/04/10:
|
||||||
|
update of miniupnpcmodule.c
|
||||||
|
comments in miniwget.c, update in testminiwget
|
||||||
|
Adding errors codes from IGD v2
|
||||||
|
new functions in upnpc.c for IGD v2
|
||||||
|
|
||||||
|
2011/04/09:
|
||||||
|
Support for litteral ip v6 address in miniwget
|
||||||
|
|
||||||
|
2011/04/08:
|
||||||
|
Adding support for urn:schemas-upnp-org:service:WANIPv6FirewallControl:1
|
||||||
|
Updating APIVERSION
|
||||||
|
Supporting IPV6 in upnpDiscover()
|
||||||
|
Adding a -6 option to upnpc command line tool
|
||||||
|
|
||||||
|
2011/03/18:
|
||||||
|
miniwget/parseURL() : return an error when url param is null.
|
||||||
|
fixing GetListOfPortMappings()
|
||||||
|
|
||||||
|
2011/03/14:
|
||||||
|
upnpDiscover() now reporting an error code.
|
||||||
|
improvements in comments.
|
||||||
|
|
||||||
|
2011/03/11:
|
||||||
|
adding miniupnpcstrings.h.cmake and CMakeLists.txt files.
|
||||||
|
|
||||||
|
2011/02/15:
|
||||||
|
Implementation of GetListOfPortMappings()
|
||||||
|
|
||||||
|
2011/02/07:
|
||||||
|
updates to minixml to support character data starting with spaces
|
||||||
|
minixml now support CDATA
|
||||||
|
upnpreplyparse treats <NewPortListing> specificaly
|
||||||
|
change in simpleUPnPcommand to return the buffer (simplification)
|
||||||
|
|
||||||
|
2011/02/06:
|
||||||
|
Added leaseDuration argument to AddPortMapping()
|
||||||
|
Starting to implement GetListOfPortMappings()
|
||||||
|
|
||||||
|
2011/01/11:
|
||||||
|
updating wingenminiupnpcstrings.c
|
||||||
|
|
||||||
|
2011/01/04:
|
||||||
|
improving updateminiupnpcstrings.sh
|
||||||
|
|
||||||
|
VERSION 1.5 : released 2011/01/01
|
||||||
|
|
||||||
|
2010/12/21:
|
||||||
|
use NO_GETADDRINFO macro to disable the use of getaddrinfo/freeaddrinfo
|
||||||
|
|
||||||
|
2010/12/11:
|
||||||
|
Improvements on getHTTPResponse() code.
|
||||||
|
|
||||||
|
2010/12/09:
|
||||||
|
new code for miniwget that handle Chunked transfer encoding
|
||||||
|
using getHTTPResponse() in SOAP call code
|
||||||
|
Adding MANIFEST.in for 'python setup.py bdist_rpm'
|
||||||
|
|
||||||
|
2010/11/25:
|
||||||
|
changes to minissdpc.c to compile under Win32.
|
||||||
|
see http://miniupnp.tuxfamily.org/forum/viewtopic.php?t=729
|
||||||
|
|
||||||
|
2010/09/17:
|
||||||
|
Various improvement to Makefile from Michał Górny
|
||||||
|
|
||||||
|
2010/08/05:
|
||||||
|
Adding the script "external-ip.sh" from Reuben Hawkins
|
||||||
|
|
||||||
|
2010/06/09:
|
||||||
|
update to python module to match modification made on 2010/04/05
|
||||||
|
update to Java test code to match modification made on 2010/04/05
|
||||||
|
all UPNP_* function now return an error if the SOAP request failed
|
||||||
|
at HTTP level.
|
||||||
|
|
||||||
|
2010/04/17:
|
||||||
|
Using GetBestRoute() under win32 in order to find the
|
||||||
|
right interface to use.
|
||||||
|
|
||||||
|
2010/04/12:
|
||||||
|
Retrying with HTTP/1.1 if HTTP/1.0 failed. see
|
||||||
|
http://miniupnp.tuxfamily.org/forum/viewtopic.php?p=1703
|
||||||
|
|
||||||
|
2010/04/07:
|
||||||
|
avoid returning duplicates in upnpDiscover()
|
||||||
|
|
||||||
|
2010/04/05:
|
||||||
|
Create a connecthostport.h/.c with connecthostport() function
|
||||||
|
and use it in miniwget and miniupnpc.
|
||||||
|
Use getnameinfo() instead of inet_ntop or inet_ntoa
|
||||||
|
Work to make miniupnpc IPV6 compatible...
|
||||||
|
Add java test code.
|
||||||
|
Big changes in order to support device having both WANIPConnection
|
||||||
|
and WANPPPConnection.
|
||||||
|
|
||||||
|
2010/04/04:
|
||||||
|
Use getaddrinfo() instead of gethostbyname() in miniwget.
|
||||||
|
|
||||||
|
2010/01/06:
|
||||||
|
#define _DARWIN_C_SOURCE for Mac OS X
|
||||||
|
|
||||||
|
2009/12/19:
|
||||||
|
Improve MinGW32 build
|
||||||
|
|
||||||
|
2009/12/11:
|
||||||
|
adding a MSVC9 project to build the static library and executable
|
||||||
|
|
||||||
|
2009/12/10:
|
||||||
|
Fixing some compilation stuff for Windows/MinGW
|
||||||
|
|
||||||
|
2009/12/07:
|
||||||
|
adaptations in Makefile and updateminiupnpcstring.sh for AmigaOS
|
||||||
|
some fixes for Windows when using virtual ethernet adapters (it is the
|
||||||
|
case with VMWare installed).
|
||||||
|
|
||||||
|
2009/12/04:
|
||||||
|
some fixes for AmigaOS compilation
|
||||||
|
Changed HTTP version to HTTP/1.0 for Soap too (to prevent chunked
|
||||||
|
transfer encoding)
|
||||||
|
|
||||||
|
2009/12/03:
|
||||||
|
updating printIDG and testigddescparse.c for debug.
|
||||||
|
modifications to compile under AmigaOS
|
||||||
|
adding a testminiwget program
|
||||||
|
Changed miniwget to advertise itself as HTTP/1.0 to prevent chunked
|
||||||
|
transfer encoding
|
||||||
|
|
||||||
|
2009/11/26:
|
||||||
|
fixing updateminiupnpcstrings.sh to take into account
|
||||||
|
which command that does not return an error code.
|
||||||
|
|
||||||
|
VERSION 1.4 : released 2009/10/30
|
||||||
|
|
||||||
|
2009/10/16:
|
||||||
|
using Py_BEGIN_ALLOW_THREADS and Py_END_ALLOW_THREADS in python module.
|
||||||
|
|
||||||
|
2009/10/10:
|
||||||
|
Some fixes for compilation under Solaris
|
||||||
|
compilation fixes : http://miniupnp.tuxfamily.org/forum/viewtopic.php?p=1464
|
||||||
|
|
||||||
|
2009/09/21:
|
||||||
|
fixing the code to ignore EINTR during connect() calls.
|
||||||
|
|
||||||
|
2009/08/07:
|
||||||
|
Set socket timeout for connect()
|
||||||
|
Some cleanup in miniwget.c
|
||||||
|
|
||||||
|
2009/08/04:
|
||||||
|
remove multiple redirections with -d in upnpc.c
|
||||||
|
Print textual error code in upnpc.c
|
||||||
|
Ignore EINTR during the connect() and poll() calls.
|
||||||
|
|
||||||
|
2009/07/29:
|
||||||
|
fix in updateminiupnpcstrings.sh if OS name contains "/"
|
||||||
|
Sending a correct value for MX: field in SSDP request
|
||||||
|
|
||||||
|
2009/07/20:
|
||||||
|
Change the Makefile to compile under Mac OS X
|
||||||
|
Fixed a stackoverflow in getDevicesFromMiniSSDPD()
|
||||||
|
|
||||||
|
2009/07/09:
|
||||||
|
Compile under Haiku
|
||||||
|
generate miniupnpcstrings.h.in from miniupnpcstrings.h
|
||||||
|
|
||||||
|
2009/06/04:
|
||||||
|
patching to compile under CygWin and cross compile for minGW
|
||||||
|
|
||||||
|
VERSION 1.3 :
|
||||||
|
|
||||||
|
2009/04/17:
|
||||||
|
updating python module
|
||||||
|
Use strtoull() when using C99
|
||||||
|
|
||||||
|
2009/02/28:
|
||||||
|
Fixed miniwget.c for compiling under sun
|
||||||
|
|
||||||
|
2008/12/18:
|
||||||
|
cleanup in Makefile (thanks to Paul de Weerd)
|
||||||
|
minissdpc.c : win32 compatibility
|
||||||
|
miniupnpc.c : changed xmlns prefix from 'm' to 'u'
|
||||||
|
Removed NDEBUG (using DEBUG)
|
||||||
|
|
||||||
|
2008/10/14:
|
||||||
|
Added the ExternalHost argument to DeletePortMapping()
|
||||||
|
|
||||||
|
2008/10/11:
|
||||||
|
Added the ExternalHost argument to AddPortMapping()
|
||||||
|
Put a correct User-Agent: header in HTTP requests.
|
||||||
|
|
||||||
|
VERSION 1.2 :
|
||||||
|
|
||||||
|
2008/10/07:
|
||||||
|
Update docs
|
||||||
|
|
||||||
|
2008/09/25:
|
||||||
|
Integrated sameport patch from Dario Meloni : Added a "sameport"
|
||||||
|
argument to upnpDiscover().
|
||||||
|
|
||||||
|
2008/07/18:
|
||||||
|
small modif to make Clang happy :)
|
||||||
|
|
||||||
|
2008/07/17:
|
||||||
|
#define SOAPPREFIX "s" in miniupnpc.c in order to remove SOAP-ENV...
|
||||||
|
|
||||||
|
2008/07/14:
|
||||||
|
include declspec.h in installation (to /usr/include/miniupnpc)
|
||||||
|
|
||||||
|
VERSION 1.1 :
|
||||||
|
|
||||||
|
2008/07/04:
|
||||||
|
standard options for install/ln instead of gnu-specific stuff.
|
||||||
|
|
||||||
|
2008/07/03:
|
||||||
|
now builds a .dll and .lib with win32. (mingw32)
|
||||||
|
|
||||||
|
2008/04/28:
|
||||||
|
make install now install the binary of the upnpc tool
|
||||||
|
|
||||||
|
2008/04/27:
|
||||||
|
added testupnpigd.py
|
||||||
|
added error strings for miniupnpc "internal" errors
|
||||||
|
improved python module error/exception reporting.
|
||||||
|
|
||||||
|
2008/04/23:
|
||||||
|
Completely rewrite igd_desc_parse.c in order to be compatible with
|
||||||
|
Linksys WAG200G
|
||||||
|
Added testigddescparse
|
||||||
|
updated python module
|
||||||
|
|
||||||
|
VERSION 1.0 :
|
||||||
|
|
||||||
|
2008/02/21:
|
||||||
|
put some #ifdef DEBUG around DisplayNameValueList()
|
||||||
|
|
||||||
|
2008/02/18:
|
||||||
|
Improved error reporting in upnpcommands.c
|
||||||
|
UPNP_GetStatusInfo() returns LastConnectionError
|
||||||
|
|
||||||
|
2008/02/16:
|
||||||
|
better error handling in minisoap.c
|
||||||
|
improving display of "valid IGD found" in upnpc.c
|
||||||
|
|
||||||
|
2008/02/03:
|
||||||
|
Fixing UPNP_GetValidIGD()
|
||||||
|
improved make install :)
|
||||||
|
|
||||||
|
2007/12/22:
|
||||||
|
Adding upnperrors.c/h to provide a strupnperror() function
|
||||||
|
used to translate UPnP error codes to string.
|
||||||
|
|
||||||
|
2007/12/19:
|
||||||
|
Fixing getDevicesFromMiniSSDPD()
|
||||||
|
improved error reporting of UPnP functions
|
||||||
|
|
||||||
|
2007/12/18:
|
||||||
|
It is now possible to specify a different location for MiniSSDPd socket.
|
||||||
|
working with MiniSSDPd is now more efficient.
|
||||||
|
python module improved.
|
||||||
|
|
||||||
|
2007/12/16:
|
||||||
|
improving error reporting
|
||||||
|
|
||||||
|
2007/12/13:
|
||||||
|
Try to improve compatibility by using HTTP/1.0 instead of 1.1 and
|
||||||
|
XML a bit different for SOAP.
|
||||||
|
|
||||||
|
2007/11/25:
|
||||||
|
fixed select() call for linux
|
||||||
|
|
||||||
|
2007/11/15:
|
||||||
|
Added -fPIC to CFLAG for better shared library code.
|
||||||
|
|
||||||
|
2007/11/02:
|
||||||
|
Fixed a potential socket leak in miniwget2()
|
||||||
|
|
||||||
|
2007/10/16:
|
||||||
|
added a parameter to upnpDiscover() in order to allow the use of another
|
||||||
|
interface than the default multicast interface.
|
||||||
|
|
||||||
|
2007/10/12:
|
||||||
|
Fixed the creation of symbolic link in Makefile
|
||||||
|
|
||||||
|
2007/10/08:
|
||||||
|
Added man page
|
||||||
|
|
||||||
|
2007/10/02:
|
||||||
|
fixed memory bug in GetUPNPUrls()
|
||||||
|
|
||||||
|
2007/10/01:
|
||||||
|
fixes in the Makefile
|
||||||
|
Added UPNP_GetIGDFromUrl() and adapted the sample program accordingly.
|
||||||
|
Added SONAME in the shared library to please debian :)
|
||||||
|
fixed MS Windows compilation (minissdpd is not available under MS Windows).
|
||||||
|
|
||||||
|
2007/09/25:
|
||||||
|
small change to Makefile to be able to install in a different location
|
||||||
|
(default is /usr)
|
||||||
|
|
||||||
|
2007/09/24:
|
||||||
|
now compiling both shared and static library
|
||||||
|
|
||||||
|
2007/09/19:
|
||||||
|
Cosmetic changes on upnpc.c
|
||||||
|
|
||||||
|
2007/09/02:
|
||||||
|
adapting to new miniSSDPd (release version ?)
|
||||||
|
|
||||||
|
2007/08/31:
|
||||||
|
Usage of miniSSDPd to skip discovery process.
|
||||||
|
|
||||||
|
2007/08/27:
|
||||||
|
fixed python module to allow compilation with Python older than Python 2.4
|
||||||
|
|
||||||
|
2007/06/12:
|
||||||
|
Added a python module.
|
||||||
|
|
||||||
|
2007/05/19:
|
||||||
|
Fixed compilation under MinGW
|
||||||
|
|
||||||
|
2007/05/15:
|
||||||
|
fixed a memory leak in AddPortMapping()
|
||||||
|
Added testupnpreplyparse executable to check the parsing of
|
||||||
|
upnp soap messages
|
||||||
|
minixml now ignore namespace prefixes.
|
||||||
|
|
||||||
|
2007/04/26:
|
||||||
|
upnpc now displays external ip address with -s or -l
|
||||||
|
|
||||||
|
2007/04/11:
|
||||||
|
changed MINIUPNPC_URL_MAXSIZE to 128 to accomodate the "BT Voyager 210"
|
||||||
|
|
||||||
|
2007/03/19:
|
||||||
|
cleanup in miniwget.c
|
||||||
|
|
||||||
|
2007/03/01:
|
||||||
|
Small typo fix...
|
||||||
|
|
||||||
|
2007/01/30:
|
||||||
|
Now parsing the HTTP header from SOAP responses in order to
|
||||||
|
get content-length value.
|
||||||
|
|
||||||
|
2007/01/29:
|
||||||
|
Fixed the Soap Query to speedup the HTTP request.
|
||||||
|
added some Win32 DLL stuff...
|
||||||
|
|
||||||
|
2007/01/27:
|
||||||
|
Fixed some WIN32 compatibility issues
|
||||||
|
|
||||||
|
2006/12/14:
|
||||||
|
Added UPNPIGD_IsConnected() function in miniupnp.c/.h
|
||||||
|
Added UPNP_GetValidIGD() in miniupnp.c/.h
|
||||||
|
cleaned upnpc.c main(). now using UPNP_GetValidIGD()
|
||||||
|
|
||||||
|
2006/12/07:
|
||||||
|
Version 1.0-RC1 released
|
||||||
|
|
||||||
|
2006/12/03:
|
||||||
|
Minor changes to compile under SunOS/Solaris
|
||||||
|
|
||||||
|
2006/11/30:
|
||||||
|
made a minixml parser validator program
|
||||||
|
updated minixml to handle attributes correctly
|
||||||
|
|
||||||
|
2006/11/22:
|
||||||
|
Added a -r option to the upnpc sample thanks to Alexander Hubmann.
|
||||||
|
|
||||||
|
2006/11/19:
|
||||||
|
Cleanup code to make it more ANSI C compliant
|
||||||
|
|
||||||
|
2006/11/10:
|
||||||
|
detect and display local lan address.
|
||||||
|
|
||||||
|
2006/11/04:
|
||||||
|
Packets and Bytes Sent/Received are now unsigned int.
|
||||||
|
|
||||||
|
2006/11/01:
|
||||||
|
Bug fix thanks to Giuseppe D'Angelo
|
||||||
|
|
||||||
|
2006/10/31:
|
||||||
|
C++ compatibility for .h files.
|
||||||
|
Added a way to get ip Address on the LAN used to reach the IGD.
|
||||||
|
|
||||||
|
2006/10/25:
|
||||||
|
Added M-SEARCH to the services in the discovery process.
|
||||||
|
|
||||||
|
2006/10/22:
|
||||||
|
updated the Makefile to use makedepend, added a "make install"
|
||||||
|
update Makefile
|
||||||
|
|
||||||
|
2006/10/20:
|
||||||
|
fixing the description url parsing thanks to patch sent by
|
||||||
|
Wayne Dawe.
|
||||||
|
Fixed/translated some comments.
|
||||||
|
Implemented a better discover process, first looking
|
||||||
|
for IGD then for root devices (as some devices only reply to
|
||||||
|
M-SEARCH for root devices).
|
||||||
|
|
||||||
|
2006/09/02:
|
||||||
|
added freeUPNPDevlist() function.
|
||||||
|
|
||||||
|
2006/08/04:
|
||||||
|
More command line arguments checking
|
||||||
|
|
||||||
|
2006/08/01:
|
||||||
|
Added the .bat file to compile under Win32 with minGW32
|
||||||
|
|
||||||
|
2006/07/31:
|
||||||
|
Fixed the rootdesc parser (igd_desc_parse.c)
|
||||||
|
|
||||||
|
2006/07/20:
|
||||||
|
parseMSEARCHReply() is now returning the ST: line as well
|
||||||
|
starting changes to detect several UPnP devices on the network
|
||||||
|
|
||||||
|
2006/07/19:
|
||||||
|
using GetCommonLinkProperties to get down/upload bitrate
|
||||||
|
|
27
tools/upnpc/LICENSE
Normal file
27
tools/upnpc/LICENSE
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
MiniUPnPc
|
||||||
|
Copyright (c) 2005-2011, Thomas BERNARD
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer in the documentation
|
||||||
|
and/or other materials provided with the distribution.
|
||||||
|
* The name of the author may not be used to endorse or promote products
|
||||||
|
derived from this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||||
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
66
tools/upnpc/README
Normal file
66
tools/upnpc/README
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
Project: miniupnp
|
||||||
|
Project web page: http://miniupnp.free.fr/ or http://miniupnp.tuxfamily.org/
|
||||||
|
github: https://github.com/miniupnp/miniupnp
|
||||||
|
freecode: http://freecode.com/projects/miniupnp
|
||||||
|
Author: Thomas Bernard
|
||||||
|
Copyright (c) 2005-2012 Thomas Bernard
|
||||||
|
This software is subject to the conditions detailed in the
|
||||||
|
LICENSE file provided within this distribution.
|
||||||
|
|
||||||
|
|
||||||
|
For the comfort of Win32 users, bsdqueue.h is included in the distribution.
|
||||||
|
Its licence is included in the header of the file.
|
||||||
|
bsdqueue.h is a copy of the sys/queue.h of an OpenBSD system.
|
||||||
|
|
||||||
|
|
||||||
|
* miniUPnP Client - miniUPnPc *
|
||||||
|
|
||||||
|
To compile, simply run 'gmake' (could be 'make' on your system).
|
||||||
|
Under win32, to compile with MinGW, type "mingw32make.bat".
|
||||||
|
MS Visual C solution and project files are supplied in the msvc/ subdirectory.
|
||||||
|
|
||||||
|
The compilation is known to work under linux, FreeBSD,
|
||||||
|
OpenBSD, MacOS X, AmigaOS and cygwin.
|
||||||
|
The official AmigaOS4.1 SDK was used for AmigaOS4 and GeekGadgets for AmigaOS3.
|
||||||
|
upx (http://upx.sourceforge.net) is used to compress the win32 .exe files.
|
||||||
|
|
||||||
|
To install the library and headers on the system use :
|
||||||
|
> su
|
||||||
|
> make install
|
||||||
|
> exit
|
||||||
|
|
||||||
|
alternatively, to install into a specific location, use :
|
||||||
|
> INSTALLPREFIX=/usr/local make install
|
||||||
|
|
||||||
|
upnpc.c is a sample client using the libminiupnpc.
|
||||||
|
To use the libminiupnpc in your application, link it with
|
||||||
|
libminiupnpc.a (or .so) and use the following functions found in miniupnpc.h,
|
||||||
|
upnpcommands.h and miniwget.h :
|
||||||
|
- upnpDiscover()
|
||||||
|
- miniwget()
|
||||||
|
- parserootdesc()
|
||||||
|
- GetUPNPUrls()
|
||||||
|
- UPNP_* (calling UPNP methods)
|
||||||
|
|
||||||
|
Note : use #include <miniupnpc/miniupnpc.h> etc... for the includes
|
||||||
|
and -lminiupnpc for the link
|
||||||
|
|
||||||
|
Discovery process is speeded up when MiniSSDPd is running on the machine.
|
||||||
|
|
||||||
|
|
||||||
|
* Python module *
|
||||||
|
|
||||||
|
you can build a python module with 'make pythonmodule'
|
||||||
|
and install it with 'make installpythonmodule'.
|
||||||
|
setup.py (and setupmingw32.py) are included in the distribution.
|
||||||
|
|
||||||
|
|
||||||
|
Feel free to contact me if you have any problem :
|
||||||
|
e-mail : miniupnp@free.fr
|
||||||
|
|
||||||
|
If you are using libminiupnpc in your application, please
|
||||||
|
send me an email !
|
||||||
|
|
||||||
|
For any question, you can use the web forum :
|
||||||
|
http://miniupnp.tuxfamily.org/forum/
|
||||||
|
|
BIN
tools/upnpc/libminiupnpc.a
Normal file
BIN
tools/upnpc/libminiupnpc.a
Normal file
Binary file not shown.
43
tools/upnpc/miniupnpc.def
Normal file
43
tools/upnpc/miniupnpc.def
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
LIBRARY
|
||||||
|
; miniupnpc library
|
||||||
|
miniupnpc
|
||||||
|
|
||||||
|
EXPORTS
|
||||||
|
; miniupnpc
|
||||||
|
upnpDiscover
|
||||||
|
freeUPNPDevlist
|
||||||
|
parserootdesc
|
||||||
|
UPNP_GetValidIGD
|
||||||
|
UPNP_GetIGDFromUrl
|
||||||
|
GetUPNPUrls
|
||||||
|
FreeUPNPUrls
|
||||||
|
; miniwget
|
||||||
|
miniwget
|
||||||
|
miniwget_getaddr
|
||||||
|
; upnpcommands
|
||||||
|
UPNP_GetTotalBytesSent
|
||||||
|
UPNP_GetTotalBytesReceived
|
||||||
|
UPNP_GetTotalPacketsSent
|
||||||
|
UPNP_GetTotalPacketsReceived
|
||||||
|
UPNP_GetStatusInfo
|
||||||
|
UPNP_GetConnectionTypeInfo
|
||||||
|
UPNP_GetExternalIPAddress
|
||||||
|
UPNP_GetLinkLayerMaxBitRates
|
||||||
|
UPNP_AddPortMapping
|
||||||
|
UPNP_DeletePortMapping
|
||||||
|
UPNP_GetPortMappingNumberOfEntries
|
||||||
|
UPNP_GetSpecificPortMappingEntry
|
||||||
|
UPNP_GetGenericPortMappingEntry
|
||||||
|
UPNP_GetListOfPortMappings
|
||||||
|
UPNP_AddPinhole
|
||||||
|
UPNP_CheckPinholeWorking
|
||||||
|
UPNP_UpdatePinhole
|
||||||
|
UPNP_GetPinholePackets
|
||||||
|
UPNP_DeletePinhole
|
||||||
|
UPNP_GetFirewallStatus
|
||||||
|
UPNP_GetOutboundPinholeTimeout
|
||||||
|
; upnperrors
|
||||||
|
strupnperror
|
||||||
|
; portlistingparse
|
||||||
|
ParsePortListing
|
||||||
|
FreePortListing
|
BIN
tools/upnpc/miniupnpc.dll
Normal file
BIN
tools/upnpc/miniupnpc.dll
Normal file
Binary file not shown.
BIN
tools/upnpc/miniupnpc.lib
Normal file
BIN
tools/upnpc/miniupnpc.lib
Normal file
Binary file not shown.
BIN
tools/upnpc/upnpc-shared.exe
Normal file
BIN
tools/upnpc/upnpc-shared.exe
Normal file
Binary file not shown.
BIN
tools/upnpc/upnpc-static.exe
Normal file
BIN
tools/upnpc/upnpc-static.exe
Normal file
Binary file not shown.
7
zeronet.py
Normal file
7
zeronet.py
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
from src import main
|
||||||
|
|
||||||
|
action_func = getattr(main, main.config.action)
|
||||||
|
action_kwargs = main.config.getActionArguments()
|
||||||
|
|
||||||
|
action_func(**action_kwargs)
|
||||||
|
|
Loading…
Reference in a new issue