rev280, The whole project reformatted to PEP8, UiRequest getPosted to query posted variables

This commit is contained in:
HelloZeroNet 2015-07-12 20:36:46 +02:00
parent a5741704e4
commit b5ecb62bc6
49 changed files with 5704 additions and 5205 deletions

View file

@ -1,60 +1,57 @@
# Based on http://stackoverflow.com/a/2022629
class Event(list):
def __call__(self, *args, **kwargs):
for f in self[:]:
if "once" in dir(f) and f in self:
self.remove(f)
f(*args, **kwargs)
def __call__(self, *args, **kwargs):
for f in self[:]:
if "once" in dir(f) and f in self:
self.remove(f)
f(*args, **kwargs)
def __repr__(self):
return "Event(%s)" % list.__repr__(self)
def once(self, func, name=None):
func.once = True
func.name = None
if name: # Dont function with same name twice
names = [f.name for f in self if "once" in dir(f)]
if name not in names:
func.name = name
self.append(func)
else:
self.append(func)
return self
def __repr__(self):
return "Event(%s)" % list.__repr__(self)
def once(self, func, name=None):
func.once = True
func.name = None
if name: # Dont function with same name twice
names = [f.name for f in self if "once" in dir(f)]
if name not in names:
func.name = name
self.append(func)
else:
self.append(func)
return self
def testBenchmark():
def say(pre, text):
print "%s Say: %s" % (pre, text)
import time
s = time.time()
onChanged = Event()
for i in range(1000):
onChanged.once(lambda pre: say(pre, "once"), "once")
print "Created 1000 once in %.3fs" % (time.time()-s)
onChanged("#1")
def say(pre, text):
print "%s Say: %s" % (pre, text)
import time
s = time.time()
on_changed = Event()
for i in range(1000):
on_changed.once(lambda pre: say(pre, "once"), "once")
print "Created 1000 once in %.3fs" % (time.time() - s)
on_changed("#1")
def testUsage():
def say(pre, text):
print "%s Say: %s" % (pre, text)
onChanged = Event()
onChanged.once(lambda pre: say(pre, "once"))
onChanged.once(lambda pre: say(pre, "once"))
onChanged.once(lambda pre: say(pre, "namedonce"), "namedonce")
onChanged.once(lambda pre: say(pre, "namedonce"), "namedonce")
onChanged.append(lambda pre: say(pre, "always"))
onChanged("#1")
onChanged("#2")
onChanged("#3")
def say(pre, text):
print "%s Say: %s" % (pre, text)
on_changed = Event()
on_changed.once(lambda pre: say(pre, "once"))
on_changed.once(lambda pre: say(pre, "once"))
on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce")
on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce")
on_changed.append(lambda pre: say(pre, "always"))
on_changed("#1")
on_changed("#2")
on_changed("#3")
if __name__ == "__main__":
testBenchmark()
testBenchmark()

View file

@ -1,19 +1,22 @@
# Re-add sslwrap to Python 2.7.9
# https://github.com/gevent/gevent/issues/477
import inspect
__ssl__ = __import__('ssl')
try:
_ssl = __ssl__._ssl
except AttributeError:
_ssl = __ssl__._ssl2
OldSSLSocket = __ssl__.SSLSocket
class NewSSLSocket(OldSSLSocket):
"""Fix SSLSocket constructor."""
def __init__(
self, sock, keyfile=None, certfile=None, server_side=False, cert_reqs=0,
ssl_version=2, ca_certs=None, do_handshake_on_connect=True,
@ -25,8 +28,8 @@ class NewSSLSocket(OldSSLSocket):
ssl_version=2, ca_certs=None, do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None
)
def new_sslwrap(
sock, server_side=False, keyfile=None, certfile=None,
cert_reqs=__ssl__.CERT_NONE, ssl_version=__ssl__.PROTOCOL_SSLv23,
@ -40,10 +43,10 @@ def new_sslwrap(
context.load_cert_chain(certfile, keyfile)
if ciphers:
context.set_ciphers(ciphers)
caller_self = inspect.currentframe().f_back.f_locals['self']
return context._wrap_socket(sock, server_side=server_side, ssl_sock=caller_self)
if not hasattr(_ssl, 'sslwrap'):
_ssl.sslwrap = new_sslwrap
__ssl__.SSLSocket = NewSSLSocket
__ssl__.SSLSocket = NewSSLSocket

View file

@ -1,11 +1,13 @@
import urllib2, logging
import urllib2
import logging
import GeventSslPatch
from Config import config
def get(url):
logging.debug("Get %s" % url)
req = urllib2.Request(url)
req.add_header('User-Agent', "ZeroNet %s (https://github.com/HelloZeroNet/ZeroNet)" % config.version)
req.add_header('Accept', 'application/json')
return urllib2.urlopen(req)
def get(url):
logging.debug("Get %s" % url)
req = urllib2.Request(url)
req.add_header('User-Agent', "ZeroNet %s (https://github.com/HelloZeroNet/ZeroNet)" % config.version)
req.add_header('Accept', 'application/json')
return urllib2.urlopen(req)

View file

@ -1,142 +1,145 @@
import gevent, time
import gevent
import time
class Noparallel(object): # Only allow function running once in same time
def __init__(self,blocking=True):
self.threads = {}
self.blocking = blocking # Blocking: Acts like normal function else thread returned
class Noparallel(object): # Only allow function running once in same time
def __init__(self, blocking=True):
self.threads = {}
self.blocking = blocking # Blocking: Acts like normal function else thread returned
def __call__(self, func):
def wrapper(*args, **kwargs):
key = (func, tuple(args), tuple(kwargs.items())) # Unique key for function including parameters
if key in self.threads: # Thread already running (if using blocking mode)
thread = self.threads[key]
if self.blocking:
thread.join() # Blocking until its finished
return thread.value # Return the value
else: # No blocking
if thread.ready(): # Its finished, create a new
thread = gevent.spawn(func, *args, **kwargs)
self.threads[key] = thread
return thread
else: # Still running
return thread
else: # Thread not running
thread = gevent.spawn(func, *args, **kwargs) # Spawning new thread
thread.link(lambda thread: self.cleanup(key, thread))
self.threads[key] = thread
if self.blocking: # Wait for finish
thread.join()
ret = thread.value
return ret
else: # No blocking just return the thread
return thread
wrapper.func_name = func.func_name
return wrapper
def __call__(self, func):
def wrapper(*args, **kwargs):
key = (func, tuple(args), tuple(kwargs.items())) # Unique key for function including parameters
if key in self.threads: # Thread already running (if using blocking mode)
thread = self.threads[key]
if self.blocking:
thread.join() # Blocking until its finished
return thread.value # Return the value
else: # No blocking
if thread.ready(): # Its finished, create a new
thread = gevent.spawn(func, *args, **kwargs)
self.threads[key] = thread
return thread
else: # Still running
return thread
else: # Thread not running
thread = gevent.spawn(func, *args, **kwargs) # Spawning new thread
thread.link(lambda thread: self.cleanup(key, thread))
self.threads[key] = thread
if self.blocking: # Wait for finish
thread.join()
ret = thread.value
return ret
else: # No blocking just return the thread
return thread
wrapper.func_name = func.func_name
# Cleanup finished threads
def cleanup(self, key, thread):
if key in self.threads: del(self.threads[key])
return wrapper
# Cleanup finished threads
def cleanup(self, key, thread):
if key in self.threads:
del(self.threads[key])
class Test():
@Noparallel()
def count(self, num=5):
for i in range(num):
print self, i
time.sleep(1)
return "%s return:%s" % (self, i)
@Noparallel()
def count(self, num=5):
for i in range(num):
print self, i
time.sleep(1)
return "%s return:%s" % (self, i)
class TestNoblock():
@Noparallel(blocking=False)
def count(self, num=5):
for i in range(num):
print self, i
time.sleep(1)
return "%s return:%s" % (self, i)
@Noparallel(blocking=False)
def count(self, num=5):
for i in range(num):
print self, i
time.sleep(1)
return "%s return:%s" % (self, i)
def testBlocking():
test = Test()
test2 = Test()
print "Counting..."
print "Creating class1/thread1"
thread1 = gevent.spawn(test.count)
print "Creating class1/thread2 (ignored)"
thread2 = gevent.spawn(test.count)
print "Creating class2/thread3"
thread3 = gevent.spawn(test2.count)
test = Test()
test2 = Test()
print "Counting..."
print "Creating class1/thread1"
thread1 = gevent.spawn(test.count)
print "Creating class1/thread2 (ignored)"
thread2 = gevent.spawn(test.count)
print "Creating class2/thread3"
thread3 = gevent.spawn(test2.count)
print "Joining class1/thread1"
thread1.join()
print "Joining class1/thread2"
thread2.join()
print "Joining class2/thread3"
thread3.join()
print "Joining class1/thread1"
thread1.join()
print "Joining class1/thread2"
thread2.join()
print "Joining class2/thread3"
thread3.join()
print "Creating class1/thread4 (its finished, allowed again)"
thread4 = gevent.spawn(test.count)
print "Joining thread4"
thread4.join()
print "Creating class1/thread4 (its finished, allowed again)"
thread4 = gevent.spawn(test.count)
print "Joining thread4"
thread4.join()
print thread1.value, thread2.value, thread3.value, thread4.value
print "Done."
print thread1.value, thread2.value, thread3.value, thread4.value
print "Done."
def testNoblocking():
test = TestNoblock()
test2 = TestNoblock()
print "Creating class1/thread1"
thread1 = test.count()
print "Creating class1/thread2 (ignored)"
thread2 = test.count()
print "Creating class2/thread3"
thread3 = test2.count()
print "Joining class1/thread1"
thread1.join()
print "Joining class1/thread2"
thread2.join()
print "Joining class2/thread3"
thread3.join()
test = TestNoblock()
test2 = TestNoblock()
print "Creating class1/thread1"
thread1 = test.count()
print "Creating class1/thread2 (ignored)"
thread2 = test.count()
print "Creating class2/thread3"
thread3 = test2.count()
print "Joining class1/thread1"
thread1.join()
print "Joining class1/thread2"
thread2.join()
print "Joining class2/thread3"
thread3.join()
print "Creating class1/thread4 (its finished, allowed again)"
thread4 = test.count()
print "Joining thread4"
thread4.join()
print "Creating class1/thread4 (its finished, allowed again)"
thread4 = test.count()
print "Joining thread4"
thread4.join()
print thread1.value, thread2.value, thread3.value, thread4.value
print "Done."
print thread1.value, thread2.value, thread3.value, thread4.value
print "Done."
def testBenchmark():
import time
def printThreadNum():
import gc
from greenlet import greenlet
objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
print "Greenlets: %s" % len(objs)
import time
printThreadNum()
test = TestNoblock()
s = time.time()
for i in range(3):
gevent.spawn(test.count, i+1)
print "Created in %.3fs" % (time.time()-s)
printThreadNum()
time.sleep(5)
def printThreadNum():
import gc
from greenlet import greenlet
objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
print "Greenlets: %s" % len(objs)
printThreadNum()
test = TestNoblock()
s = time.time()
for i in range(3):
gevent.spawn(test.count, i + 1)
print "Created in %.3fs" % (time.time() - s)
printThreadNum()
time.sleep(5)
if __name__ == "__main__":
from gevent import monkey
monkey.patch_all()
from gevent import monkey
monkey.patch_all()
testBenchmark()
print "Testing blocking mode..."
testBlocking()
print "Testing noblocking mode..."
testNoblocking()
print [instance.threads for instance in registry]
testBenchmark()
print "Testing blocking mode..."
testBlocking()
print "Testing noblocking mode..."
testNoblocking()

View file

@ -1,61 +1,64 @@
import json, re, os
import json
import re
import os
def queryFile(file_path, filter_path, filter_key = None, filter_val = None):
back = []
data = json.load(open(file_path))
if filter_path == ['']: return [data]
for key in filter_path: # Get to the point
data = data.get(key)
if not data: return
def queryFile(file_path, filter_path, filter_key=None, filter_val=None):
back = []
data = json.load(open(file_path))
if filter_path == ['']:
return [data]
for key in filter_path: # Get to the point
data = data.get(key)
if not data:
return
for row in data:
if filter_val: # Filter by value
if row[filter_key] == filter_val: back.append(row)
else:
back.append(row)
for row in data:
if filter_val: # Filter by value
if row[filter_key] == filter_val:
back.append(row)
else:
back.append(row)
return back
return back
# Find in json files
# Return: [{u'body': u'Hello Topic 1!!', 'inner_path': '1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6', u'added': 1422740732, u'message_id': 1},...]
# Return: [{u'body': u'Hello Topic 1!!', 'inner_path': '1KRxE1...beEp6', u'added': 1422740732, u'message_id': 1},...]
def query(path_pattern, filter):
if "=" in filter: # Filter by value
filter_path, filter_val = filter.split("=")
filter_path = filter_path.split(".")
filter_key = filter_path.pop() # Last element is the key
filter_val = int(filter_val)
else: # No filter
filter_path = filter
filter_path = filter_path.split(".")
filter_key = None
filter_val = None
if "=" in filter: # Filter by value
filter_path, filter_val = filter.split("=")
filter_path = filter_path.split(".")
filter_key = filter_path.pop() # Last element is the key
filter_val = int(filter_val)
else: # No filter
filter_path = filter
filter_path = filter_path.split(".")
filter_key = None
filter_val = None
if "/*/" in path_pattern: # Wildcard search
root_dir, file_pattern = path_pattern.replace("\\", "/").split("/*/")
else: # No wildcard
root_dir, file_pattern = re.match("(.*)/(.*?)$", path_pattern.replace("\\", "/")).groups()
for root, dirs, files in os.walk(root_dir, topdown=False):
root = root.replace("\\", "/")
inner_path = root.replace(root_dir, "").strip("/")
for file_name in files:
if file_pattern != file_name: continue
try:
res = queryFile(root+"/"+file_name, filter_path, filter_key, filter_val)
if not res: continue
except Exception, err: # Json load error
# print file_name, filter, err
continue
for row in res:
row["inner_path"] = inner_path
yield row
if "/*/" in path_pattern: # Wildcard search
root_dir, file_pattern = path_pattern.replace("\\", "/").split("/*/")
else: # No wildcard
root_dir, file_pattern = re.match("(.*)/(.*?)$", path_pattern.replace("\\", "/")).groups()
for root, dirs, files in os.walk(root_dir, topdown=False):
root = root.replace("\\", "/")
inner_path = root.replace(root_dir, "").strip("/")
for file_name in files:
if file_pattern != file_name:
continue
try:
res = queryFile(root + "/" + file_name, filter_path, filter_key, filter_val)
if not res:
continue
except Exception: # Json load error
continue
for row in res:
row["inner_path"] = inner_path
yield row
if __name__ == "__main__":
#for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "topics")):
# print row
for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")):
print row
for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")):
print row

View file

@ -4,119 +4,120 @@ import logging
log = logging.getLogger("RateLimit")
called_db = {} # Holds events last call time
queue_db = {} # Commands queued to run
called_db = {} # Holds events last call time
queue_db = {} # Commands queued to run
# Register event as called
# Return: None
def called(event):
called_db[event] = time.time()
called_db[event] = time.time()
# Check if calling event is allowed
# Return: True if allowed False if not
def isAllowed(event, allowed_again=10):
last_called = called_db.get(event)
if not last_called: # Its not called before
return True
elif time.time()-last_called >= allowed_again:
del called_db[event] # Delete last call time to save memory
return True
else:
return False
last_called = called_db.get(event)
if not last_called: # Its not called before
return True
elif time.time() - last_called >= allowed_again:
del called_db[event] # Delete last call time to save memory
return True
else:
return False
def callQueue(event):
func, args, kwargs, thread = queue_db[event]
log.debug("Calling: %s" % event)
del called_db[event]
del queue_db[event]
return func(*args, **kwargs)
func, args, kwargs, thread = queue_db[event]
log.debug("Calling: %s" % event)
del called_db[event]
del queue_db[event]
return func(*args, **kwargs)
# Rate limit and delay function call if needed, If the function called again within the rate limit interval then previous queued call will be dropped
# Return: Immedietly gevent thread
# Rate limit and delay function call if necessary
# If the function called again within the rate limit interval then previous queued call will be dropped
# Return: Immediately gevent thread
def callAsync(event, allowed_again=10, func=None, *args, **kwargs):
if isAllowed(event, allowed_again): # Not called recently, call it now
called(event)
# print "Calling now"
return gevent.spawn(func, *args, **kwargs)
else: # Called recently, schedule it for later
time_left = allowed_again-max(0, time.time()-called_db[event])
log.debug("Added to queue (%.2fs left): %s " % (time_left, event))
if not queue_db.get(event): # Function call not queued yet
thread = gevent.spawn_later(time_left, lambda: callQueue(event)) # Call this function later
queue_db[event] = (func, args, kwargs, thread)
return thread
else: # Function call already queued, just update the parameters
thread = queue_db[event][3]
queue_db[event] = (func, args, kwargs, thread)
return thread
if isAllowed(event, allowed_again): # Not called recently, call it now
called(event)
# print "Calling now"
return gevent.spawn(func, *args, **kwargs)
else: # Called recently, schedule it for later
time_left = allowed_again - max(0, time.time() - called_db[event])
log.debug("Added to queue (%.2fs left): %s " % (time_left, event))
if not queue_db.get(event): # Function call not queued yet
thread = gevent.spawn_later(time_left, lambda: callQueue(event)) # Call this function later
queue_db[event] = (func, args, kwargs, thread)
return thread
else: # Function call already queued, just update the parameters
thread = queue_db[event][3]
queue_db[event] = (func, args, kwargs, thread)
return thread
# Rate limit and delay function call if needed
# Return: Wait for execution/delay then return value
def call(event, allowed_again=10, func=None, *args, **kwargs):
if isAllowed(event): # Not called recently, call it now
called(event)
# print "Calling now"
return func(*args, **kwargs)
if isAllowed(event): # Not called recently, call it now
called(event)
# print "Calling now"
return func(*args, **kwargs)
else: # Called recently, schedule it for later
time_left = max(0, allowed_again-(time.time()-called_db[event]))
# print "Time left: %s" % time_left, args, kwargs
log.debug("Calling sync (%.2fs left): %s" % (time_left, event))
time.sleep(time_left)
called(event)
back = func(*args, **kwargs)
if event in called_db:
del called_db[event]
return back
else: # Called recently, schedule it for later
time_left = max(0, allowed_again - (time.time() - called_db[event]))
# print "Time left: %s" % time_left, args, kwargs
log.debug("Calling sync (%.2fs left): %s" % (time_left, event))
time.sleep(time_left)
called(event)
back = func(*args, **kwargs)
if event in called_db:
del called_db[event]
return back
# Cleanup expired events every 3 minutes
def cleanup():
while 1:
expired = time.time()-60*2 # Cleanup if older than 2 minutes
for event in called_db.keys():
if called_db[event] < expired:
del called_db[event]
time.sleep(60*3) # Every 3 minutes
while 1:
expired = time.time() - 60 * 2 # Cleanup if older than 2 minutes
for event in called_db.keys():
if called_db[event] < expired:
del called_db[event]
time.sleep(60 * 3) # Every 3 minutes
gevent.spawn(cleanup)
if __name__ == "__main__":
from gevent import monkey
monkey.patch_all()
import random
from gevent import monkey
monkey.patch_all()
import random
def publish(inner_path):
print "Publishing %s..." % inner_path
return 1
def publish(inner_path):
print "Publishing %s..." % inner_path
return 1
def cb(thread):
print "Value:", thread.value
def cb(thread):
print "Value:", thread.value
print "Testing async spam requests rate limit to 1/sec..."
for i in range(3000):
thread = callAsync("publish content.json", 1, publish, "content.json %s" % i)
time.sleep(float(random.randint(1,20))/100000)
print thread.link(cb)
print "Done"
print "Testing async spam requests rate limit to 1/sec..."
for i in range(3000):
thread = callAsync("publish content.json", 1, publish, "content.json %s" % i)
time.sleep(float(random.randint(1, 20)) / 100000)
print thread.link(cb)
print "Done"
time.sleep(2)
time.sleep(2)
print "Testing sync spam requests rate limit to 1/sec..."
for i in range(5):
call("publish data.json", 1, publish, "data.json %s" % i)
time.sleep(float(random.randint(1,100))/100)
print "Done"
print "Testing cleanup"
thread = callAsync("publish content.json single", 1, publish, "content.json single")
print "Needs to cleanup:", called_db, queue_db
print "Waiting 3min for cleanup process..."
time.sleep(60*3)
print "Cleaned up:", called_db, queue_db
print "Testing sync spam requests rate limit to 1/sec..."
for i in range(5):
call("publish data.json", 1, publish, "data.json %s" % i)
time.sleep(float(random.randint(1, 100)) / 100)
print "Done"
print "Testing cleanup"
thread = callAsync("publish content.json single", 1, publish, "content.json single")
print "Needs to cleanup:", called_db, queue_db
print "Waiting 3min for cleanup process..."
time.sleep(60 * 3)
print "Cleaned up:", called_db, queue_db

View file

@ -1,22 +1,22 @@
from lib.PySocks import socks
import socket
from lib.PySocks import socks
def create_connection(address, timeout=None, source_address=None):
sock = socks.socksocket()
sock.connect(address)
return sock
sock = socks.socksocket()
sock.connect(address)
return sock
# Dns queries using the proxy
def getaddrinfo(*args):
return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
def monkeyPath(proxy_ip, proxy_port):
print proxy_ip, proxy_port
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port))
socket.socket = socks.socksocket
socket.create_connection = create_connection
socket.getaddrinfo = getaddrinfo
print proxy_ip, proxy_port
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port))
socket.socket = socks.socksocket
socket.create_connection = create_connection
socket.getaddrinfo = getaddrinfo

View file

@ -2,6 +2,7 @@
# Disable SSL compression to save massive memory and cpu
import logging
from Config import config
@ -9,7 +10,10 @@ def disableSSLCompression():
import ctypes
import ctypes.util
try:
openssl = ctypes.CDLL(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32', ctypes.RTLD_GLOBAL)
openssl = ctypes.CDLL(
ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32',
ctypes.RTLD_GLOBAL
)
openssl.SSL_COMP_get_compression_methods.restype = ctypes.c_void_p
except Exception, err:
logging.debug("Disable SSL compression failed: %s (normal on Windows)" % err)
@ -81,7 +85,7 @@ if not hasattr(_ssl, 'sslwrap'):
logging.debug("Missing SSLwrap, readded.")
# Add SSLContext to gevent.ssl (Ubutunu 15 fix)
# Add SSLContext to gevent.ssl (Ubuntu 15 fix)
try:
import gevent
if not hasattr(gevent.ssl, "SSLContext"):

View file

@ -1,36 +1,40 @@
import msgpack, os, struct
import os
import struct
import msgpack
def msgpackHeader(size):
if size <= 2**8-1:
return b"\xc4" + struct.pack("B", size)
elif size <= 2**16-1:
return b"\xc5" + struct.pack(">H", size)
elif size <= 2**32-1:
return b"\xc6" + struct.pack(">I", size)
else:
raise Exception("huge binary string")
if size <= 2 ** 8 - 1:
return b"\xc4" + struct.pack("B", size)
elif size <= 2 ** 16 - 1:
return b"\xc5" + struct.pack(">H", size)
elif size <= 2 ** 32 - 1:
return b"\xc6" + struct.pack(">I", size)
else:
raise Exception("huge binary string")
def stream(data, writer):
packer = msgpack.Packer()
writer(packer.pack_map_header(len(data)))
for key, val in data.iteritems():
writer(packer.pack(key))
if issubclass(type(val), file): # File obj
max_size = os.fstat(val.fileno()).st_size-val.tell()
size = min(max_size, val.read_bytes)
bytes_left = size
writer(msgpackHeader(size))
buff = 1024*64
while 1:
writer(val.read(min(bytes_left, buff)))
bytes_left = bytes_left-buff
if bytes_left <= 0: break
else: # Simple
writer(packer.pack(val))
return size
packer = msgpack.Packer()
writer(packer.pack_map_header(len(data)))
for key, val in data.iteritems():
writer(packer.pack(key))
if issubclass(type(val), file): # File obj
max_size = os.fstat(val.fileno()).st_size - val.tell()
size = min(max_size, val.read_bytes)
bytes_left = size
writer(msgpackHeader(size))
buff = 1024 * 64
while 1:
writer(val.read(min(bytes_left, buff)))
bytes_left = bytes_left - buff
if bytes_left <= 0:
break
else: # Simple
writer(packer.pack(val))
return size
class FilePart(file):
pass
pass

View file

@ -1,10 +1,13 @@
import gevent
from gevent import socket
import re, urllib2, httplib, logging
import re
import urllib2
import httplib
import logging
from urlparse import urlparse
from xml.dom.minidom import parseString
import gevent
from gevent import socket
# Relevant UPnP spec: http://www.upnp.org/specs/gw/UPnP-gw-WANIPConnection-v1-Service.pdf
# General TODOs:
@ -14,222 +17,222 @@ remove_whitespace = re.compile(r'>\s*<')
def _m_search_ssdp(local_ip):
"""
Broadcast a UDP SSDP M-SEARCH packet and return response.
"""
search_target = "urn:schemas-upnp-org:device:InternetGatewayDevice:1"
"""
Broadcast a UDP SSDP M-SEARCH packet and return response.
"""
search_target = "urn:schemas-upnp-org:device:InternetGatewayDevice:1"
ssdp_request = ''.join(
['M-SEARCH * HTTP/1.1\r\n',
'HOST: 239.255.255.250:1900\r\n',
'MAN: "ssdp:discover"\r\n',
'MX: 2\r\n',
'ST: {0}\r\n'.format(search_target),
'\r\n']
)
ssdp_request = ''.join(
['M-SEARCH * HTTP/1.1\r\n',
'HOST: 239.255.255.250:1900\r\n',
'MAN: "ssdp:discover"\r\n',
'MX: 2\r\n',
'ST: {0}\r\n'.format(search_target),
'\r\n']
)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind((local_ip, 10000))
sock.bind((local_ip, 10000))
sock.sendto(ssdp_request, ('239.255.255.250', 1900))
sock.settimeout(5)
sock.sendto(ssdp_request, ('239.255.255.250', 1900))
sock.settimeout(5)
try:
return sock.recv(2048)
except socket.error, err:
# no reply from IGD, possibly no IGD on LAN
logging.debug("UDP SSDP M-SEARCH send error using ip %s: %s" % (local_ip, err))
return False
try:
return sock.recv(2048)
except socket.error, err:
# no reply from IGD, possibly no IGD on LAN
logging.debug("UDP SSDP M-SEARCH send error using ip %s: %s" % (local_ip, err))
return False
def _retrieve_location_from_ssdp(response):
"""
Parse raw HTTP response to retrieve the UPnP location header
and return a ParseResult object.
"""
parsed = re.findall(r'(?P<name>.*?): (?P<value>.*?)\r\n', response)
location_header = filter(lambda x: x[0].lower() == 'location', parsed)
"""
Parse raw HTTP response to retrieve the UPnP location header
and return a ParseResult object.
"""
parsed = re.findall(r'(?P<name>.*?): (?P<value>.*?)\r\n', response)
location_header = filter(lambda x: x[0].lower() == 'location', parsed)
if not len(location_header):
# no location header returned :(
return False
if not len(location_header):
# no location header returned :(
return False
return urlparse(location_header[0][1])
return urlparse(location_header[0][1])
def _retrieve_igd_profile(url):
"""
Retrieve the device's UPnP profile.
"""
return urllib2.urlopen(url.geturl()).read()
"""
Retrieve the device's UPnP profile.
"""
return urllib2.urlopen(url.geturl()).read()
def _node_val(node):
"""
Get the text value of the first child text node of a node.
"""
return node.childNodes[0].data
"""
Get the text value of the first child text node of a node.
"""
return node.childNodes[0].data
def _parse_igd_profile(profile_xml):
"""
Traverse the profile xml DOM looking for either
WANIPConnection or WANPPPConnection and return
the value found as well as the 'controlURL'.
"""
dom = parseString(profile_xml)
"""
Traverse the profile xml DOM looking for either
WANIPConnection or WANPPPConnection and return
the value found as well as the 'controlURL'.
"""
dom = parseString(profile_xml)
service_types = dom.getElementsByTagName('serviceType')
for service in service_types:
if _node_val(service).find('WANIPConnection') > 0 or \
_node_val(service).find('WANPPPConnection') > 0:
control_url = service.parentNode.getElementsByTagName(
'controlURL'
)[0].childNodes[0].data
upnp_schema = _node_val(service).split(':')[-2]
return control_url, upnp_schema
service_types = dom.getElementsByTagName('serviceType')
for service in service_types:
if _node_val(service).find('WANIPConnection') > 0 or \
_node_val(service).find('WANPPPConnection') > 0:
control_url = service.parentNode.getElementsByTagName(
'controlURL'
)[0].childNodes[0].data
upnp_schema = _node_val(service).split(':')[-2]
return control_url, upnp_schema
return False
return False
def _get_local_ip():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
# not using <broadcast> because gevents getaddrinfo doesn't like that
# using port 1 as per hobbldygoop's comment about port 0 not working on osx:
# https://github.com/sirMackk/ZeroNet/commit/fdcd15cf8df0008a2070647d4d28ffedb503fba2#commitcomment-9863928
s.connect(('239.255.255.250', 1))
return s.getsockname()[0]
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
# not using <broadcast> because gevents getaddrinfo doesn't like that
# using port 1 as per hobbldygoop's comment about port 0 not working on osx:
# https://github.com/sirMackk/ZeroNet/commit/fdcd15cf8df0008a2070647d4d28ffedb503fba2#commitcomment-9863928
s.connect(('239.255.255.250', 1))
return s.getsockname()[0]
def _create_soap_message(local_ip, port, description="UPnPPunch", protocol="TCP",
upnp_schema='WANIPConnection'):
"""
Build a SOAP AddPortMapping message.
"""
upnp_schema='WANIPConnection'):
"""
Build a SOAP AddPortMapping message.
"""
soap_message = """<?xml version="1.0"?>
soap_message = """<?xml version="1.0"?>
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">
<s:Body>
<u:AddPortMapping xmlns:u="urn:schemas-upnp-org:service:{upnp_schema}:1">
<NewRemoteHost></NewRemoteHost>
<NewExternalPort>{port}</NewExternalPort>
<NewProtocol>{protocol}</NewProtocol>
<NewInternalPort>{port}</NewInternalPort>
<NewInternalClient>{host_ip}</NewInternalClient>
<NewEnabled>1</NewEnabled>
<NewPortMappingDescription>{description}</NewPortMappingDescription>
<NewLeaseDuration>0</NewLeaseDuration>
</u:AddPortMapping>
</s:Body>
<s:Body>
<u:AddPortMapping xmlns:u="urn:schemas-upnp-org:service:{upnp_schema}:1">
<NewRemoteHost></NewRemoteHost>
<NewExternalPort>{port}</NewExternalPort>
<NewProtocol>{protocol}</NewProtocol>
<NewInternalPort>{port}</NewInternalPort>
<NewInternalClient>{host_ip}</NewInternalClient>
<NewEnabled>1</NewEnabled>
<NewPortMappingDescription>{description}</NewPortMappingDescription>
<NewLeaseDuration>0</NewLeaseDuration>
</u:AddPortMapping>
</s:Body>
</s:Envelope>""".format(port=port,
protocol=protocol,
host_ip=local_ip,
description=description,
upnp_schema=upnp_schema)
return remove_whitespace.sub('><', soap_message)
protocol=protocol,
host_ip=local_ip,
description=description,
upnp_schema=upnp_schema)
return remove_whitespace.sub('><', soap_message)
def _parse_for_errors(soap_response):
if soap_response.status == 500:
err_dom = parseString(soap_response.read())
err_code = _node_val(err_dom.getElementsByTagName('errorCode')[0])
err_msg = _node_val(
err_dom.getElementsByTagName('errorDescription')[0]
)
logging.error('SOAP request error: {0} - {1}'.format(err_code, err_msg))
raise Exception(
'SOAP request error: {0} - {1}'.format(err_code, err_msg)
)
if soap_response.status == 500:
err_dom = parseString(soap_response.read())
err_code = _node_val(err_dom.getElementsByTagName('errorCode')[0])
err_msg = _node_val(
err_dom.getElementsByTagName('errorDescription')[0]
)
logging.error('SOAP request error: {0} - {1}'.format(err_code, err_msg))
raise Exception(
'SOAP request error: {0} - {1}'.format(err_code, err_msg)
)
return False
else:
return True
return False
else:
return True
def _send_soap_request(location, upnp_schema, control_url, soap_message):
"""
Send out SOAP request to UPnP device and return a response.
"""
headers = {
'SOAPAction': (
'"urn:schemas-upnp-org:service:{schema}:'
'1#AddPortMapping"'.format(schema=upnp_schema)
),
'Content-Type': 'text/xml'
}
conn = httplib.HTTPConnection(location.hostname, location.port)
conn.request('POST', control_url, soap_message, headers)
"""
Send out SOAP request to UPnP device and return a response.
"""
headers = {
'SOAPAction': (
'"urn:schemas-upnp-org:service:{schema}:'
'1#AddPortMapping"'.format(schema=upnp_schema)
),
'Content-Type': 'text/xml'
}
conn = httplib.HTTPConnection(location.hostname, location.port)
conn.request('POST', control_url, soap_message, headers)
response = conn.getresponse()
conn.close()
response = conn.getresponse()
conn.close()
return _parse_for_errors(response)
return _parse_for_errors(response)
def open_port(port=15441, desc="UpnpPunch"):
"""
Attempt to forward a port using UPnP.
"""
"""
Attempt to forward a port using UPnP.
"""
local_ips = [_get_local_ip()]
try:
local_ips += socket.gethostbyname_ex('')[2] # Get ip by '' hostname not supported on all platform
except:
pass
local_ips = [_get_local_ip()]
try:
local_ips += socket.gethostbyname_ex('')[2] # Get ip by '' hostname not supported on all platform
except:
pass
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('8.8.8.8', 0)) # Using google dns route
local_ips.append(s.getsockname()[0])
except:
pass
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('8.8.8.8', 0)) # Using google dns route
local_ips.append(s.getsockname()[0])
except:
pass
local_ips = list(set(local_ips)) # Delete duplicates
logging.debug("Found local ips: %s" % local_ips)
local_ips = local_ips*3 # Retry every ip 3 times
local_ips = list(set(local_ips)) # Delete duplicates
logging.debug("Found local ips: %s" % local_ips)
local_ips = local_ips * 3 # Retry every ip 3 times
for local_ip in local_ips:
logging.debug("Trying using local ip: %s" % local_ip)
idg_response = _m_search_ssdp(local_ip)
for local_ip in local_ips:
logging.debug("Trying using local ip: %s" % local_ip)
idg_response = _m_search_ssdp(local_ip)
if not idg_response:
logging.debug("No IGD response")
continue
if not idg_response:
logging.debug("No IGD response")
continue
location = _retrieve_location_from_ssdp(idg_response)
location = _retrieve_location_from_ssdp(idg_response)
if not location:
logging.debug("No location")
continue
if not location:
logging.debug("No location")
continue
parsed = _parse_igd_profile(
_retrieve_igd_profile(location)
)
parsed = _parse_igd_profile(
_retrieve_igd_profile(location)
)
if not parsed:
logging.debug("IGD parse error using location %s" % repr(location))
continue
if not parsed:
logging.debug("IGD parse error using location %s" % repr(location))
continue
control_url, upnp_schema = parsed
control_url, upnp_schema = parsed
soap_messages = [_create_soap_message(local_ip, port, desc, proto, upnp_schema)
for proto in ['TCP', 'UDP']]
soap_messages = [_create_soap_message(local_ip, port, desc, proto, upnp_schema)
for proto in ['TCP', 'UDP']]
requests = [gevent.spawn(
_send_soap_request, location, upnp_schema, control_url, message
) for message in soap_messages]
requests = [gevent.spawn(
_send_soap_request, location, upnp_schema, control_url, message
) for message in soap_messages]
gevent.joinall(requests, timeout=3)
gevent.joinall(requests, timeout=3)
if all([request.value for request in requests]):
return True
return False
if all([request.value for request in requests]):
return True
return False
if __name__ == "__main__":
from gevent import monkey
monkey.patch_socket()
from gevent import monkey
monkey.patch_socket()
logging.getLogger().setLevel(logging.DEBUG)
print open_port(15441, "ZeroNet")
logging.getLogger().setLevel(logging.DEBUG)
print open_port(15441, "ZeroNet")