Change to Python3 coding style

This commit is contained in:
shortcutme 2019-03-15 21:06:59 +01:00
parent fc0fe0557b
commit b0b9a4d33c
No known key found for this signature in database
GPG key ID: 5B63BAE6CB9613AE
137 changed files with 910 additions and 913 deletions

View file

@ -1,4 +1,4 @@
import cStringIO as StringIO
import io
import difflib
@ -31,7 +31,7 @@ def diff(old, new, limit=False):
def patch(old_f, actions):
new_f = StringIO.StringIO()
new_f = io.BytesIO()
for action, param in actions:
if action == "=": # Same lines
new_f.write(old_f.read(param))

View file

@ -28,19 +28,19 @@ class Event(list):
if __name__ == "__main__":
def testBenchmark():
def say(pre, text):
print "%s Say: %s" % (pre, text)
print("%s Say: %s" % (pre, text))
import time
s = time.time()
on_changed = Event()
for i in range(1000):
on_changed.once(lambda pre: say(pre, "once"), "once")
print "Created 1000 once in %.3fs" % (time.time() - s)
print("Created 1000 once in %.3fs" % (time.time() - s))
on_changed("#1")
def testUsage():
def say(pre, text):
print "%s Say: %s" % (pre, text)
print("%s Say: %s" % (pre, text))
on_changed = Event()
on_changed.once(lambda pre: say(pre, "once"))

View file

@ -49,7 +49,7 @@ class Noparallel(object): # Only allow function running once in same time
return ret
else: # No blocking just return the thread
return thread
wrapper.func_name = func.func_name
wrapper.__name__ = func.__name__
return wrapper
@ -65,7 +65,7 @@ if __name__ == "__main__":
@Noparallel()
def count(self, num=5):
for i in range(num):
print self, i
print(self, i)
time.sleep(1)
return "%s return:%s" % (self, i)
@ -74,59 +74,59 @@ if __name__ == "__main__":
@Noparallel(blocking=False)
def count(self, num=5):
for i in range(num):
print self, i
print(self, i)
time.sleep(1)
return "%s return:%s" % (self, i)
def testBlocking():
test = Test()
test2 = Test()
print "Counting..."
print "Creating class1/thread1"
print("Counting...")
print("Creating class1/thread1")
thread1 = gevent.spawn(test.count)
print "Creating class1/thread2 (ignored)"
print("Creating class1/thread2 (ignored)")
thread2 = gevent.spawn(test.count)
print "Creating class2/thread3"
print("Creating class2/thread3")
thread3 = gevent.spawn(test2.count)
print "Joining class1/thread1"
print("Joining class1/thread1")
thread1.join()
print "Joining class1/thread2"
print("Joining class1/thread2")
thread2.join()
print "Joining class2/thread3"
print("Joining class2/thread3")
thread3.join()
print "Creating class1/thread4 (its finished, allowed again)"
print("Creating class1/thread4 (its finished, allowed again)")
thread4 = gevent.spawn(test.count)
print "Joining thread4"
print("Joining thread4")
thread4.join()
print thread1.value, thread2.value, thread3.value, thread4.value
print "Done."
print(thread1.value, thread2.value, thread3.value, thread4.value)
print("Done.")
def testNoblocking():
test = TestNoblock()
test2 = TestNoblock()
print "Creating class1/thread1"
print("Creating class1/thread1")
thread1 = test.count()
print "Creating class1/thread2 (ignored)"
print("Creating class1/thread2 (ignored)")
thread2 = test.count()
print "Creating class2/thread3"
print("Creating class2/thread3")
thread3 = test2.count()
print "Joining class1/thread1"
print("Joining class1/thread1")
thread1.join()
print "Joining class1/thread2"
print("Joining class1/thread2")
thread2.join()
print "Joining class2/thread3"
print("Joining class2/thread3")
thread3.join()
print "Creating class1/thread4 (its finished, allowed again)"
print("Creating class1/thread4 (its finished, allowed again)")
thread4 = test.count()
print "Joining thread4"
print("Joining thread4")
thread4.join()
print thread1.value, thread2.value, thread3.value, thread4.value
print "Done."
print(thread1.value, thread2.value, thread3.value, thread4.value)
print("Done.")
def testBenchmark():
import time
@ -135,21 +135,21 @@ if __name__ == "__main__":
import gc
from greenlet import greenlet
objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
print "Greenlets: %s" % len(objs)
print("Greenlets: %s" % len(objs))
printThreadNum()
test = TestNoblock()
s = time.time()
for i in range(3):
gevent.spawn(test.count, i + 1)
print "Created in %.3fs" % (time.time() - s)
print("Created in %.3fs" % (time.time() - s))
printThreadNum()
time.sleep(5)
from gevent import monkey
monkey.patch_all()
testBenchmark()
print "Testing blocking mode..."
print("Testing blocking mode...")
testBlocking()
print "Testing noblocking mode..."
print("Testing noblocking mode...")
testNoblocking()

View file

@ -19,6 +19,6 @@ def setMaxfilesopened(limit):
resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard))
return True
except Exception, err:
except Exception as err:
logging.error("Failed to modify max files open limit: %s" % err)
return False

View file

@ -29,7 +29,7 @@ class Pooled(object):
self.pooler_running = True
gevent.spawn(self.pooler)
return evt
wrapper.func_name = func.func_name
wrapper.__name__ = func.__name__
self.func = func
return wrapper
@ -62,4 +62,4 @@ if __name__ == "__main__":
s = time.time()
gevent.joinall(threads) # Should take 10 second
print time.time() - s
print(time.time() - s)

View file

@ -64,4 +64,4 @@ def query(path_pattern, filter):
if __name__ == "__main__":
for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")):
print row
print(row)

View file

@ -86,7 +86,7 @@ def call(event, allowed_again=10, func=None, *args, **kwargs):
def rateLimitCleanup():
while 1:
expired = time.time() - 60 * 2 # Cleanup if older than 2 minutes
for event in called_db.keys():
for event in list(called_db.keys()):
if called_db[event] < expired:
del called_db[event]
time.sleep(60 * 3) # Every 3 minutes
@ -99,30 +99,30 @@ if __name__ == "__main__":
import random
def publish(inner_path):
print "Publishing %s..." % inner_path
print("Publishing %s..." % inner_path)
return 1
def cb(thread):
print "Value:", thread.value
print("Value:", thread.value)
print "Testing async spam requests rate limit to 1/sec..."
for i in range(3000):
thread = callAsync("publish content.json", 1, publish, "content.json %s" % i)
time.sleep(float(random.randint(1, 20)) / 100000)
print thread.link(cb)
print "Done"
print(thread.link(cb))
print("Done")
time.sleep(2)
print "Testing sync spam requests rate limit to 1/sec..."
print("Testing sync spam requests rate limit to 1/sec...")
for i in range(5):
call("publish data.json", 1, publish, "data.json %s" % i)
time.sleep(float(random.randint(1, 100)) / 100)
print "Done"
print("Done")
print "Testing cleanup"
print("Testing cleanup")
thread = callAsync("publish content.json single", 1, publish, "content.json single")
print "Needs to cleanup:", called_db, queue_db
print "Waiting 3min for cleanup process..."
print("Needs to cleanup:", called_db, queue_db)
print("Waiting 3min for cleanup process...")
time.sleep(60 * 3)
print "Cleaned up:", called_db, queue_db
print("Cleaned up:", called_db, queue_db)

View file

@ -57,7 +57,7 @@ def disableSSLCompression():
try:
openssl = openLibrary()
openssl.SSL_COMP_get_compression_methods.restype = ctypes.c_void_p
except Exception, err:
except Exception as err:
logging.debug("Disable SSL compression failed: %s (normal on Windows)" % err)
return False
@ -69,7 +69,7 @@ def disableSSLCompression():
if config.disable_sslcompression:
try:
disableSSLCompression()
except Exception, err:
except Exception as err:
logging.debug("Error disabling SSL compression: %s" % err)

View file

@ -1,8 +1,8 @@
import re
import urllib2
import httplib
import urllib.request
import http.client
import logging
from urlparse import urlparse
from urllib.parse import urlparse
from xml.dom.minidom import parseString
from xml.parsers.expat import ExpatError
@ -84,7 +84,7 @@ def _retrieve_igd_profile(url):
Retrieve the device's UPnP profile.
"""
try:
return urllib2.urlopen(url.geturl(), timeout=5).read().decode('utf-8')
return urllib.request.urlopen(url.geturl(), timeout=5).read().decode('utf-8')
except socket.error:
raise IGDError('IGD profile query timed out')
@ -251,7 +251,7 @@ def _send_soap_request(location, upnp_schema, control_path, soap_fn,
}
logging.debug("Sending UPnP request to {0}:{1}...".format(
location.hostname, location.port))
conn = httplib.HTTPConnection(location.hostname, location.port)
conn = http.client.HTTPConnection(location.hostname, location.port)
conn.request('POST', control_path, soap_message, headers)
response = conn.getresponse()
@ -366,10 +366,12 @@ if __name__ == "__main__":
import time
s = time.time()
print "Opening port..."
print ask_to_open_port(15443, "ZeroNet", protos=["TCP"])
print "Done in", time.time() - s
print("Opening port...")
print(ask_to_open_port(15443, "ZeroNet", protos=["TCP"]))
print("Done in", time.time() - s)
print "Closing port..."
print ask_to_close_port(15443, "ZeroNet", protos=["TCP"])
print "Done in", time.time() - s
"""
print("Closing port...")
print(ask_to_close_port(15443, "ZeroNet", protos=["TCP"]))
print("Done in", time.time() - s)
"""

View file

@ -1,3 +1,3 @@
from Event import Event
from Noparallel import Noparallel
from Pooled import Pooled
from .Event import Event
from .Noparallel import Noparallel
from .Pooled import Pooled

View file

@ -67,7 +67,7 @@ def getFreeSpace():
ctypes.c_wchar_p(config.data_dir), None, None, ctypes.pointer(free_space_pointer)
)
free_space = free_space_pointer.value
except Exception, err:
except Exception as err:
logging.error("GetFreeSpace error: %s" % err)
return free_space
@ -153,7 +153,7 @@ def toHashId(hash):
def mergeDicts(dicts):
back = collections.defaultdict(set)
for d in dicts:
for key, val in d.iteritems():
for key, val in d.items():
back[key].update(val)
return dict(back)
@ -161,16 +161,16 @@ def mergeDicts(dicts):
# Request https url using gevent SSL error workaround
def httpRequest(url, as_file=False):
if url.startswith("http://"):
import urllib
response = urllib.urlopen(url)
import urllib.request
response = urllib.request.urlopen(url)
else: # Hack to avoid Python gevent ssl errors
import socket
import httplib
import http.client
import ssl
host, request = re.match("https://(.*?)(/.*?)$", url).groups()
conn = httplib.HTTPSConnection(host)
conn = http.client.HTTPSConnection(host)
sock = socket.create_connection((conn.host, conn.port), conn.timeout, conn.source_address)
conn.sock = ssl.wrap_socket(sock, conn.key_file, conn.cert_file)
conn.request("GET", request)
@ -180,8 +180,8 @@ def httpRequest(url, as_file=False):
response = httpRequest(response.getheader('Location'))
if as_file:
import cStringIO as StringIO
data = StringIO.StringIO()
import io
data = io.BytesIO()
while True:
buff = response.read(1024 * 16)
if not buff: