Change to Python3 coding style
This commit is contained in:
parent
fc0fe0557b
commit
b0b9a4d33c
137 changed files with 910 additions and 913 deletions
|
@ -3,7 +3,7 @@ import sys
|
|||
import os
|
||||
import locale
|
||||
import re
|
||||
import ConfigParser
|
||||
import configparser
|
||||
import logging
|
||||
import logging.handlers
|
||||
import stat
|
||||
|
@ -304,7 +304,7 @@ class Config(object):
|
|||
if "://" in tracker and tracker not in self.trackers:
|
||||
self.trackers.append(tracker)
|
||||
except Exception as err:
|
||||
print "Error loading trackers file: %s" % err
|
||||
print("Error loading trackers file: %s" % err)
|
||||
|
||||
# Find arguments specified for current action
|
||||
def getActionArguments(self):
|
||||
|
@ -316,7 +316,7 @@ class Config(object):
|
|||
|
||||
# Try to find action from argv
|
||||
def getAction(self, argv):
|
||||
actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions
|
||||
actions = [list(action.choices.keys()) for action in self.parser._actions if action.dest == "action"][0] # Valid actions
|
||||
found_action = False
|
||||
for action in actions: # See if any in argv
|
||||
if action in argv:
|
||||
|
@ -404,7 +404,7 @@ class Config(object):
|
|||
self.config_file = argv[argv.index("--config_file") + 1]
|
||||
# Load config file
|
||||
if os.path.isfile(self.config_file):
|
||||
config = ConfigParser.ConfigParser(allow_no_value=True)
|
||||
config = configparser.ConfigParser(allow_no_value=True)
|
||||
config.read(self.config_file)
|
||||
for section in config.sections():
|
||||
for key, val in config.items(section):
|
||||
|
@ -570,7 +570,7 @@ class Config(object):
|
|||
try:
|
||||
os.chmod(self.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
except Exception as err:
|
||||
print "Can't change permission of %s: %s" % (self.log_dir, err)
|
||||
print("Can't change permission of %s: %s" % (self.log_dir, err))
|
||||
|
||||
# Make warning hidden from console
|
||||
logging.WARNING = 15 # Don't display warnings if not in debug mode
|
||||
|
|
|
@ -314,7 +314,7 @@ class Connection(object):
|
|||
self.incomplete_buff_recv += 1
|
||||
self.bytes_recv += buff_len
|
||||
self.server.bytes_recv += buff_len
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log("Stream read error: %s" % Debug.formatException(err))
|
||||
|
||||
if config.debug_socket:
|
||||
|
@ -328,7 +328,7 @@ class Connection(object):
|
|||
if unpacker_stream_bytes:
|
||||
return buff[buff_stream_start + unpacker_stream_bytes:]
|
||||
else:
|
||||
return ""
|
||||
return b""
|
||||
|
||||
# My handshake info
|
||||
def getHandshakeInfo(self):
|
||||
|
@ -476,7 +476,7 @@ class Connection(object):
|
|||
try:
|
||||
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin)
|
||||
self.sock_wrapped = True
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
if not config.force_encryption:
|
||||
self.log("Crypt connection error: %s, adding ip %s as broken ssl." % (err, self.ip))
|
||||
self.server.broken_ssl_ips[self.ip] = True
|
||||
|
@ -526,7 +526,7 @@ class Connection(object):
|
|||
message = None
|
||||
with self.send_lock:
|
||||
self.sock.sendall(data)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.close("Send error: %s (cmd: %s)" % (err, stat_key))
|
||||
return False
|
||||
self.last_sent_time = time.time()
|
||||
|
@ -577,9 +577,9 @@ class Connection(object):
|
|||
with gevent.Timeout(10.0, False):
|
||||
try:
|
||||
response = self.request("ping")
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log("Ping error: %s" % Debug.formatException(err))
|
||||
if response and "body" in response and response["body"] == "Pong!":
|
||||
if response and "body" in response and response["body"] == b"Pong!":
|
||||
self.last_ping_delay = time.time() - s
|
||||
return True
|
||||
else:
|
||||
|
@ -608,7 +608,7 @@ class Connection(object):
|
|||
if self.sock:
|
||||
self.sock.shutdown(gevent.socket.SHUT_WR)
|
||||
self.sock.close()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
if config.debug_socket:
|
||||
self.log("Close error: %s" % err)
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ from gevent.pool import Pool
|
|||
import util
|
||||
from util import helper
|
||||
from Debug import Debug
|
||||
from Connection import Connection
|
||||
from .Connection import Connection
|
||||
from Config import config
|
||||
from Crypt import CryptConnection
|
||||
from Crypt import CryptHash
|
||||
|
@ -94,7 +94,7 @@ class ConnectionServer(object):
|
|||
self.stream_server = StreamServer(
|
||||
(self.ip, self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100
|
||||
)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.info("StreamServer create error: %s" % Debug.formatException(err))
|
||||
|
||||
def listen(self):
|
||||
|
@ -102,7 +102,7 @@ class ConnectionServer(object):
|
|||
gevent.spawn(self.listenProxy)
|
||||
try:
|
||||
self.stream_server.serve_forever()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.info("StreamServer listen error: %s" % err)
|
||||
|
||||
def stop(self):
|
||||
|
@ -199,7 +199,7 @@ class ConnectionServer(object):
|
|||
connection.close("Connection event return error")
|
||||
raise Exception("Connection event return error")
|
||||
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
connection.close("%s Connect error: %s" % (ip, Debug.formatException(err)))
|
||||
raise err
|
||||
|
||||
|
@ -346,6 +346,6 @@ class ConnectionServer(object):
|
|||
])
|
||||
if len(corrections) < 6:
|
||||
return 0.0
|
||||
mid = len(corrections) / 2 - 1
|
||||
mid = int(len(corrections) / 2 - 1)
|
||||
median = (corrections[mid - 1] + corrections[mid] + corrections[mid + 1]) / 3
|
||||
return median
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
from ConnectionServer import ConnectionServer
|
||||
from Connection import Connection
|
||||
from .ConnectionServer import ConnectionServer
|
||||
from .Connection import Connection
|
||||
|
|
|
@ -19,7 +19,7 @@ class ContentDb(Db):
|
|||
foreign_key_error = self.execute("PRAGMA foreign_key_check").fetchone()
|
||||
if foreign_key_error:
|
||||
raise Exception("Database foreign key error: %s" % foreign_key_error)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Error loading content.db: %s, rebuilding..." % Debug.formatException(err))
|
||||
self.close()
|
||||
os.unlink(path) # Remove and try again
|
||||
|
@ -95,8 +95,8 @@ class ContentDb(Db):
|
|||
def setContent(self, site, inner_path, content, size=0):
|
||||
self.insertOrUpdate("content", {
|
||||
"size": size,
|
||||
"size_files": sum([val["size"] for key, val in content.get("files", {}).iteritems()]),
|
||||
"size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).iteritems()]),
|
||||
"size_files": sum([val["size"] for key, val in content.get("files", {}).items()]),
|
||||
"size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).items()]),
|
||||
"modified": int(content.get("modified", 0))
|
||||
}, {
|
||||
"site_id": self.site_ids.get(site.address, 0),
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import time
|
||||
import os
|
||||
|
||||
import ContentDb
|
||||
from . import ContentDb
|
||||
from Debug import Debug
|
||||
from Config import config
|
||||
|
||||
|
@ -127,29 +127,29 @@ if __name__ == "__main__":
|
|||
s_mem = process.memory_info()[0] / float(2 ** 20)
|
||||
root = "data-live/1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27"
|
||||
contents = ContentDbDict("1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27", root)
|
||||
print "Init len", len(contents)
|
||||
print("Init len", len(contents))
|
||||
|
||||
s = time.time()
|
||||
for dir_name in os.listdir(root + "/data/users/")[0:8000]:
|
||||
contents["data/users/%s/content.json" % dir_name]
|
||||
print "Load: %.3fs" % (time.time() - s)
|
||||
print("Load: %.3fs" % (time.time() - s))
|
||||
|
||||
s = time.time()
|
||||
found = 0
|
||||
for key, val in contents.iteritems():
|
||||
for key, val in contents.items():
|
||||
found += 1
|
||||
assert key
|
||||
assert val
|
||||
print "Found:", found
|
||||
print "Iteritem: %.3fs" % (time.time() - s)
|
||||
print("Found:", found)
|
||||
print("Iteritem: %.3fs" % (time.time() - s))
|
||||
|
||||
s = time.time()
|
||||
found = 0
|
||||
for key in contents.keys():
|
||||
for key in list(contents.keys()):
|
||||
found += 1
|
||||
assert key in contents
|
||||
print "In: %.3fs" % (time.time() - s)
|
||||
print("In: %.3fs" % (time.time() - s))
|
||||
|
||||
print "Len:", len(contents.values()), len(contents.keys())
|
||||
print("Len:", len(list(contents.values())), len(list(contents.keys())))
|
||||
|
||||
print "Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem
|
||||
print("Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem)
|
||||
|
|
|
@ -3,6 +3,7 @@ import time
|
|||
import re
|
||||
import os
|
||||
import copy
|
||||
import base64
|
||||
|
||||
import gevent
|
||||
|
||||
|
@ -13,7 +14,7 @@ from util import helper
|
|||
from util import Diff
|
||||
from util import SafeRe
|
||||
from Peer import PeerHashfield
|
||||
from ContentDbDict import ContentDbDict
|
||||
from .ContentDbDict import ContentDbDict
|
||||
from Plugin import PluginManager
|
||||
|
||||
|
||||
|
@ -44,7 +45,7 @@ class ContentManager(object):
|
|||
|
||||
# Load hashfield cache
|
||||
if "hashfield" in self.site.settings.get("cache", {}):
|
||||
self.hashfield.fromstring(self.site.settings["cache"]["hashfield"].decode("base64"))
|
||||
self.hashfield.frombytes(base64.b64decode(self.site.settings["cache"]["hashfield"]))
|
||||
del self.site.settings["cache"]["hashfield"]
|
||||
elif self.contents.get("content.json") and self.site.settings["size_optional"] > 0:
|
||||
self.site.storage.updateBadFiles() # No hashfield cache created yet
|
||||
|
@ -74,7 +75,7 @@ class ContentManager(object):
|
|||
return [], []
|
||||
|
||||
new_content = json.load(open(content_path))
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.warning("%s load error: %s" % (content_path, Debug.formatException(err)))
|
||||
return [], []
|
||||
else:
|
||||
|
@ -86,7 +87,7 @@ class ContentManager(object):
|
|||
changed = []
|
||||
deleted = []
|
||||
# Check changed
|
||||
for relative_path, info in new_content.get("files", {}).iteritems():
|
||||
for relative_path, info in new_content.get("files", {}).items():
|
||||
if "sha512" in info:
|
||||
hash_type = "sha512"
|
||||
else: # Backward compatibility
|
||||
|
@ -101,7 +102,7 @@ class ContentManager(object):
|
|||
changed.append(content_inner_dir + relative_path)
|
||||
|
||||
# Check changed optional files
|
||||
for relative_path, info in new_content.get("files_optional", {}).iteritems():
|
||||
for relative_path, info in new_content.get("files_optional", {}).items():
|
||||
file_inner_path = content_inner_dir + relative_path
|
||||
new_hash = info["sha512"]
|
||||
if old_content and old_content.get("files_optional", {}).get(relative_path):
|
||||
|
@ -115,7 +116,7 @@ class ContentManager(object):
|
|||
self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][relative_path]["size"])
|
||||
self.optionalDelete(file_inner_path)
|
||||
self.log.debug("Deleted changed optional file: %s" % file_inner_path)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err)))
|
||||
else: # The file is not in the old content
|
||||
if self.site.isDownloadable(file_inner_path):
|
||||
|
@ -151,7 +152,7 @@ class ContentManager(object):
|
|||
self.site.storage.delete(file_inner_path)
|
||||
|
||||
self.log.debug("Deleted file: %s" % file_inner_path)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err)))
|
||||
|
||||
# Cleanup empty dirs
|
||||
|
@ -165,7 +166,7 @@ class ContentManager(object):
|
|||
self.site.storage.deleteDir(root_inner_path)
|
||||
# Remove from tree dict to reflect changed state
|
||||
tree[os.path.dirname(root)][0].remove(os.path.basename(root))
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error deleting empty directory %s: %s" % (root_inner_path, err))
|
||||
|
||||
# Check archived
|
||||
|
@ -175,12 +176,12 @@ class ContentManager(object):
|
|||
self.log.debug("old archived: %s, new archived: %s" % (len(old_archived), len(new_archived)))
|
||||
archived_changed = {
|
||||
key: date_archived
|
||||
for key, date_archived in new_archived.iteritems()
|
||||
for key, date_archived in new_archived.items()
|
||||
if old_archived.get(key) != new_archived[key]
|
||||
}
|
||||
if archived_changed:
|
||||
self.log.debug("Archived changed: %s" % archived_changed)
|
||||
for archived_dirname, date_archived in archived_changed.iteritems():
|
||||
for archived_dirname, date_archived in archived_changed.items():
|
||||
archived_inner_path = content_inner_dir + archived_dirname + "/content.json"
|
||||
if self.contents.get(archived_inner_path, {}).get("modified", 0) < date_archived:
|
||||
self.removeContent(archived_inner_path)
|
||||
|
@ -204,7 +205,7 @@ class ContentManager(object):
|
|||
|
||||
# Remove archived files from download queue
|
||||
num_removed_bad_files = 0
|
||||
for bad_file in self.site.bad_files.keys():
|
||||
for bad_file in list(self.site.bad_files.keys()):
|
||||
if bad_file.endswith("content.json"):
|
||||
del self.site.bad_files[bad_file]
|
||||
num_removed_bad_files += 1
|
||||
|
@ -217,7 +218,7 @@ class ContentManager(object):
|
|||
|
||||
# Load includes
|
||||
if load_includes and "includes" in new_content:
|
||||
for relative_path, info in new_content["includes"].items():
|
||||
for relative_path, info in list(new_content["includes"].items()):
|
||||
include_inner_path = content_inner_dir + relative_path
|
||||
if self.site.storage.isFile(include_inner_path): # Content.json exists, load it
|
||||
include_changed, include_deleted = self.loadContent(
|
||||
|
@ -255,7 +256,7 @@ class ContentManager(object):
|
|||
self.has_optional_files = True
|
||||
# Update the content
|
||||
self.contents[content_inner_path] = new_content
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.warning("%s parse error: %s" % (content_inner_path, Debug.formatException(err)))
|
||||
return [], [] # Content.json parse error
|
||||
|
||||
|
@ -282,7 +283,7 @@ class ContentManager(object):
|
|||
content.get("files", {}),
|
||||
**content.get("files_optional", {})
|
||||
)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error loading %s for removeContent: %s" % (inner_path, Debug.formatException(err)))
|
||||
files = {}
|
||||
files["content.json"] = True
|
||||
|
@ -292,16 +293,16 @@ class ContentManager(object):
|
|||
try:
|
||||
self.site.storage.delete(file_inner_path)
|
||||
self.log.debug("Deleted file: %s" % file_inner_path)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error deleting file %s: %s" % (file_inner_path, err))
|
||||
try:
|
||||
self.site.storage.deleteDir(inner_dir)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error deleting dir %s: %s" % (inner_dir, err))
|
||||
|
||||
try:
|
||||
del self.contents[inner_path]
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error key from contents: %s" % inner_path)
|
||||
|
||||
# Get total size of site
|
||||
|
@ -317,7 +318,7 @@ class ContentManager(object):
|
|||
return []
|
||||
back = [inner_path]
|
||||
content_inner_dir = helper.getDirname(inner_path)
|
||||
for relative_path in self.contents[inner_path].get("includes", {}).keys():
|
||||
for relative_path in list(self.contents[inner_path].get("includes", {}).keys()):
|
||||
include_inner_path = content_inner_dir + relative_path
|
||||
back += self.listContents(include_inner_path)
|
||||
return back
|
||||
|
@ -333,7 +334,7 @@ class ContentManager(object):
|
|||
file_info = self.getFileInfo(user_contents_inner_path)
|
||||
if file_info:
|
||||
time_archived_before = file_info.get("archived_before", 0)
|
||||
time_directory_archived = file_info.get("archived", {}).get(relative_directory)
|
||||
time_directory_archived = file_info.get("archived", {}).get(relative_directory, 0)
|
||||
if modified <= time_archived_before or modified <= time_directory_archived:
|
||||
return True
|
||||
else:
|
||||
|
@ -493,11 +494,11 @@ class ContentManager(object):
|
|||
banned = False
|
||||
if "signers" in rules:
|
||||
rules["signers"] = rules["signers"][:] # Make copy of the signers
|
||||
for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules
|
||||
for permission_pattern, permission_rules in list(user_contents["permission_rules"].items()): # Regexp rules
|
||||
if not SafeRe.match(permission_pattern, user_urn):
|
||||
continue # Rule is not valid for user
|
||||
# Update rules if its better than current recorded ones
|
||||
for key, val in permission_rules.iteritems():
|
||||
for key, val in permission_rules.items():
|
||||
if key not in rules:
|
||||
if type(val) is list:
|
||||
rules[key] = val[:] # Make copy
|
||||
|
@ -649,7 +650,7 @@ class ContentManager(object):
|
|||
|
||||
if extend:
|
||||
# Add extend keys if not exists
|
||||
for key, val in extend.items():
|
||||
for key, val in list(extend.items()):
|
||||
if not content.get(key):
|
||||
content[key] = val
|
||||
self.log.info("Extending content.json with: %s" % key)
|
||||
|
@ -664,14 +665,14 @@ class ContentManager(object):
|
|||
)
|
||||
|
||||
if not remove_missing_optional:
|
||||
for file_inner_path, file_details in content.get("files_optional", {}).iteritems():
|
||||
for file_inner_path, file_details in content.get("files_optional", {}).items():
|
||||
if file_inner_path not in files_optional_node:
|
||||
files_optional_node[file_inner_path] = file_details
|
||||
|
||||
# Find changed files
|
||||
files_merged = files_node.copy()
|
||||
files_merged.update(files_optional_node)
|
||||
for file_relative_path, file_details in files_merged.iteritems():
|
||||
for file_relative_path, file_details in files_merged.items():
|
||||
old_hash = content.get("files", {}).get(file_relative_path, {}).get("sha512")
|
||||
new_hash = files_merged[file_relative_path]["sha512"]
|
||||
if old_hash != new_hash:
|
||||
|
@ -795,19 +796,19 @@ class ContentManager(object):
|
|||
try:
|
||||
cert_subject = "%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name)
|
||||
result = CryptBitcoin.verify(cert_subject, cert_address, content["cert_sign"])
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
raise VerifyError("Certificate verify error: %s" % err)
|
||||
return result
|
||||
|
||||
# Checks if the content.json content is valid
|
||||
# Return: True or False
|
||||
def verifyContent(self, inner_path, content):
|
||||
content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in content["files"].values() if file["size"] >= 0]) # Size of new content
|
||||
content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in list(content["files"].values()) if file["size"] >= 0]) # Size of new content
|
||||
# Calculate old content size
|
||||
old_content = self.contents.get(inner_path)
|
||||
if old_content:
|
||||
old_content_size = len(json.dumps(old_content, indent=1)) + sum([file["size"] for file in old_content.get("files", {}).values()])
|
||||
old_content_size_optional = sum([file["size"] for file in old_content.get("files_optional", {}).values()])
|
||||
old_content_size = len(json.dumps(old_content, indent=1)) + sum([file["size"] for file in list(old_content.get("files", {}).values())])
|
||||
old_content_size_optional = sum([file["size"] for file in list(old_content.get("files_optional", {}).values())])
|
||||
else:
|
||||
old_content_size = 0
|
||||
old_content_size_optional = 0
|
||||
|
@ -816,7 +817,7 @@ class ContentManager(object):
|
|||
if not old_content and inner_path == "content.json":
|
||||
self.site.settings["size"] = 0
|
||||
|
||||
content_size_optional = sum([file["size"] for file in content.get("files_optional", {}).values() if file["size"] >= 0])
|
||||
content_size_optional = sum([file["size"] for file in list(content.get("files_optional", {}).values()) if file["size"] >= 0])
|
||||
site_size = self.site.settings["size"] - old_content_size + content_size # Site size without old content plus the new
|
||||
site_size_optional = self.site.settings["size_optional"] - old_content_size_optional + content_size_optional # Site size without old content plus the new
|
||||
|
||||
|
@ -841,7 +842,7 @@ class ContentManager(object):
|
|||
raise VerifyError("Content too large %sB > %sB, aborting task..." % (site_size, site_size_limit))
|
||||
|
||||
# Verify valid filenames
|
||||
for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys():
|
||||
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
|
||||
if not self.isValidRelativePath(file_relative_path):
|
||||
raise VerifyError("Invalid relative path: %s" % file_relative_path)
|
||||
|
||||
|
@ -876,12 +877,12 @@ class ContentManager(object):
|
|||
|
||||
# Filename limit
|
||||
if rules.get("files_allowed"):
|
||||
for file_inner_path in content["files"].keys():
|
||||
for file_inner_path in list(content["files"].keys()):
|
||||
if not SafeRe.match("^%s$" % rules["files_allowed"], file_inner_path):
|
||||
raise VerifyError("File not allowed: %s" % file_inner_path)
|
||||
|
||||
if rules.get("files_allowed_optional"):
|
||||
for file_inner_path in content.get("files_optional", {}).keys():
|
||||
for file_inner_path in list(content.get("files_optional", {}).keys()):
|
||||
if not SafeRe.match("^%s$" % rules["files_allowed_optional"], file_inner_path):
|
||||
raise VerifyError("Optional file not allowed: %s" % file_inner_path)
|
||||
|
||||
|
@ -964,7 +965,7 @@ class ContentManager(object):
|
|||
else:
|
||||
raise VerifyError("Invalid old-style sign")
|
||||
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.warning("%s: verify sign error: %s" % (inner_path, Debug.formatException(err)))
|
||||
raise err
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
from ContentManager import ContentManager
|
||||
from .ContentManager import ContentManager
|
|
@ -13,10 +13,10 @@ def sha1sum(file, blocksize=65536):
|
|||
|
||||
|
||||
def sha512sum(file, blocksize=65536, format="hexdigest"):
|
||||
if hasattr(file, "endswith"): # Its a string open it
|
||||
if type(file) is str: # Filename specified
|
||||
file = open(file, "rb")
|
||||
hash = hashlib.sha512()
|
||||
for block in iter(lambda: file.read(blocksize), ""):
|
||||
for block in iter(lambda: file.read(blocksize), b""):
|
||||
hash.update(block)
|
||||
|
||||
# Truncate to 256bits is good enough
|
||||
|
@ -31,7 +31,7 @@ def sha256sum(file, blocksize=65536):
|
|||
if hasattr(file, "endswith"): # Its a string open it
|
||||
file = open(file, "rb")
|
||||
hash = hashlib.sha256()
|
||||
for block in iter(lambda: file.read(blocksize), ""):
|
||||
for block in iter(lambda: file.read(blocksize), b""):
|
||||
hash.update(block)
|
||||
return hash.hexdigest()
|
||||
|
||||
|
@ -39,7 +39,7 @@ def sha256sum(file, blocksize=65536):
|
|||
def random(length=64, encoding="hex"):
|
||||
if encoding == "base64": # Characters: A-Za-z0-9
|
||||
hash = hashlib.sha512(os.urandom(256)).digest()
|
||||
return base64.standard_b64encode(hash).replace("+", "").replace("/", "").replace("=", "")[0:length]
|
||||
return base64.b64encode(hash).decode("ascii").replace("+", "").replace("/", "").replace("=", "")[0:length]
|
||||
else: # Characters: a-f0-9 (faster)
|
||||
return hashlib.sha512(os.urandom(256)).hexdigest()[0:length]
|
||||
|
||||
|
|
|
@ -35,4 +35,4 @@ def privatekeyToPublickey(privatekey):
|
|||
return pub.save_pkcs1("DER")
|
||||
|
||||
def publickeyToOnion(publickey):
|
||||
return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower()
|
||||
return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower().decode("ascii")
|
||||
|
|
25
src/Db/Db.py
25
src/Db/Db.py
|
@ -7,7 +7,7 @@ import os
|
|||
import gevent
|
||||
|
||||
from Debug import Debug
|
||||
from DbCursor import DbCursor
|
||||
from .DbCursor import DbCursor
|
||||
from Config import config
|
||||
from util import SafeRe
|
||||
from util import helper
|
||||
|
@ -149,8 +149,8 @@ class Db(object):
|
|||
if not self.db_keyvalues: # Get db keyvalues
|
||||
try:
|
||||
res = self.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues
|
||||
except sqlite3.OperationalError, err: # Table not exist
|
||||
self.log.debug("Query error: %s" % err)
|
||||
except sqlite3.OperationalError as err: # Table not exist
|
||||
self.log.debug("Query table version error: %s" % err)
|
||||
return False
|
||||
|
||||
for row in res:
|
||||
|
@ -260,7 +260,7 @@ class Db(object):
|
|||
data = json.load(helper.limitedGzipFile(fileobj=file))
|
||||
else:
|
||||
data = json.load(file)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Json file %s load error: %s" % (file_path, err))
|
||||
data = {}
|
||||
|
||||
|
@ -274,7 +274,7 @@ class Db(object):
|
|||
commit_after_done = False
|
||||
|
||||
# Row for current json file if required
|
||||
if not data or filter(lambda dbmap: "to_keyvalue" in dbmap or "to_table" in dbmap, matched_maps):
|
||||
if not data or [dbmap for dbmap in matched_maps if "to_keyvalue" in dbmap or "to_table" in dbmap]:
|
||||
json_row = cur.getJsonRow(relative_path)
|
||||
|
||||
# Check matched mappings in schema
|
||||
|
@ -311,7 +311,7 @@ class Db(object):
|
|||
changed = True
|
||||
if changed:
|
||||
# Add the custom col values
|
||||
data_json_row.update({key: val for key, val in data.iteritems() if key in dbmap["to_json_table"]})
|
||||
data_json_row.update({key: val for key, val in data.items() if key in dbmap["to_json_table"]})
|
||||
cur.execute("INSERT OR REPLACE INTO json ?", data_json_row)
|
||||
|
||||
# Insert data to tables
|
||||
|
@ -333,7 +333,7 @@ class Db(object):
|
|||
|
||||
# Fill import cols from table cols
|
||||
if not import_cols:
|
||||
import_cols = set(map(lambda item: item[0], self.schema["tables"][table_name]["cols"]))
|
||||
import_cols = set([item[0] for item in self.schema["tables"][table_name]["cols"]])
|
||||
|
||||
cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],))
|
||||
|
||||
|
@ -341,7 +341,7 @@ class Db(object):
|
|||
continue
|
||||
|
||||
if key_col: # Map as dict
|
||||
for key, val in data[node].iteritems():
|
||||
for key, val in data[node].items():
|
||||
if val_col: # Single value
|
||||
cur.execute(
|
||||
"INSERT OR REPLACE INTO %s ?" % table_name,
|
||||
|
@ -355,9 +355,9 @@ class Db(object):
|
|||
row[key_col] = key
|
||||
# Replace in value if necessary
|
||||
if replaces:
|
||||
for replace_key, replace in replaces.iteritems():
|
||||
for replace_key, replace in replaces.items():
|
||||
if replace_key in row:
|
||||
for replace_from, replace_to in replace.iteritems():
|
||||
for replace_from, replace_to in replace.items():
|
||||
row[replace_key] = row[replace_key].replace(replace_from, replace_to)
|
||||
|
||||
row["json_id"] = json_row["json_id"]
|
||||
|
@ -402,7 +402,6 @@ if __name__ == "__main__":
|
|||
dbjson.updateJson("data/users/%s/data.json" % user_dir, cur=cur)
|
||||
# print ".",
|
||||
cur.logging = True
|
||||
cur.execute("COMMIT")
|
||||
print "Done in %.3fs" % (time.time() - s)
|
||||
print("Done in %.3fs" % (time.time() - s))
|
||||
for query, stats in sorted(dbjson.query_stats.items()):
|
||||
print "-", query, stats
|
||||
print("-", query, stats)
|
||||
|
|
|
@ -9,9 +9,9 @@ class DbQuery:
|
|||
# Split main parts of query
|
||||
def parseParts(self, query):
|
||||
parts = re.split("(SELECT|FROM|WHERE|ORDER BY|LIMIT)", query)
|
||||
parts = filter(None, parts) # Remove empty parts
|
||||
parts = map(lambda s: s.strip(), parts) # Remove whitespace
|
||||
return dict(zip(parts[0::2], parts[1::2]))
|
||||
parts = [_f for _f in parts if _f] # Remove empty parts
|
||||
parts = [s.strip() for s in parts] # Remove whitespace
|
||||
return dict(list(zip(parts[0::2], parts[1::2])))
|
||||
|
||||
# Parse selected fields SELECT ... FROM
|
||||
def parseFields(self, query_select):
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
from Db import Db
|
||||
from DbQuery import DbQuery
|
||||
from DbCursor import DbCursor
|
||||
from .Db import Db
|
||||
from .DbQuery import DbQuery
|
||||
from .DbCursor import DbCursor
|
|
@ -63,10 +63,10 @@ gevent.spawn(testBlock)
|
|||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
print 1 / 0
|
||||
except Exception, err:
|
||||
print type(err).__name__
|
||||
print "1/0 error: %s" % formatException(err)
|
||||
print(1 / 0)
|
||||
except Exception as err:
|
||||
print(type(err).__name__)
|
||||
print("1/0 error: %s" % formatException(err))
|
||||
|
||||
def loadJson():
|
||||
json.loads("Errr")
|
||||
|
@ -74,13 +74,13 @@ if __name__ == "__main__":
|
|||
import json
|
||||
try:
|
||||
loadJson()
|
||||
except Exception, err:
|
||||
print err
|
||||
print "Json load error: %s" % formatException(err)
|
||||
except Exception as err:
|
||||
print(err)
|
||||
print("Json load error: %s" % formatException(err))
|
||||
|
||||
try:
|
||||
raise Notify("nothing...")
|
||||
except Exception, err:
|
||||
print "Notify: %s" % formatException(err)
|
||||
except Exception as err:
|
||||
print("Notify: %s" % formatException(err))
|
||||
|
||||
loadJson()
|
||||
|
|
|
@ -5,19 +5,20 @@ import gevent
|
|||
import gevent.hub
|
||||
|
||||
from Config import config
|
||||
import importlib
|
||||
|
||||
last_error = None
|
||||
|
||||
def shutdown():
|
||||
print "Shutting down..."
|
||||
print("Shutting down...")
|
||||
if "file_server" in dir(sys.modules["main"]) and sys.modules["main"].file_server.running:
|
||||
try:
|
||||
if "file_server" in dir(sys.modules["main"]):
|
||||
gevent.spawn(sys.modules["main"].file_server.stop)
|
||||
if "ui_server" in dir(sys.modules["main"]):
|
||||
gevent.spawn(sys.modules["main"].ui_server.stop)
|
||||
except Exception, err:
|
||||
print "Proper shutdown error: %s" % err
|
||||
except Exception as err:
|
||||
print("Proper shutdown error: %s" % err)
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.exit(0)
|
||||
|
@ -67,7 +68,7 @@ else:
|
|||
sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
|
||||
|
||||
gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet
|
||||
reload(gevent)
|
||||
importlib.reload(gevent)
|
||||
|
||||
def handleGreenletError(self, context, type, value, tb):
|
||||
if isinstance(value, str):
|
||||
|
@ -83,18 +84,18 @@ if __name__ == "__main__":
|
|||
import time
|
||||
from gevent import monkey
|
||||
monkey.patch_all(thread=False, ssl=False)
|
||||
import Debug
|
||||
from . import Debug
|
||||
|
||||
def sleeper(num):
|
||||
print "started", num
|
||||
print("started", num)
|
||||
time.sleep(3)
|
||||
raise Exception("Error")
|
||||
print "stopped", num
|
||||
print("stopped", num)
|
||||
thread1 = gevent.spawn(sleeper, 1)
|
||||
thread2 = gevent.spawn(sleeper, 2)
|
||||
time.sleep(1)
|
||||
print "killing..."
|
||||
print("killing...")
|
||||
thread1.kill(exception=Debug.Notify("Worker stopped"))
|
||||
#thread2.throw(Debug.Notify("Throw"))
|
||||
print "killed"
|
||||
print("killed")
|
||||
gevent.joinall([thread1,thread2])
|
||||
|
|
|
@ -3,6 +3,7 @@ import subprocess
|
|||
import re
|
||||
import logging
|
||||
import time
|
||||
import functools
|
||||
|
||||
from Config import config
|
||||
from util import helper
|
||||
|
@ -18,9 +19,9 @@ def findfiles(path, find_ext):
|
|||
elif f2 == "":
|
||||
return -1
|
||||
else:
|
||||
return cmp(f1.lower(), f2.lower())
|
||||
return helper.cmp(f1.lower(), f2.lower())
|
||||
|
||||
for root, dirs, files in sorted(os.walk(path, topdown=False), cmp=sorter):
|
||||
for root, dirs, files in sorted(os.walk(path, topdown=False), key=functools.cmp_to_key(sorter)):
|
||||
for file in sorted(files):
|
||||
file_path = root + "/" + file
|
||||
file_ext = file.split(".")[-1]
|
||||
|
@ -66,16 +67,16 @@ def merge(merged_path):
|
|||
return # Assets not changed, nothing to do
|
||||
|
||||
if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile
|
||||
merged_old = open(merged_path, "rb").read().decode("utf8")
|
||||
merged_old = open(merged_path, "rb").read()
|
||||
old_parts = {}
|
||||
for match in re.findall(r"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
|
||||
old_parts[match[1]] = match[2].strip("\n\r")
|
||||
for match in re.findall(rb"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
|
||||
old_parts[match[1]] = match[2].strip(rb"\n\r")
|
||||
|
||||
# Merge files
|
||||
parts = []
|
||||
s_total = time.time()
|
||||
for file_path in findfiles(merge_dir, find_ext):
|
||||
parts.append("\n\n/* ---- %s ---- */\n\n" % file_path.replace(config.data_dir, ""))
|
||||
parts.append(b"\n\n/* ---- %s ---- */\n\n" % file_path.replace(config.data_dir, "").encode("utf8"))
|
||||
if file_path.endswith(".coffee"): # Compile coffee script
|
||||
if file_path in changed or file_path.replace(config.data_dir, "") not in old_parts: # Only recompile if changed or its not compiled before
|
||||
if config.coffeescript_compiler is None:
|
||||
|
@ -95,31 +96,31 @@ def merge(merged_path):
|
|||
# Start compiling
|
||||
s = time.time()
|
||||
compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
|
||||
out = compiler.stdout.read().decode("utf8")
|
||||
out = compiler.stdout.read()
|
||||
compiler.wait()
|
||||
logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s))
|
||||
|
||||
# Check errors
|
||||
if out and out.startswith("("): # No error found
|
||||
if out and out.startswith(b"("): # No error found
|
||||
parts.append(out)
|
||||
else: # Put error message in place of source code
|
||||
error = out
|
||||
logging.error("%s Compile error: %s" % (file_path, error))
|
||||
parts.append(
|
||||
"alert('%s compile error: %s');" %
|
||||
(file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n"))
|
||||
b"alert('%s compile error: %s');" %
|
||||
(file_path, re.escape(error).replace(b"\n", b"\\n").replace(r"\\n", r"\n"))
|
||||
)
|
||||
else: # Not changed use the old_part
|
||||
parts.append(old_parts[file_path.replace(config.data_dir, "")])
|
||||
else: # Add to parts
|
||||
parts.append(open(file_path).read().decode("utf8"))
|
||||
parts.append(open(file_path, "rb").read())
|
||||
|
||||
merged = u"\n".join(parts)
|
||||
merged = b"\n".join(parts)
|
||||
if ext == "css": # Vendor prefix css
|
||||
from lib.cssvendor import cssvendor
|
||||
merged = cssvendor.prefix(merged)
|
||||
merged = merged.replace("\r", "")
|
||||
open(merged_path, "wb").write(merged.encode("utf8"))
|
||||
merged = merged.replace(b"\r", b"")
|
||||
open(merged_path, "wb").write(merged)
|
||||
logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total))
|
||||
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
from DebugReloader import DebugReloader
|
|
@ -118,7 +118,7 @@ class FileRequest(object):
|
|||
|
||||
try:
|
||||
content = json.loads(params["body"])
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Update for %s is invalid JSON: %s" % (inner_path, err))
|
||||
self.response({"error": "File invalid JSON"})
|
||||
self.connection.badAction(5)
|
||||
|
@ -131,7 +131,7 @@ class FileRequest(object):
|
|||
else:
|
||||
try:
|
||||
valid = site.content_manager.verifyFile(inner_path, content)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Update for %s is invalid: %s" % (inner_path, err))
|
||||
valid = False
|
||||
|
||||
|
@ -251,10 +251,10 @@ class FileRequest(object):
|
|||
|
||||
return {"bytes_sent": bytes_sent, "file_size": file_size, "location": params["location"]}
|
||||
|
||||
except RequestError, err:
|
||||
except RequestError as err:
|
||||
self.log.debug("GetFile %s %s request error: %s" % (self.connection, params["inner_path"], Debug.formatException(err)))
|
||||
self.response({"error": "File read error: %s" % err})
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
if config.verbose:
|
||||
self.log.debug("GetFile read error: %s" % Debug.formatException(err))
|
||||
self.response({"error": "File read error"})
|
||||
|
@ -306,7 +306,7 @@ class FileRequest(object):
|
|||
if config.verbose:
|
||||
self.log.debug(
|
||||
"Added %s peers to %s using pex, sending back %s" %
|
||||
(added, site, {key: len(val) for key, val in packed_peers.iteritems()})
|
||||
(added, site, {key: len(val) for key, val in packed_peers.items()})
|
||||
)
|
||||
|
||||
back = {
|
||||
|
@ -353,7 +353,7 @@ class FileRequest(object):
|
|||
back = collections.defaultdict(lambda: collections.defaultdict(list))
|
||||
found = site.worker_manager.findOptionalHashIds(hash_ids, limit=limit)
|
||||
|
||||
for hash_id, peers in found.iteritems():
|
||||
for hash_id, peers in found.items():
|
||||
for peer in peers:
|
||||
ip_type = helper.getIpType(peer.ip)
|
||||
if len(back[ip_type][hash_id]) < 20:
|
||||
|
@ -385,7 +385,7 @@ class FileRequest(object):
|
|||
if config.verbose:
|
||||
self.log.debug(
|
||||
"Found: %s for %s hashids in %.3fs" %
|
||||
({key: len(val) for key, val in back.iteritems()}, len(params["hash_ids"]), time.time() - s)
|
||||
({key: len(val) for key, val in back.items()}, len(params["hash_ids"]), time.time() - s)
|
||||
)
|
||||
self.response({"peers": back["ipv4"], "peers_onion": back["onion"], "peers_ipv6": back["ipv6"], "my": my_hashes})
|
||||
|
||||
|
@ -405,7 +405,7 @@ class FileRequest(object):
|
|||
|
||||
# Send a simple Pong! answer
|
||||
def actionPing(self, params):
|
||||
self.response("Pong!")
|
||||
self.response(b"Pong!")
|
||||
|
||||
# Check requested port of the other peer
|
||||
def actionCheckport(self, params):
|
||||
|
|
|
@ -10,7 +10,7 @@ from gevent.server import StreamServer
|
|||
import util
|
||||
from util import helper
|
||||
from Config import config
|
||||
from FileRequest import FileRequest
|
||||
from .FileRequest import FileRequest
|
||||
from Peer import PeerPortchecker
|
||||
from Site import SiteManager
|
||||
from Connection import ConnectionServer
|
||||
|
@ -41,7 +41,7 @@ class FileServer(ConnectionServer):
|
|||
port = config.tor_hs_port
|
||||
config.fileserver_port = port
|
||||
elif port == 0: # Use random port
|
||||
port_range_from, port_range_to = map(int, config.fileserver_port_range.split("-"))
|
||||
port_range_from, port_range_to = list(map(int, config.fileserver_port_range.split("-")))
|
||||
port = self.getRandomPort(ip, port_range_from, port_range_to)
|
||||
config.fileserver_port = port
|
||||
if not port:
|
||||
|
@ -59,7 +59,7 @@ class FileServer(ConnectionServer):
|
|||
self.stream_server_proxy = StreamServer(
|
||||
("0.0.0.0", self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100
|
||||
)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.info("StreamServer proxy create error: %s" % Debug.formatException(err))
|
||||
|
||||
self.port_opened = {}
|
||||
|
@ -117,7 +117,7 @@ class FileServer(ConnectionServer):
|
|||
def listenProxy(self):
|
||||
try:
|
||||
self.stream_server_proxy.serve_forever()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
if err.errno == 98: # Address already in use error
|
||||
self.log.debug("StreamServer proxy listen error: %s" % err)
|
||||
else:
|
||||
|
@ -231,7 +231,7 @@ class FileServer(ConnectionServer):
|
|||
if not self.port_opened or force_port_check: # Test and open port if not tested yet
|
||||
if len(self.sites) <= 2: # Don't wait port opening on first startup
|
||||
sites_checking = True
|
||||
for address, site in self.sites.items():
|
||||
for address, site in list(self.sites.items()):
|
||||
gevent.spawn(self.checkSite, site, check_files)
|
||||
|
||||
self.portCheck()
|
||||
|
@ -242,7 +242,7 @@ class FileServer(ConnectionServer):
|
|||
if not sites_checking:
|
||||
check_pool = gevent.pool.Pool(5)
|
||||
# Check sites integrity
|
||||
for site in sorted(self.sites.values(), key=lambda site: site.settings.get("modified", 0), reverse=True):
|
||||
for site in sorted(list(self.sites.values()), key=lambda site: site.settings.get("modified", 0), reverse=True):
|
||||
if not site.settings["serving"]:
|
||||
continue
|
||||
check_thread = check_pool.spawn(self.checkSite, site, check_files) # Check in new thread
|
||||
|
@ -263,7 +263,7 @@ class FileServer(ConnectionServer):
|
|||
(len(self.connections), self.has_internet, len(peers_protected))
|
||||
)
|
||||
|
||||
for address, site in self.sites.items():
|
||||
for address, site in list(self.sites.items()):
|
||||
if not site.settings["serving"]:
|
||||
continue
|
||||
|
||||
|
@ -273,7 +273,7 @@ class FileServer(ConnectionServer):
|
|||
time.sleep(1) # Prevent too quick request
|
||||
|
||||
peers_protected = set([])
|
||||
for address, site in self.sites.items():
|
||||
for address, site in list(self.sites.items()):
|
||||
if not site.settings["serving"]:
|
||||
continue
|
||||
|
||||
|
@ -313,7 +313,7 @@ class FileServer(ConnectionServer):
|
|||
while 1:
|
||||
config.loadTrackersFile()
|
||||
s = time.time()
|
||||
for address, site in self.sites.items():
|
||||
for address, site in list(self.sites.items()):
|
||||
if not site.settings["serving"]:
|
||||
continue
|
||||
gevent.spawn(self.announceSite, site).join(timeout=10)
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
from FileServer import FileServer
|
||||
from FileRequest import FileRequest
|
||||
from .FileServer import FileServer
|
||||
from .FileRequest import FileRequest
|
|
@ -6,11 +6,11 @@ import collections
|
|||
|
||||
import gevent
|
||||
|
||||
from cStringIO import StringIO
|
||||
import io
|
||||
from Debug import Debug
|
||||
from Config import config
|
||||
from util import helper
|
||||
from PeerHashfield import PeerHashfield
|
||||
from .PeerHashfield import PeerHashfield
|
||||
from Plugin import PluginManager
|
||||
|
||||
if config.use_tempfiles:
|
||||
|
@ -95,7 +95,7 @@ class Peer(object):
|
|||
self.connection = connection_server.getConnection(self.ip, self.port, site=self.site, is_tracker_connection=self.is_tracker_connection)
|
||||
self.reputation += 1
|
||||
self.connection.sites += 1
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.onConnectionError("Getting connection error")
|
||||
self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" %
|
||||
(Debug.formatException(err), self.connection_error, self.hash_failed))
|
||||
|
@ -164,7 +164,7 @@ class Peer(object):
|
|||
return res
|
||||
else:
|
||||
raise Exception("Invalid response: %s" % res)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
if type(err).__name__ == "Notify": # Greenlet killed by worker
|
||||
self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd))
|
||||
break
|
||||
|
@ -195,7 +195,7 @@ class Peer(object):
|
|||
if config.use_tempfiles:
|
||||
buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b')
|
||||
else:
|
||||
buff = StringIO()
|
||||
buff = io.BytesIO()
|
||||
|
||||
s = time.time()
|
||||
while True: # Read in smaller parts
|
||||
|
@ -240,7 +240,7 @@ class Peer(object):
|
|||
with gevent.Timeout(10.0, False): # 10 sec timeout, don't raise exception
|
||||
res = self.request("ping")
|
||||
|
||||
if res and "body" in res and res["body"] == "Pong!":
|
||||
if res and "body" in res and res["body"] == b"Pong!":
|
||||
response_time = time.time() - s
|
||||
break # All fine, exit from for loop
|
||||
# Timeout reached or bad response
|
||||
|
@ -267,12 +267,9 @@ class Peer(object):
|
|||
request["peers_onion"] = packed_peers["onion"]
|
||||
if packed_peers["ipv6"]:
|
||||
request["peers_ipv6"] = packed_peers["ipv6"]
|
||||
|
||||
res = self.request("pex", request)
|
||||
|
||||
if not res or "error" in res:
|
||||
return False
|
||||
|
||||
added = 0
|
||||
|
||||
# Remove unsupported peer types
|
||||
|
@ -331,13 +328,13 @@ class Peer(object):
|
|||
key = "peers"
|
||||
else:
|
||||
key = "peers_%s" % ip_type
|
||||
for hash, peers in res.get(key, {}).items()[0:30]:
|
||||
for hash, peers in list(res.get(key, {}).items())[0:30]:
|
||||
if ip_type == "onion":
|
||||
unpacker_func = helper.unpackOnionAddress
|
||||
else:
|
||||
unpacker_func = helper.unpackAddress
|
||||
|
||||
back[hash] += map(unpacker_func, peers)
|
||||
back[hash] += list(map(unpacker_func, peers))
|
||||
|
||||
for hash in res.get("my", []):
|
||||
back[hash].append((self.connection.ip, self.connection.port))
|
||||
|
|
|
@ -68,8 +68,8 @@ if __name__ == "__main__":
|
|||
s = time.time()
|
||||
for i in range(10000):
|
||||
field.appendHashId(i)
|
||||
print time.time()-s
|
||||
print(time.time()-s)
|
||||
s = time.time()
|
||||
for i in range(10000):
|
||||
field.hasHash("AABB")
|
||||
print time.time()-s
|
||||
print(time.time()-s)
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
import urllib
|
||||
import urllib2
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
import re
|
||||
import time
|
||||
|
||||
|
@ -16,10 +16,10 @@ class PeerPortchecker(object):
|
|||
|
||||
def requestUrl(self, url, post_data=None):
|
||||
if type(post_data) is dict:
|
||||
post_data = urllib.urlencode(post_data)
|
||||
req = urllib2.Request(url, post_data)
|
||||
post_data = urllib.parse.urlencode(post_data).encode("utf8")
|
||||
req = urllib.request.Request(url, post_data)
|
||||
req.add_header('Referer', url)
|
||||
return urllib2.urlopen(req, timeout=20.0)
|
||||
return urllib.request.urlopen(req, timeout=20.0)
|
||||
|
||||
def portOpen(self, port):
|
||||
self.log.info("Trying to open port using UpnpPunch...")
|
||||
|
@ -67,7 +67,7 @@ class PeerPortchecker(object):
|
|||
return res
|
||||
|
||||
def checkCanyouseeme(self, port):
|
||||
data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read()
|
||||
data = urllib.request.urlopen("http://www.canyouseeme.org/", b"port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8")
|
||||
message = re.match('.*<p style="padding-left:15px">(.*?)</p>', data, re.DOTALL).group(1)
|
||||
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace(" ", " ")) # Strip http tags
|
||||
|
||||
|
@ -85,7 +85,7 @@ class PeerPortchecker(object):
|
|||
raise Exception("Invalid response: %s" % message)
|
||||
|
||||
def checkPortchecker(self, port):
|
||||
data = urllib2.urlopen("https://portchecker.co/check", "port=%s" % port, timeout=20.0).read()
|
||||
data = urllib.request.urlopen("https://portchecker.co/check", b"port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8")
|
||||
message = re.match('.*<div id="results-wrapper">(.*?)</div>', data, re.DOTALL).group(1)
|
||||
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace(" ", " ").strip()) # Strip http tags
|
||||
|
||||
|
@ -109,7 +109,6 @@ class PeerPortchecker(object):
|
|||
|
||||
ip = re.match('.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL).group(1)
|
||||
token = re.match('.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1)
|
||||
print ip
|
||||
|
||||
post_data = {"host": ip, "port": port, "allow": "on", "token": token, "submit": "Scanning.."}
|
||||
data = self.requestUrl(url, post_data).read()
|
||||
|
@ -168,4 +167,4 @@ if __name__ == "__main__":
|
|||
peer_portchecker = PeerPortchecker()
|
||||
for func_name in ["checkIpv6scanner", "checkMyaddr", "checkPortchecker", "checkCanyouseeme"]:
|
||||
s = time.time()
|
||||
print(func_name, getattr(peer_portchecker, func_name)(3894), "%.3fs" % (time.time() - s))
|
||||
print((func_name, getattr(peer_portchecker, func_name)(3894), "%.3fs" % (time.time() - s)))
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
from Peer import Peer
|
||||
from PeerHashfield import PeerHashfield
|
||||
from .Peer import Peer
|
||||
from .PeerHashfield import PeerHashfield
|
||||
|
|
|
@ -7,6 +7,7 @@ from collections import defaultdict
|
|||
|
||||
from Debug import Debug
|
||||
from Config import config
|
||||
import importlib
|
||||
|
||||
|
||||
class PluginManager:
|
||||
|
@ -48,7 +49,7 @@ class PluginManager:
|
|||
self.log.debug("Loading plugin: %s" % dir_name)
|
||||
try:
|
||||
__import__(dir_name)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err)))
|
||||
if dir_name not in self.plugin_names:
|
||||
self.plugin_names.append(dir_name)
|
||||
|
@ -62,19 +63,19 @@ class PluginManager:
|
|||
self.after_load = []
|
||||
self.plugins_before = self.plugins
|
||||
self.plugins = defaultdict(list) # Reset registered plugins
|
||||
for module_name, module in sys.modules.items():
|
||||
if module and "__file__" in dir(module) and self.plugin_path in module.__file__: # Module file within plugin_path
|
||||
for module_name, module in list(sys.modules.items()):
|
||||
if module and getattr(module, "__file__", None) and self.plugin_path in module.__file__: # Module file in plugin_path
|
||||
if "allow_reload" in dir(module) and not module.allow_reload: # Reload disabled
|
||||
# Re-add non-reloadable plugins
|
||||
for class_name, classes in self.plugins_before.iteritems():
|
||||
for class_name, classes in self.plugins_before.items():
|
||||
for c in classes:
|
||||
if c.__module__ != module.__name__:
|
||||
continue
|
||||
self.plugins[class_name].append(c)
|
||||
else:
|
||||
try:
|
||||
reload(module)
|
||||
except Exception, err:
|
||||
importlib.reload(module)
|
||||
except Exception as err:
|
||||
self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err)))
|
||||
|
||||
self.loadPlugins() # Load new plugins
|
||||
|
@ -82,7 +83,7 @@ class PluginManager:
|
|||
# Change current classes in memory
|
||||
import gc
|
||||
patched = {}
|
||||
for class_name, classes in self.plugins.iteritems():
|
||||
for class_name, classes in self.plugins.items():
|
||||
classes = classes[:] # Copy the current plugins
|
||||
classes.reverse()
|
||||
base_class = self.pluggable[class_name] # Original class
|
||||
|
@ -96,8 +97,8 @@ class PluginManager:
|
|||
|
||||
# Change classes in modules
|
||||
patched = {}
|
||||
for class_name, classes in self.plugins.iteritems():
|
||||
for module_name, module in sys.modules.iteritems():
|
||||
for class_name, classes in self.plugins.items():
|
||||
for module_name, module in list(sys.modules.items()):
|
||||
if class_name in dir(module):
|
||||
if "__class__" not in dir(getattr(module, class_name)): # Not a class
|
||||
continue
|
||||
|
@ -134,7 +135,7 @@ def acceptPlugins(base_class):
|
|||
if str(key) in plugin_manager.subclass_order[class_name]
|
||||
else 9999
|
||||
)
|
||||
plugin_manager.subclass_order[class_name] = map(str, classes)
|
||||
plugin_manager.subclass_order[class_name] = list(map(str, classes))
|
||||
|
||||
classes.reverse()
|
||||
classes.append(base_class) # Add the class itself to end of inherience line
|
||||
|
@ -181,4 +182,4 @@ if __name__ == "__main__":
|
|||
else:
|
||||
return "Can't route to", path
|
||||
|
||||
print Request().route("MainPage")
|
||||
print(Request().route("MainPage"))
|
||||
|
|
|
@ -7,6 +7,7 @@ import random
|
|||
import sys
|
||||
import hashlib
|
||||
import collections
|
||||
import base64
|
||||
|
||||
import gevent
|
||||
import gevent.pool
|
||||
|
@ -17,14 +18,14 @@ from Peer import Peer
|
|||
from Worker import WorkerManager
|
||||
from Debug import Debug
|
||||
from Content import ContentManager
|
||||
from SiteStorage import SiteStorage
|
||||
from .SiteStorage import SiteStorage
|
||||
from Crypt import CryptHash
|
||||
from util import helper
|
||||
from util import Diff
|
||||
from Plugin import PluginManager
|
||||
from File import FileServer
|
||||
from SiteAnnouncer import SiteAnnouncer
|
||||
import SiteManager
|
||||
from .SiteAnnouncer import SiteAnnouncer
|
||||
from . import SiteManager
|
||||
|
||||
|
||||
@PluginManager.acceptPlugins
|
||||
|
@ -32,7 +33,8 @@ class Site(object):
|
|||
|
||||
def __init__(self, address, allow_create=True, settings=None):
|
||||
self.address = str(re.sub("[^A-Za-z0-9]", "", address)) # Make sure its correct address
|
||||
self.address_hash = hashlib.sha256(self.address).digest()
|
||||
self.address_hash = hashlib.sha256(self.address.encode("ascii")).digest()
|
||||
self.address_sha1 = hashlib.sha1(self.address.encode("ascii")).digest()
|
||||
self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging
|
||||
self.log = logging.getLogger("Site:%s" % self.address_short)
|
||||
self.addEventListeners()
|
||||
|
@ -127,7 +129,7 @@ class Site(object):
|
|||
def getSettingsCache(self):
|
||||
back = {}
|
||||
back["bad_files"] = self.bad_files
|
||||
back["hashfield"] = self.content_manager.hashfield.tostring().encode("base64")
|
||||
back["hashfield"] = base64.b64encode(self.content_manager.hashfield.tobytes()).decode("ascii")
|
||||
return back
|
||||
|
||||
# Max site size in MB
|
||||
|
@ -173,7 +175,7 @@ class Site(object):
|
|||
# Start download files
|
||||
file_threads = []
|
||||
if download_files:
|
||||
for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys():
|
||||
for file_relative_path in list(self.content_manager.contents[inner_path].get("files", {}).keys()):
|
||||
file_inner_path = content_inner_dir + file_relative_path
|
||||
|
||||
# Try to diff first
|
||||
|
@ -204,7 +206,7 @@ class Site(object):
|
|||
"Patched successfully: %s (diff: %.3fs, verify: %.3fs, write: %.3fs, on_done: %.3fs)" %
|
||||
(file_inner_path, time_diff, time_verify, time_write, time_on_done)
|
||||
)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Failed to patch %s: %s" % (file_inner_path, err))
|
||||
diff_success = False
|
||||
|
||||
|
@ -218,7 +220,7 @@ class Site(object):
|
|||
if inner_path == "content.json":
|
||||
gevent.spawn(self.updateHashfield)
|
||||
|
||||
for file_relative_path in self.content_manager.contents[inner_path].get("files_optional", {}).keys():
|
||||
for file_relative_path in list(self.content_manager.contents[inner_path].get("files_optional", {}).keys()):
|
||||
file_inner_path = content_inner_dir + file_relative_path
|
||||
if file_inner_path not in changed and not self.bad_files.get(file_inner_path):
|
||||
continue
|
||||
|
@ -233,7 +235,7 @@ class Site(object):
|
|||
|
||||
# Wait for includes download
|
||||
include_threads = []
|
||||
for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys():
|
||||
for file_relative_path in list(self.content_manager.contents[inner_path].get("includes", {}).keys()):
|
||||
file_inner_path = content_inner_dir + file_relative_path
|
||||
include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer)
|
||||
include_threads.append(include_thread)
|
||||
|
@ -262,7 +264,7 @@ class Site(object):
|
|||
def getReachableBadFiles(self):
|
||||
if not self.bad_files:
|
||||
return False
|
||||
return [bad_file for bad_file, retry in self.bad_files.iteritems() if retry < 3]
|
||||
return [bad_file for bad_file, retry in self.bad_files.items() if retry < 3]
|
||||
|
||||
# Retry download bad files
|
||||
def retryBadFiles(self, force=False):
|
||||
|
@ -272,7 +274,7 @@ class Site(object):
|
|||
content_inner_paths = []
|
||||
file_inner_paths = []
|
||||
|
||||
for bad_file, tries in self.bad_files.items():
|
||||
for bad_file, tries in list(self.bad_files.items()):
|
||||
if force or random.randint(0, min(40, tries)) < 4: # Larger number tries = less likely to check every 15min
|
||||
if bad_file.endswith("content.json"):
|
||||
content_inner_paths.append(bad_file)
|
||||
|
@ -286,7 +288,7 @@ class Site(object):
|
|||
self.pooledDownloadFile(file_inner_paths, only_if_bad=True)
|
||||
|
||||
def checkBadFiles(self):
|
||||
for bad_file in self.bad_files.keys():
|
||||
for bad_file in list(self.bad_files.keys()):
|
||||
file_info = self.content_manager.getFileInfo(bad_file)
|
||||
if bad_file.endswith("content.json"):
|
||||
if file_info is False and bad_file != "content.json":
|
||||
|
@ -374,7 +376,7 @@ class Site(object):
|
|||
queried.append(peer)
|
||||
modified_contents = []
|
||||
my_modified = self.content_manager.listModified(since)
|
||||
for inner_path, modified in res["modified_files"].iteritems(): # Check if the peer has newer files than we
|
||||
for inner_path, modified in res["modified_files"].items(): # Check if the peer has newer files than we
|
||||
has_newer = int(modified) > my_modified.get(inner_path, 0)
|
||||
has_older = int(modified) < my_modified.get(inner_path, 0)
|
||||
if inner_path not in self.bad_files and not self.content_manager.isArchived(inner_path, modified):
|
||||
|
@ -480,7 +482,7 @@ class Site(object):
|
|||
def redownloadContents(self):
|
||||
# Download all content.json again
|
||||
content_threads = []
|
||||
for inner_path in self.content_manager.contents.keys():
|
||||
for inner_path in list(self.content_manager.contents.keys()):
|
||||
content_threads.append(self.needFile(inner_path, update=True, blocking=False))
|
||||
|
||||
self.log.debug("Waiting %s content.json to finish..." % len(content_threads))
|
||||
|
@ -523,7 +525,7 @@ class Site(object):
|
|||
})
|
||||
if result:
|
||||
break
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Publish error: %s" % Debug.formatException(err))
|
||||
result = {"exception": Debug.formatException(err)}
|
||||
|
||||
|
@ -563,7 +565,7 @@ class Site(object):
|
|||
peers = set(peers)
|
||||
|
||||
self.log.info("Publishing %s to %s/%s peers (connected: %s) diffs: %s (%.2fk)..." % (
|
||||
inner_path, limit, len(self.peers), num_connected_peers, diffs.keys(), float(len(str(diffs))) / 1024
|
||||
inner_path, limit, len(self.peers), num_connected_peers, list(diffs.keys()), float(len(str(diffs))) / 1024
|
||||
))
|
||||
|
||||
if not peers:
|
||||
|
@ -631,8 +633,8 @@ class Site(object):
|
|||
)
|
||||
|
||||
# Copy files
|
||||
for content_inner_path, content in self.content_manager.contents.items():
|
||||
file_relative_paths = content.get("files", {}).keys()
|
||||
for content_inner_path, content in list(self.content_manager.contents.items()):
|
||||
file_relative_paths = list(content.get("files", {}).keys())
|
||||
|
||||
# Sign content.json at the end to make sure every file is included
|
||||
file_relative_paths.sort()
|
||||
|
@ -812,7 +814,7 @@ class Site(object):
|
|||
self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, connected, len(self.peers)))
|
||||
|
||||
if connected < need: # Need more than we have
|
||||
for peer in self.peers.values():
|
||||
for peer in list(self.peers.values()):
|
||||
if not peer.connection or not peer.connection.connected: # No peer connection or disconnected
|
||||
peer.pex() # Initiate peer exchange
|
||||
if peer.connection and peer.connection.connected:
|
||||
|
@ -831,7 +833,7 @@ class Site(object):
|
|||
|
||||
# Return: Probably peers verified to be connectable recently
|
||||
def getConnectablePeers(self, need_num=5, ignore=[], allow_private=True):
|
||||
peers = self.peers.values()
|
||||
peers = list(self.peers.values())
|
||||
found = []
|
||||
for peer in peers:
|
||||
if peer.key.endswith(":0"):
|
||||
|
@ -874,7 +876,7 @@ class Site(object):
|
|||
# Add random peers
|
||||
need_more = need_num - len(found)
|
||||
found_more = sorted(
|
||||
self.peers.values()[0:need_more * 50],
|
||||
list(self.peers.values())[0:need_more * 50],
|
||||
key=lambda peer: peer.reputation,
|
||||
reverse=True
|
||||
)[0:need_more * 2]
|
||||
|
@ -906,7 +908,7 @@ class Site(object):
|
|||
|
||||
# Cleanup probably dead peers and close connection if too much
|
||||
def cleanupPeers(self, peers_protected=[]):
|
||||
peers = self.peers.values()
|
||||
peers = list(self.peers.values())
|
||||
if len(peers) > 20:
|
||||
# Cleanup old peers
|
||||
removed = 0
|
||||
|
@ -1019,7 +1021,7 @@ class Site(object):
|
|||
# Send site status update to websocket clients
|
||||
def updateWebsocket(self, **kwargs):
|
||||
if kwargs:
|
||||
param = {"event": kwargs.items()[0]}
|
||||
param = {"event": list(kwargs.items())[0]}
|
||||
else:
|
||||
param = None
|
||||
for ws in self.websockets:
|
||||
|
|
|
@ -1,17 +1,16 @@
|
|||
import random
|
||||
import time
|
||||
import hashlib
|
||||
import urllib
|
||||
import urllib2
|
||||
import urllib.request
|
||||
import struct
|
||||
import socket
|
||||
import re
|
||||
import collections
|
||||
|
||||
from lib import bencode
|
||||
import bencode
|
||||
from lib.subtl.subtl import UdpTrackerClient
|
||||
from lib.PySocks import socks
|
||||
from lib.PySocks import sockshandler
|
||||
import socks
|
||||
import sockshandler
|
||||
import gevent
|
||||
|
||||
from Plugin import PluginManager
|
||||
|
@ -69,7 +68,7 @@ class SiteAnnouncer(object):
|
|||
back = []
|
||||
# Type of addresses they can reach me
|
||||
if config.trackers_proxy == "disable":
|
||||
for ip_type, opened in self.site.connection_server.port_opened.items():
|
||||
for ip_type, opened in list(self.site.connection_server.port_opened.items()):
|
||||
if opened:
|
||||
back.append(ip_type)
|
||||
if self.site.connection_server.tor_manager.start_onions:
|
||||
|
@ -221,7 +220,7 @@ class SiteAnnouncer(object):
|
|||
if error:
|
||||
self.stats[tracker]["status"] = "error"
|
||||
self.stats[tracker]["time_status"] = time.time()
|
||||
self.stats[tracker]["last_error"] = str(err).decode("utf8", "ignore")
|
||||
self.stats[tracker]["last_error"] = str(error)
|
||||
self.stats[tracker]["time_last_error"] = time.time()
|
||||
self.stats[tracker]["num_error"] += 1
|
||||
self.stats[tracker]["num_request"] += 1
|
||||
|
@ -359,9 +358,9 @@ class SiteAnnouncer(object):
|
|||
try:
|
||||
peer_data = bencode.decode(response)["peers"]
|
||||
response = None
|
||||
peer_count = len(peer_data) / 6
|
||||
peer_count = int(len(peer_data) / 6)
|
||||
peers = []
|
||||
for peer_offset in xrange(peer_count):
|
||||
for peer_offset in range(peer_count):
|
||||
off = 6 * peer_offset
|
||||
peer = peer_data[off:off + 6]
|
||||
addr, port = struct.unpack('!LH', peer)
|
||||
|
@ -379,7 +378,7 @@ class SiteAnnouncer(object):
|
|||
peers = self.site.getConnectedPeers()
|
||||
|
||||
if len(peers) == 0: # Small number of connected peers for this site, connect to any
|
||||
peers = self.site.peers.values()
|
||||
peers = list(self.site.peers.values())
|
||||
need_num = 10
|
||||
|
||||
random.shuffle(peers)
|
||||
|
@ -399,7 +398,7 @@ class SiteAnnouncer(object):
|
|||
|
||||
def updateWebsocket(self, **kwargs):
|
||||
if kwargs:
|
||||
param = {"event": kwargs.items()[0]}
|
||||
param = {"event": list(kwargs.items())[0]}
|
||||
else:
|
||||
param = None
|
||||
|
||||
|
|
|
@ -28,11 +28,11 @@ class SiteManager(object):
|
|||
def load(self, cleanup=True, startup=False):
|
||||
self.log.debug("Loading sites...")
|
||||
self.loaded = False
|
||||
from Site import Site
|
||||
from .Site import Site
|
||||
address_found = []
|
||||
added = 0
|
||||
# Load new adresses
|
||||
for address, settings in json.load(open("%s/sites.json" % config.data_dir)).iteritems():
|
||||
for address, settings in json.load(open("%s/sites.json" % config.data_dir)).items():
|
||||
if address not in self.sites:
|
||||
if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)):
|
||||
# Root content.json exists, try load site
|
||||
|
@ -40,7 +40,7 @@ class SiteManager(object):
|
|||
try:
|
||||
site = Site(address, settings=settings)
|
||||
site.content_manager.contents.get("content.json")
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error loading site %s: %s" % (address, err))
|
||||
continue
|
||||
self.sites[address] = site
|
||||
|
@ -56,7 +56,7 @@ class SiteManager(object):
|
|||
|
||||
# Remove deleted adresses
|
||||
if cleanup:
|
||||
for address in self.sites.keys():
|
||||
for address in list(self.sites.keys()):
|
||||
if address not in address_found:
|
||||
del(self.sites[address])
|
||||
self.log.debug("Removed site: %s" % address)
|
||||
|
@ -93,7 +93,7 @@ class SiteManager(object):
|
|||
data = {}
|
||||
# Generate data file
|
||||
s = time.time()
|
||||
for address, site in self.list().iteritems():
|
||||
for address, site in self.list().items():
|
||||
if recalculate_size:
|
||||
site.settings["size"], site.settings["size_optional"] = site.content_manager.getTotalSize() # Update site size
|
||||
data[address] = site.settings
|
||||
|
@ -108,7 +108,7 @@ class SiteManager(object):
|
|||
time_write = time.time() - s
|
||||
|
||||
# Remove cache from site settings
|
||||
for address, site in self.list().iteritems():
|
||||
for address, site in self.list().items():
|
||||
site.settings["cache"] = {}
|
||||
|
||||
self.log.debug("Saved sites in %.2fs (generate: %.2fs, write: %.2fs)" % (time.time() - s, time_generate, time_write))
|
||||
|
@ -134,12 +134,12 @@ class SiteManager(object):
|
|||
|
||||
# Return or create site and start download site files
|
||||
def need(self, address, all_file=True, settings=None):
|
||||
from Site import Site
|
||||
from .Site import Site
|
||||
site = self.get(address)
|
||||
if not site: # Site not exist yet
|
||||
self.sites_changed = int(time.time())
|
||||
# Try to find site with differect case
|
||||
for recover_address, recover_site in self.sites.items():
|
||||
for recover_address, recover_site in list(self.sites.items()):
|
||||
if recover_address.lower() == address.lower():
|
||||
return recover_site
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ from Translate import translate as _
|
|||
class SiteStorage(object):
|
||||
def __init__(self, site, allow_create=True):
|
||||
self.site = site
|
||||
self.directory = u"%s/%s" % (config.data_dir, self.site.address) # Site data diretory
|
||||
self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
|
||||
self.allowed_dir = os.path.abspath(self.directory) # Only serve file within this dir
|
||||
self.log = site.log
|
||||
self.db = None # Db class
|
||||
|
@ -59,7 +59,7 @@ class SiteStorage(object):
|
|||
def getDbSchema(self):
|
||||
try:
|
||||
schema = self.loadJson("dbschema.json")
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
raise Exception("dbschema.json is not a valid JSON: %s" % err)
|
||||
return schema
|
||||
|
||||
|
@ -92,7 +92,7 @@ class SiteStorage(object):
|
|||
# Return possible db files for the site
|
||||
def getDbFiles(self):
|
||||
found = 0
|
||||
for content_inner_path, content in self.site.content_manager.contents.iteritems():
|
||||
for content_inner_path, content in self.site.content_manager.contents.items():
|
||||
# content.json file itself
|
||||
if self.isFile(content_inner_path):
|
||||
yield content_inner_path, self.getPath(content_inner_path)
|
||||
|
@ -100,7 +100,7 @@ class SiteStorage(object):
|
|||
self.log.error("[MISSING] %s" % content_inner_path)
|
||||
# Data files in content.json
|
||||
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
|
||||
for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys():
|
||||
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
|
||||
if not file_relative_path.endswith(".json") and not file_relative_path.endswith("json.gz"):
|
||||
continue # We only interesed in json files
|
||||
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
|
||||
|
@ -181,7 +181,7 @@ class SiteStorage(object):
|
|||
self.event_db_busy.get() # Wait for event
|
||||
try:
|
||||
res = self.getDb().execute(query, params)
|
||||
except sqlite3.DatabaseError, err:
|
||||
except sqlite3.DatabaseError as err:
|
||||
if err.__class__.__name__ == "DatabaseError":
|
||||
self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query))
|
||||
self.rebuildDb()
|
||||
|
@ -240,7 +240,7 @@ class SiteStorage(object):
|
|||
os.rename(self.getPath(inner_path_before), self.getPath(inner_path_after))
|
||||
err = None
|
||||
break
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("%s rename error: %s (retry #%s)" % (inner_path_before, err, retry))
|
||||
time.sleep(0.1 + retry)
|
||||
if err:
|
||||
|
@ -297,7 +297,7 @@ class SiteStorage(object):
|
|||
self.log.debug("Loading json file to db: %s (file: %s)" % (inner_path, file))
|
||||
try:
|
||||
self.updateDbFile(inner_path, file)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
|
||||
self.closeDb()
|
||||
|
||||
|
@ -363,9 +363,9 @@ class SiteStorage(object):
|
|||
return self.directory
|
||||
|
||||
if ".." in inner_path:
|
||||
raise Exception(u"File not allowed: %s" % inner_path)
|
||||
raise Exception("File not allowed: %s" % inner_path)
|
||||
|
||||
return u"%s/%s" % (self.directory, inner_path)
|
||||
return "%s/%s" % (self.directory, inner_path)
|
||||
|
||||
# Get site dir relative path
|
||||
def getInnerPath(self, path):
|
||||
|
@ -375,7 +375,7 @@ class SiteStorage(object):
|
|||
if path.startswith(self.directory):
|
||||
inner_path = path[len(self.directory) + 1:]
|
||||
else:
|
||||
raise Exception(u"File not allowed: %s" % path)
|
||||
raise Exception("File not allowed: %s" % path)
|
||||
return inner_path
|
||||
|
||||
# Verify all files sha512sum using content.json
|
||||
|
@ -390,7 +390,7 @@ class SiteStorage(object):
|
|||
self.log.debug("VerifyFile content.json not exists")
|
||||
self.site.needFile("content.json", update=True) # Force update to fix corrupt file
|
||||
self.site.content_manager.loadContent() # Reload content.json
|
||||
for content_inner_path, content in self.site.content_manager.contents.items():
|
||||
for content_inner_path, content in list(self.site.content_manager.contents.items()):
|
||||
back["num_content"] += 1
|
||||
i += 1
|
||||
if i % 50 == 0:
|
||||
|
@ -400,7 +400,7 @@ class SiteStorage(object):
|
|||
self.log.debug("[MISSING] %s" % content_inner_path)
|
||||
bad_files.append(content_inner_path)
|
||||
|
||||
for file_relative_path in content.get("files", {}).keys():
|
||||
for file_relative_path in list(content.get("files", {}).keys()):
|
||||
back["num_file"] += 1
|
||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||
file_inner_path = file_inner_path.strip("/") # Strip leading /
|
||||
|
@ -418,7 +418,7 @@ class SiteStorage(object):
|
|||
else:
|
||||
try:
|
||||
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
ok = False
|
||||
|
||||
if not ok:
|
||||
|
@ -430,7 +430,7 @@ class SiteStorage(object):
|
|||
# Optional files
|
||||
optional_added = 0
|
||||
optional_removed = 0
|
||||
for file_relative_path in content.get("files_optional", {}).keys():
|
||||
for file_relative_path in list(content.get("files_optional", {}).keys()):
|
||||
back["num_optional"] += 1
|
||||
file_node = content["files_optional"][file_relative_path]
|
||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||
|
@ -451,7 +451,7 @@ class SiteStorage(object):
|
|||
else:
|
||||
try:
|
||||
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
ok = False
|
||||
|
||||
if ok:
|
||||
|
@ -475,7 +475,7 @@ class SiteStorage(object):
|
|||
)
|
||||
|
||||
self.site.content_manager.contents.db.processDelayed()
|
||||
time.sleep(0.0001) # Context switch to avoid gevent hangs
|
||||
time.sleep(0.001) # Context switch to avoid gevent hangs
|
||||
return back
|
||||
|
||||
# Check and try to fix site files integrity
|
||||
|
@ -497,15 +497,15 @@ class SiteStorage(object):
|
|||
def deleteFiles(self):
|
||||
self.log.debug("Deleting files from content.json...")
|
||||
files = [] # Get filenames
|
||||
for content_inner_path in self.site.content_manager.contents.keys():
|
||||
for content_inner_path in list(self.site.content_manager.contents.keys()):
|
||||
content = self.site.content_manager.contents.get(content_inner_path, {})
|
||||
files.append(content_inner_path)
|
||||
# Add normal files
|
||||
for file_relative_path in content.get("files", {}).keys():
|
||||
for file_relative_path in list(content.get("files", {}).keys()):
|
||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||
files.append(file_inner_path)
|
||||
# Add optional files
|
||||
for file_relative_path in content.get("files_optional", {}).keys():
|
||||
for file_relative_path in list(content.get("files_optional", {}).keys()):
|
||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||
files.append(file_inner_path)
|
||||
|
||||
|
@ -518,7 +518,7 @@ class SiteStorage(object):
|
|||
db_path = self.getPath(schema["db_file"])
|
||||
if os.path.isfile(db_path):
|
||||
os.unlink(db_path)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Db file delete error: %s" % err)
|
||||
|
||||
for inner_path in files:
|
||||
|
@ -528,8 +528,8 @@ class SiteStorage(object):
|
|||
try:
|
||||
os.unlink(path)
|
||||
break
|
||||
except Exception, err:
|
||||
self.log.error(u"Error removing %s: %s, try #%s" % (inner_path, err, retry))
|
||||
except Exception as err:
|
||||
self.log.error("Error removing %s: %s, try #%s" % (inner_path, err, retry))
|
||||
time.sleep(float(retry) / 10)
|
||||
self.onUpdated(inner_path, False)
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
from Site import Site
|
||||
from SiteStorage import SiteStorage
|
||||
from SiteAnnouncer import SiteAnnouncer
|
||||
from .Site import Site
|
||||
from .SiteStorage import SiteStorage
|
||||
from .SiteAnnouncer import SiteAnnouncer
|
||||
|
|
|
@ -8,7 +8,7 @@ import socket
|
|||
import ssl
|
||||
sys.path.append(os.path.abspath("..")) # Imports relative to src dir
|
||||
|
||||
import cStringIO as StringIO
|
||||
import io as StringIO
|
||||
import gevent
|
||||
|
||||
from gevent.server import StreamServer
|
||||
|
@ -46,8 +46,8 @@ def handle(sock_raw, addr):
|
|||
)
|
||||
else:
|
||||
sock.sendall(data)
|
||||
except Exception, err:
|
||||
print err
|
||||
except Exception as err:
|
||||
print(err)
|
||||
try:
|
||||
sock.shutdown(gevent.socket.SHUT_WR)
|
||||
sock.close()
|
||||
|
@ -102,7 +102,7 @@ def getData():
|
|||
total_num += 1
|
||||
total_bytes += buff.tell()
|
||||
if not data:
|
||||
print "No data"
|
||||
print("No data")
|
||||
|
||||
sock.shutdown(gevent.socket.SHUT_WR)
|
||||
sock.close()
|
||||
|
@ -119,8 +119,8 @@ def info():
|
|||
else:
|
||||
memory_info = process.get_memory_info
|
||||
while 1:
|
||||
print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s,
|
||||
print "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20)
|
||||
print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, end=' ')
|
||||
print("using", clipher, "Mem:", memory_info()[0] / float(2 ** 20))
|
||||
time.sleep(1)
|
||||
|
||||
gevent.spawn(info)
|
||||
|
@ -132,7 +132,7 @@ for test in range(1):
|
|||
gevent.joinall(clients)
|
||||
|
||||
|
||||
print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s
|
||||
print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s)
|
||||
|
||||
# Separate client/server process:
|
||||
# 10*10*100:
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
class Spy:
|
||||
def __init__(self, obj, func_name):
|
||||
self.obj = obj
|
||||
self.func_name = func_name
|
||||
self.__name__ = func_name
|
||||
self.func_original = getattr(self.obj, func_name)
|
||||
self.calls = []
|
||||
|
||||
|
@ -10,11 +10,11 @@ class Spy:
|
|||
call = dict(enumerate(args, 1))
|
||||
call[0] = cls
|
||||
call.update(kwargs)
|
||||
print "Logging", call
|
||||
print("Logging", call)
|
||||
self.calls.append(call)
|
||||
return self.func_original(cls, *args, **kwargs)
|
||||
setattr(self.obj, self.func_name, loggedFunc)
|
||||
setattr(self.obj, self.__name__, loggedFunc)
|
||||
return self.calls
|
||||
|
||||
def __exit__(self, *args, **kwargs):
|
||||
setattr(self.obj, self.func_name, self.func_original)
|
||||
setattr(self.obj, self.__name__, self.func_original)
|
|
@ -1,6 +1,6 @@
|
|||
import json
|
||||
import time
|
||||
from cStringIO import StringIO
|
||||
import io
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -52,7 +52,7 @@ class TestContent:
|
|||
|
||||
# Normal data
|
||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||
# Reset
|
||||
del data_dict["signs"]
|
||||
|
@ -60,7 +60,7 @@ class TestContent:
|
|||
# Too large
|
||||
data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json
|
||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
with pytest.raises(VerifyError) as err:
|
||||
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||
assert "Include too large" in str(err)
|
||||
|
@ -72,7 +72,7 @@ class TestContent:
|
|||
# Not allowed file
|
||||
data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"]
|
||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
with pytest.raises(VerifyError) as err:
|
||||
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||
assert "File not allowed" in str(err)
|
||||
|
@ -83,7 +83,7 @@ class TestContent:
|
|||
|
||||
# Should work again
|
||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||
|
||||
@pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"])
|
||||
|
@ -166,7 +166,7 @@ class TestContent:
|
|||
data_dict["signs"] = {
|
||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||
}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
with pytest.raises(VerifyError) as err:
|
||||
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||
assert "Wrong site address" in str(err)
|
||||
|
@ -178,7 +178,7 @@ class TestContent:
|
|||
data_dict["signs"] = {
|
||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||
}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
with pytest.raises(VerifyError) as err:
|
||||
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||
assert "Wrong inner_path" in str(err)
|
||||
|
@ -190,7 +190,7 @@ class TestContent:
|
|||
data_dict["signs"] = {
|
||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||
}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||
|
||||
def testVerifyInnerPath(self, site):
|
||||
|
@ -206,7 +206,7 @@ class TestContent:
|
|||
data_dict["signs"] = {
|
||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||
}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||
|
||||
for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg"]:
|
||||
|
@ -218,7 +218,7 @@ class TestContent:
|
|||
data_dict["signs"] = {
|
||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||
}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
with pytest.raises(VerifyError) as err:
|
||||
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||
assert "Invalid relative path" in str(err)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import json
|
||||
from cStringIO import StringIO
|
||||
from io import StringIO
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import cStringIO as StringIO
|
||||
import io
|
||||
|
||||
|
||||
class TestDb:
|
||||
|
@ -63,11 +63,11 @@ class TestDb:
|
|||
# Large ammount of IN values
|
||||
assert db.execute(
|
||||
"SELECT COUNT(*) AS num FROM test WHERE ?",
|
||||
{"not__test_id": range(2, 3000)}
|
||||
{"not__test_id": list(range(2, 3000))}
|
||||
).fetchone()["num"] == 2
|
||||
assert db.execute(
|
||||
"SELECT COUNT(*) AS num FROM test WHERE ?",
|
||||
{"test_id": range(50, 3000)}
|
||||
{"test_id": list(range(50, 3000))}
|
||||
).fetchone()["num"] == 50
|
||||
|
||||
assert db.execute(
|
||||
|
@ -103,7 +103,7 @@ class TestDb:
|
|||
|
||||
|
||||
def testUpdateJson(self, db):
|
||||
f = StringIO.StringIO()
|
||||
f = io.StringIO()
|
||||
f.write("""
|
||||
{
|
||||
"test": [
|
||||
|
@ -118,7 +118,7 @@ class TestDb:
|
|||
|
||||
def testUnsafePattern(self, db):
|
||||
db.schema["maps"] = {"[A-Za-z.]*": db.schema["maps"]["data.json"]} # Only repetition of . supported
|
||||
f = StringIO.StringIO()
|
||||
f = io.StringIO()
|
||||
f.write("""
|
||||
{
|
||||
"test": [
|
||||
|
@ -129,4 +129,4 @@ class TestDb:
|
|||
f.seek(0)
|
||||
assert db.updateJson(db.db_dir + "data.json", f) is False
|
||||
assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 0
|
||||
assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 0
|
||||
assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 0
|
|
@ -1,4 +1,4 @@
|
|||
import cStringIO as StringIO
|
||||
import io
|
||||
|
||||
from util import Diff
|
||||
|
||||
|
@ -31,19 +31,19 @@ class TestDiff:
|
|||
) == [("-", 11)]
|
||||
|
||||
def testDiffLimit(self):
|
||||
old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix")
|
||||
new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix")
|
||||
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
|
||||
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix")
|
||||
actions = Diff.diff(list(old_f), list(new_f), limit=1024)
|
||||
assert actions
|
||||
|
||||
old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix")
|
||||
new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix"*1024)
|
||||
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
|
||||
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix"*1024)
|
||||
actions = Diff.diff(list(old_f), list(new_f), limit=1024)
|
||||
assert actions is False
|
||||
|
||||
def testPatch(self):
|
||||
old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix")
|
||||
new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix")
|
||||
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
|
||||
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix")
|
||||
actions = Diff.diff(
|
||||
list(old_f),
|
||||
list(new_f)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import cStringIO as StringIO
|
||||
import io
|
||||
|
||||
import pytest
|
||||
import time
|
||||
|
@ -20,10 +20,10 @@ class TestFileRequest:
|
|||
|
||||
# Normal request
|
||||
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
|
||||
assert "sign" in response["body"]
|
||||
assert b"sign" in response["body"]
|
||||
|
||||
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json")})
|
||||
assert "sign" in response["body"]
|
||||
assert b"sign" in response["body"]
|
||||
|
||||
# Invalid file
|
||||
response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0})
|
||||
|
@ -57,25 +57,25 @@ class TestFileRequest:
|
|||
connection = client.getConnection(file_server.ip, 1544)
|
||||
file_server.sites[site.address] = site
|
||||
|
||||
buff = StringIO.StringIO()
|
||||
buff = io.BytesIO()
|
||||
response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff)
|
||||
assert "stream_bytes" in response
|
||||
assert "sign" in buff.getvalue()
|
||||
assert b"sign" in buff.getvalue()
|
||||
|
||||
# Invalid file
|
||||
buff = StringIO.StringIO()
|
||||
buff = io.BytesIO()
|
||||
response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff)
|
||||
assert "File read error" in response["error"]
|
||||
|
||||
# Location over size
|
||||
buff = StringIO.StringIO()
|
||||
buff = io.BytesIO()
|
||||
response = connection.request(
|
||||
"streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff
|
||||
)
|
||||
assert "File read error" in response["error"]
|
||||
|
||||
# Stream from parent dir
|
||||
buff = StringIO.StringIO()
|
||||
buff = io.BytesIO()
|
||||
response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff)
|
||||
assert "File read error" in response["error"]
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import time
|
||||
from cStringIO import StringIO
|
||||
import io
|
||||
|
||||
import pytest
|
||||
|
||||
from File import FileServer
|
||||
from File import FileRequest
|
||||
from Crypt import CryptHash
|
||||
import Spy
|
||||
from . import Spy
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("resetSettings")
|
||||
|
@ -43,17 +43,17 @@ class TestPeer:
|
|||
|
||||
# Testing streamFile
|
||||
buff = peer_file_server.getFile(site_temp.address, "content.json", streaming=True)
|
||||
assert "sign" in buff.getvalue()
|
||||
assert b"sign" in buff.getvalue()
|
||||
|
||||
# Testing getFile
|
||||
buff = peer_file_server.getFile(site_temp.address, "content.json")
|
||||
assert "sign" in buff.getvalue()
|
||||
assert b"sign" in buff.getvalue()
|
||||
|
||||
connection.close()
|
||||
client.stop()
|
||||
|
||||
def testHashfield(self, site):
|
||||
sample_hash = site.content_manager.contents["content.json"]["files_optional"].values()[0]["sha512"]
|
||||
sample_hash = list(site.content_manager.contents["content.json"]["files_optional"].values())[0]["sha512"]
|
||||
|
||||
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
|
||||
|
||||
|
@ -65,7 +65,7 @@ class TestPeer:
|
|||
assert site.content_manager.hashfield.getHashId(sample_hash) in site.content_manager.hashfield
|
||||
|
||||
# Add new hash
|
||||
new_hash = CryptHash.sha512sum(StringIO("hello"))
|
||||
new_hash = CryptHash.sha512sum(io.BytesIO(b"hello"))
|
||||
assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
|
||||
assert site.content_manager.hashfield.appendHash(new_hash)
|
||||
assert not site.content_manager.hashfield.appendHash(new_hash) # Don't add second time
|
||||
|
|
|
@ -36,8 +36,8 @@ class TestSite:
|
|||
# Test re-cloning (updating)
|
||||
|
||||
# Changes in non-data files should be overwritten
|
||||
new_site.storage.write("index.html", "this will be overwritten")
|
||||
assert new_site.storage.read("index.html") == "this will be overwritten"
|
||||
new_site.storage.write("index.html", b"this will be overwritten")
|
||||
assert new_site.storage.read("index.html") == b"this will be overwritten"
|
||||
|
||||
# Changes in data file should be kept after re-cloning
|
||||
changed_contentjson = new_site.storage.loadJson("content.json")
|
||||
|
|
|
@ -9,7 +9,7 @@ from Config import config
|
|||
from File import FileRequest
|
||||
from File import FileServer
|
||||
from Site import Site
|
||||
import Spy
|
||||
from . import Spy
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("resetTempSettings")
|
||||
|
@ -289,7 +289,7 @@ class TestSiteDownload:
|
|||
|
||||
# Update file
|
||||
data_original = site.storage.open("data/data.json").read()
|
||||
data_new = data_original.replace('"ZeroBlog"', '"UpdatedZeroBlog"')
|
||||
data_new = data_original.replace(b'"ZeroBlog"', b'"UpdatedZeroBlog"')
|
||||
assert data_original != data_new
|
||||
|
||||
site.storage.open("data/data.json", "wb").write(data_new)
|
||||
|
@ -309,13 +309,13 @@ class TestSiteDownload:
|
|||
assert site_temp.storage.open("data/data.json").read() == data_new
|
||||
|
||||
# Close connection to avoid update spam limit
|
||||
site.peers.values()[0].remove()
|
||||
list(site.peers.values())[0].remove()
|
||||
site.addPeer(file_server.ip, 1545)
|
||||
site_temp.peers.values()[0].ping() # Connect back
|
||||
list(site_temp.peers.values())[0].ping() # Connect back
|
||||
time.sleep(0.1)
|
||||
|
||||
# Update with patch
|
||||
data_new = data_original.replace('"ZeroBlog"', '"PatchedZeroBlog"')
|
||||
data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"')
|
||||
assert data_original != data_new
|
||||
|
||||
site.storage.open("data/data.json-new", "wb").write(data_new)
|
||||
|
@ -328,7 +328,7 @@ class TestSiteDownload:
|
|||
assert not site.storage.isFile("data/data.json-new") # New data file removed
|
||||
assert site.storage.open("data/data.json").read() == data_new # -new postfix removed
|
||||
assert "data/data.json" in diffs
|
||||
assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', ['\t"title": "PatchedZeroBlog",\n']), ('=', 31102)]
|
||||
assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', [b'\t"title": "PatchedZeroBlog",\n']), ('=', 31102)]
|
||||
|
||||
# Publish with patch
|
||||
site.log.info("Publish new data.json with patch")
|
||||
|
|
|
@ -34,14 +34,14 @@ class TestTor:
|
|||
address = tor_manager.addOnion()
|
||||
|
||||
# Sign
|
||||
sign = CryptRsa.sign("hello", tor_manager.getPrivatekey(address))
|
||||
sign = CryptRsa.sign(b"hello", tor_manager.getPrivatekey(address))
|
||||
assert len(sign) == 128
|
||||
|
||||
# Verify
|
||||
publickey = CryptRsa.privatekeyToPublickey(tor_manager.getPrivatekey(address))
|
||||
assert len(publickey) == 140
|
||||
assert CryptRsa.verify("hello", publickey, sign)
|
||||
assert not CryptRsa.verify("not hello", publickey, sign)
|
||||
assert CryptRsa.verify(b"hello", publickey, sign)
|
||||
assert not CryptRsa.verify(b"not hello", publickey, sign)
|
||||
|
||||
# Pub to address
|
||||
assert CryptRsa.publickeyToOnion(publickey) == address
|
||||
|
@ -54,7 +54,7 @@ class TestTor:
|
|||
file_server.tor_manager.start_onions = True
|
||||
address = file_server.tor_manager.getOnion(site.address)
|
||||
assert address
|
||||
print "Connecting to", address
|
||||
print("Connecting to", address)
|
||||
for retry in range(5): # Wait for hidden service creation
|
||||
time.sleep(10)
|
||||
try:
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
import os
|
||||
|
||||
from Translate import Translate
|
||||
|
||||
class TestTranslate:
|
||||
|
@ -13,7 +11,6 @@ class TestTranslate:
|
|||
assert 'translated = _("translated")' in data_translated
|
||||
assert 'not_translated = "original"' in data_translated
|
||||
|
||||
|
||||
def testTranslateStrictNamed(self):
|
||||
translate = Translate()
|
||||
data = """
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import socket
|
||||
from urlparse import urlparse
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import pytest
|
||||
import mock
|
||||
|
@ -10,7 +10,7 @@ from util import UpnpPunch as upnp
|
|||
@pytest.fixture
|
||||
def mock_socket():
|
||||
mock_socket = mock.MagicMock()
|
||||
mock_socket.recv = mock.MagicMock(return_value='Hello')
|
||||
mock_socket.recv = mock.MagicMock(return_value=b'Hello')
|
||||
mock_socket.bind = mock.MagicMock()
|
||||
mock_socket.send_to = mock.MagicMock()
|
||||
|
||||
|
@ -79,12 +79,12 @@ class TestUpnpPunch(object):
|
|||
upnp._retrieve_location_from_ssdp(rsp)
|
||||
|
||||
def test_retrieve_igd_profile(self, url_obj):
|
||||
with mock.patch('urllib2.urlopen') as mock_urlopen:
|
||||
with mock.patch('urllib.request.urlopen') as mock_urlopen:
|
||||
upnp._retrieve_igd_profile(url_obj)
|
||||
mock_urlopen.assert_called_with(url_obj.geturl(), timeout=5)
|
||||
|
||||
def test_retrieve_igd_profile_timeout(self, url_obj):
|
||||
with mock.patch('urllib2.urlopen') as mock_urlopen:
|
||||
with mock.patch('urllib.request.urlopen') as mock_urlopen:
|
||||
mock_urlopen.side_effect = socket.error('Timeout error')
|
||||
with pytest.raises(upnp.IGDError):
|
||||
upnp._retrieve_igd_profile(url_obj)
|
||||
|
|
|
@ -7,7 +7,7 @@ from Crypt import CryptBitcoin
|
|||
class TestUser:
|
||||
def testAddress(self, user):
|
||||
assert user.master_address == "15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc"
|
||||
address_index = 1458664252141532163166741013621928587528255888800826689784628722366466547364755811L
|
||||
address_index = 1458664252141532163166741013621928587528255888800826689784628722366466547364755811
|
||||
assert user.getAddressAuthIndex("15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc") == address_index
|
||||
|
||||
# Re-generate privatekey based on address_index
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import urllib
|
||||
import urllib.request
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -26,7 +26,7 @@ def getContextUrl(browser):
|
|||
|
||||
|
||||
def getUrl(url):
|
||||
content = urllib.urlopen(url).read()
|
||||
content = urllib.request.urlopen(url).read()
|
||||
assert "server error" not in content.lower(), "Got a server error! " + repr(url)
|
||||
return content
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
import sys
|
||||
import urllib
|
||||
import urllib.request
|
||||
import time
|
||||
import logging
|
||||
import json
|
||||
|
@ -15,9 +15,11 @@ import gevent
|
|||
from gevent import monkey
|
||||
monkey.patch_all(thread=False, subprocess=False)
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--slow", action='store_true', default=False, help="Also run slow tests")
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(config, items):
|
||||
if config.getoption("--slow"):
|
||||
# --runslow given in cli: do not skip slow tests
|
||||
|
@ -44,18 +46,18 @@ config.action = "test"
|
|||
|
||||
logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
|
||||
|
||||
|
||||
# Set custom formatter with realative time format (via: https://stackoverflow.com/questions/31521859/python-logging-module-time-since-last-log)
|
||||
class TimeFilter(logging.Filter):
|
||||
|
||||
def filter(self, record):
|
||||
try:
|
||||
last = self.last
|
||||
last = self.last
|
||||
except AttributeError:
|
||||
last = record.relativeCreated
|
||||
last = record.relativeCreated
|
||||
|
||||
delta = datetime.datetime.fromtimestamp(record.relativeCreated/1000.0) - datetime.datetime.fromtimestamp(last/1000.0)
|
||||
|
||||
record.relative = '{0:.3f}'.format(delta.seconds + delta.microseconds/1000000.0)
|
||||
record.relative = '{0:.3f}'.format(delta.seconds + delta.microseconds / 1000000.0)
|
||||
|
||||
self.last = record.relativeCreated
|
||||
return True
|
||||
|
@ -204,7 +206,7 @@ def user():
|
|||
def browser(request):
|
||||
try:
|
||||
from selenium import webdriver
|
||||
print "Starting chromedriver..."
|
||||
print("Starting chromedriver...")
|
||||
options = webdriver.chrome.options.Options()
|
||||
options.add_argument("--headless")
|
||||
options.add_argument("--window-size=1920x1080")
|
||||
|
@ -214,7 +216,7 @@ def browser(request):
|
|||
def quit():
|
||||
browser.quit()
|
||||
request.addfinalizer(quit)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
raise pytest.skip("Test requires selenium + chromedriver: %s" % err)
|
||||
return browser
|
||||
|
||||
|
@ -222,8 +224,8 @@ def browser(request):
|
|||
@pytest.fixture(scope="session")
|
||||
def site_url():
|
||||
try:
|
||||
urllib.urlopen(SITE_URL).read()
|
||||
except Exception, err:
|
||||
urllib.request.urlopen(SITE_URL).read()
|
||||
except Exception as err:
|
||||
raise pytest.skip("Test requires zeronet client running: %s" % err)
|
||||
return SITE_URL
|
||||
|
||||
|
@ -253,8 +255,8 @@ def file_server4(request):
|
|||
conn = file_server.getConnection("127.0.0.1", 1544)
|
||||
conn.close()
|
||||
break
|
||||
except Exception, err:
|
||||
print err
|
||||
except Exception as err:
|
||||
print(err)
|
||||
assert file_server.running
|
||||
file_server.ip_incoming = {} # Reset flood protection
|
||||
|
||||
|
@ -263,6 +265,7 @@ def file_server4(request):
|
|||
request.addfinalizer(stop)
|
||||
return file_server
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def file_server6(request):
|
||||
file_server6 = FileServer("::1", 1544)
|
||||
|
@ -280,8 +283,8 @@ def file_server6(request):
|
|||
conn = file_server6.getConnection("::1", 1544)
|
||||
conn.close()
|
||||
break
|
||||
except Exception, err:
|
||||
print err
|
||||
except Exception as err:
|
||||
print(err)
|
||||
assert file_server6.running
|
||||
file_server6.ip_incoming = {} # Reset flood protection
|
||||
|
||||
|
@ -318,10 +321,11 @@ def tor_manager():
|
|||
tor_manager.start()
|
||||
assert tor_manager.conn
|
||||
tor_manager.startOnions()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
raise pytest.skip("Test requires Tor with ControlPort: %s, %s" % (config.tor_controller, err))
|
||||
return tor_manager
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def db(request):
|
||||
db_path = "%s/zeronet.db" % config.data_dir
|
||||
|
|
|
@ -110,8 +110,8 @@ class TorManager(object):
|
|||
break
|
||||
# Terminate on exit
|
||||
atexit.register(self.stopTor)
|
||||
except Exception, err:
|
||||
self.log.error(u"Error starting Tor client: %s" % Debug.formatException(str(err).decode("utf8", "ignore")))
|
||||
except Exception as err:
|
||||
self.log.error("Error starting Tor client: %s" % Debug.formatException(str(err)))
|
||||
self.enabled = False
|
||||
self.starting = False
|
||||
self.event_started.set(False)
|
||||
|
@ -125,7 +125,7 @@ class TorManager(object):
|
|||
try:
|
||||
if self.isSubprocessRunning():
|
||||
self.request("SIGNAL SHUTDOWN")
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Error stopping Tor: %s" % err)
|
||||
|
||||
def downloadTor(self):
|
||||
|
@ -235,18 +235,18 @@ class TorManager(object):
|
|||
def resetCircuits(self):
|
||||
res = self.request("SIGNAL NEWNYM")
|
||||
if "250 OK" not in res:
|
||||
self.setStatus(u"Reset circuits error (%s)" % res)
|
||||
self.setStatus("Reset circuits error (%s)" % res)
|
||||
self.log.error("Tor reset circuits error: %s" % res)
|
||||
|
||||
def addOnion(self):
|
||||
if len(self.privatekeys) >= config.tor_hs_limit:
|
||||
return random.choice([key for key in self.privatekeys.keys() if key != self.site_onions.get("global")])
|
||||
return random.choice([key for key in list(self.privatekeys.keys()) if key != self.site_onions.get("global")])
|
||||
|
||||
result = self.makeOnionAndKey()
|
||||
if result:
|
||||
onion_address, onion_privatekey = result
|
||||
self.privatekeys[onion_address] = onion_privatekey
|
||||
self.setStatus(u"OK (%s onions running)" % len(self.privatekeys))
|
||||
self.setStatus("OK (%s onions running)" % len(self.privatekeys))
|
||||
SiteManager.peer_blacklist.append((onion_address + ".onion", self.fileserver_port))
|
||||
return onion_address
|
||||
else:
|
||||
|
@ -259,7 +259,7 @@ class TorManager(object):
|
|||
onion_address, onion_privatekey = match.groups()
|
||||
return (onion_address, onion_privatekey)
|
||||
else:
|
||||
self.setStatus(u"AddOnion error (%s)" % res)
|
||||
self.setStatus("AddOnion error (%s)" % res)
|
||||
self.log.error("Tor addOnion error: %s" % res)
|
||||
return False
|
||||
|
||||
|
@ -270,7 +270,7 @@ class TorManager(object):
|
|||
self.setStatus("OK (%s onion running)" % len(self.privatekeys))
|
||||
return True
|
||||
else:
|
||||
self.setStatus(u"DelOnion error (%s)" % res)
|
||||
self.setStatus("DelOnion error (%s)" % res)
|
||||
self.log.error("Tor delOnion error: %s" % res)
|
||||
self.disconnect()
|
||||
return False
|
||||
|
@ -291,11 +291,11 @@ class TorManager(object):
|
|||
back = ""
|
||||
for retry in range(2):
|
||||
try:
|
||||
conn.sendall("%s\r\n" % cmd)
|
||||
conn.sendall(b"%s\r\n" % cmd.encode("utf8"))
|
||||
while not back.endswith("250 OK\r\n"):
|
||||
back += conn.recv(1024 * 64).decode("utf8", "ignore")
|
||||
break
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Tor send error: %s, reconnecting..." % err)
|
||||
self.disconnect()
|
||||
time.sleep(1)
|
||||
|
|
|
@ -1 +1 @@
|
|||
from TorManager import TorManager
|
||||
from .TorManager import TorManager
|
|
@ -3,7 +3,7 @@ import json
|
|||
import logging
|
||||
import inspect
|
||||
import re
|
||||
import cgi
|
||||
import html
|
||||
import string
|
||||
|
||||
from Config import config
|
||||
|
@ -15,8 +15,8 @@ class EscapeProxy(dict):
|
|||
# Automatically escape the accessed string values
|
||||
def __getitem__(self, key):
|
||||
val = dict.__getitem__(self, key)
|
||||
if type(val) in (str, unicode):
|
||||
return cgi.escape(val, quote=True)
|
||||
if type(val) in (str, str):
|
||||
return html.escape(val)
|
||||
elif type(val) is dict:
|
||||
return EscapeProxy(val)
|
||||
elif type(val) is list:
|
||||
|
@ -105,7 +105,7 @@ class Translate(dict):
|
|||
data = data.decode("utf8")
|
||||
|
||||
patterns = []
|
||||
for key, val in translate_table.items():
|
||||
for key, val in list(translate_table.items()):
|
||||
if key.startswith("_("): # Problematic string: only match if called between _(" ") function
|
||||
key = key.replace("_(", "").replace(")", "").replace(", ", '", "')
|
||||
translate_table[key] = "|" + val
|
||||
|
@ -128,6 +128,6 @@ class Translate(dict):
|
|||
else:
|
||||
pattern = '"(' + "|".join(patterns) + ')"'
|
||||
data = re.sub(pattern, replacer, data)
|
||||
return data.encode("utf8")
|
||||
return data
|
||||
|
||||
translate = Translate()
|
||||
|
|
|
@ -1 +1 @@
|
|||
from Translate import *
|
||||
from .Translate import *
|
|
@ -3,7 +3,8 @@ import re
|
|||
import os
|
||||
import mimetypes
|
||||
import json
|
||||
import cgi
|
||||
import html
|
||||
import urllib
|
||||
|
||||
import gevent
|
||||
|
||||
|
@ -157,7 +158,8 @@ class UiRequest(object):
|
|||
if func:
|
||||
return func()
|
||||
else:
|
||||
return self.error404(path)
|
||||
ret = self.error404(path)
|
||||
return ret
|
||||
|
||||
# The request is proxied by chrome extension or a transparent proxy
|
||||
def isProxyRequest(self):
|
||||
|
@ -190,7 +192,7 @@ class UiRequest(object):
|
|||
# Return: <dict> Posted variables
|
||||
def getPosted(self):
|
||||
if self.env['REQUEST_METHOD'] == "POST":
|
||||
return dict(cgi.parse_qsl(
|
||||
return dict(urllib.parse.parse_qsl(
|
||||
self.env['wsgi.input'].readline().decode()
|
||||
))
|
||||
else:
|
||||
|
@ -200,7 +202,7 @@ class UiRequest(object):
|
|||
def getCookies(self):
|
||||
raw_cookies = self.env.get('HTTP_COOKIE')
|
||||
if raw_cookies:
|
||||
cookies = cgi.parse_qsl(raw_cookies)
|
||||
cookies = urllib.parse.parse_qsl(raw_cookies)
|
||||
return {key.strip(): val for key, val in cookies}
|
||||
else:
|
||||
return {}
|
||||
|
@ -282,12 +284,12 @@ class UiRequest(object):
|
|||
headers["Cache-Control"] = "no-cache, no-store, private, must-revalidate, max-age=0" # No caching at all
|
||||
headers["Content-Type"] = content_type
|
||||
headers.update(extra_headers)
|
||||
return self.start_response(status_texts[status], headers.items())
|
||||
return self.start_response(status_texts[status], list(headers.items()))
|
||||
|
||||
# Renders a template
|
||||
def render(self, template_path, *args, **kwargs):
|
||||
template = open(template_path).read()
|
||||
for key, val in kwargs.items():
|
||||
for key, val in list(kwargs.items()):
|
||||
template = template.replace("{%s}" % key, "%s" % val)
|
||||
return template.encode("utf8")
|
||||
|
||||
|
@ -296,7 +298,7 @@ class UiRequest(object):
|
|||
# Redirect to an url
|
||||
def actionRedirect(self, url):
|
||||
self.start_response('301 Redirect', [('Location', str(url))])
|
||||
yield "Location changed: %s" % url
|
||||
yield b"Location changed: %s" % url.encode("utf8")
|
||||
|
||||
def actionIndex(self):
|
||||
return self.actionRedirect("/" + config.homepage)
|
||||
|
@ -447,11 +449,11 @@ class UiRequest(object):
|
|||
content = site.content_manager.contents["content.json"]
|
||||
if content.get("background-color"):
|
||||
background_color = content.get("background-color-%s" % theme, content["background-color"])
|
||||
body_style += "background-color: %s;" % cgi.escape(background_color, True)
|
||||
body_style += "background-color: %s;" % html.escape(background_color)
|
||||
if content.get("viewport"):
|
||||
meta_tags += '<meta name="viewport" id="viewport" content="%s">' % cgi.escape(content["viewport"], True)
|
||||
meta_tags += '<meta name="viewport" id="viewport" content="%s">' % html.escape(content["viewport"])
|
||||
if content.get("favicon"):
|
||||
meta_tags += '<link rel="icon" href="%s%s">' % (root_url, cgi.escape(content["favicon"], True))
|
||||
meta_tags += '<link rel="icon" href="%s%s">' % (root_url, html.escape(content["favicon"]))
|
||||
if content.get("postmessage_nonce_security"):
|
||||
postmessage_nonce_security = "true"
|
||||
|
||||
|
@ -470,7 +472,7 @@ class UiRequest(object):
|
|||
file_url=re.escape(file_url),
|
||||
file_inner_path=re.escape(file_inner_path),
|
||||
address=site.address,
|
||||
title=cgi.escape(title, True),
|
||||
title=html.escape(title),
|
||||
body_style=body_style,
|
||||
meta_tags=meta_tags,
|
||||
query_string=re.escape(inner_query_string),
|
||||
|
@ -612,7 +614,7 @@ class UiRequest(object):
|
|||
return self.error400()
|
||||
|
||||
def actionSiteAdd(self):
|
||||
post = dict(cgi.parse_qsl(self.env["wsgi.input"].read()))
|
||||
post = dict(urllib.parse.parse_qsl(self.env["wsgi.input"].read()))
|
||||
if post["add_nonce"] not in self.server.add_nonces:
|
||||
return self.error403("Add nonce error.")
|
||||
self.server.add_nonces.remove(post["add_nonce"])
|
||||
|
@ -626,7 +628,7 @@ class UiRequest(object):
|
|||
|
||||
self.sendHeader(200, "text/html", noscript=True)
|
||||
template = open("src/Ui/template/site_add.html").read()
|
||||
template = template.replace("{url}", cgi.escape(self.env["PATH_INFO"], True))
|
||||
template = template.replace("{url}", html.escape(self.env["PATH_INFO"]))
|
||||
template = template.replace("{address}", path_parts["address"])
|
||||
template = template.replace("{add_nonce}", self.getAddNonce())
|
||||
return template
|
||||
|
@ -634,7 +636,7 @@ class UiRequest(object):
|
|||
def replaceHtmlVariables(self, block, path_parts):
|
||||
user = self.getCurrentUser()
|
||||
themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light"))
|
||||
block = block.replace("{themeclass}", themeclass.encode("utf8"))
|
||||
block = block.replace(b"{themeclass}", themeclass.encode("utf8"))
|
||||
|
||||
if path_parts:
|
||||
site = self.server.sites.get(path_parts.get("address"))
|
||||
|
@ -642,7 +644,7 @@ class UiRequest(object):
|
|||
modified = int(time.time())
|
||||
else:
|
||||
modified = int(site.content_manager.contents["content.json"]["modified"])
|
||||
block = block.replace("{site_modified}", str(modified))
|
||||
block = block.replace(b"{site_modified}", str(modified).encode("utf8"))
|
||||
|
||||
return block
|
||||
|
||||
|
@ -708,14 +710,14 @@ class UiRequest(object):
|
|||
wrapper_key = self.get["wrapper_key"]
|
||||
# Find site by wrapper_key
|
||||
site = None
|
||||
for site_check in self.server.sites.values():
|
||||
for site_check in list(self.server.sites.values()):
|
||||
if site_check.settings["wrapper_key"] == wrapper_key:
|
||||
site = site_check
|
||||
|
||||
if site: # Correct wrapper key
|
||||
try:
|
||||
user = self.getCurrentUser()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Error in data/user.json: %s" % err)
|
||||
return self.error500()
|
||||
if not user:
|
||||
|
@ -726,7 +728,7 @@ class UiRequest(object):
|
|||
self.server.websockets.append(ui_websocket)
|
||||
ui_websocket.start()
|
||||
self.server.websockets.remove(ui_websocket)
|
||||
for site_check in self.server.sites.values():
|
||||
for site_check in list(self.server.sites.values()):
|
||||
# Remove websocket from every site (admin sites allowed to join other sites event channels)
|
||||
if ui_websocket in site_check.websockets:
|
||||
site_check.websockets.remove(ui_websocket)
|
||||
|
@ -744,10 +746,10 @@ class UiRequest(object):
|
|||
import sys
|
||||
last_error = sys.modules["main"].DebugHook.last_error
|
||||
if last_error:
|
||||
raise last_error[0], last_error[1], last_error[2]
|
||||
raise last_error[0](last_error[1]).with_traceback(last_error[2])
|
||||
else:
|
||||
self.sendHeader()
|
||||
return "No error! :)"
|
||||
return [b"No error! :)"]
|
||||
|
||||
# Just raise an error to get console
|
||||
def actionConsole(self):
|
||||
|
@ -793,19 +795,19 @@ class UiRequest(object):
|
|||
# Send file not found error
|
||||
def error404(self, path=""):
|
||||
self.sendHeader(404)
|
||||
return self.formatError("Not Found", cgi.escape(path.encode("utf8")), details=False)
|
||||
return self.formatError("Not Found", html.escape(path), details=False)
|
||||
|
||||
# Internal server error
|
||||
def error500(self, message=":("):
|
||||
self.sendHeader(500)
|
||||
return self.formatError("Server error", cgi.escape(message))
|
||||
return self.formatError("Server error", html.escape(message))
|
||||
|
||||
def formatError(self, title, message, details=True):
|
||||
import sys
|
||||
import gevent
|
||||
|
||||
if details:
|
||||
details = {key: val for key, val in self.env.items() if hasattr(val, "endswith") and "COOKIE" not in key}
|
||||
details = {key: val for key, val in list(self.env.items()) if hasattr(val, "endswith") and "COOKIE" not in key}
|
||||
details["version_zeronet"] = "%s r%s" % (config.version, config.rev)
|
||||
details["version_python"] = sys.version
|
||||
details["version_gevent"] = gevent.__version__
|
||||
|
|
|
@ -7,12 +7,13 @@ import gevent
|
|||
|
||||
from gevent.pywsgi import WSGIServer
|
||||
from gevent.pywsgi import WSGIHandler
|
||||
from lib.geventwebsocket.handler import WebSocketHandler
|
||||
from geventwebsocket.handler import WebSocketHandler
|
||||
|
||||
from UiRequest import UiRequest
|
||||
from .UiRequest import UiRequest
|
||||
from Site import SiteManager
|
||||
from Config import config
|
||||
from Debug import Debug
|
||||
import importlib
|
||||
|
||||
|
||||
# Skip websocket handler if not necessary
|
||||
|
@ -30,7 +31,7 @@ class UiWSGIHandler(WSGIHandler):
|
|||
ws_handler = WebSocketHandler(*self.args, **self.kwargs)
|
||||
ws_handler.__dict__ = self.__dict__ # Match class variables
|
||||
ws_handler.run_application()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
logging.error("UiWSGIHandler websocket error: %s" % Debug.formatException(err))
|
||||
if config.debug: # Allow websocket errors to appear on /Debug
|
||||
import sys
|
||||
|
@ -38,7 +39,7 @@ class UiWSGIHandler(WSGIHandler):
|
|||
else: # Standard HTTP request
|
||||
try:
|
||||
super(UiWSGIHandler, self).run_application()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
logging.error("UiWSGIHandler error: %s" % Debug.formatException(err))
|
||||
if config.debug: # Allow websocket errors to appear on /Debug
|
||||
import sys
|
||||
|
@ -101,7 +102,7 @@ class UiServer:
|
|||
else: # Catch and display the error
|
||||
try:
|
||||
return ui_request.route(path)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
logging.debug("UiRequest error: %s" % Debug.formatException(err))
|
||||
return ui_request.error500("Err: %s" % Debug.formatException(err))
|
||||
|
||||
|
@ -110,8 +111,8 @@ class UiServer:
|
|||
global UiRequest
|
||||
import imp
|
||||
import sys
|
||||
reload(sys.modules["User.UserManager"])
|
||||
reload(sys.modules["Ui.UiWebsocket"])
|
||||
importlib.reload(sys.modules["User.UserManager"])
|
||||
importlib.reload(sys.modules["Ui.UiWebsocket"])
|
||||
UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest
|
||||
# UiRequest.reload()
|
||||
|
||||
|
@ -128,7 +129,7 @@ class UiServer:
|
|||
try:
|
||||
from werkzeug.debug import DebuggedApplication
|
||||
handler = DebuggedApplication(self.handleRequest, evalex=True)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.info("%s: For debugging please download Werkzeug (http://werkzeug.pocoo.org/)" % err)
|
||||
from Debug import DebugReloader
|
||||
self.log.write = lambda msg: self.log.debug(msg.strip()) # For Wsgi access.log
|
||||
|
@ -147,14 +148,14 @@ class UiServer:
|
|||
url = "http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage)
|
||||
gevent.spawn_later(0.3, browser.open, url, new=2)
|
||||
except Exception as err:
|
||||
print "Error starting browser: %s" % err
|
||||
print("Error starting browser: %s" % err)
|
||||
|
||||
self.server = WSGIServer((self.ip, self.port), handler, handler_class=UiWSGIHandler, log=self.log)
|
||||
self.server.sockets = {}
|
||||
self.afterStarted()
|
||||
try:
|
||||
self.server.serve_forever()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Web interface bind error, must be running already, exiting.... %s" % err)
|
||||
sys.modules["main"].file_server.stop()
|
||||
self.log.debug("Stopped.")
|
||||
|
@ -163,18 +164,18 @@ class UiServer:
|
|||
self.log.debug("Stopping...")
|
||||
# Close WS sockets
|
||||
if "clients" in dir(self.server):
|
||||
for client in self.server.clients.values():
|
||||
for client in list(self.server.clients.values()):
|
||||
client.ws.close()
|
||||
# Close http sockets
|
||||
sock_closed = 0
|
||||
for sock in self.server.sockets.values():
|
||||
for sock in list(self.server.sockets.values()):
|
||||
try:
|
||||
sock.send("bye")
|
||||
sock.send(b"bye")
|
||||
sock.shutdown(socket.SHUT_RDWR)
|
||||
# sock._sock.close()
|
||||
# sock.close()
|
||||
sock_closed += 1
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Http connection close error: %s" % err)
|
||||
self.log.debug("Socket closed: %s" % sock_closed)
|
||||
time.sleep(0.1)
|
||||
|
|
|
@ -55,7 +55,7 @@ class UiWebsocket(object):
|
|||
else:
|
||||
try:
|
||||
self.addHomepageNotifications()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Uncaught Exception: " + Debug.formatException(err))
|
||||
|
||||
for notification in self.site.notifications: # Send pending notification messages
|
||||
|
@ -73,7 +73,7 @@ class UiWebsocket(object):
|
|||
break
|
||||
else:
|
||||
message = ws.receive()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("WebSocket receive error: %s" % Debug.formatException(err))
|
||||
break
|
||||
|
||||
|
@ -81,7 +81,7 @@ class UiWebsocket(object):
|
|||
try:
|
||||
req = json.loads(message)
|
||||
self.handleRequest(req)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
if config.debug: # Allow websocket errors to appear on /Debug
|
||||
sys.modules["main"].DebugHook.handleError()
|
||||
self.log.error("WebSocket handleRequest error: %s \n %s" % (Debug.formatException(err), message))
|
||||
|
@ -99,7 +99,7 @@ class UiWebsocket(object):
|
|||
if ("0.0.0.0" == bind_ip or "*" == bind_ip) and (not whitelist):
|
||||
self.site.notifications.append([
|
||||
"error",
|
||||
_(u"You are not going to set up a public gateway. However, <b>your Web UI is<br>" +
|
||||
_("You are not going to set up a public gateway. However, <b>your Web UI is<br>" +
|
||||
"open to the whole Internet.</b> " +
|
||||
"Please check your configuration.")
|
||||
])
|
||||
|
@ -114,7 +114,7 @@ class UiWebsocket(object):
|
|||
elif config.tor == "always" and file_server.tor_manager.start_onions:
|
||||
self.site.notifications.append([
|
||||
"done",
|
||||
_(u"""
|
||||
_("""
|
||||
{_[Tor mode active, every connection using Onion route.]}<br>
|
||||
{_[Successfully started Tor onion hidden services.]}
|
||||
"""),
|
||||
|
@ -123,7 +123,7 @@ class UiWebsocket(object):
|
|||
elif config.tor == "always" and file_server.tor_manager.start_onions is not False:
|
||||
self.site.notifications.append([
|
||||
"error",
|
||||
_(u"""
|
||||
_("""
|
||||
{_[Tor mode active, every connection using Onion route.]}<br>
|
||||
{_[Unable to start hidden services, please check your config.]}
|
||||
"""),
|
||||
|
@ -132,7 +132,7 @@ class UiWebsocket(object):
|
|||
elif file_server.tor_manager.start_onions:
|
||||
self.site.notifications.append([
|
||||
"done",
|
||||
_(u"""
|
||||
_("""
|
||||
{_[Successfully started Tor onion hidden services.]}<br>
|
||||
{_[For faster connections open <b>{0}</b> port on your router.]}
|
||||
""").format(config.fileserver_port),
|
||||
|
@ -141,7 +141,7 @@ class UiWebsocket(object):
|
|||
else:
|
||||
self.site.notifications.append([
|
||||
"error",
|
||||
_(u"""
|
||||
_("""
|
||||
{_[Your connection is restricted. Please, open <b>{0}</b> port on your router]}<br>
|
||||
{_[or configure Tor to become a full member of the ZeroNet network.]}
|
||||
""").format(config.fileserver_port),
|
||||
|
@ -213,7 +213,7 @@ class UiWebsocket(object):
|
|||
message = self.send_queue.pop(0)
|
||||
self.ws.send(json.dumps(message))
|
||||
self.state["sending"] = False
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Websocket send error: %s" % Debug.formatException(err))
|
||||
self.state["sending"] = False
|
||||
|
||||
|
@ -230,7 +230,7 @@ class UiWebsocket(object):
|
|||
result = func(*args, **kwargs)
|
||||
if result is not None:
|
||||
self.response(args[0], result)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
if config.debug: # Allow websocket errors to appear on /Debug
|
||||
sys.modules["main"].DebugHook.handleError()
|
||||
self.log.error("WebSocket handleRequest error: %s" % Debug.formatException(err))
|
||||
|
@ -403,14 +403,14 @@ class UiWebsocket(object):
|
|||
def actionAnnouncerStats(self, to):
|
||||
back = {}
|
||||
trackers = self.site.announcer.getTrackers()
|
||||
for site in self.server.sites.values():
|
||||
for tracker, stats in site.announcer.stats.iteritems():
|
||||
for site in list(self.server.sites.values()):
|
||||
for tracker, stats in site.announcer.stats.items():
|
||||
if tracker not in trackers:
|
||||
continue
|
||||
if tracker not in back:
|
||||
back[tracker] = {}
|
||||
is_latest_data = bool(stats["time_request"] > back[tracker].get("time_request", 0) and stats["status"])
|
||||
for key, val in stats.iteritems():
|
||||
for key, val in stats.items():
|
||||
if key.startswith("num_"):
|
||||
back[tracker][key] = back[tracker].get(key, 0) + val
|
||||
elif is_latest_data:
|
||||
|
@ -548,7 +548,7 @@ class UiWebsocket(object):
|
|||
if notification:
|
||||
self.cmd("notification", [
|
||||
"info",
|
||||
_(u"""{_[Your network connection is restricted. Please, open <b>{0}</b> port]}<br>
|
||||
_("""{_[Your network connection is restricted. Please, open <b>{0}</b> port]}<br>
|
||||
{_[on your router to make your site accessible for everyone.]}""").format(config.fileserver_port)
|
||||
])
|
||||
if callback:
|
||||
|
@ -580,7 +580,7 @@ class UiWebsocket(object):
|
|||
self.cmd(
|
||||
"confirm",
|
||||
[_["This file still in sync, if you write it now, then the previous content may be lost."], _["Write content anyway"]],
|
||||
lambda (res): self.actionFileWrite(to, inner_path, content_base64, ignore_bad_files=True)
|
||||
lambda res: self.actionFileWrite(to, inner_path, content_base64, ignore_bad_files=True)
|
||||
)
|
||||
return False
|
||||
|
||||
|
@ -601,7 +601,7 @@ class UiWebsocket(object):
|
|||
shutil.copyfileobj(f_old, f_new)
|
||||
|
||||
self.site.storage.write(inner_path, content)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("File write error: %s" % Debug.formatException(err))
|
||||
return self.response(to, {"error": "Write error: %s" % Debug.formatException(err)})
|
||||
|
||||
|
@ -636,7 +636,7 @@ class UiWebsocket(object):
|
|||
if need_delete:
|
||||
try:
|
||||
self.site.storage.delete(inner_path)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("File delete error: %s" % err)
|
||||
return self.response(to, {"error": "Delete error: %s" % err})
|
||||
|
||||
|
@ -676,7 +676,7 @@ class UiWebsocket(object):
|
|||
rows = []
|
||||
try:
|
||||
res = self.site.storage.query(query, params)
|
||||
except Exception, err: # Response the error to client
|
||||
except Exception as err: # Response the error to client
|
||||
self.log.error("DbQuery error: %s" % err)
|
||||
return self.response(to, {"error": str(err)})
|
||||
# Convert result to dict
|
||||
|
@ -693,7 +693,7 @@ class UiWebsocket(object):
|
|||
with gevent.Timeout(timeout):
|
||||
self.site.needFile(inner_path, priority=6)
|
||||
body = self.site.storage.read(inner_path, "rb")
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("%s fileGet error: %s" % (inner_path, err))
|
||||
body = None
|
||||
if body and format == "base64":
|
||||
|
@ -705,7 +705,7 @@ class UiWebsocket(object):
|
|||
try:
|
||||
with gevent.Timeout(timeout):
|
||||
self.site.needFile(inner_path, priority=6)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
return self.response(to, {"error": str(err)})
|
||||
return self.response(to, "ok")
|
||||
|
||||
|
@ -725,7 +725,7 @@ class UiWebsocket(object):
|
|||
rules = self.site.content_manager.getRules(inner_path, content)
|
||||
if inner_path.endswith("content.json") and rules:
|
||||
if content:
|
||||
rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in content.get("files", {}).values()])
|
||||
rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in list(content.get("files", {}).values())])
|
||||
else:
|
||||
rules["current_size"] = 0
|
||||
return self.response(to, rules)
|
||||
|
@ -749,11 +749,11 @@ class UiWebsocket(object):
|
|||
self.cmd(
|
||||
"confirm",
|
||||
[body, _("Change it to {auth_type}/{auth_user_name}@{domain}")],
|
||||
lambda (res): self.cbCertAddConfirm(to, domain, auth_type, auth_user_name, cert)
|
||||
lambda res: self.cbCertAddConfirm(to, domain, auth_type, auth_user_name, cert)
|
||||
)
|
||||
else:
|
||||
self.response(to, "Not changed")
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("CertAdd error: Exception - %s (%s)" % (err.message, Debug.formatException(err)))
|
||||
self.response(to, {"error": err.message})
|
||||
|
||||
|
@ -781,7 +781,7 @@ class UiWebsocket(object):
|
|||
if not accepted_domains and not accepted_pattern: # Accept any if no filter defined
|
||||
accept_any = True
|
||||
|
||||
for domain, cert in self.user.certs.items():
|
||||
for domain, cert in list(self.user.certs.items()):
|
||||
if auth_address == cert["auth_address"] and domain == site_data.get("cert"):
|
||||
active = domain
|
||||
title = cert["auth_user_name"] + "@" + domain
|
||||
|
@ -797,7 +797,7 @@ class UiWebsocket(object):
|
|||
for domain, account, css_class in accounts:
|
||||
if domain == active:
|
||||
css_class += " active" # Currently selected option
|
||||
title = _(u"<b>%s</b> <small>({_[currently selected]})</small>") % account
|
||||
title = _("<b>%s</b> <small>({_[currently selected]})</small>") % account
|
||||
else:
|
||||
title = "<b>%s</b>" % account
|
||||
body += "<a href='#Select+account' class='select select-close cert %s' title='%s'>%s</a>" % (css_class, domain, title)
|
||||
|
@ -807,7 +807,7 @@ class UiWebsocket(object):
|
|||
# body+= "<small style='margin-top: 10px; display: block'>Accepted authorization providers by the site:</small>"
|
||||
body += "<div style='background-color: #F7F7F7; margin-right: -30px'>"
|
||||
for domain in more_domains:
|
||||
body += _(u"""
|
||||
body += _("""
|
||||
<a href='/{domain}' target='_top' class='select'>
|
||||
<small style='float: right; margin-right: 40px; margin-top: -1px'>{_[Register]} »</small>{domain}
|
||||
</a>
|
||||
|
@ -858,7 +858,7 @@ class UiWebsocket(object):
|
|||
def actionCertList(self, to):
|
||||
back = []
|
||||
auth_address = self.user.getAuthAddress(self.site.address)
|
||||
for domain, cert in self.user.certs.items():
|
||||
for domain, cert in list(self.user.certs.items()):
|
||||
back.append({
|
||||
"auth_address": cert["auth_address"],
|
||||
"auth_type": cert["auth_type"],
|
||||
|
@ -872,7 +872,7 @@ class UiWebsocket(object):
|
|||
def actionSiteList(self, to, connecting_sites=False):
|
||||
ret = []
|
||||
SiteManager.site_manager.load() # Reload sites
|
||||
for site in self.server.sites.values():
|
||||
for site in list(self.server.sites.values()):
|
||||
if not site.content_manager.contents.get("content.json") and not connecting_sites:
|
||||
continue # Incomplete site
|
||||
ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing
|
||||
|
@ -883,7 +883,7 @@ class UiWebsocket(object):
|
|||
if channel not in self.channels: # Add channel to channels
|
||||
self.channels.append(channel)
|
||||
|
||||
for site in self.server.sites.values(): # Add websocket to every channel
|
||||
for site in list(self.server.sites.values()): # Add websocket to every channel
|
||||
if self not in site.websockets:
|
||||
site.websockets.append(self)
|
||||
|
||||
|
@ -970,7 +970,7 @@ class UiWebsocket(object):
|
|||
|
||||
site = self.server.sites.get(address)
|
||||
if site.bad_files:
|
||||
for bad_inner_path in site.bad_files.keys():
|
||||
for bad_inner_path in list(site.bad_files.keys()):
|
||||
is_user_file = "cert_signers" in site.content_manager.getRules(bad_inner_path)
|
||||
if not is_user_file:
|
||||
self.cmd("notification", ["error", _["Clone error: Site still in sync"]])
|
||||
|
@ -982,7 +982,7 @@ class UiWebsocket(object):
|
|||
self.cmd(
|
||||
"confirm",
|
||||
[_["Clone site <b>%s</b>?"] % address, _["Clone"]],
|
||||
lambda (res): self.cbSiteClone(to, address, root_inner_path, target_address)
|
||||
lambda res: self.cbSiteClone(to, address, root_inner_path, target_address)
|
||||
)
|
||||
|
||||
def actionSiteSetLimit(self, to, size_limit):
|
||||
|
@ -1013,7 +1013,7 @@ class UiWebsocket(object):
|
|||
min_mtime = self.site.settings["cache"].get("time_modified_files_check")
|
||||
modified_files = self.site.settings["cache"].get("modified_files", [])
|
||||
|
||||
inner_paths = [content_inner_path] + content.get("includes", {}).keys() + content.get("files", {}).keys()
|
||||
inner_paths = [content_inner_path] + list(content.get("includes", {}).keys()) + list(content.get("files", {}).keys())
|
||||
|
||||
for relative_inner_path in inner_paths:
|
||||
inner_path = helper.getDirname(content_inner_path) + relative_inner_path
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
from UiServer import UiServer
|
||||
from UiRequest import UiRequest
|
||||
from UiWebsocket import UiWebsocket
|
||||
from .UiServer import UiServer
|
||||
from .UiRequest import UiRequest
|
||||
from .UiWebsocket import UiWebsocket
|
|
@ -9,6 +9,7 @@ from Crypt import CryptBitcoin
|
|||
from Plugin import PluginManager
|
||||
from Config import config
|
||||
from util import helper
|
||||
from Debug import Debug
|
||||
|
||||
|
||||
@PluginManager.acceptPlugins
|
||||
|
@ -52,7 +53,7 @@ class User(object):
|
|||
self.delayed_save_thread = gevent.spawn_later(5, self.save)
|
||||
|
||||
def getAddressAuthIndex(self, address):
|
||||
return int(address.encode("hex"), 16)
|
||||
return int(address.encode("ascii").hex(), 16)
|
||||
|
||||
@util.Noparallel()
|
||||
def generateAuthAddress(self, address):
|
||||
|
@ -122,7 +123,7 @@ class User(object):
|
|||
# Add cert for the user
|
||||
def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign):
|
||||
# Find privatekey by auth address
|
||||
auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0]
|
||||
auth_privatekey = [site["auth_privatekey"] for site in list(self.sites.values()) if site["auth_address"] == auth_address][0]
|
||||
cert_node = {
|
||||
"auth_address": auth_address,
|
||||
"auth_privatekey": auth_privatekey,
|
||||
|
|
|
@ -4,7 +4,7 @@ import logging
|
|||
import time
|
||||
|
||||
# ZeroNet Modules
|
||||
from User import User
|
||||
from .User import User
|
||||
from Plugin import PluginManager
|
||||
from Config import config
|
||||
|
||||
|
@ -24,7 +24,7 @@ class UserManager(object):
|
|||
added = 0
|
||||
s = time.time()
|
||||
# Load new users
|
||||
for master_address, data in json.load(open("%s/users.json" % config.data_dir)).items():
|
||||
for master_address, data in list(json.load(open("%s/users.json" % config.data_dir)).items()):
|
||||
if master_address not in self.users:
|
||||
user = User(master_address, data=data)
|
||||
self.users[master_address] = user
|
||||
|
@ -32,7 +32,7 @@ class UserManager(object):
|
|||
user_found.append(master_address)
|
||||
|
||||
# Remove deleted adresses
|
||||
for master_address in self.users.keys():
|
||||
for master_address in list(self.users.keys()):
|
||||
if master_address not in user_found:
|
||||
del(self.users[master_address])
|
||||
self.log.debug("Removed user: %s" % master_address)
|
||||
|
@ -63,7 +63,7 @@ class UserManager(object):
|
|||
def get(self, master_address=None):
|
||||
users = self.list()
|
||||
if users:
|
||||
return users.values()[0] # Single user mode, always return the first
|
||||
return list(users.values())[0] # Single user mode, always return the first
|
||||
else:
|
||||
return None
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
from User import User
|
||||
from .User import User
|
||||
|
|
|
@ -78,10 +78,12 @@ class Worker(object):
|
|||
self.task = task
|
||||
site = task["site"]
|
||||
task["workers_num"] += 1
|
||||
error_message = "Unknown error"
|
||||
try:
|
||||
buff = self.peer.getFile(site.address, task["inner_path"], task["size"])
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.manager.log.debug("%s: getFile error: %s" % (self.key, err))
|
||||
error_message = str(err)
|
||||
buff = None
|
||||
if self.running is False: # Worker no longer needed or got killed
|
||||
self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"]))
|
||||
|
@ -91,10 +93,11 @@ class Worker(object):
|
|||
if buff: # Download ok
|
||||
try:
|
||||
correct = site.content_manager.verifyFile(task["inner_path"], buff)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
error_message = str(err)
|
||||
correct = False
|
||||
else: # Download error
|
||||
err = "Download failed"
|
||||
error_message = "Download failed"
|
||||
correct = False
|
||||
if correct is True or correct is None: # Verify ok or same file
|
||||
self.manager.log.debug("%s: Verify correct: %s" % (self.key, task["inner_path"]))
|
||||
|
|
|
@ -4,7 +4,7 @@ import collections
|
|||
|
||||
import gevent
|
||||
|
||||
from Worker import Worker
|
||||
from .Worker import Worker
|
||||
from Config import config
|
||||
from util import helper
|
||||
from Plugin import PluginManager
|
||||
|
@ -41,7 +41,7 @@ class WorkerManager(object):
|
|||
time.sleep(15) # Check every 15 sec
|
||||
|
||||
# Clean up workers
|
||||
for worker in self.workers.values():
|
||||
for worker in list(self.workers.values()):
|
||||
if worker.task and worker.task["done"]:
|
||||
worker.skip() # Stop workers with task done
|
||||
|
||||
|
@ -205,7 +205,7 @@ class WorkerManager(object):
|
|||
def findOptionalTasks(self, optional_tasks, reset_task=False):
|
||||
found = collections.defaultdict(list) # { found_hash: [peer1, peer2...], ...}
|
||||
|
||||
for peer in self.site.peers.values():
|
||||
for peer in list(self.site.peers.values()):
|
||||
if not peer.has_hashfield:
|
||||
continue
|
||||
|
||||
|
@ -226,7 +226,7 @@ class WorkerManager(object):
|
|||
def findOptionalHashIds(self, optional_hash_ids, limit=0):
|
||||
found = collections.defaultdict(list) # { found_hash_id: [peer1, peer2...], ...}
|
||||
|
||||
for peer in self.site.peers.values():
|
||||
for peer in list(self.site.peers.values()):
|
||||
if not peer.has_hashfield:
|
||||
continue
|
||||
|
||||
|
@ -242,7 +242,7 @@ class WorkerManager(object):
|
|||
# Add peers to tasks from found result
|
||||
def addOptionalPeers(self, found_ips):
|
||||
found = collections.defaultdict(list)
|
||||
for hash_id, peer_ips in found_ips.iteritems():
|
||||
for hash_id, peer_ips in found_ips.items():
|
||||
task = [task for task in self.tasks if task["optional_hash_id"] == hash_id]
|
||||
if task: # Found task, lets take the first
|
||||
task = task[0]
|
||||
|
@ -283,10 +283,10 @@ class WorkerManager(object):
|
|||
found = self.findOptionalTasks(optional_tasks, reset_task=reset_task)
|
||||
|
||||
if found:
|
||||
found_peers = set([peer for peers in found.values() for peer in peers])
|
||||
found_peers = set([peer for peers in list(found.values()) for peer in peers])
|
||||
self.startWorkers(found_peers, force_num=3)
|
||||
|
||||
if len(found) < len(optional_hash_ids) or find_more or (high_priority and any(len(peers) < 10 for peers in found.itervalues())):
|
||||
if len(found) < len(optional_hash_ids) or find_more or (high_priority and any(len(peers) < 10 for peers in found.values())):
|
||||
self.log.debug("No local result for optional files: %s" % (optional_hash_ids - set(found)))
|
||||
|
||||
# Query hashfield from connected peers
|
||||
|
@ -308,7 +308,7 @@ class WorkerManager(object):
|
|||
))
|
||||
|
||||
if found:
|
||||
found_peers = set([peer for hash_id_peers in found.values() for peer in hash_id_peers])
|
||||
found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers])
|
||||
self.startWorkers(found_peers, force_num=3)
|
||||
|
||||
if len(found) < len(optional_hash_ids) or find_more:
|
||||
|
@ -344,7 +344,7 @@ class WorkerManager(object):
|
|||
))
|
||||
|
||||
if found:
|
||||
found_peers = set([peer for hash_id_peers in found.values() for peer in hash_id_peers])
|
||||
found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers])
|
||||
self.startWorkers(found_peers, force_num=3)
|
||||
|
||||
if len(thread_values) == len(threads):
|
||||
|
@ -376,7 +376,7 @@ class WorkerManager(object):
|
|||
self.log.debug("Found optional files after findhash random peers: %s/%s" % (len(found), len(optional_hash_ids)))
|
||||
|
||||
if found:
|
||||
found_peers = set([peer for hash_id_peers in found.values() for peer in hash_id_peers])
|
||||
found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers])
|
||||
self.startWorkers(found_peers, force_num=3)
|
||||
|
||||
if len(found) < len(optional_hash_ids):
|
||||
|
@ -390,7 +390,7 @@ class WorkerManager(object):
|
|||
|
||||
# Stop all worker
|
||||
def stopWorkers(self):
|
||||
for worker in self.workers.values():
|
||||
for worker in list(self.workers.values()):
|
||||
worker.stop()
|
||||
tasks = self.tasks[:] # Copy
|
||||
for task in tasks: # Mark all current task as failed
|
||||
|
@ -399,7 +399,7 @@ class WorkerManager(object):
|
|||
# Find workers by task
|
||||
def findWorkers(self, task):
|
||||
workers = []
|
||||
for worker in self.workers.values():
|
||||
for worker in list(self.workers.values()):
|
||||
if worker.task == task:
|
||||
workers.append(worker)
|
||||
return workers
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
from Worker import Worker
|
||||
from WorkerManager import WorkerManager
|
||||
from .Worker import Worker
|
||||
from .WorkerManager import WorkerManager
|
||||
|
|
|
@ -3,26 +3,26 @@ import re
|
|||
|
||||
def prefix(content):
|
||||
content = re.sub(
|
||||
"@keyframes (.*? {.*?}\s*})", "@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n",
|
||||
b"@keyframes (.*? {.*?}\s*})", b"@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n",
|
||||
content, flags=re.DOTALL
|
||||
)
|
||||
content = re.sub(
|
||||
'([^-\*])(border-radius|box-shadow|appearance|transition|animation|box-sizing|' +
|
||||
'backface-visibility|transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])',
|
||||
'\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content
|
||||
b'([^-\*])(border-radius|box-shadow|appearance|transition|animation|box-sizing|' +
|
||||
b'backface-visibility|transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])',
|
||||
b'\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content
|
||||
)
|
||||
content = re.sub(
|
||||
'(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])',
|
||||
'\\1: -webkit-\\2(\\3);' +
|
||||
'\\1: -moz-\\2(\\3);' +
|
||||
'\\1: -o-\\2(\\3);' +
|
||||
'\\1: -ms-\\2(\\3);' +
|
||||
'\\1: \\2(\\3);', content
|
||||
b'(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])',
|
||||
b'\\1: -webkit-\\2(\\3);' +
|
||||
b'\\1: -moz-\\2(\\3);' +
|
||||
b'\\1: -o-\\2(\\3);' +
|
||||
b'\\1: -ms-\\2(\\3);' +
|
||||
b'\\1: \\2(\\3);', content
|
||||
)
|
||||
return content
|
||||
|
||||
if __name__ == "__main__":
|
||||
print prefix("""
|
||||
print(prefix(b"""
|
||||
.test {
|
||||
border-radius: 5px;
|
||||
background: linear-gradient(red, blue);
|
||||
|
@ -36,4 +36,4 @@ if __name__ == "__main__":
|
|||
}
|
||||
|
||||
|
||||
""")
|
||||
""").decode("utf8"))
|
||||
|
|
71
src/main.py
71
src/main.py
|
@ -35,7 +35,7 @@ if not os.path.isdir(config.data_dir):
|
|||
try:
|
||||
os.chmod(config.data_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
except Exception as err:
|
||||
print "Can't change permission of %s: %s" % (config.data_dir, err)
|
||||
print("Can't change permission of %s: %s" % (config.data_dir, err))
|
||||
|
||||
if not os.path.isfile("%s/sites.json" % config.data_dir):
|
||||
open("%s/sites.json" % config.data_dir, "w").write("{}")
|
||||
|
@ -48,9 +48,9 @@ if config.action == "main":
|
|||
lock = helper.openLocked("%s/lock.pid" % config.data_dir, "w")
|
||||
lock.write("%s" % os.getpid())
|
||||
except IOError as err:
|
||||
print "Can't open lock file, your ZeroNet client is probably already running, exiting... (%s)" % err
|
||||
print("Can't open lock file, your ZeroNet client is probably already running, exiting... (%s)" % err)
|
||||
if config.open_browser and config.open_browser != "False":
|
||||
print "Opening browser: %s...", config.open_browser
|
||||
print("Opening browser: %s...", config.open_browser)
|
||||
import webbrowser
|
||||
try:
|
||||
if config.open_browser == "default_browser":
|
||||
|
@ -59,7 +59,7 @@ if config.action == "main":
|
|||
browser = webbrowser.get(config.open_browser)
|
||||
browser.open("http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage), new=2)
|
||||
except Exception as err:
|
||||
print "Error starting browser: %s" % err
|
||||
print("Error starting browser: %s" % err)
|
||||
sys.exit()
|
||||
|
||||
|
||||
|
@ -87,14 +87,14 @@ if config.msgpack_purepython:
|
|||
# Socket monkey patch
|
||||
if config.proxy:
|
||||
from util import SocksProxy
|
||||
import urllib2
|
||||
import urllib.request
|
||||
logging.info("Patching sockets to socks proxy: %s" % config.proxy)
|
||||
if config.fileserver_ip == "*":
|
||||
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
|
||||
SocksProxy.monkeyPatch(*config.proxy.split(":"))
|
||||
elif config.tor == "always":
|
||||
from util import SocksProxy
|
||||
import urllib2
|
||||
import urllib.request
|
||||
logging.info("Patching sockets to tor socks proxy: %s" % config.tor_proxy)
|
||||
if config.fileserver_ip == "*":
|
||||
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
|
||||
|
@ -118,7 +118,7 @@ class Actions(object):
|
|||
func = getattr(self, function_name, None)
|
||||
back = func(**kwargs)
|
||||
if back:
|
||||
print back
|
||||
print(back)
|
||||
|
||||
# Default action: Start serving UiServer and FileServer
|
||||
def main(self):
|
||||
|
@ -153,7 +153,7 @@ class Actions(object):
|
|||
logging.info("----------------------------------------------------------------------")
|
||||
|
||||
while True and not config.batch:
|
||||
if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes":
|
||||
if input("? Have you secured your private key? (yes, no) > ").lower() == "yes":
|
||||
break
|
||||
else:
|
||||
logging.info("Please, secure it now, you going to need it to modify your site!")
|
||||
|
@ -196,7 +196,7 @@ class Actions(object):
|
|||
privatekey = getpass.getpass("Private key (input hidden):")
|
||||
try:
|
||||
succ = site.content_manager.sign(inner_path=inner_path, privatekey=privatekey, update_changed_files=True, remove_missing_optional=remove_missing_optional)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
logging.error("Sign error: %s" % Debug.formatException(err))
|
||||
succ = False
|
||||
if succ and publish:
|
||||
|
@ -220,14 +220,14 @@ class Actions(object):
|
|||
file_correct = site.content_manager.verifyFile(
|
||||
content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False
|
||||
)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
file_correct = False
|
||||
|
||||
if file_correct is True:
|
||||
logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s))
|
||||
else:
|
||||
logging.error("[ERROR] %s: invalid file: %s!" % (content_inner_path, err))
|
||||
raw_input("Continue?")
|
||||
input("Continue?")
|
||||
bad_files += content_inner_path
|
||||
|
||||
logging.info("Verifying site files...")
|
||||
|
@ -258,7 +258,7 @@ class Actions(object):
|
|||
result = []
|
||||
for row in site.storage.query(query):
|
||||
result.append(dict(row))
|
||||
print json.dumps(result, indent=4)
|
||||
print(json.dumps(result, indent=4))
|
||||
|
||||
def siteAnnounce(self, address):
|
||||
from Site.Site import Site
|
||||
|
@ -276,8 +276,8 @@ class Actions(object):
|
|||
|
||||
s = time.time()
|
||||
site.announce()
|
||||
print "Response time: %.3fs" % (time.time() - s)
|
||||
print site.peers
|
||||
print("Response time: %.3fs" % (time.time() - s))
|
||||
print(site.peers)
|
||||
|
||||
def siteDownload(self, address):
|
||||
from Site import Site
|
||||
|
@ -298,14 +298,14 @@ class Actions(object):
|
|||
evt.set(True)
|
||||
|
||||
site.onComplete.once(lambda: onComplete(on_completed))
|
||||
print "Announcing..."
|
||||
print("Announcing...")
|
||||
site.announce()
|
||||
|
||||
s = time.time()
|
||||
print "Downloading..."
|
||||
print("Downloading...")
|
||||
site.downloadContent("content.json", check_modifications=True)
|
||||
|
||||
print "Downloaded in %.3fs" % (time.time()-s)
|
||||
print("Downloaded in %.3fs" % (time.time()-s))
|
||||
|
||||
|
||||
def siteNeedFile(self, address, inner_path):
|
||||
|
@ -317,7 +317,7 @@ class Actions(object):
|
|||
while 1:
|
||||
s = time.time()
|
||||
time.sleep(1)
|
||||
print "Switch time:", time.time() - s
|
||||
print("Switch time:", time.time() - s)
|
||||
gevent.spawn(checker)
|
||||
|
||||
logging.info("Opening a simple connection server")
|
||||
|
@ -328,7 +328,7 @@ class Actions(object):
|
|||
|
||||
site = Site(address)
|
||||
site.announce()
|
||||
print site.needFile(inner_path, update=True)
|
||||
print(site.needFile(inner_path, update=True))
|
||||
|
||||
def siteCmd(self, address, cmd, parameters):
|
||||
import json
|
||||
|
@ -398,15 +398,15 @@ class Actions(object):
|
|||
import getpass
|
||||
privatekey = getpass.getpass("Private key (input hidden):")
|
||||
|
||||
print CryptBitcoin.privatekeyToAddress(privatekey)
|
||||
print(CryptBitcoin.privatekeyToAddress(privatekey))
|
||||
|
||||
def cryptSign(self, message, privatekey):
|
||||
from Crypt import CryptBitcoin
|
||||
print CryptBitcoin.sign(message, privatekey)
|
||||
print(CryptBitcoin.sign(message, privatekey))
|
||||
|
||||
def cryptVerify(self, message, sign, address):
|
||||
from Crypt import CryptBitcoin
|
||||
print CryptBitcoin.verify(message, address, sign)
|
||||
print(CryptBitcoin.verify(message, address, sign))
|
||||
|
||||
def cryptGetPrivatekey(self, master_seed, site_address_index=None):
|
||||
from Crypt import CryptBitcoin
|
||||
|
@ -414,7 +414,7 @@ class Actions(object):
|
|||
logging.error("Error: Invalid master seed length: %s (required: 64)" % len(master_seed))
|
||||
return False
|
||||
privatekey = CryptBitcoin.hdPrivatekey(master_seed, site_address_index)
|
||||
print "Requested private key: %s" % privatekey
|
||||
print("Requested private key: %s" % privatekey)
|
||||
|
||||
# Peer
|
||||
def peerPing(self, peer_ip, peer_port=None):
|
||||
|
@ -435,18 +435,18 @@ class Actions(object):
|
|||
peer.connect()
|
||||
|
||||
if not peer.connection:
|
||||
print "Error: Can't connect to peer (connection error: %s)" % peer.connection_error
|
||||
print("Error: Can't connect to peer (connection error: %s)" % peer.connection_error)
|
||||
return False
|
||||
print "Connection time: %.3fs (connection error: %s)" % (time.time() - s, peer.connection_error)
|
||||
print("Connection time: %.3fs (connection error: %s)" % (time.time() - s, peer.connection_error))
|
||||
|
||||
for i in range(5):
|
||||
print "Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt)
|
||||
print("Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt))
|
||||
time.sleep(1)
|
||||
peer.remove()
|
||||
print "Reconnect test..."
|
||||
print("Reconnect test...")
|
||||
peer = Peer(peer_ip, peer_port)
|
||||
for i in range(5):
|
||||
print "Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt)
|
||||
print("Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt))
|
||||
time.sleep(1)
|
||||
|
||||
def peerGetFile(self, peer_ip, peer_port, site, filename, benchmark=False):
|
||||
|
@ -465,10 +465,10 @@ class Actions(object):
|
|||
if benchmark:
|
||||
for i in range(10):
|
||||
peer.getFile(site, filename),
|
||||
print "Response time: %.3fs" % (time.time() - s)
|
||||
raw_input("Check memory")
|
||||
print("Response time: %.3fs" % (time.time() - s))
|
||||
input("Check memory")
|
||||
else:
|
||||
print peer.getFile(site, filename).read()
|
||||
print(peer.getFile(site, filename).read())
|
||||
|
||||
def peerCmd(self, peer_ip, peer_port, cmd, parameters):
|
||||
logging.info("Opening a simple connection server")
|
||||
|
@ -489,13 +489,14 @@ class Actions(object):
|
|||
parameters = {}
|
||||
try:
|
||||
res = peer.request(cmd, parameters)
|
||||
print json.dumps(res, indent=2, ensure_ascii=False)
|
||||
except Exception, err:
|
||||
print "Unknown response (%s): %s" % (err, res)
|
||||
print(json.dumps(res, indent=2, ensure_ascii=False))
|
||||
except Exception as err:
|
||||
print("Unknown response (%s): %s" % (err, res))
|
||||
|
||||
def getConfig(self):
|
||||
import json
|
||||
print json.dumps(config.getServerInfo(), indent=2, ensure_ascii=False)
|
||||
print(json.dumps(config.getServerInfo(), indent=2, ensure_ascii=False))
|
||||
|
||||
|
||||
|
||||
actions = Actions()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import cStringIO as StringIO
|
||||
import io
|
||||
|
||||
import difflib
|
||||
|
||||
|
@ -31,7 +31,7 @@ def diff(old, new, limit=False):
|
|||
|
||||
|
||||
def patch(old_f, actions):
|
||||
new_f = StringIO.StringIO()
|
||||
new_f = io.BytesIO()
|
||||
for action, param in actions:
|
||||
if action == "=": # Same lines
|
||||
new_f.write(old_f.read(param))
|
||||
|
|
|
@ -28,19 +28,19 @@ class Event(list):
|
|||
if __name__ == "__main__":
|
||||
def testBenchmark():
|
||||
def say(pre, text):
|
||||
print "%s Say: %s" % (pre, text)
|
||||
print("%s Say: %s" % (pre, text))
|
||||
|
||||
import time
|
||||
s = time.time()
|
||||
on_changed = Event()
|
||||
for i in range(1000):
|
||||
on_changed.once(lambda pre: say(pre, "once"), "once")
|
||||
print "Created 1000 once in %.3fs" % (time.time() - s)
|
||||
print("Created 1000 once in %.3fs" % (time.time() - s))
|
||||
on_changed("#1")
|
||||
|
||||
def testUsage():
|
||||
def say(pre, text):
|
||||
print "%s Say: %s" % (pre, text)
|
||||
print("%s Say: %s" % (pre, text))
|
||||
|
||||
on_changed = Event()
|
||||
on_changed.once(lambda pre: say(pre, "once"))
|
||||
|
|
|
@ -49,7 +49,7 @@ class Noparallel(object): # Only allow function running once in same time
|
|||
return ret
|
||||
else: # No blocking just return the thread
|
||||
return thread
|
||||
wrapper.func_name = func.func_name
|
||||
wrapper.__name__ = func.__name__
|
||||
|
||||
return wrapper
|
||||
|
||||
|
@ -65,7 +65,7 @@ if __name__ == "__main__":
|
|||
@Noparallel()
|
||||
def count(self, num=5):
|
||||
for i in range(num):
|
||||
print self, i
|
||||
print(self, i)
|
||||
time.sleep(1)
|
||||
return "%s return:%s" % (self, i)
|
||||
|
||||
|
@ -74,59 +74,59 @@ if __name__ == "__main__":
|
|||
@Noparallel(blocking=False)
|
||||
def count(self, num=5):
|
||||
for i in range(num):
|
||||
print self, i
|
||||
print(self, i)
|
||||
time.sleep(1)
|
||||
return "%s return:%s" % (self, i)
|
||||
|
||||
def testBlocking():
|
||||
test = Test()
|
||||
test2 = Test()
|
||||
print "Counting..."
|
||||
print "Creating class1/thread1"
|
||||
print("Counting...")
|
||||
print("Creating class1/thread1")
|
||||
thread1 = gevent.spawn(test.count)
|
||||
print "Creating class1/thread2 (ignored)"
|
||||
print("Creating class1/thread2 (ignored)")
|
||||
thread2 = gevent.spawn(test.count)
|
||||
print "Creating class2/thread3"
|
||||
print("Creating class2/thread3")
|
||||
thread3 = gevent.spawn(test2.count)
|
||||
|
||||
print "Joining class1/thread1"
|
||||
print("Joining class1/thread1")
|
||||
thread1.join()
|
||||
print "Joining class1/thread2"
|
||||
print("Joining class1/thread2")
|
||||
thread2.join()
|
||||
print "Joining class2/thread3"
|
||||
print("Joining class2/thread3")
|
||||
thread3.join()
|
||||
|
||||
print "Creating class1/thread4 (its finished, allowed again)"
|
||||
print("Creating class1/thread4 (its finished, allowed again)")
|
||||
thread4 = gevent.spawn(test.count)
|
||||
print "Joining thread4"
|
||||
print("Joining thread4")
|
||||
thread4.join()
|
||||
|
||||
print thread1.value, thread2.value, thread3.value, thread4.value
|
||||
print "Done."
|
||||
print(thread1.value, thread2.value, thread3.value, thread4.value)
|
||||
print("Done.")
|
||||
|
||||
def testNoblocking():
|
||||
test = TestNoblock()
|
||||
test2 = TestNoblock()
|
||||
print "Creating class1/thread1"
|
||||
print("Creating class1/thread1")
|
||||
thread1 = test.count()
|
||||
print "Creating class1/thread2 (ignored)"
|
||||
print("Creating class1/thread2 (ignored)")
|
||||
thread2 = test.count()
|
||||
print "Creating class2/thread3"
|
||||
print("Creating class2/thread3")
|
||||
thread3 = test2.count()
|
||||
print "Joining class1/thread1"
|
||||
print("Joining class1/thread1")
|
||||
thread1.join()
|
||||
print "Joining class1/thread2"
|
||||
print("Joining class1/thread2")
|
||||
thread2.join()
|
||||
print "Joining class2/thread3"
|
||||
print("Joining class2/thread3")
|
||||
thread3.join()
|
||||
|
||||
print "Creating class1/thread4 (its finished, allowed again)"
|
||||
print("Creating class1/thread4 (its finished, allowed again)")
|
||||
thread4 = test.count()
|
||||
print "Joining thread4"
|
||||
print("Joining thread4")
|
||||
thread4.join()
|
||||
|
||||
print thread1.value, thread2.value, thread3.value, thread4.value
|
||||
print "Done."
|
||||
print(thread1.value, thread2.value, thread3.value, thread4.value)
|
||||
print("Done.")
|
||||
|
||||
def testBenchmark():
|
||||
import time
|
||||
|
@ -135,21 +135,21 @@ if __name__ == "__main__":
|
|||
import gc
|
||||
from greenlet import greenlet
|
||||
objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
|
||||
print "Greenlets: %s" % len(objs)
|
||||
print("Greenlets: %s" % len(objs))
|
||||
|
||||
printThreadNum()
|
||||
test = TestNoblock()
|
||||
s = time.time()
|
||||
for i in range(3):
|
||||
gevent.spawn(test.count, i + 1)
|
||||
print "Created in %.3fs" % (time.time() - s)
|
||||
print("Created in %.3fs" % (time.time() - s))
|
||||
printThreadNum()
|
||||
time.sleep(5)
|
||||
from gevent import monkey
|
||||
monkey.patch_all()
|
||||
|
||||
testBenchmark()
|
||||
print "Testing blocking mode..."
|
||||
print("Testing blocking mode...")
|
||||
testBlocking()
|
||||
print "Testing noblocking mode..."
|
||||
print("Testing noblocking mode...")
|
||||
testNoblocking()
|
||||
|
|
|
@ -19,6 +19,6 @@ def setMaxfilesopened(limit):
|
|||
resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard))
|
||||
return True
|
||||
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
logging.error("Failed to modify max files open limit: %s" % err)
|
||||
return False
|
||||
|
|
|
@ -29,7 +29,7 @@ class Pooled(object):
|
|||
self.pooler_running = True
|
||||
gevent.spawn(self.pooler)
|
||||
return evt
|
||||
wrapper.func_name = func.func_name
|
||||
wrapper.__name__ = func.__name__
|
||||
self.func = func
|
||||
|
||||
return wrapper
|
||||
|
@ -62,4 +62,4 @@ if __name__ == "__main__":
|
|||
|
||||
s = time.time()
|
||||
gevent.joinall(threads) # Should take 10 second
|
||||
print time.time() - s
|
||||
print(time.time() - s)
|
||||
|
|
|
@ -64,4 +64,4 @@ def query(path_pattern, filter):
|
|||
|
||||
if __name__ == "__main__":
|
||||
for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")):
|
||||
print row
|
||||
print(row)
|
||||
|
|
|
@ -86,7 +86,7 @@ def call(event, allowed_again=10, func=None, *args, **kwargs):
|
|||
def rateLimitCleanup():
|
||||
while 1:
|
||||
expired = time.time() - 60 * 2 # Cleanup if older than 2 minutes
|
||||
for event in called_db.keys():
|
||||
for event in list(called_db.keys()):
|
||||
if called_db[event] < expired:
|
||||
del called_db[event]
|
||||
time.sleep(60 * 3) # Every 3 minutes
|
||||
|
@ -99,30 +99,30 @@ if __name__ == "__main__":
|
|||
import random
|
||||
|
||||
def publish(inner_path):
|
||||
print "Publishing %s..." % inner_path
|
||||
print("Publishing %s..." % inner_path)
|
||||
return 1
|
||||
|
||||
def cb(thread):
|
||||
print "Value:", thread.value
|
||||
print("Value:", thread.value)
|
||||
|
||||
print "Testing async spam requests rate limit to 1/sec..."
|
||||
for i in range(3000):
|
||||
thread = callAsync("publish content.json", 1, publish, "content.json %s" % i)
|
||||
time.sleep(float(random.randint(1, 20)) / 100000)
|
||||
print thread.link(cb)
|
||||
print "Done"
|
||||
print(thread.link(cb))
|
||||
print("Done")
|
||||
|
||||
time.sleep(2)
|
||||
|
||||
print "Testing sync spam requests rate limit to 1/sec..."
|
||||
print("Testing sync spam requests rate limit to 1/sec...")
|
||||
for i in range(5):
|
||||
call("publish data.json", 1, publish, "data.json %s" % i)
|
||||
time.sleep(float(random.randint(1, 100)) / 100)
|
||||
print "Done"
|
||||
print("Done")
|
||||
|
||||
print "Testing cleanup"
|
||||
print("Testing cleanup")
|
||||
thread = callAsync("publish content.json single", 1, publish, "content.json single")
|
||||
print "Needs to cleanup:", called_db, queue_db
|
||||
print "Waiting 3min for cleanup process..."
|
||||
print("Needs to cleanup:", called_db, queue_db)
|
||||
print("Waiting 3min for cleanup process...")
|
||||
time.sleep(60 * 3)
|
||||
print "Cleaned up:", called_db, queue_db
|
||||
print("Cleaned up:", called_db, queue_db)
|
||||
|
|
|
@ -57,7 +57,7 @@ def disableSSLCompression():
|
|||
try:
|
||||
openssl = openLibrary()
|
||||
openssl.SSL_COMP_get_compression_methods.restype = ctypes.c_void_p
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
logging.debug("Disable SSL compression failed: %s (normal on Windows)" % err)
|
||||
return False
|
||||
|
||||
|
@ -69,7 +69,7 @@ def disableSSLCompression():
|
|||
if config.disable_sslcompression:
|
||||
try:
|
||||
disableSSLCompression()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
logging.debug("Error disabling SSL compression: %s" % err)
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import re
|
||||
import urllib2
|
||||
import httplib
|
||||
import urllib.request
|
||||
import http.client
|
||||
import logging
|
||||
from urlparse import urlparse
|
||||
from urllib.parse import urlparse
|
||||
from xml.dom.minidom import parseString
|
||||
from xml.parsers.expat import ExpatError
|
||||
|
||||
|
@ -84,7 +84,7 @@ def _retrieve_igd_profile(url):
|
|||
Retrieve the device's UPnP profile.
|
||||
"""
|
||||
try:
|
||||
return urllib2.urlopen(url.geturl(), timeout=5).read().decode('utf-8')
|
||||
return urllib.request.urlopen(url.geturl(), timeout=5).read().decode('utf-8')
|
||||
except socket.error:
|
||||
raise IGDError('IGD profile query timed out')
|
||||
|
||||
|
@ -251,7 +251,7 @@ def _send_soap_request(location, upnp_schema, control_path, soap_fn,
|
|||
}
|
||||
logging.debug("Sending UPnP request to {0}:{1}...".format(
|
||||
location.hostname, location.port))
|
||||
conn = httplib.HTTPConnection(location.hostname, location.port)
|
||||
conn = http.client.HTTPConnection(location.hostname, location.port)
|
||||
conn.request('POST', control_path, soap_message, headers)
|
||||
|
||||
response = conn.getresponse()
|
||||
|
@ -366,10 +366,12 @@ if __name__ == "__main__":
|
|||
import time
|
||||
|
||||
s = time.time()
|
||||
print "Opening port..."
|
||||
print ask_to_open_port(15443, "ZeroNet", protos=["TCP"])
|
||||
print "Done in", time.time() - s
|
||||
print("Opening port...")
|
||||
print(ask_to_open_port(15443, "ZeroNet", protos=["TCP"]))
|
||||
print("Done in", time.time() - s)
|
||||
|
||||
print "Closing port..."
|
||||
print ask_to_close_port(15443, "ZeroNet", protos=["TCP"])
|
||||
print "Done in", time.time() - s
|
||||
"""
|
||||
print("Closing port...")
|
||||
print(ask_to_close_port(15443, "ZeroNet", protos=["TCP"]))
|
||||
print("Done in", time.time() - s)
|
||||
"""
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
from Event import Event
|
||||
from Noparallel import Noparallel
|
||||
from Pooled import Pooled
|
||||
from .Event import Event
|
||||
from .Noparallel import Noparallel
|
||||
from .Pooled import Pooled
|
||||
|
|
|
@ -67,7 +67,7 @@ def getFreeSpace():
|
|||
ctypes.c_wchar_p(config.data_dir), None, None, ctypes.pointer(free_space_pointer)
|
||||
)
|
||||
free_space = free_space_pointer.value
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
logging.error("GetFreeSpace error: %s" % err)
|
||||
return free_space
|
||||
|
||||
|
@ -153,7 +153,7 @@ def toHashId(hash):
|
|||
def mergeDicts(dicts):
|
||||
back = collections.defaultdict(set)
|
||||
for d in dicts:
|
||||
for key, val in d.iteritems():
|
||||
for key, val in d.items():
|
||||
back[key].update(val)
|
||||
return dict(back)
|
||||
|
||||
|
@ -161,16 +161,16 @@ def mergeDicts(dicts):
|
|||
# Request https url using gevent SSL error workaround
|
||||
def httpRequest(url, as_file=False):
|
||||
if url.startswith("http://"):
|
||||
import urllib
|
||||
response = urllib.urlopen(url)
|
||||
import urllib.request
|
||||
response = urllib.request.urlopen(url)
|
||||
else: # Hack to avoid Python gevent ssl errors
|
||||
import socket
|
||||
import httplib
|
||||
import http.client
|
||||
import ssl
|
||||
|
||||
host, request = re.match("https://(.*?)(/.*?)$", url).groups()
|
||||
|
||||
conn = httplib.HTTPSConnection(host)
|
||||
conn = http.client.HTTPSConnection(host)
|
||||
sock = socket.create_connection((conn.host, conn.port), conn.timeout, conn.source_address)
|
||||
conn.sock = ssl.wrap_socket(sock, conn.key_file, conn.cert_file)
|
||||
conn.request("GET", request)
|
||||
|
@ -180,8 +180,8 @@ def httpRequest(url, as_file=False):
|
|||
response = httpRequest(response.getheader('Location'))
|
||||
|
||||
if as_file:
|
||||
import cStringIO as StringIO
|
||||
data = StringIO.StringIO()
|
||||
import io
|
||||
data = io.BytesIO()
|
||||
while True:
|
||||
buff = response.read(1024 * 16)
|
||||
if not buff:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue