Merge branch 'master' into disable_plugins_in_data
This commit is contained in:
commit
497ad0f4b1
35 changed files with 342 additions and 777 deletions
195
src/Config.py
195
src/Config.py
|
@ -9,185 +9,13 @@ import logging.handlers
|
|||
import stat
|
||||
import time
|
||||
|
||||
trackers = [
|
||||
'zero://188.242.242.224:26474',
|
||||
'zero://2001:19f0:8001:1d2f:5400:2ff:fe83:5bf7:23141',
|
||||
'zero://200:1e7a:5100:ef7c:6fa4:d8ae:b91c:a74:15441',
|
||||
'zero://23.184.48.134:15441',
|
||||
'zero://57hzgtu62yzxqgbvgxs7g3lfck3za4zrda7qkskar3tlak5recxcebyd.onion:15445',
|
||||
'zero://6i54dd5th73oelv636ivix6sjnwfgk2qsltnyvswagwphub375t3xcad.onion:15441',
|
||||
'zero://f2hnjbggc3c2u2apvxdugirnk6bral54ibdoul3hhvu7pd4fso5fq3yd.onion:15441',
|
||||
'zero://gugt43coc5tkyrhrc3esf6t6aeycvcqzw7qafxrjpqbwt4ssz5czgzyd.onion:15441',
|
||||
'zero://k5w77dozo3hy5zualyhni6vrh73iwfkaofa64abbilwyhhd3wgenbjqd.onion:15441',
|
||||
'zero://ow7in4ftwsix5klcbdfqvfqjvimqshbm2o75rhtpdnsderrcbx74wbad.onion:15441',
|
||||
'zero://pn4q2zzt2pw4nk7yidxvsxmydko7dfibuzxdswi6gu6ninjpofvqs2id.onion:15441',
|
||||
'zero://skdeywpgm5xncpxbbr4cuiip6ey4dkambpanog6nruvmef4f3e7o47qd.onion:15441',
|
||||
'zero://wlxav3szbrdhest4j7dib2vgbrd7uj7u7rnuzg22cxbih7yxyg2hsmid.onion:15441',
|
||||
'zero://zy7wttvjtsijt5uwmlar4yguvjc2gppzbdj4v6bujng6xwjmkdg7uvqd.onion:15441',
|
||||
'http://bt.okmp3.ru:2710/announce',
|
||||
'http://fxtt.ru:80/announce',
|
||||
'http://incine.ru:6969/announce',
|
||||
'http://moeweb.pw:6969/announce',
|
||||
'http://open.acgnxtracker.com:80/announce',
|
||||
'http://t.acg.rip:6699/announce',
|
||||
'http://t.nyaatracker.com:80/announce',
|
||||
'http://t.overflow.biz:6969/announce',
|
||||
'http://tracker.files.fm:6969/announce',
|
||||
'http://tracker.mywaifu.best:6969/announce',
|
||||
'http://tracker.vrpnet.org:6969/announce',
|
||||
'http://vps02.net.orel.ru:80/announce',
|
||||
'udp://960303.xyz:6969/announce',
|
||||
'udp://aarsen.me:6969/announce',
|
||||
'udp://astrr.ru:6969/announce',
|
||||
'udp://ben.kerbertools.xyz:6969/announce',
|
||||
'udp://bt1.archive.org:6969/announce',
|
||||
'udp://bt2.archive.org:6969/announce',
|
||||
'udp://bt.ktrackers.com:6666/announce',
|
||||
'udp://bubu.mapfactor.com:6969/announce',
|
||||
'udp://c.ns.cluefone.com:6969/announce',
|
||||
'udp://cutscloud.duckdns.org:6969/announce',
|
||||
'udp://download.nerocloud.me:6969/announce',
|
||||
'udp://epider.me:6969/announce',
|
||||
'udp://exodus.desync.com:6969/announce',
|
||||
'udp://htz3.noho.st:6969/announce',
|
||||
'udp://ipv4.tracker.harry.lu:80/announce',
|
||||
'udp://laze.cc:6969/announce',
|
||||
'udp://mail.artixlinux.org:6969/announce',
|
||||
'udp://mirror.aptus.co.tz:6969/announce',
|
||||
'udp://moonburrow.club:6969/announce',
|
||||
'udp://movies.zsw.ca:6969/announce',
|
||||
'udp://mts.tvbit.co:6969/announce',
|
||||
'udp://new-line.net:6969/announce',
|
||||
'udp://open.demonii.com:1337/announce',
|
||||
'udp://open.stealth.si:80/announce',
|
||||
'udp://opentracker.i2p.rocks:6969/announce',
|
||||
'udp://p4p.arenabg.com:1337/announce',
|
||||
'udp://psyco.fr:6969/announce',
|
||||
'udp://public.publictracker.xyz:6969/announce',
|
||||
'udp://rep-art.ynh.fr:6969/announce',
|
||||
'udp://run.publictracker.xyz:6969/announce',
|
||||
'udp://sanincode.com:6969/announce',
|
||||
'udp://slicie.icon256.com:8000/announce',
|
||||
'udp://tamas3.ynh.fr:6969/announce',
|
||||
'udp://thouvenin.cloud:6969/announce',
|
||||
'udp://torrentclub.space:6969/announce',
|
||||
'udp://tracker.0x.tf:6969/announce',
|
||||
'udp://tracker1.bt.moack.co.kr:80/announce',
|
||||
'udp://tracker.4.babico.name.tr:3131/announce',
|
||||
'udp://tracker.altrosky.nl:6969/announce',
|
||||
'udp://tracker.artixlinux.org:6969/announce',
|
||||
'udp://tracker.farted.net:6969/announce',
|
||||
'udp://tracker.jonaslsa.com:6969/announce',
|
||||
'udp://tracker.joybomb.tw:6969/announce',
|
||||
'udp://tracker.monitorit4.me:6969/announce',
|
||||
'udp://tracker.opentrackr.org:1337/announce',
|
||||
'udp://tracker.pomf.se:80/announce',
|
||||
'udp://tracker.publictracker.xyz:6969/announce',
|
||||
'udp://tracker.srv00.com:6969/announce',
|
||||
'udp://tracker.tcp.exchange:6969/announce',
|
||||
'udp://tracker.theoks.net:6969/announce',
|
||||
'udp://transkaroo.joustasie.net:6969/announce',
|
||||
'udp://uploads.gamecoast.net:6969/announce',
|
||||
'udp://v2.iperson.xyz:6969/announce',
|
||||
'udp://vibe.sleepyinternetfun.xyz:1738/announce',
|
||||
'udp://www.skynetcenter.me:6969/announce',
|
||||
'udp://www.torrent.eu.org:451/announce',
|
||||
'zero://194.5.98.39:15441',
|
||||
'zero://145.239.95.38:15441',
|
||||
'zero://178.128.34.249:26117',
|
||||
'zero://217.18.217.143:39288',
|
||||
'zero://83.246.141.203:22207',
|
||||
'zero://syncronite.loki:15441',
|
||||
'zero://2a05:dfc1:4000:1e00::a:15441',
|
||||
'zero://2400:6180:100:d0::8fd:8001:21697',
|
||||
'zero://2001:19f0:8001:1d2f:5400:2ff:fe83:5bf7:30530',
|
||||
'zero://73pyhfwfwsrhfw76knkjfnw6o3lk53zfo7hlxdmxbj75sjcnol5cioad.onion:15442',
|
||||
'zero://fzlzmxuz2bust72cuy5g4w6d62tx624xcjaupf2kp7ffuitbiniy2hqd.onion:15441',
|
||||
'zero://rlcjomszyitxpwv7kzopmqgzk3bdpsxeull4c3s6goszkk6h2sotfoad.onion:15441',
|
||||
'zero://tqmo2nffqo4qc5jgmz3me5eri3zpgf3v2zciufzmhnvznjve5c3argad.onion:15441',
|
||||
'http://107.189.31.134:6969/announce',
|
||||
'http://119.28.71.45:8080/announce',
|
||||
'http://129.146.193.240:6699/announce',
|
||||
'http://159.69.65.157:6969/announce',
|
||||
'http://163.172.29.130:80/announce',
|
||||
'http://185.130.47.2:6969/announce',
|
||||
'http://45.67.35.111:6969/announce',
|
||||
'http://61.222.178.254:6969/announce',
|
||||
'http://83.31.30.182:6969/announce',
|
||||
'http://93.158.213.92:1337/announce',
|
||||
'http://95.217.167.10:6969/announce',
|
||||
'udp://102.223.180.235:6969/announce',
|
||||
'udp://103.122.21.50:6969/announce',
|
||||
'udp://104.131.98.232:6969/announce',
|
||||
'udp://104.244.77.87:6969/announce',
|
||||
'udp://107.189.11.58:6969/announce',
|
||||
'udp://107.189.31.134:6969/announce',
|
||||
'udp://139.144.68.88:6969/announce',
|
||||
'udp://149.28.239.70:6969/announce',
|
||||
'udp://15.204.205.14:6969/announce',
|
||||
'udp://156.234.201.18:80/announce',
|
||||
'udp://158.101.161.60:3131/announce',
|
||||
'udp://163.172.29.130:80/announce',
|
||||
'udp://167.99.185.219:6969/announce',
|
||||
'udp://176.31.250.174:6969/announce',
|
||||
'udp://176.56.4.238:6969/announce',
|
||||
'udp://178.32.222.98:3391/announce',
|
||||
'udp://184.105.151.166:6969/announce',
|
||||
'udp://185.102.219.163:6969/announce',
|
||||
'udp://185.181.60.155:80/announce',
|
||||
'udp://185.217.199.21:6969/announce',
|
||||
'udp://185.44.82.25:1337/announce',
|
||||
'udp://185.68.21.244:6969/announce',
|
||||
'udp://192.3.165.191:6969/announce',
|
||||
'udp://192.3.165.198:6969/announce',
|
||||
'udp://192.95.46.115:6969/announce',
|
||||
'udp://193.176.158.162:6969/announce',
|
||||
'udp://193.37.214.12:6969/announce',
|
||||
'udp://193.42.111.57:9337/announce',
|
||||
'udp://198.100.149.66:6969/announce',
|
||||
'udp://20.100.205.229:6969/announce',
|
||||
'udp://207.241.226.111:6969/announce',
|
||||
'udp://207.241.231.226:6969/announce',
|
||||
'udp://209.141.59.16:6969/announce',
|
||||
'udp://212.237.53.230:6969/announce',
|
||||
'udp://23.153.248.2:6969/announce',
|
||||
'udp://23.254.228.89:6969/announce',
|
||||
'udp://37.187.111.136:6969/announce',
|
||||
'udp://37.27.4.53:6969/announce',
|
||||
'udp://38.7.201.142:6969/announce',
|
||||
'udp://45.154.253.6:6969/announce',
|
||||
'udp://45.63.30.114:6969/announce',
|
||||
'udp://45.9.60.30:6969/announce',
|
||||
'udp://46.38.238.105:6969/announce',
|
||||
'udp://49.12.76.8:8080/announce',
|
||||
'udp://5.102.159.190:6969/announce',
|
||||
'udp://5.196.89.204:6969/announce',
|
||||
'udp://51.15.79.209:6969/announce',
|
||||
'udp://51.159.54.68:6666/announce',
|
||||
'udp://51.68.174.87:6969/announce',
|
||||
'udp://51.81.222.188:6969/announce',
|
||||
'udp://52.58.128.163:6969/announce',
|
||||
'udp://61.222.178.254:6969/announce',
|
||||
'udp://77.73.69.230:6969/announce',
|
||||
'udp://83.102.180.21:80/announce',
|
||||
'udp://83.31.30.182:6969/announce',
|
||||
'udp://85.206.172.159:6969/announce',
|
||||
'udp://85.239.33.28:6969/announce',
|
||||
'udp://86.57.161.157:6969/announce',
|
||||
'udp://91.216.110.52:451/announce',
|
||||
'udp://93.158.213.92:1337/announce',
|
||||
'udp://94.103.87.87:6969/announce',
|
||||
'udp://95.216.74.39:6969/announce',
|
||||
'udp://95.31.11.224:6969/announce',
|
||||
]
|
||||
|
||||
class Config(object):
|
||||
|
||||
def __init__(self, argv):
|
||||
self.version = "0.7.9+"
|
||||
self.version = "0.7.10+"
|
||||
self.user_agent = "conservancy"
|
||||
# DEPRECATED ; replace with git-generated commit
|
||||
self.rev = 5110
|
||||
self.rev = 5121
|
||||
self.user_agent_rev = 8192
|
||||
self.argv = argv
|
||||
self.action = None
|
||||
|
@ -243,7 +71,7 @@ class Config(object):
|
|||
elif this_file.endswith("/core/src/Config.py"):
|
||||
# Running as exe or source is at Application Support directory, put var files to outside of core dir
|
||||
start_dir = this_file.replace("/core/src/Config.py", "")
|
||||
elif this_file.endswith("usr/share/zeronet/src/Config.py"):
|
||||
elif not os.access(this_file.replace('/src/Config.py', ''), os.R_OK | os.W_OK):
|
||||
# Running from non-writeable location, e.g., AppImage
|
||||
start_dir = os.path.expanduser("~/ZeroNet")
|
||||
else:
|
||||
|
@ -309,6 +137,8 @@ class Config(object):
|
|||
default=15441, nargs='?')
|
||||
action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)',
|
||||
default="content.json", metavar="inner_path")
|
||||
action.add_argument('--recursive', help="Whether to publish all of site's content.json. "
|
||||
"Overrides --inner_path. (default: false)", action='store_true', dest='recursive')
|
||||
|
||||
# SiteVerify
|
||||
action = self.subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
|
||||
|
@ -320,6 +150,10 @@ class Config(object):
|
|||
action.add_argument('cmd', help='API command name')
|
||||
action.add_argument('parameters', help='Parameters of the command', nargs='?')
|
||||
|
||||
# Import bundled sites
|
||||
action = self.subparsers.add_parser("importBundle", help='Import sites from a .zip bundle')
|
||||
action.add_argument('bundle', help='Path to a data bundle')
|
||||
|
||||
# dbRebuild
|
||||
action = self.subparsers.add_parser("dbRebuild", help='Rebuild site database cache')
|
||||
action.add_argument('address', help='Site to rebuild')
|
||||
|
@ -422,12 +256,15 @@ class Config(object):
|
|||
self.parser.add_argument('--ip_local', help='My local ips', default=ip_local, type=int, metavar='ip', nargs='*')
|
||||
self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip', nargs='*')
|
||||
self.parser.add_argument('--offline', help='Disable network communication', action='store_true')
|
||||
self.parser.add_argument('--disable_port_check', help='Disable checking port', action='store_true')
|
||||
|
||||
self.parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true')
|
||||
self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port')
|
||||
self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip')
|
||||
self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=trackers, metavar='protocol://address', nargs='*')
|
||||
self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', metavar='path', nargs='*')
|
||||
self.parser.add_argument('--bootstrap_url', help='URL of file with link to bootstrap bundle', default='https://raw.githubusercontent.com/zeronet-conservancy/zeronet-conservancy/master/bootstrap.url', type=str)
|
||||
self.parser.add_argument('--disable_bootstrap', help='Disable downloading bootstrap information from clearnet', action='store_true')
|
||||
self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=[], metavar='protocol://address', nargs='*')
|
||||
self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', default=['{data_dir}/15CEFKBRHFfAP9rmL6hhLmHoXrrgmw4B5o/cache/1/Syncronite.html'], metavar='path', nargs='*')
|
||||
self.parser.add_argument('--trackers_proxy', help='Force use proxy to connect to trackers (disable, tor, ip:port)', default="disable")
|
||||
self.parser.add_argument('--use_libsecp256k1', help='Use Libsecp256k1 liblary for speedup', type='bool', choices=[True, False], default=True)
|
||||
self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=True)
|
||||
|
@ -797,10 +634,6 @@ class Config(object):
|
|||
except Exception as err:
|
||||
print("Can't change permission of %s: %s" % (self.log_dir, err))
|
||||
|
||||
# Make warning hidden from console
|
||||
logging.WARNING = 15 # Don't display warnings if not in debug mode
|
||||
logging.addLevelName(15, "WARNING")
|
||||
|
||||
logging.getLogger('').name = "-" # Remove root prefix
|
||||
|
||||
self.error_logger = ErrorLogHandler()
|
||||
|
|
|
@ -95,17 +95,6 @@ class ContentDbDict(dict):
|
|||
back.append((key, val))
|
||||
return back
|
||||
|
||||
def values(self):
|
||||
back = []
|
||||
for key, val in dict.iteritems(self):
|
||||
if not val:
|
||||
try:
|
||||
val = self.loadItem(key)
|
||||
except Exception:
|
||||
continue
|
||||
back.append(val)
|
||||
return back
|
||||
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self.__getitem__(key)
|
||||
|
|
|
@ -928,10 +928,7 @@ class ContentManager(object):
|
|||
new_content = file
|
||||
else:
|
||||
try:
|
||||
if sys.version_info.major == 3 and sys.version_info.minor < 6:
|
||||
new_content = json.loads(file.read().decode("utf8"))
|
||||
else:
|
||||
new_content = json.load(file)
|
||||
new_content = json.load(file)
|
||||
except Exception as err:
|
||||
raise VerifyError(f"Invalid json file: {err}")
|
||||
if inner_path in self.contents:
|
||||
|
|
|
@ -99,3 +99,16 @@ def verify(data, valid_address, sign, lib_verify=None): # Verify data using add
|
|||
return sign_address in valid_address
|
||||
else: # One possible address
|
||||
return sign_address == valid_address
|
||||
|
||||
def isValidAddress(addr):
|
||||
'''Check if provided address is valid bitcoin address'''
|
||||
if addr[0] != '1':
|
||||
# no support for new-style addrs
|
||||
return False
|
||||
from base58 import b58decode
|
||||
bs = b58decode(addr)
|
||||
main = bs[:-4]
|
||||
checksum = bs[-4:]
|
||||
h1 = hashlib.sha256(main).digest()
|
||||
h2 = hashlib.sha256(h1).digest()
|
||||
return h2[:4] == checksum
|
||||
|
|
|
@ -377,10 +377,7 @@ class Db(object):
|
|||
if file_path.endswith("json.gz"):
|
||||
file = helper.limitedGzipFile(fileobj=file)
|
||||
|
||||
if sys.version_info.major == 3 and sys.version_info.minor < 6:
|
||||
data = json.loads(file.read().decode("utf8"))
|
||||
else:
|
||||
data = json.load(file)
|
||||
data = json.load(file)
|
||||
except Exception as err:
|
||||
self.log.debug("Json file %s load error: %s" % (file_path, err))
|
||||
data = {}
|
||||
|
|
|
@ -152,9 +152,13 @@ class FileServer(ConnectionServer):
|
|||
FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest
|
||||
|
||||
def portCheck(self):
|
||||
if config.offline or config.tor == 'always':
|
||||
msg = "Offline mode" if config.offline else "Tor-only"
|
||||
self.log.info(f'{msg}: port check disabled')
|
||||
if config.offline or config.tor == 'always' or config.disable_port_check:
|
||||
if config.offline:
|
||||
self.log.info(f'Offline mode: port check disabled')
|
||||
elif config.tor == 'always':
|
||||
self.log.info('Tor-only mode: port check disabled')
|
||||
else:
|
||||
self.log.info('Port check disabled')
|
||||
res = {"ipv4": None, "ipv6": None}
|
||||
self.port_opened = res
|
||||
return res
|
||||
|
|
|
@ -28,11 +28,8 @@ class PeerPortchecker(object):
|
|||
return urllib.request.urlopen(req, timeout=20.0)
|
||||
|
||||
def portOpen(self, port):
|
||||
# self.log.info("Not trying to open port using UpnpPunch until it's proven robust...")
|
||||
# return False
|
||||
|
||||
try:
|
||||
UpnpPunch.ask_to_open_port(port, 'ZeroNet', retries=3, protos=["TCP"])
|
||||
UpnpPunch.ask_to_open_port(port, retries=3, protos=["TCP"])
|
||||
self.upnp_port_opened = True
|
||||
except Exception as err:
|
||||
self.log.warning("UpnpPunch run error: %s" % Debug.formatException(err))
|
||||
|
|
|
@ -87,7 +87,11 @@ class Site(object):
|
|||
# Load site settings from data/sites.json
|
||||
def loadSettings(self, settings=None):
|
||||
if not settings:
|
||||
settings = json.load(open("%s/sites.json" % config.data_dir)).get(self.address)
|
||||
try:
|
||||
settings = json.load(open(f'{config.data_dir}/sites.json')).get(self.address)
|
||||
except Exception as err:
|
||||
logging.error(f'Error loading {config.data_dir}/sites.json: {err}')
|
||||
settings = {}
|
||||
if settings:
|
||||
self.settings = settings
|
||||
if "cache" not in settings:
|
||||
|
|
|
@ -202,7 +202,7 @@ class SiteAnnouncer(object):
|
|||
else:
|
||||
raise AnnounceError("Unknown protocol: %s" % address_parts["protocol"])
|
||||
except Exception as err:
|
||||
self.site.log.warning("Tracker %s announce failed: %s in mode %s" % (tracker, Debug.formatException(err), mode))
|
||||
self.site.log.debug("Tracker %s announce failed: %s in mode %s" % (tracker, Debug.formatException(err), mode))
|
||||
error = err
|
||||
|
||||
if error:
|
||||
|
|
|
@ -31,17 +31,18 @@ class SiteManager(object):
|
|||
@util.Noparallel()
|
||||
def load(self, cleanup=True, startup=False):
|
||||
from .Site import Site
|
||||
self.log.info("Loading sites... (cleanup: %s, startup: %s)" % (cleanup, startup))
|
||||
self.log.info(f'Loading sites... ({cleanup=}, {startup=})')
|
||||
self.loaded = False
|
||||
address_found = []
|
||||
added = 0
|
||||
load_s = time.time()
|
||||
# Load new adresses
|
||||
try:
|
||||
json_path = "%s/sites.json" % config.data_dir
|
||||
json_path = f"{config.data_dir}/sites.json"
|
||||
data = json.load(open(json_path))
|
||||
except Exception as err:
|
||||
raise Exception("Unable to load %s: %s" % (json_path, err))
|
||||
self.log.error(f"Unable to load {json_path}: {err}")
|
||||
data = {}
|
||||
|
||||
sites_need = []
|
||||
|
||||
|
|
|
@ -452,19 +452,21 @@ class SiteStorage(object):
|
|||
bad_files.append(file_inner_path)
|
||||
continue
|
||||
|
||||
error = None
|
||||
if quick_check:
|
||||
ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"]
|
||||
if not ok:
|
||||
err = "Invalid size"
|
||||
error = "Invalid size"
|
||||
else:
|
||||
try:
|
||||
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
||||
except Exception as err:
|
||||
error = err
|
||||
ok = False
|
||||
|
||||
if not ok:
|
||||
back["num_file_invalid"] += 1
|
||||
self.log.debug("[INVALID] %s: %s" % (file_inner_path, err))
|
||||
self.log.debug("[INVALID] %s: %s" % (file_inner_path, error))
|
||||
if add_changed or content.get("cert_user_id"): # If updating own site only add changed user files
|
||||
bad_files.append(file_inner_path)
|
||||
|
||||
|
|
|
@ -15,11 +15,9 @@ class TestThreadPool:
|
|||
@pool.wrap
|
||||
def blocker():
|
||||
events.append("S")
|
||||
out = 0
|
||||
for i in range(10000000):
|
||||
if i == 3000000:
|
||||
events.append("M")
|
||||
out += 1
|
||||
time.sleep(0.001)
|
||||
events.append("M")
|
||||
time.sleep(0.001)
|
||||
events.append("D")
|
||||
return out
|
||||
|
||||
|
@ -30,9 +28,6 @@ class TestThreadPool:
|
|||
|
||||
assert events == ["S"] * 3 + ["M"] * 3 + ["D"] * 3
|
||||
|
||||
res = blocker()
|
||||
assert res == 10000000
|
||||
|
||||
def testLockBlockingSameThread(self):
|
||||
lock = ThreadPool.Lock()
|
||||
|
||||
|
|
|
@ -63,9 +63,6 @@ config.debug = True
|
|||
|
||||
os.chdir(os.path.abspath(os.path.dirname(__file__) + "/../..")) # Set working dir
|
||||
|
||||
all_loaded = PluginManager.plugin_manager.loadPlugins()
|
||||
assert all_loaded, "Not all plugin loaded successfully"
|
||||
|
||||
config.loadPlugins()
|
||||
config.parse(parse_config=False) # Parse again to add plugin configuration options
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[pytest]
|
||||
python_files = Test*.py
|
||||
addopts = -rsxX -v --durations=6 --no-print-logs --capture=fd
|
||||
addopts = -rsxX -v --durations=6 --capture=fd
|
||||
markers =
|
||||
slow: mark a tests as slow.
|
||||
webtest: mark a test as a webtest.
|
||||
|
|
|
@ -231,8 +231,12 @@ class UiRequest(object):
|
|||
# Return: <dict> Posted variables
|
||||
def getPosted(self):
|
||||
if self.env['REQUEST_METHOD'] == "POST":
|
||||
try:
|
||||
content_length = int(self.env.get('CONTENT_LENGTH', 0))
|
||||
except ValueError:
|
||||
content_length = 0
|
||||
return dict(urllib.parse.parse_qsl(
|
||||
self.env['wsgi.input'].readline().decode()
|
||||
self.env['wsgi.input'].read(content_length).decode()
|
||||
))
|
||||
else:
|
||||
return {}
|
||||
|
@ -282,13 +286,23 @@ class UiRequest(object):
|
|||
|
||||
# Send response headers
|
||||
def sendHeader(self, status=200, content_type="text/html", noscript=False, allow_ajax=False, script_nonce=None, extra_headers=[]):
|
||||
url = self.getRequestUrl()
|
||||
referer = self.env.get('HTTP_REFERER')
|
||||
origin = self.env.get('HTTP_ORIGIN')
|
||||
fetch_site = self.env.get('HTTP_SEC_FETCH_SITE')
|
||||
fetch_mode = self.env.get('HTTP_SEC_FETCH_MODE')
|
||||
not_same_ref = referer and not self.isSameHost(referer, url)
|
||||
not_same_origin = origin and not self.isSameHost(origin, url)
|
||||
cross_site_not_navigate = not referer and fetch_site == 'cross-site' and not fetch_mode == 'navigate'
|
||||
if status != 404 and (not_same_ref or not_same_origin or cross_site_not_navigate):
|
||||
# pretend nothing is here for third-party access
|
||||
return self.error404()
|
||||
|
||||
headers = {}
|
||||
headers["Version"] = "HTTP/1.1"
|
||||
headers["Connection"] = "Keep-Alive"
|
||||
headers["Keep-Alive"] = "max=25, timeout=30"
|
||||
headers["X-Frame-Options"] = "SAMEORIGIN"
|
||||
if content_type != "text/html" and self.env.get("HTTP_REFERER") and self.isSameOrigin(self.getReferer(), self.getRequestUrl()):
|
||||
headers["Access-Control-Allow-Origin"] = "*" # Allow load font files from css
|
||||
|
||||
if noscript:
|
||||
headers["Content-Security-Policy"] = "default-src 'none'; sandbox allow-top-navigation allow-forms; img-src *; font-src * data:; media-src *; style-src * 'unsafe-inline';"
|
||||
|
@ -398,8 +412,9 @@ class UiRequest(object):
|
|||
if self.isWebSocketRequest():
|
||||
return self.error403("WebSocket request not allowed to load wrapper") # No websocket
|
||||
|
||||
if "text/html" not in self.env.get("HTTP_ACCEPT", ""):
|
||||
return self.error403("Invalid Accept header to load wrapper: %s" % self.env.get("HTTP_ACCEPT", ""))
|
||||
http_accept = self.env.get("HTTP_ACCEPT", "")
|
||||
if "text/html" not in http_accept and "*/*" not in http_accept:
|
||||
return self.error403(f"Invalid Accept header to load wrapper: {http_accept}")
|
||||
if "prefetch" in self.env.get("HTTP_X_MOZ", "") or "prefetch" in self.env.get("HTTP_PURPOSE", ""):
|
||||
return self.error403("Prefetch not allowed to load wrapper")
|
||||
|
||||
|
@ -604,7 +619,23 @@ class UiRequest(object):
|
|||
self.server.add_nonces.append(add_nonce)
|
||||
return add_nonce
|
||||
|
||||
def isSameHost(self, url_a, url_b):
|
||||
"""Check if urls have the same HOST (to prevent leaking resources to clearnet sites)"""
|
||||
if not url_a or not url_b:
|
||||
return False
|
||||
|
||||
url_a = url_a.replace("/raw/", "/")
|
||||
url_b = url_b.replace("/raw/", "/")
|
||||
|
||||
origin_pattern = "http[s]{0,1}://(.*?/).*"
|
||||
|
||||
origin_a = re.sub(origin_pattern, "\\1", url_a)
|
||||
origin_b = re.sub(origin_pattern, "\\1", url_b)
|
||||
|
||||
return origin_a == origin_b
|
||||
|
||||
def isSameOrigin(self, url_a, url_b):
|
||||
"""Check if 0net origin is the same"""
|
||||
if not url_a or not url_b:
|
||||
return False
|
||||
|
||||
|
|
|
@ -7,6 +7,8 @@ from Debug import Debug
|
|||
from Config import config
|
||||
from Content.ContentManager import VerifyError
|
||||
|
||||
import traceback
|
||||
|
||||
|
||||
class WorkerDownloadError(Exception):
|
||||
pass
|
||||
|
@ -119,13 +121,15 @@ class Worker(object):
|
|||
self.manager.log.error("%s: Error writing: %s (%s: %s)" % (self.key, task["inner_path"], type(err), err))
|
||||
raise WorkerIOError(str(err))
|
||||
|
||||
def onTaskVerifyFail(self, task, error_message):
|
||||
def onTaskVerifyFail(self, task, error):
|
||||
self.num_failed += 1
|
||||
if self.manager.started_task_num < 50 or config.verbose:
|
||||
self.manager.log.debug(
|
||||
self.manager.log.info(
|
||||
"%s: Verify failed: %s, error: %s, failed peers: %s, workers: %s" %
|
||||
(self.key, task["inner_path"], error_message, len(task["failed"]), task["workers_num"])
|
||||
(self.key, task["inner_path"], error, len(task["failed"]), task["workers_num"])
|
||||
)
|
||||
# traceback.format_
|
||||
self.manager.log.debug(''.join(traceback.format_exception(error)))
|
||||
task["failed"].append(self.peer)
|
||||
self.peer.hash_failed += 1
|
||||
if self.peer.hash_failed >= max(len(self.manager.tasks), 3) or self.peer.connection_error > 10:
|
||||
|
|
154
src/main.py
154
src/main.py
|
@ -3,6 +3,7 @@ import sys
|
|||
import stat
|
||||
import time
|
||||
import logging
|
||||
from util.compat import *
|
||||
|
||||
startup_errors = []
|
||||
def startupError(msg):
|
||||
|
@ -34,24 +35,79 @@ def load_config():
|
|||
|
||||
load_config()
|
||||
|
||||
def init_dirs():
|
||||
if not os.path.isdir(config.data_dir):
|
||||
os.mkdir(config.data_dir)
|
||||
try:
|
||||
os.chmod(config.data_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
except Exception as err:
|
||||
startupError("Can't change permission of %s: %s" % (config.data_dir, err))
|
||||
def importBundle(bundle):
|
||||
from zipfile import ZipFile
|
||||
from Crypt.CryptBitcoin import isValidAddress
|
||||
import json
|
||||
|
||||
sites_json = f"{config.data_dir}/sites.json"
|
||||
sites_json_path = f"{config.data_dir}/sites.json"
|
||||
try:
|
||||
with open(sites_json_path) as f:
|
||||
sites = json.load(f)
|
||||
except Exception as err:
|
||||
sites = {}
|
||||
|
||||
with ZipFile(bundle) as zf:
|
||||
all_files = zf.namelist()
|
||||
top_files = list(set(map(lambda f: f.split('/')[0], all_files)))
|
||||
if len(top_files) == 1 and not isValidAddress(top_files[0]):
|
||||
prefix = top_files[0]+'/'
|
||||
else:
|
||||
prefix = ''
|
||||
top_2 = list(set(filter(lambda f: len(f)>0,
|
||||
map(lambda f: removeprefix(f, prefix).split('/')[0], all_files))))
|
||||
for d in top_2:
|
||||
if isValidAddress(d):
|
||||
logging.info(f'unpack {d} into {config.data_dir}')
|
||||
for fname in filter(lambda f: f.startswith(prefix+d) and not f.endswith('/'), all_files):
|
||||
tgt = config.data_dir + '/' + removeprefix(fname, prefix)
|
||||
logging.info(f'-- {fname} --> {tgt}')
|
||||
info = zf.getinfo(fname)
|
||||
info.filename = tgt
|
||||
zf.extract(info)
|
||||
logging.info(f'add site {d}')
|
||||
sites[d] = {}
|
||||
else:
|
||||
logging.info(f'Warning: unknown file in a bundle: {prefix+d}')
|
||||
with open(sites_json_path, 'w') as f:
|
||||
json.dump(sites, f)
|
||||
|
||||
def init_dirs():
|
||||
data_dir = config.data_dir
|
||||
if not os.path.isdir(data_dir):
|
||||
os.mkdir(data_dir)
|
||||
try:
|
||||
os.chmod(data_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
except Exception as err:
|
||||
startupError(f"Can't change permission of {data_dir}: {err}")
|
||||
|
||||
# download latest bootstrap bundle
|
||||
if not config.disable_bootstrap and not config.offline:
|
||||
import requests
|
||||
from io import BytesIO
|
||||
|
||||
print(f'fetching {config.bootstrap_url}')
|
||||
response = requests.get(config.bootstrap_url)
|
||||
if response.status_code != 200:
|
||||
startupError(f"Cannot load bootstrap bundle (response status: {response.status_code})")
|
||||
url = response.text
|
||||
print(f'got {url}')
|
||||
response = requests.get(url)
|
||||
if response.status_code < 200 or response.status_code >= 300:
|
||||
startupError(f"Cannot load boostrap bundle (response status: {response.status_code})")
|
||||
importBundle(BytesIO(response.content))
|
||||
|
||||
sites_json = f"{data_dir}/sites.json"
|
||||
if not os.path.isfile(sites_json):
|
||||
with open(sites_json, "w") as f:
|
||||
f.write("{}")
|
||||
users_json = f"{config.data_dir}/users.json"
|
||||
users_json = f"{data_dir}/users.json"
|
||||
if not os.path.isfile(users_json):
|
||||
with open(users_json, "w") as f:
|
||||
f.write("{}")
|
||||
|
||||
# TODO: GET RID OF TOP-LEVEL CODE!!!
|
||||
config.initConsoleLogger()
|
||||
|
||||
try:
|
||||
init_dirs()
|
||||
|
@ -73,7 +129,7 @@ if config.action == "main":
|
|||
r = proc.wait()
|
||||
sys.exit(r)
|
||||
|
||||
config.initLogging()
|
||||
config.initLogging(console_logging=False)
|
||||
|
||||
# Debug dependent configuration
|
||||
from Debug import DebugHook
|
||||
|
@ -272,18 +328,19 @@ class Actions(object):
|
|||
for content_inner_path in site.content_manager.contents:
|
||||
s = time.time()
|
||||
logging.info("Verifing %s signature..." % content_inner_path)
|
||||
err = None
|
||||
error = None
|
||||
try:
|
||||
file_correct = site.content_manager.verifyFile(
|
||||
content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False
|
||||
)
|
||||
except Exception as err:
|
||||
file_correct = False
|
||||
error = err
|
||||
|
||||
if file_correct is True:
|
||||
logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s))
|
||||
else:
|
||||
logging.error("[ERROR] %s: invalid file: %s!" % (content_inner_path, err))
|
||||
logging.error("[ERROR] %s: invalid file: %s!" % (content_inner_path, error))
|
||||
input("Continue?")
|
||||
bad_files += content_inner_path
|
||||
|
||||
|
@ -414,6 +471,9 @@ class Actions(object):
|
|||
else:
|
||||
return res
|
||||
|
||||
def importBundle(self, bundle):
|
||||
importBundle(bundle)
|
||||
|
||||
def getWebsocket(self, site):
|
||||
import websocket
|
||||
|
||||
|
@ -422,47 +482,61 @@ class Actions(object):
|
|||
ws = websocket.create_connection(ws_address)
|
||||
return ws
|
||||
|
||||
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"):
|
||||
global file_server
|
||||
from Site.Site import Site
|
||||
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json", recursive=False):
|
||||
from Site import SiteManager
|
||||
from File import FileServer # We need fileserver to handle incoming file requests
|
||||
from Peer import Peer
|
||||
file_server = FileServer()
|
||||
site = SiteManager.site_manager.get(address)
|
||||
logging.info("Loading site...")
|
||||
site = SiteManager.site_manager.get(address)
|
||||
site.settings["serving"] = True # Serving the site even if its disabled
|
||||
|
||||
if not recursive:
|
||||
inner_paths = [inner_path]
|
||||
else:
|
||||
inner_paths = list(site.content_manager.contents.keys())
|
||||
|
||||
try:
|
||||
ws = self.getWebsocket(site)
|
||||
|
||||
except Exception as err:
|
||||
self.sitePublishFallback(site, peer_ip, peer_port, inner_paths, err)
|
||||
|
||||
else:
|
||||
logging.info("Sending siteReload")
|
||||
self.siteCmd(address, "siteReload", inner_path)
|
||||
|
||||
logging.info("Sending sitePublish")
|
||||
self.siteCmd(address, "sitePublish", {"inner_path": inner_path, "sign": False})
|
||||
for inner_path in inner_paths:
|
||||
logging.info(f"Sending sitePublish for {inner_path}")
|
||||
self.siteCmd(address, "sitePublish", {"inner_path": inner_path, "sign": False})
|
||||
logging.info("Done.")
|
||||
ws.close()
|
||||
|
||||
except Exception as err:
|
||||
logging.info("Can't connect to local websocket client: %s" % err)
|
||||
logging.info("Creating FileServer....")
|
||||
file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity
|
||||
time.sleep(0.001)
|
||||
def sitePublishFallback(self, site, peer_ip, peer_port, inner_paths, err):
|
||||
if err is not None:
|
||||
logging.info(f"Can't connect to local websocket client: {err}")
|
||||
logging.info("Publish using fallback mechanism. "
|
||||
"Note that there might be not enough time for peer discovery, "
|
||||
"but you can specify target peer on command line.")
|
||||
logging.info("Creating FileServer....")
|
||||
file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity
|
||||
time.sleep(0.001)
|
||||
|
||||
# Started fileserver
|
||||
file_server.portCheck()
|
||||
if peer_ip: # Announce ip specificed
|
||||
site.addPeer(peer_ip, peer_port)
|
||||
else: # Just ask the tracker
|
||||
logging.info("Gathering peers from tracker")
|
||||
site.announce() # Gather peers
|
||||
# Started fileserver
|
||||
file_server.portCheck()
|
||||
if peer_ip: # Announce ip specificed
|
||||
site.addPeer(peer_ip, peer_port)
|
||||
else: # Just ask the tracker
|
||||
logging.info("Gathering peers from tracker")
|
||||
site.announce() # Gather peers
|
||||
|
||||
for inner_path in inner_paths:
|
||||
published = site.publish(5, inner_path) # Push to peers
|
||||
if published > 0:
|
||||
time.sleep(3)
|
||||
logging.info("Serving files (max 60s)...")
|
||||
gevent.joinall([file_server_thread], timeout=60)
|
||||
logging.info("Done.")
|
||||
else:
|
||||
logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
|
||||
|
||||
if published > 0:
|
||||
time.sleep(3)
|
||||
logging.info("Serving files (max 60s)...")
|
||||
gevent.joinall([file_server_thread], timeout=60)
|
||||
logging.info("Done.")
|
||||
else:
|
||||
logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
|
||||
|
||||
# Crypto commands
|
||||
def cryptPrivatekeyToAddress(self, privatekey=None):
|
||||
|
|
|
@ -7,7 +7,8 @@ class UnsafePatternError(Exception):
|
|||
cached_patterns = {}
|
||||
|
||||
|
||||
def isSafePattern(pattern):
|
||||
def guard(pattern):
|
||||
'''Checks if pattern is safe and raises exception if it isn't'''
|
||||
if len(pattern) > 255:
|
||||
raise UnsafePatternError("Pattern too long: %s characters in %s" % (len(pattern), pattern))
|
||||
|
||||
|
@ -15,18 +16,18 @@ def isSafePattern(pattern):
|
|||
if unsafe_pattern_match:
|
||||
raise UnsafePatternError("Potentially unsafe part of the pattern: %s in %s" % (unsafe_pattern_match.group(0), pattern))
|
||||
|
||||
repetitions = re.findall(r"\.[\*\{\+]", pattern)
|
||||
if len(repetitions) >= 10:
|
||||
raise UnsafePatternError("More than 10 repetitions of %s in %s" % (repetitions[0], pattern))
|
||||
|
||||
return True
|
||||
repetitions1 = re.findall(r"\.[\*\{\+]", pattern)
|
||||
repetitions2 = re.findall(r"[^(][?]", pattern)
|
||||
if len(repetitions1) + len(repetitions2) >= 10:
|
||||
raise UnsafePatternError("More than 10 repetitions in %s" % pattern)
|
||||
|
||||
|
||||
def match(pattern, *args, **kwargs):
|
||||
'''Guard for safety, compile, cache and match regexp'''
|
||||
cached_pattern = cached_patterns.get(pattern)
|
||||
if cached_pattern:
|
||||
return cached_pattern.match(*args, **kwargs)
|
||||
else:
|
||||
if isSafePattern(pattern):
|
||||
cached_patterns[pattern] = re.compile(pattern)
|
||||
return cached_patterns[pattern].match(*args, **kwargs)
|
||||
guard(pattern)
|
||||
cached_patterns[pattern] = re.compile(pattern)
|
||||
return cached_patterns[pattern].match(*args, **kwargs)
|
||||
|
|
|
@ -181,7 +181,6 @@ def _get_local_ips():
|
|||
|
||||
def _create_open_message(local_ip,
|
||||
port,
|
||||
description="UPnPPunch",
|
||||
protocol="TCP",
|
||||
upnp_schema='WANIPConnection'):
|
||||
"""
|
||||
|
@ -205,14 +204,13 @@ def _create_open_message(local_ip,
|
|||
</s:Envelope>""".format(port=port,
|
||||
protocol=protocol,
|
||||
host_ip=local_ip,
|
||||
description=description,
|
||||
description='',
|
||||
upnp_schema=upnp_schema)
|
||||
return (REMOVE_WHITESPACE.sub('><', soap_message), 'AddPortMapping')
|
||||
|
||||
|
||||
def _create_close_message(local_ip,
|
||||
port,
|
||||
description=None,
|
||||
protocol='TCP',
|
||||
upnp_schema='WANIPConnection'):
|
||||
soap_message = """<?xml version="1.0"?>
|
||||
|
@ -294,12 +292,12 @@ def _send_requests(messages, location, upnp_schema, control_path):
|
|||
raise UpnpError('Sending requests using UPnP failed.')
|
||||
|
||||
|
||||
def _orchestrate_soap_request(ip, port, msg_fn, desc=None, protos=("TCP", "UDP")):
|
||||
def _orchestrate_soap_request(ip, port, msg_fn, protos=("TCP", "UDP")):
|
||||
logger.debug("Trying using local ip: %s" % ip)
|
||||
idg_data = _collect_idg_data(ip)
|
||||
|
||||
soap_messages = [
|
||||
msg_fn(ip, port, desc, proto, idg_data['upnp_schema'])
|
||||
msg_fn(ip, port, proto, idg_data['upnp_schema'])
|
||||
for proto in protos
|
||||
]
|
||||
|
||||
|
@ -307,7 +305,6 @@ def _orchestrate_soap_request(ip, port, msg_fn, desc=None, protos=("TCP", "UDP")
|
|||
|
||||
|
||||
def _communicate_with_igd(port=15441,
|
||||
desc="UpnpPunch",
|
||||
retries=3,
|
||||
fn=_create_open_message,
|
||||
protos=("TCP", "UDP")):
|
||||
|
@ -321,7 +318,7 @@ def _communicate_with_igd(port=15441,
|
|||
def job(local_ip):
|
||||
for retry in range(retries):
|
||||
try:
|
||||
_orchestrate_soap_request(local_ip, port, fn, desc, protos)
|
||||
_orchestrate_soap_request(local_ip, port, fn, protos)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.debug('Upnp request using "{0}" failed: {1}'.format(local_ip, e))
|
||||
|
@ -357,20 +354,18 @@ def _communicate_with_igd(port=15441,
|
|||
return success
|
||||
|
||||
|
||||
def ask_to_open_port(port=15441, desc="UpnpPunch", retries=3, protos=("TCP", "UDP")):
|
||||
def ask_to_open_port(port=15441, retries=3, protos=("TCP", "UDP")):
|
||||
logger.debug("Trying to open port %d." % port)
|
||||
return _communicate_with_igd(port=port,
|
||||
desc=desc,
|
||||
retries=retries,
|
||||
fn=_create_open_message,
|
||||
protos=protos)
|
||||
|
||||
|
||||
def ask_to_close_port(port=15441, desc="UpnpPunch", retries=3, protos=("TCP", "UDP")):
|
||||
def ask_to_close_port(port=15441, retries=3, protos=("TCP", "UDP")):
|
||||
logger.debug("Trying to close port %d." % port)
|
||||
# retries=1 because multiple successes cause 500 response and failure
|
||||
return _communicate_with_igd(port=port,
|
||||
desc=desc,
|
||||
retries=retries,
|
||||
fn=_create_close_message,
|
||||
protos=protos)
|
||||
|
|
16
src/util/compat.py
Normal file
16
src/util/compat.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
import sys
|
||||
|
||||
if sys.version_info.major == 3 and sys.version_info.minor < 9:
|
||||
def removeprefix(s, prefix, /):
|
||||
if s.startswith(prefix):
|
||||
return s[len(prefix):]
|
||||
return s
|
||||
def removesuffix(s, suffix, /):
|
||||
if s.endswith(suffix):
|
||||
return s[:-len(suffix)]
|
||||
return s
|
||||
else:
|
||||
def removeprefix(s, prefix, /):
|
||||
return s.removeprefix(prefix)
|
||||
def removesuffix(s, suffix, /):
|
||||
return s.removesuffix(suffix)
|
|
@ -195,44 +195,6 @@ def mergeDicts(dicts):
|
|||
return dict(back)
|
||||
|
||||
|
||||
# Request https url using gevent SSL error workaround
|
||||
def httpRequest(url, as_file=False):
|
||||
if url.startswith("http://"):
|
||||
import urllib.request
|
||||
response = urllib.request.urlopen(url)
|
||||
else: # Hack to avoid Python gevent ssl errors
|
||||
import socket
|
||||
import http.client
|
||||
import ssl
|
||||
|
||||
host, request = re.match("https://(.*?)(/.*?)$", url).groups()
|
||||
|
||||
conn = http.client.HTTPSConnection(host)
|
||||
sock = socket.create_connection((conn.host, conn.port), conn.timeout, conn.source_address)
|
||||
|
||||
context = ssl.create_default_context()
|
||||
context.minimum_version = ssl.TLSVersion.TLSv1_2
|
||||
|
||||
conn.sock = context.wrap_socket(sock, conn.key_file, conn.cert_file)
|
||||
conn.request("GET", request)
|
||||
response = conn.getresponse()
|
||||
if response.status in [301, 302, 303, 307, 308]:
|
||||
logging.info("Redirect to: %s" % response.getheader('Location'))
|
||||
response = httpRequest(response.getheader('Location'))
|
||||
|
||||
if as_file:
|
||||
import io
|
||||
data = io.BytesIO()
|
||||
while True:
|
||||
buff = response.read(1024 * 16)
|
||||
if not buff:
|
||||
break
|
||||
data.write(buff)
|
||||
return data
|
||||
else:
|
||||
return response
|
||||
|
||||
|
||||
def timerCaller(secs, func, *args, **kwargs):
|
||||
gevent.spawn_later(secs, timerCaller, secs, func, *args, **kwargs)
|
||||
func(*args, **kwargs)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue