Merge branch 'py3' into plugins
This commit is contained in:
commit
735061b79d
15 changed files with 212 additions and 153 deletions
|
@ -15,7 +15,6 @@ before_install:
|
||||||
# - docker build -t zeronet .
|
# - docker build -t zeronet .
|
||||||
# - docker run -d -v $PWD:/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 zeronet
|
# - docker run -d -v $PWD:/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 zeronet
|
||||||
install:
|
install:
|
||||||
- if [[ $TRAVIS_PYTHON_VERSION == 3.8-dev ]]; then pip install --upgrade setuptools cffi 'cython>=0.28' git+git://github.com/gevent/gevent.git#egg=gevent; fi
|
|
||||||
- pip install --upgrade -r requirements.txt
|
- pip install --upgrade -r requirements.txt
|
||||||
- pip list
|
- pip list
|
||||||
before_script:
|
before_script:
|
||||||
|
|
17
README.md
17
README.md
|
@ -1,5 +1,3 @@
|
||||||
__Warning: Development test version, do not use on live data__
|
|
||||||
|
|
||||||
# ZeroNet [](https://travis-ci.org/HelloZeroNet/ZeroNet) [](https://zeronet.io/docs/faq/) [](https://zeronet.io/docs/help_zeronet/donate/)
|
# ZeroNet [](https://travis-ci.org/HelloZeroNet/ZeroNet) [](https://zeronet.io/docs/faq/) [](https://zeronet.io/docs/help_zeronet/donate/)
|
||||||
|
|
||||||
Decentralized websites using Bitcoin crypto and the BitTorrent network - https://zeronet.io
|
Decentralized websites using Bitcoin crypto and the BitTorrent network - https://zeronet.io
|
||||||
|
@ -63,19 +61,18 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - https:/
|
||||||
|
|
||||||
## How to join
|
## How to join
|
||||||
|
|
||||||
### Install from package for your distribution
|
### Windows
|
||||||
|
|
||||||
* Arch Linux: [zeronet](https://aur.archlinux.org/zeronet.git), [zeronet-git](https://aur.archlinux.org/zeronet-git.git)
|
- Download [ZeroNet-py3-win64.zip](https://github.com/HelloZeroNet/ZeroNet-win/archive/dist-win64/ZeroNet-py3-win64.zip) (18MB)
|
||||||
* Gentoo: [emerge repository](https://github.com/leycec/raiagent)
|
- Unpack anywhere
|
||||||
* FreeBSD: zeronet
|
- Run `ZeroNet.exe`
|
||||||
* Whonix: [instructions](https://www.whonix.org/wiki/ZeroNet)
|
|
||||||
|
|
||||||
### Install from source
|
### Other platforms: Install from source
|
||||||
|
|
||||||
Fetch and extract the source:
|
Fetch and extract the source:
|
||||||
|
|
||||||
wget https://github.com/HelloZeroNet/ZeroNet/archive/py3.tar.gz
|
wget https://github.com/HelloZeroNet/ZeroNet/archive/py3/ZeroNet-py3.tar.gz
|
||||||
tar xvpfz py3.tar.gz
|
tar xvpfz ZeroNet-py3.tar.gz
|
||||||
cd ZeroNet-py3
|
cd ZeroNet-py3
|
||||||
|
|
||||||
Install Python module dependencies either:
|
Install Python module dependencies either:
|
||||||
|
|
150
plugins/AnnounceBitTorrent/AnnounceBitTorrentPlugin.py
Normal file
150
plugins/AnnounceBitTorrent/AnnounceBitTorrentPlugin.py
Normal file
|
@ -0,0 +1,150 @@
|
||||||
|
import time
|
||||||
|
import urllib.request
|
||||||
|
import struct
|
||||||
|
import socket
|
||||||
|
|
||||||
|
import bencode
|
||||||
|
from lib.subtl.subtl import UdpTrackerClient
|
||||||
|
import socks
|
||||||
|
import sockshandler
|
||||||
|
import gevent
|
||||||
|
|
||||||
|
from Plugin import PluginManager
|
||||||
|
from Config import config
|
||||||
|
from Debug import Debug
|
||||||
|
from util import helper
|
||||||
|
|
||||||
|
|
||||||
|
# We can only import plugin host clases after the plugins are loaded
|
||||||
|
@PluginManager.afterLoad
|
||||||
|
def importHostClasses():
|
||||||
|
global Peer, AnnounceError
|
||||||
|
from Peer import Peer
|
||||||
|
from Site.SiteAnnouncer import AnnounceError
|
||||||
|
|
||||||
|
|
||||||
|
@PluginManager.registerTo("SiteAnnouncer")
|
||||||
|
class SiteAnnouncerPlugin(object):
|
||||||
|
def getSupportedTrackers(self):
|
||||||
|
trackers = super(SiteAnnouncerPlugin, self).getSupportedTrackers()
|
||||||
|
if config.disable_udp or config.trackers_proxy != "disable":
|
||||||
|
trackers = [tracker for tracker in trackers if not tracker.startswith("udp://")]
|
||||||
|
|
||||||
|
return trackers
|
||||||
|
|
||||||
|
def getTrackerHandler(self, protocol):
|
||||||
|
if protocol == "udp":
|
||||||
|
handler = self.announceTrackerUdp
|
||||||
|
elif protocol == "http":
|
||||||
|
handler = self.announceTrackerHttp
|
||||||
|
elif protocol == "https":
|
||||||
|
handler = self.announceTrackerHttps
|
||||||
|
else:
|
||||||
|
handler = super(SiteAnnouncerPlugin, self).getTrackerHandler(protocol)
|
||||||
|
return handler
|
||||||
|
|
||||||
|
def announceTrackerUdp(self, tracker_address, mode="start", num_want=10):
|
||||||
|
s = time.time()
|
||||||
|
if config.disable_udp:
|
||||||
|
raise AnnounceError("Udp disabled by config")
|
||||||
|
if config.trackers_proxy != "disable":
|
||||||
|
raise AnnounceError("Udp trackers not available with proxies")
|
||||||
|
|
||||||
|
ip, port = tracker_address.split("/")[0].split(":")
|
||||||
|
tracker = UdpTrackerClient(ip, int(port))
|
||||||
|
if helper.getIpType(ip) in self.getOpenedServiceTypes():
|
||||||
|
tracker.peer_port = self.fileserver_port
|
||||||
|
else:
|
||||||
|
tracker.peer_port = 0
|
||||||
|
tracker.connect()
|
||||||
|
if not tracker.poll_once():
|
||||||
|
raise AnnounceError("Could not connect")
|
||||||
|
tracker.announce(info_hash=self.site.address_sha1, num_want=num_want, left=431102370)
|
||||||
|
back = tracker.poll_once()
|
||||||
|
if not back:
|
||||||
|
raise AnnounceError("No response after %.0fs" % (time.time() - s))
|
||||||
|
elif type(back) is dict and "response" in back:
|
||||||
|
peers = back["response"]["peers"]
|
||||||
|
else:
|
||||||
|
raise AnnounceError("Invalid response: %r" % back)
|
||||||
|
|
||||||
|
return peers
|
||||||
|
|
||||||
|
def httpRequest(self, url):
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
|
||||||
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
||||||
|
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
|
||||||
|
'Accept-Encoding': 'none',
|
||||||
|
'Accept-Language': 'en-US,en;q=0.8',
|
||||||
|
'Connection': 'keep-alive'
|
||||||
|
}
|
||||||
|
|
||||||
|
req = urllib.request.Request(url, headers=headers)
|
||||||
|
|
||||||
|
if config.trackers_proxy == "tor":
|
||||||
|
tor_manager = self.site.connection_server.tor_manager
|
||||||
|
handler = sockshandler.SocksiPyHandler(socks.SOCKS5, tor_manager.proxy_ip, tor_manager.proxy_port)
|
||||||
|
opener = urllib.request.build_opener(handler)
|
||||||
|
return opener.open(req, timeout=50)
|
||||||
|
elif config.trackers_proxy == "disable":
|
||||||
|
return urllib.request.urlopen(req, timeout=25)
|
||||||
|
else:
|
||||||
|
proxy_ip, proxy_port = config.trackers_proxy.split(":")
|
||||||
|
handler = sockshandler.SocksiPyHandler(socks.SOCKS5, proxy_ip, int(proxy_port))
|
||||||
|
opener = urllib.request.build_opener(handler)
|
||||||
|
return opener.open(req, timeout=50)
|
||||||
|
|
||||||
|
def announceTrackerHttps(self, *args, **kwargs):
|
||||||
|
kwargs["protocol"] = "https"
|
||||||
|
return self.announceTrackerHttp(*args, **kwargs)
|
||||||
|
|
||||||
|
def announceTrackerHttp(self, tracker_address, mode="start", num_want=10, protocol="http"):
|
||||||
|
tracker_ip, tracker_port = tracker_address.rsplit(":", 1)
|
||||||
|
if helper.getIpType(tracker_ip) in self.getOpenedServiceTypes():
|
||||||
|
port = self.fileserver_port
|
||||||
|
else:
|
||||||
|
port = 1
|
||||||
|
params = {
|
||||||
|
'info_hash': self.site.address_sha1,
|
||||||
|
'peer_id': self.peer_id, 'port': port,
|
||||||
|
'uploaded': 0, 'downloaded': 0, 'left': 431102370, 'compact': 1, 'numwant': num_want,
|
||||||
|
'event': 'started'
|
||||||
|
}
|
||||||
|
|
||||||
|
url = protocol + "://" + tracker_address + "?" + urllib.parse.urlencode(params)
|
||||||
|
|
||||||
|
s = time.time()
|
||||||
|
response = None
|
||||||
|
# Load url
|
||||||
|
if config.tor == "always" or config.trackers_proxy != "disable":
|
||||||
|
timeout = 60
|
||||||
|
else:
|
||||||
|
timeout = 30
|
||||||
|
|
||||||
|
with gevent.Timeout(timeout, False): # Make sure of timeout
|
||||||
|
req = self.httpRequest(url)
|
||||||
|
response = req.read()
|
||||||
|
req.close()
|
||||||
|
req = None
|
||||||
|
|
||||||
|
if not response:
|
||||||
|
raise AnnounceError("No response after %.0fs" % (time.time() - s))
|
||||||
|
|
||||||
|
# Decode peers
|
||||||
|
try:
|
||||||
|
peer_data = bencode.decode(response)["peers"]
|
||||||
|
if type(peer_data) is not bytes:
|
||||||
|
peer_data = peer_data.encode()
|
||||||
|
response = None
|
||||||
|
peer_count = int(len(peer_data) / 6)
|
||||||
|
peers = []
|
||||||
|
for peer_offset in range(peer_count):
|
||||||
|
off = 6 * peer_offset
|
||||||
|
peer = peer_data[off:off + 6]
|
||||||
|
addr, port = struct.unpack('!LH', peer)
|
||||||
|
peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port})
|
||||||
|
except Exception as err:
|
||||||
|
raise AnnounceError("Invalid response: %r (%s)" % (response, Debug.formatException(err)))
|
||||||
|
|
||||||
|
return peers
|
1
plugins/AnnounceBitTorrent/__init__.py
Normal file
1
plugins/AnnounceBitTorrent/__init__.py
Normal file
|
@ -0,0 +1 @@
|
||||||
|
from . import AnnounceBitTorrentPlugin
|
|
@ -105,7 +105,7 @@ class UiWebsocketPlugin(object):
|
||||||
|
|
||||||
# Optional file functions
|
# Optional file functions
|
||||||
|
|
||||||
def actionOptionalFileList(self, to, address=None, orderby="time_downloaded DESC", limit=10, filter="downloaded"):
|
def actionOptionalFileList(self, to, address=None, orderby="time_downloaded DESC", limit=10, filter="downloaded", filter_inner_path=None):
|
||||||
if not address:
|
if not address:
|
||||||
address = self.site.address
|
address = self.site.address
|
||||||
|
|
||||||
|
@ -139,6 +139,8 @@ class UiWebsocketPlugin(object):
|
||||||
wheres_raw.append("(is_downloaded = 1 OR is_pinned = 1)")
|
wheres_raw.append("(is_downloaded = 1 OR is_pinned = 1)")
|
||||||
if "pinned" in filter:
|
if "pinned" in filter:
|
||||||
wheres["is_pinned"] = 1
|
wheres["is_pinned"] = 1
|
||||||
|
if filter_inner_path:
|
||||||
|
wheres["inner_path__like"] = filter_inner_path
|
||||||
|
|
||||||
if address == "all":
|
if address == "all":
|
||||||
join = "LEFT JOIN site USING (site_id)"
|
join = "LEFT JOIN site USING (site_id)"
|
||||||
|
|
|
@ -4,6 +4,8 @@
|
||||||
"Open web browser on ZeroNet startup": "ZeroNet启动时,打开浏览器",
|
"Open web browser on ZeroNet startup": "ZeroNet启动时,打开浏览器",
|
||||||
|
|
||||||
"Network": "网络",
|
"Network": "网络",
|
||||||
|
"Offline mode": "离线模式",
|
||||||
|
"Disable network communication.": "关闭网络通信.",
|
||||||
"File server network": "文件服务器网络",
|
"File server network": "文件服务器网络",
|
||||||
"Accept incoming peers using IPv4 or IPv6 address. (default: dual)": "使用IPv4或IPv6地址接受传入的节点请求. (默认:双重)",
|
"Accept incoming peers using IPv4 or IPv6 address. (default: dual)": "使用IPv4或IPv6地址接受传入的节点请求. (默认:双重)",
|
||||||
"Dual (IPv4 & IPv6)": "双重 (IPv4与IPv6)",
|
"Dual (IPv4 & IPv6)": "双重 (IPv4与IPv6)",
|
||||||
|
|
|
@ -13,7 +13,7 @@ class Config(object):
|
||||||
|
|
||||||
def __init__(self, argv):
|
def __init__(self, argv):
|
||||||
self.version = "0.7.0"
|
self.version = "0.7.0"
|
||||||
self.rev = 4112
|
self.rev = 4122
|
||||||
self.argv = argv
|
self.argv = argv
|
||||||
self.action = None
|
self.action = None
|
||||||
self.pending_changes = {}
|
self.pending_changes = {}
|
||||||
|
@ -573,7 +573,7 @@ class Config(object):
|
||||||
logging.getLogger('').setLevel(logging.getLevelName(self.log_level))
|
logging.getLogger('').setLevel(logging.getLevelName(self.log_level))
|
||||||
logging.getLogger('').addHandler(file_logger)
|
logging.getLogger('').addHandler(file_logger)
|
||||||
|
|
||||||
def initLogging(self):
|
def initLogging(self, console_logging=True, file_logging=True):
|
||||||
# Create necessary files and dirs
|
# Create necessary files and dirs
|
||||||
if not os.path.isdir(self.log_dir):
|
if not os.path.isdir(self.log_dir):
|
||||||
os.mkdir(self.log_dir)
|
os.mkdir(self.log_dir)
|
||||||
|
@ -589,7 +589,9 @@ class Config(object):
|
||||||
logging.getLogger('').name = "-" # Remove root prefix
|
logging.getLogger('').name = "-" # Remove root prefix
|
||||||
logging.getLogger("geventwebsocket.handler").setLevel(logging.WARNING) # Don't log ws debug messages
|
logging.getLogger("geventwebsocket.handler").setLevel(logging.WARNING) # Don't log ws debug messages
|
||||||
|
|
||||||
self.initConsoleLogger()
|
if console_logging:
|
||||||
self.initFileLogger()
|
self.initConsoleLogger()
|
||||||
|
if file_logging:
|
||||||
|
self.initFileLogger()
|
||||||
|
|
||||||
config = Config(sys.argv)
|
config = Config(sys.argv)
|
||||||
|
|
|
@ -49,6 +49,8 @@ class DbCursor:
|
||||||
else:
|
else:
|
||||||
if key.startswith("not__"):
|
if key.startswith("not__"):
|
||||||
query_wheres.append(key.replace("not__", "") + " != ?")
|
query_wheres.append(key.replace("not__", "") + " != ?")
|
||||||
|
elif key.endswith("__like"):
|
||||||
|
query_wheres.append(key.replace("__like", "") + " LIKE ?")
|
||||||
elif key.endswith(">"):
|
elif key.endswith(">"):
|
||||||
query_wheres.append(key.replace(">", "") + " > ?")
|
query_wheres.append(key.replace(">", "") + " > ?")
|
||||||
elif key.endswith("<"):
|
elif key.endswith("<"):
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
import os
|
||||||
|
|
||||||
from Config import config
|
from Config import config
|
||||||
|
|
||||||
|
@ -41,7 +42,8 @@ class DebugReloader:
|
||||||
if ext not in ["py", "json"] or "Test" in path or time.time() - self.last_chaged < 1.0:
|
if ext not in ["py", "json"] or "Test" in path or time.time() - self.last_chaged < 1.0:
|
||||||
return False
|
return False
|
||||||
self.last_chaged = time.time()
|
self.last_chaged = time.time()
|
||||||
self.log.debug("File changed: %s reloading source code" % evt)
|
time_modified = os.path.getmtime(path)
|
||||||
|
self.log.debug("File changed: %s reloading source code (modified %.3fs ago)" % (evt, time.time() - time_modified))
|
||||||
time.sleep(0.1) # Wait for lock release
|
time.sleep(0.1) # Wait for lock release
|
||||||
for callback in self.callbacks:
|
for callback in self.callbacks:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,16 +1,9 @@
|
||||||
import random
|
import random
|
||||||
import time
|
import time
|
||||||
import hashlib
|
import hashlib
|
||||||
import urllib.request
|
|
||||||
import struct
|
|
||||||
import socket
|
|
||||||
import re
|
import re
|
||||||
import collections
|
import collections
|
||||||
|
|
||||||
import bencode
|
|
||||||
from lib.subtl.subtl import UdpTrackerClient
|
|
||||||
import socks
|
|
||||||
import sockshandler
|
|
||||||
import gevent
|
import gevent
|
||||||
|
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
|
@ -41,8 +34,6 @@ class SiteAnnouncer(object):
|
||||||
|
|
||||||
def getSupportedTrackers(self):
|
def getSupportedTrackers(self):
|
||||||
trackers = self.getTrackers()
|
trackers = self.getTrackers()
|
||||||
if config.disable_udp or config.trackers_proxy != "disable":
|
|
||||||
trackers = [tracker for tracker in trackers if not tracker.startswith("udp://")]
|
|
||||||
|
|
||||||
if not self.site.connection_server.tor_manager.enabled:
|
if not self.site.connection_server.tor_manager.enabled:
|
||||||
trackers = [tracker for tracker in trackers if ".onion" not in tracker]
|
trackers = [tracker for tracker in trackers if ".onion" not in tracker]
|
||||||
|
@ -157,15 +148,7 @@ class SiteAnnouncer(object):
|
||||||
self.updateWebsocket(pex="announced")
|
self.updateWebsocket(pex="announced")
|
||||||
|
|
||||||
def getTrackerHandler(self, protocol):
|
def getTrackerHandler(self, protocol):
|
||||||
if protocol == "udp":
|
return None
|
||||||
handler = self.announceTrackerUdp
|
|
||||||
elif protocol == "http":
|
|
||||||
handler = self.announceTrackerHttp
|
|
||||||
elif protocol == "https":
|
|
||||||
handler = self.announceTrackerHttps
|
|
||||||
else:
|
|
||||||
handler = None
|
|
||||||
return handler
|
|
||||||
|
|
||||||
def getAddressParts(self, tracker):
|
def getAddressParts(self, tracker):
|
||||||
if "://" not in tracker or not re.match("^[A-Za-z0-9:/\\.#-]+$", tracker):
|
if "://" not in tracker or not re.match("^[A-Za-z0-9:/\\.#-]+$", tracker):
|
||||||
|
@ -266,112 +249,6 @@ class SiteAnnouncer(object):
|
||||||
)
|
)
|
||||||
return time.time() - s
|
return time.time() - s
|
||||||
|
|
||||||
def announceTrackerUdp(self, tracker_address, mode="start", num_want=10):
|
|
||||||
s = time.time()
|
|
||||||
if config.disable_udp:
|
|
||||||
raise AnnounceError("Udp disabled by config")
|
|
||||||
if config.trackers_proxy != "disable":
|
|
||||||
raise AnnounceError("Udp trackers not available with proxies")
|
|
||||||
|
|
||||||
ip, port = tracker_address.split("/")[0].split(":")
|
|
||||||
tracker = UdpTrackerClient(ip, int(port))
|
|
||||||
if helper.getIpType(ip) in self.getOpenedServiceTypes():
|
|
||||||
tracker.peer_port = self.fileserver_port
|
|
||||||
else:
|
|
||||||
tracker.peer_port = 0
|
|
||||||
tracker.connect()
|
|
||||||
if not tracker.poll_once():
|
|
||||||
raise AnnounceError("Could not connect")
|
|
||||||
tracker.announce(info_hash=self.site.address_sha1, num_want=num_want, left=431102370)
|
|
||||||
back = tracker.poll_once()
|
|
||||||
if not back:
|
|
||||||
raise AnnounceError("No response after %.0fs" % (time.time() - s))
|
|
||||||
elif type(back) is dict and "response" in back:
|
|
||||||
peers = back["response"]["peers"]
|
|
||||||
else:
|
|
||||||
raise AnnounceError("Invalid response: %r" % back)
|
|
||||||
|
|
||||||
return peers
|
|
||||||
|
|
||||||
def httpRequest(self, url):
|
|
||||||
headers = {
|
|
||||||
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
|
|
||||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
|
||||||
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
|
|
||||||
'Accept-Encoding': 'none',
|
|
||||||
'Accept-Language': 'en-US,en;q=0.8',
|
|
||||||
'Connection': 'keep-alive'
|
|
||||||
}
|
|
||||||
|
|
||||||
req = urllib.request.Request(url, headers=headers)
|
|
||||||
|
|
||||||
if config.trackers_proxy == "tor":
|
|
||||||
tor_manager = self.site.connection_server.tor_manager
|
|
||||||
handler = sockshandler.SocksiPyHandler(socks.SOCKS5, tor_manager.proxy_ip, tor_manager.proxy_port)
|
|
||||||
opener = urllib.request.build_opener(handler)
|
|
||||||
return opener.open(req, timeout=50)
|
|
||||||
elif config.trackers_proxy == "disable":
|
|
||||||
return urllib.request.urlopen(req, timeout=25)
|
|
||||||
else:
|
|
||||||
proxy_ip, proxy_port = config.trackers_proxy.split(":")
|
|
||||||
handler = sockshandler.SocksiPyHandler(socks.SOCKS5, proxy_ip, int(proxy_port))
|
|
||||||
opener = urllib.request.build_opener(handler)
|
|
||||||
return opener.open(req, timeout=50)
|
|
||||||
|
|
||||||
def announceTrackerHttps(self, *args, **kwargs):
|
|
||||||
kwargs["protocol"] = "https"
|
|
||||||
return self.announceTrackerHttp(*args, **kwargs)
|
|
||||||
|
|
||||||
def announceTrackerHttp(self, tracker_address, mode="start", num_want=10, protocol="http"):
|
|
||||||
tracker_ip, tracker_port = tracker_address.rsplit(":", 1)
|
|
||||||
if helper.getIpType(tracker_ip) in self.getOpenedServiceTypes():
|
|
||||||
port = self.fileserver_port
|
|
||||||
else:
|
|
||||||
port = 1
|
|
||||||
params = {
|
|
||||||
'info_hash': self.site.address_sha1,
|
|
||||||
'peer_id': self.peer_id, 'port': port,
|
|
||||||
'uploaded': 0, 'downloaded': 0, 'left': 431102370, 'compact': 1, 'numwant': num_want,
|
|
||||||
'event': 'started'
|
|
||||||
}
|
|
||||||
|
|
||||||
url = protocol + "://" + tracker_address + "?" + urllib.parse.urlencode(params)
|
|
||||||
|
|
||||||
s = time.time()
|
|
||||||
response = None
|
|
||||||
# Load url
|
|
||||||
if config.tor == "always" or config.trackers_proxy != "disable":
|
|
||||||
timeout = 60
|
|
||||||
else:
|
|
||||||
timeout = 30
|
|
||||||
|
|
||||||
with gevent.Timeout(timeout, False): # Make sure of timeout
|
|
||||||
req = self.httpRequest(url)
|
|
||||||
response = req.read()
|
|
||||||
req.close()
|
|
||||||
req = None
|
|
||||||
|
|
||||||
if not response:
|
|
||||||
raise AnnounceError("No response after %.0fs" % (time.time() - s))
|
|
||||||
|
|
||||||
# Decode peers
|
|
||||||
try:
|
|
||||||
peer_data = bencode.decode(response)["peers"]
|
|
||||||
if type(peer_data) is not bytes:
|
|
||||||
peer_data = peer_data.encode()
|
|
||||||
response = None
|
|
||||||
peer_count = int(len(peer_data) / 6)
|
|
||||||
peers = []
|
|
||||||
for peer_offset in range(peer_count):
|
|
||||||
off = 6 * peer_offset
|
|
||||||
peer = peer_data[off:off + 6]
|
|
||||||
addr, port = struct.unpack('!LH', peer)
|
|
||||||
peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port})
|
|
||||||
except Exception as err:
|
|
||||||
raise AnnounceError("Invalid response: %r (%s)" % (response, Debug.formatException(err)))
|
|
||||||
|
|
||||||
return peers
|
|
||||||
|
|
||||||
@util.Noparallel(blocking=False)
|
@util.Noparallel(blocking=False)
|
||||||
def announcePex(self, query_num=2, need_num=5):
|
def announcePex(self, query_num=2, need_num=5):
|
||||||
peers = self.site.getConnectedPeers()
|
peers = self.site.getConnectedPeers()
|
||||||
|
|
|
@ -75,6 +75,11 @@ class TestDb:
|
||||||
{"not__title": ["Test #%s" % i for i in range(50, 3000)]}
|
{"not__title": ["Test #%s" % i for i in range(50, 3000)]}
|
||||||
).fetchone()["num"] == 50
|
).fetchone()["num"] == 50
|
||||||
|
|
||||||
|
assert db.execute(
|
||||||
|
"SELECT COUNT(*) AS num FROM test WHERE ?",
|
||||||
|
{"title__like": "%20%"}
|
||||||
|
).fetchone()["num"] == 1
|
||||||
|
|
||||||
# Test named parameter escaping
|
# Test named parameter escaping
|
||||||
assert db.execute(
|
assert db.execute(
|
||||||
"SELECT COUNT(*) AS num FROM test WHERE test_id = :test_id AND title LIKE :titlelike",
|
"SELECT COUNT(*) AS num FROM test WHERE test_id = :test_id AND title LIKE :titlelike",
|
||||||
|
|
|
@ -48,6 +48,12 @@ class TestFileRequest:
|
||||||
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": 1234})
|
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": 1234})
|
||||||
assert "File size does not match" in response["error"]
|
assert "File size does not match" in response["error"]
|
||||||
|
|
||||||
|
# Invalid path
|
||||||
|
for path in ["../users.json", "./../users.json", "data/../content.json", ".../users.json"]:
|
||||||
|
for sep in ["/", "\\"]:
|
||||||
|
response = connection.request("getFile", {"site": site.address, "inner_path": path.replace("/", sep), "location": 0})
|
||||||
|
assert response["error"] == 'File read exception'
|
||||||
|
|
||||||
connection.close()
|
connection.close()
|
||||||
client.stop()
|
client.stop()
|
||||||
|
|
||||||
|
|
|
@ -68,7 +68,7 @@ config.verbose = True # Use test data for unittests
|
||||||
config.tor = "disable" # Don't start Tor client
|
config.tor = "disable" # Don't start Tor client
|
||||||
config.trackers = []
|
config.trackers = []
|
||||||
config.data_dir = TEST_DATA_PATH # Use test data for unittests
|
config.data_dir = TEST_DATA_PATH # Use test data for unittests
|
||||||
config.initLogging()
|
config.initLogging(console_logging=False)
|
||||||
|
|
||||||
# Set custom formatter with realative time format (via: https://stackoverflow.com/questions/31521859/python-logging-module-time-since-last-log)
|
# Set custom formatter with realative time format (via: https://stackoverflow.com/questions/31521859/python-logging-module-time-since-last-log)
|
||||||
class TimeFilter(logging.Filter):
|
class TimeFilter(logging.Filter):
|
||||||
|
|
|
@ -9,10 +9,10 @@ changeColorScheme = (theme) ->
|
||||||
zeroframe.cmd "userGetGlobalSettings", [], (user_settings) ->
|
zeroframe.cmd "userGetGlobalSettings", [], (user_settings) ->
|
||||||
if user_settings.theme != theme
|
if user_settings.theme != theme
|
||||||
user_settings.theme = theme
|
user_settings.theme = theme
|
||||||
zeroframe.cmd "userSetGlobalSettings", [user_settings]
|
zeroframe.cmd "userSetGlobalSettings", [user_settings], (status) ->
|
||||||
|
if status == "ok"
|
||||||
location.reload()
|
location.reload()
|
||||||
|
return
|
||||||
return
|
return
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -21,7 +21,12 @@ displayNotification = ({matches, media}) ->
|
||||||
if !matches
|
if !matches
|
||||||
return
|
return
|
||||||
|
|
||||||
zeroframe.cmd "wrapperNotification", ["info", "Your system's theme has been changed.<br>Please reload site to use it."]
|
zeroframe.cmd "siteInfo", [], (site_info) ->
|
||||||
|
if "ADMIN" in site_info.settings.permissions
|
||||||
|
zeroframe.cmd "wrapperNotification", ["info", "Your system's theme has been changed.<br>Please reload site to use it."]
|
||||||
|
else
|
||||||
|
zeroframe.cmd "wrapperNotification", ["info", "Your system's theme has been changed.<br>Please open ZeroHello to use it."]
|
||||||
|
return
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1981,7 +1981,6 @@ $.extend( $.easing,
|
||||||
|
|
||||||
}).call(this);
|
}).call(this);
|
||||||
|
|
||||||
|
|
||||||
/* ---- src/Ui/media/WrapperZeroFrame.coffee ---- */
|
/* ---- src/Ui/media/WrapperZeroFrame.coffee ---- */
|
||||||
|
|
||||||
|
|
||||||
|
@ -2037,7 +2036,8 @@ $.extend( $.easing,
|
||||||
|
|
||||||
|
|
||||||
(function() {
|
(function() {
|
||||||
var DARK, LIGHT, changeColorScheme, detectColorScheme, displayNotification, mqDark, mqLight;
|
var DARK, LIGHT, changeColorScheme, detectColorScheme, displayNotification, mqDark, mqLight,
|
||||||
|
indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; };
|
||||||
|
|
||||||
DARK = "(prefers-color-scheme: dark)";
|
DARK = "(prefers-color-scheme: dark)";
|
||||||
|
|
||||||
|
@ -2051,8 +2051,11 @@ $.extend( $.easing,
|
||||||
zeroframe.cmd("userGetGlobalSettings", [], function(user_settings) {
|
zeroframe.cmd("userGetGlobalSettings", [], function(user_settings) {
|
||||||
if (user_settings.theme !== theme) {
|
if (user_settings.theme !== theme) {
|
||||||
user_settings.theme = theme;
|
user_settings.theme = theme;
|
||||||
zeroframe.cmd("userSetGlobalSettings", [user_settings]);
|
zeroframe.cmd("userSetGlobalSettings", [user_settings], function(status) {
|
||||||
location.reload();
|
if (status === "ok") {
|
||||||
|
location.reload();
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
@ -2063,7 +2066,13 @@ $.extend( $.easing,
|
||||||
if (!matches) {
|
if (!matches) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
zeroframe.cmd("wrapperNotification", ["info", "Your system's theme has been changed.<br>Please reload site to use it."]);
|
zeroframe.cmd("siteInfo", [], function(site_info) {
|
||||||
|
if (indexOf.call(site_info.settings.permissions, "ADMIN") >= 0) {
|
||||||
|
zeroframe.cmd("wrapperNotification", ["info", "Your system's theme has been changed.<br>Please reload site to use it."]);
|
||||||
|
} else {
|
||||||
|
zeroframe.cmd("wrapperNotification", ["info", "Your system's theme has been changed.<br>Please open ZeroHello to use it."]);
|
||||||
|
}
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
detectColorScheme = function() {
|
detectColorScheme = function() {
|
||||||
|
@ -2082,4 +2091,4 @@ $.extend( $.easing,
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
}).call(this);
|
}).call(this);
|
||||||
|
|
Loading…
Reference in a new issue