Merge pull request #276 from caryoscelus/git-and-newdir

New directory structure, versioning information and "build" types

- data is separated into config-like files, private files (with keys/user data) and normal site data
- use sensible platform-based defaults for data directories (`~/.local` / `~/AppData` etc)
- support various 'build' types (in particular to differentiate between "portable" and normal setup)
- get versioning info from git
- various code improvements (use `pathlib` instead of string formatting)
- separate Actions from main.py
This commit is contained in:
caryoscelus 2024-05-10 11:48:06 +00:00 committed by GitHub
commit 597bd57bd2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
30 changed files with 1018 additions and 692 deletions

3
.gitignore vendored
View file

@ -45,3 +45,6 @@ plugins/Multiuser
plugins/NoNewSites plugins/NoNewSites
plugins/StemPort plugins/StemPort
plugins/UiPassword plugins/UiPassword
# Build files
src/Build.py

View file

@ -1,7 +1,7 @@
### zeronet-conservancy 0.7.10+ ### zeronet-conservancy 0.7.10+
- disable site-plugins installed for security reasons (@caryoscelus) - disable site-plugins installed for security reasons (@caryoscelus)
- fix downloading geoip db (@caryoscelus) - fix downloading geoip db (@caryoscelus)
- python <3.6 is officially unsupported - python <3.8 is officially unsupported
- SafeRe improvements by @geekless - SafeRe improvements by @geekless
- remove and don't update muted files (@caryoscelus) - remove and don't update muted files (@caryoscelus)
- option to disable port checking (@caryoscelus) - option to disable port checking (@caryoscelus)
@ -11,6 +11,9 @@
- better fix of local sites leak (@caryoscelus) - better fix of local sites leak (@caryoscelus)
- ipython-based repl via --repl for debug/interactive development (@caryoscelus) - ipython-based repl via --repl for debug/interactive development (@caryoscelus)
- optional blocking of compromised id certificates for spam protection (@caryoscelus) - optional blocking of compromised id certificates for spam protection (@caryoscelus)
- changes in directory structure (split data and config, use user directories by default)
- use version information from git if available
- different build types (portable vs package)
- various improvements - various improvements
### zeronet-conservancy 0.7.10 (2023-07-26) (18d35d3bed4f0683e99) ### zeronet-conservancy 0.7.10 (2023-07-26) (18d35d3bed4f0683e99)

55
build.py Executable file
View file

@ -0,0 +1,55 @@
#!/usr/bin/env python3
## Copyright (c) 2024 caryoscelus
##
## zeronet-conservancy is free software: you can redistribute it and/or modify it under the
## terms of the GNU General Public License as published by the Free Software
## Foundation, either version 3 of the License, or (at your option) any later version.
##
## zeronet-conservancy is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
## FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
## details.
##
## You should have received a copy of the GNU General Public License along with
## zeronet-conservancy. If not, see <https://www.gnu.org/licenses/>.
##
"""Simple build/bundle script
"""
import argparse
def write_to(args, target):
branch = args.branch
commit = args.commit
if branch is None or commit is None:
from src.util import Git
branch = branch or Git.branch() or 'unknown'
commit = commit or Git.commit() or 'unknown'
target.write('\n'.join([
f"build_type = {args.type!r}",
f"branch = {branch!r}",
f"commit = {commit!r}",
f"version = {args.version!r}",
f"platform = {args.platform!r}",
]))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--type', default='source')
parser.add_argument('--version')
parser.add_argument('--branch')
parser.add_argument('--commit')
parser.add_argument('--platform', default='source')
parser.add_argument('--stdout', action=argparse.BooleanOptionalAction, default=False)
args = parser.parse_args()
if args.stdout:
import sys
target = sys.stdout
else:
target = open('src/Build.py', 'w')
write_to(args, target)
if __name__ == '__main__':
main()

View file

@ -14,7 +14,7 @@ from util import helper
class TrackerStorage(object): class TrackerStorage(object):
def __init__(self): def __init__(self):
self.log = logging.getLogger("TrackerStorage") self.log = logging.getLogger("TrackerStorage")
self.file_path = "%s/trackers.json" % config.data_dir self.file_path = config.start_dir / 'trackers.json'
self.load() self.load()
self.time_discover = 0.0 self.time_discover = 0.0
atexit.register(self.save) atexit.register(self.save)

View file

@ -9,7 +9,7 @@ from Config import config
@pytest.mark.usefixtures("resetTempSettings") @pytest.mark.usefixtures("resetTempSettings")
class TestAnnounceShare: class TestAnnounceShare:
def testAnnounceList(self, file_server): def testAnnounceList(self, file_server):
open("%s/trackers.json" % config.data_dir, "w").write("{}") (config.start_dir / 'trackers.json').open('w').write('{}')
tracker_storage = AnnounceSharePlugin.tracker_storage tracker_storage = AnnounceSharePlugin.tracker_storage
tracker_storage.load() tracker_storage.load()
peer = Peer(file_server.ip, 1544, connection_server=file_server) peer = Peer(file_server.ip, 1544, connection_server=file_server)

View file

@ -6,7 +6,7 @@ import time
class ChartDb(Db): class ChartDb(Db):
def __init__(self): def __init__(self):
self.version = 2 self.version = 2
super(ChartDb, self).__init__(self.getSchema(), "%s/chart.db" % config.data_dir) super(ChartDb, self).__init__(self.getSchema(), config.start_dir / 'chart.db')
self.foreign_keys = True self.foreign_keys = True
self.checkTables() self.checkTables()
self.sites = self.loadSites() self.sites = self.loadSites()

View file

@ -14,7 +14,7 @@ from util import helper
class ContentFilterStorage(object): class ContentFilterStorage(object):
def __init__(self, site_manager): def __init__(self, site_manager):
self.log = logging.getLogger("ContentFilterStorage") self.log = logging.getLogger("ContentFilterStorage")
self.file_path = "%s/filters.json" % config.data_dir self.file_path = config.config_dir / 'filters.json'
self.site_manager = site_manager self.site_manager = site_manager
self.file_content = self.load() self.file_content = self.load()
@ -36,12 +36,12 @@ class ContentFilterStorage(object):
def load(self): def load(self):
# Rename previously used mutes.json -> filters.json # Rename previously used mutes.json -> filters.json
if os.path.isfile("%s/mutes.json" % config.data_dir): if (config.config_dir / 'mutes.json').is_file():
self.log.info("Renaming mutes.json to filters.json...") self.log.info("Renaming mutes.json to filters.json...")
os.rename("%s/mutes.json" % config.data_dir, self.file_path) os.rename(config.config_dir / 'mutes.json', self.file_path)
if os.path.isfile(self.file_path): if self.file_path.is_file():
try: try:
return json.load(open(self.file_path)) return json.load(self.file_path.open())
except Exception as err: except Exception as err:
self.log.error("Error loading filters.json: %s" % err) self.log.error("Error loading filters.json: %s" % err)
return None return None

View file

@ -44,7 +44,7 @@ class UiRequestPlugin(object):
if ".zip/" in path or ".tar.gz/" in path: if ".zip/" in path or ".tar.gz/" in path:
file_obj = None file_obj = None
path_parts = self.parsePath(path) path_parts = self.parsePath(path)
file_path = "%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"]) file_path = config.data_dir / path_parts["address"] / path_parts["inner_path"]
match = re.match(r"^(.*\.(?:tar.gz|zip))/(.*)", file_path) match = re.match(r"^(.*\.(?:tar.gz|zip))/(.*)", file_path)
archive_path, path_within = match.groups() archive_path, path_within = match.groups()
if archive_path not in archive_cache: if archive_path not in archive_cache:

View file

@ -686,7 +686,7 @@ class UiWebsocketPlugin(object):
if sys.platform == "linux": if sys.platform == "linux":
sys_db_paths += ['/usr/share/GeoIP/' + db_name] sys_db_paths += ['/usr/share/GeoIP/' + db_name]
data_dir_db_path = os.path.join(config.data_dir, db_name) data_dir_db_path = config.start_dir / db_name
db_paths = sys_db_paths + [data_dir_db_path] db_paths = sys_db_paths + [data_dir_db_path]

View file

@ -12,7 +12,7 @@ class BootstrapperDb(Db.Db):
def __init__(self): def __init__(self):
self.version = 7 self.version = 7
self.hash_ids = {} # hash -> id cache self.hash_ids = {} # hash -> id cache
super(BootstrapperDb, self).__init__({"db_name": "Bootstrapper"}, "%s/bootstrapper.db" % config.data_dir) super(BootstrapperDb, self).__init__({"db_name": "Bootstrapper"}, config.start_dir / 'bootstrapper.db')
self.foreign_keys = True self.foreign_keys = True
self.checkTables() self.checkTables()
self.updateHashCache() self.updateHashCache()

View file

@ -16,7 +16,7 @@ def importPluginnedClasses():
from User import UserManager from User import UserManager
try: try:
local_master_addresses = set(json.load(open("%s/users.json" % config.data_dir)).keys()) # Users in users.json local_master_addresses = set(json.load((config.private_dir / 'users.json').open()).keys()) # Users in users.json
except Exception as err: except Exception as err:
local_master_addresses = set() local_master_addresses = set()

View file

@ -8,7 +8,8 @@ from User import UserManager
class TestMultiuser: class TestMultiuser:
def testMemorySave(self, user): def testMemorySave(self, user):
# It should not write users to disk # It should not write users to disk
users_before = open("%s/users.json" % config.data_dir).read() users_json = config.private_dir / 'users.json'
users_before = users_json.open().read()
user = UserManager.user_manager.create() user = UserManager.user_manager.create()
user.save() user.save()
assert open("%s/users.json" % config.data_dir).read() == users_before assert users_json.open().read() == users_before

521
src/Actions.py Normal file
View file

@ -0,0 +1,521 @@
import logging
import sys
import gevent
from Config import config
from Plugin import PluginManager
@PluginManager.acceptPlugins
class Actions:
def call(self, function_name, kwargs):
logging.info(f'zeronet-conservancy {config.version_full} on Python {sys.version} Gevent {gevent.__version__}')
func = getattr(self, function_name, None)
back = func(**kwargs)
if back:
print(back)
def ipythonThread(self):
import IPython
IPython.embed()
self.gevent_quit.set()
# Default action: Start serving UiServer and FileServer
def main(self):
import main
from File import FileServer
from Ui import UiServer
logging.info("Creating FileServer....")
main.file_server = FileServer()
logging.info("Creating UiServer....")
main.ui_server = UiServer()
main.file_server.ui_server = main.ui_server
# for startup_error in startup_errors:
# logging.error("Startup error: %s" % startup_error)
logging.info("Removing old SSL certs...")
from Crypt import CryptConnection
CryptConnection.manager.removeCerts()
logging.info("Starting servers....")
import threading
self.gevent_quit = threading.Event()
launched_greenlets = [gevent.spawn(main.ui_server.start), gevent.spawn(main.file_server.start), gevent.spawn(main.ui_server.startSiteServer)]
# if --repl, start ipython thread
# FIXME: Unfortunately this leads to exceptions on exit so use with care
if config.repl:
threading.Thread(target=self.ipythonThread).start()
stopped = 0
# Process all greenlets in main thread
while not self.gevent_quit.is_set() and stopped < len(launched_greenlets):
stopped += len(gevent.joinall(launched_greenlets, timeout=1))
# Exited due to repl, so must kill greenlets
if stopped < len(launched_greenlets):
gevent.killall(launched_greenlets, exception=KeyboardInterrupt)
logging.info("All server stopped")
# Site commands
def siteCreate(self, use_master_seed=True):
logging.info("Generating new privatekey (use_master_seed: %s)..." % config.use_master_seed)
from Crypt import CryptBitcoin
if use_master_seed:
from User import UserManager
user = UserManager.user_manager.get()
if not user:
user = UserManager.user_manager.create()
address, address_index, site_data = user.getNewSiteData()
privatekey = site_data["privatekey"]
logging.info("Generated using master seed from users.json, site index: %s" % address_index)
else:
privatekey = CryptBitcoin.newPrivatekey()
address = CryptBitcoin.privatekeyToAddress(privatekey)
logging.info("----------------------------------------------------------------------")
logging.info("Site private key: %s" % privatekey)
logging.info(" !!! ^ Save it now, required to modify the site ^ !!!")
logging.info("Site address: %s" % address)
logging.info("----------------------------------------------------------------------")
while True and not config.batch and not use_master_seed:
if input("? Have you secured your private key? (yes, no) > ").lower() == "yes":
break
else:
logging.info("Please, secure it now, you going to need it to modify your site!")
logging.info("Creating directory structure...")
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
(config.data_dir / address).mkdir()
(config.data_dir / address / 'index.html').open('w').write(f"Hello {address}!")
logging.info("Creating content.json...")
site = Site(address)
extend = {"postmessage_nonce_security": True}
if use_master_seed:
extend["address_index"] = address_index
site.content_manager.sign(privatekey=privatekey, extend=extend)
site.settings["own"] = True
site.saveSettings()
logging.info("Site created!")
def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False, remove_missing_optional=False):
from Site.Site import Site
from Site import SiteManager
from Debug import Debug
SiteManager.site_manager.load()
logging.info("Signing site: %s..." % address)
site = Site(address, allow_create=False)
if not privatekey: # If no privatekey defined
from User import UserManager
user = UserManager.user_manager.get()
if user:
site_data = user.getSiteData(address)
privatekey = site_data.get("privatekey")
else:
privatekey = None
if not privatekey:
# Not found in users.json, ask from console
import getpass
privatekey = getpass.getpass("Private key (input hidden):")
# inner_path can be either relative to site directory or absolute/relative path
if os.path.isabs(inner_path):
full_path = os.path.abspath(inner_path)
else:
full_path = os.path.abspath(config.working_dir + '/' + inner_path)
print(full_path)
if os.path.isfile(full_path):
if address in full_path:
# assuming site address is unique, keep only path after it
inner_path = full_path.split(address+'/')[1]
else:
# oops, file that we found seems to be rogue, so reverting to old behaviour
logging.warning(f'using {inner_path} relative to site directory')
try:
succ = site.content_manager.sign(
inner_path=inner_path, privatekey=privatekey,
update_changed_files=True, remove_missing_optional=remove_missing_optional
)
except Exception as err:
logging.error("Sign error: %s" % Debug.formatException(err))
succ = False
if succ and publish:
self.sitePublish(address, inner_path=inner_path)
def siteVerify(self, address):
import time
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
s = time.time()
logging.info("Verifing site: %s..." % address)
site = Site(address)
bad_files = []
for content_inner_path in site.content_manager.contents:
s = time.time()
logging.info("Verifing %s signature..." % content_inner_path)
error = None
try:
file_correct = site.content_manager.verifyFile(
content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False
)
except Exception as err:
file_correct = False
error = err
if file_correct is True:
logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s))
else:
logging.error("[ERROR] %s: invalid file: %s!" % (content_inner_path, error))
input("Continue?")
bad_files += content_inner_path
logging.info("Verifying site files...")
bad_files += site.storage.verifyFiles()["bad_files"]
if not bad_files:
logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s))
else:
logging.error("[ERROR] Error during verifying site files!")
def dbRebuild(self, address):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
logging.info("Rebuilding site sql cache: %s..." % address)
site = SiteManager.site_manager.get(address)
s = time.time()
try:
site.storage.rebuildDb()
logging.info("Done in %.3fs" % (time.time() - s))
except Exception as err:
logging.error(err)
def dbQuery(self, address, query):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
import json
site = Site(address)
result = []
for row in site.storage.query(query):
result.append(dict(row))
print(json.dumps(result, indent=4))
def siteAnnounce(self, address):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
logging.info("Opening a simple connection server")
from File import FileServer
main.file_server = FileServer("127.0.0.1", 1234)
main.file_server.start()
logging.info("Announcing site %s to tracker..." % address)
site = Site(address)
s = time.time()
site.announce()
print("Response time: %.3fs" % (time.time() - s))
print(site.peers)
def siteDownload(self, address):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
logging.info("Opening a simple connection server")
from File import FileServer
main.file_server = FileServer("127.0.0.1", 1234)
file_server_thread = gevent.spawn(main.file_server.start, check_sites=False)
site = Site(address)
on_completed = gevent.event.AsyncResult()
def onComplete(evt):
evt.set(True)
site.onComplete.once(lambda: onComplete(on_completed))
print("Announcing...")
site.announce()
s = time.time()
print("Downloading...")
site.downloadContent("content.json", check_modifications=True)
print("Downloaded in %.3fs" % (time.time()-s))
def siteNeedFile(self, address, inner_path):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
def checker():
while 1:
s = time.time()
time.sleep(1)
print("Switch time:", time.time() - s)
gevent.spawn(checker)
logging.info("Opening a simple connection server")
from File import FileServer
main.file_server = FileServer("127.0.0.1", 1234)
file_server_thread = gevent.spawn(main.file_server.start, check_sites=False)
site = Site(address)
site.announce()
print(site.needFile(inner_path, update=True))
def siteCmd(self, address, cmd, parameters):
import json
from Site import SiteManager
site = SiteManager.site_manager.get(address)
if not site:
logging.error("Site not found: %s" % address)
return None
ws = self.getWebsocket(site)
ws.send(json.dumps({"cmd": cmd, "params": parameters, "id": 1}))
res_raw = ws.recv()
try:
res = json.loads(res_raw)
except Exception as err:
return {"error": "Invalid result: %s" % err, "res_raw": res_raw}
if "result" in res:
return res["result"]
else:
return res
def importBundle(self, bundle):
import main
main.importBundle(bundle)
def getWebsocket(self, site):
import websocket
ws_address = "ws://%s:%s/Websocket?wrapper_key=%s" % (config.ui_ip, config.ui_port, site.settings["wrapper_key"])
logging.info("Connecting to %s" % ws_address)
ws = websocket.create_connection(ws_address)
return ws
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json", recursive=False):
from Site import SiteManager
logging.info("Loading site...")
site = SiteManager.site_manager.get(address)
site.settings["serving"] = True # Serving the site even if its disabled
if not recursive:
inner_paths = [inner_path]
else:
inner_paths = list(site.content_manager.contents.keys())
try:
ws = self.getWebsocket(site)
except Exception as err:
self.sitePublishFallback(site, peer_ip, peer_port, inner_paths, err)
else:
logging.info("Sending siteReload")
self.siteCmd(address, "siteReload", inner_path)
for inner_path in inner_paths:
logging.info(f"Sending sitePublish for {inner_path}")
self.siteCmd(address, "sitePublish", {"inner_path": inner_path, "sign": False})
logging.info("Done.")
ws.close()
def sitePublishFallback(self, site, peer_ip, peer_port, inner_paths, err):
if err is not None:
logging.info(f"Can't connect to local websocket client: {err}")
logging.info("Publish using fallback mechanism. "
"Note that there might be not enough time for peer discovery, "
"but you can specify target peer on command line.")
logging.info("Creating FileServer....")
file_server_thread = gevent.spawn(main.file_server.start, check_sites=False) # Dont check every site integrity
time.sleep(0.001)
# Started fileserver
main.file_server.portCheck()
if peer_ip: # Announce ip specificed
site.addPeer(peer_ip, peer_port)
else: # Just ask the tracker
logging.info("Gathering peers from tracker")
site.announce() # Gather peers
for inner_path in inner_paths:
published = site.publish(5, inner_path) # Push to peers
if published > 0:
time.sleep(3)
logging.info("Serving files (max 60s)...")
gevent.joinall([file_server_thread], timeout=60)
logging.info("Done.")
else:
logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
# Crypto commands
def cryptPrivatekeyToAddress(self, privatekey=None):
from Crypt import CryptBitcoin
if not privatekey: # If no privatekey in args then ask it now
import getpass
privatekey = getpass.getpass("Private key (input hidden):")
print(CryptBitcoin.privatekeyToAddress(privatekey))
def cryptSign(self, message, privatekey):
from Crypt import CryptBitcoin
print(CryptBitcoin.sign(message, privatekey))
def cryptVerify(self, message, sign, address):
from Crypt import CryptBitcoin
print(CryptBitcoin.verify(message, address, sign))
def cryptGetPrivatekey(self, master_seed, site_address_index=None):
from Crypt import CryptBitcoin
if len(master_seed) != 64:
logging.error("Error: Invalid master seed length: %s (required: 64)" % len(master_seed))
return False
privatekey = CryptBitcoin.hdPrivatekey(master_seed, site_address_index)
print("Requested private key: %s" % privatekey)
# Peer
def peerPing(self, peer_ip, peer_port=None):
if not peer_port:
peer_port = 15441
logging.info("Opening a simple connection server")
from Connection import ConnectionServer
main.file_server = ConnectionServer("127.0.0.1", 1234)
main.file_server.start(check_connections=False)
from Crypt import CryptConnection
CryptConnection.manager.loadCerts()
from Peer import Peer
logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port)))
s = time.time()
peer = Peer(peer_ip, peer_port)
peer.connect()
if not peer.connection:
print("Error: Can't connect to peer (connection error: %s)" % peer.connection_error)
return False
if "shared_ciphers" in dir(peer.connection.sock):
print("Shared ciphers:", peer.connection.sock.shared_ciphers())
if "cipher" in dir(peer.connection.sock):
print("Cipher:", peer.connection.sock.cipher()[0])
if "version" in dir(peer.connection.sock):
print("TLS version:", peer.connection.sock.version())
print("Connection time: %.3fs (connection error: %s)" % (time.time() - s, peer.connection_error))
for i in range(5):
ping_delay = peer.ping()
print("Response time: %.3fs" % ping_delay)
time.sleep(1)
peer.remove()
print("Reconnect test...")
peer = Peer(peer_ip, peer_port)
for i in range(5):
ping_delay = peer.ping()
print("Response time: %.3fs" % ping_delay)
time.sleep(1)
def peerGetFile(self, peer_ip, peer_port, site, filename, benchmark=False):
logging.info("Opening a simple connection server")
from Connection import ConnectionServer
main.file_server = ConnectionServer("127.0.0.1", 1234)
main.file_server.start(check_connections=False)
from Crypt import CryptConnection
CryptConnection.manager.loadCerts()
from Peer import Peer
logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port))
peer = Peer(peer_ip, peer_port)
s = time.time()
if benchmark:
for i in range(10):
peer.getFile(site, filename),
print("Response time: %.3fs" % (time.time() - s))
input("Check memory")
else:
print(peer.getFile(site, filename).read())
def peerCmd(self, peer_ip, peer_port, cmd, parameters):
logging.info("Opening a simple connection server")
from Connection import ConnectionServer
main.file_server = ConnectionServer()
main.file_server.start(check_connections=False)
from Crypt import CryptConnection
CryptConnection.manager.loadCerts()
from Peer import Peer
peer = Peer(peer_ip, peer_port)
import json
if parameters:
parameters = json.loads(parameters.replace("'", '"'))
else:
parameters = {}
try:
res = peer.request(cmd, parameters)
print(json.dumps(res, indent=2, ensure_ascii=False))
except Exception as err:
print("Unknown response (%s): %s" % (err, res))
def getConfig(self):
import json
print(json.dumps(config.getServerInfo(), indent=2, ensure_ascii=False))
def test(self, test_name, *args, **kwargs):
import types
def funcToName(func_name):
test_name = func_name.replace("test", "")
return test_name[0].lower() + test_name[1:]
test_names = [funcToName(name) for name in dir(self) if name.startswith("test") and name != "test"]
if not test_name:
# No test specificed, list tests
print("\nNo test specified, possible tests:")
for test_name in test_names:
func_name = "test" + test_name[0].upper() + test_name[1:]
func = getattr(self, func_name)
if func.__doc__:
print("- %s: %s" % (test_name, func.__doc__.strip()))
else:
print("- %s" % test_name)
return None
# Run tests
func_name = "test" + test_name[0].upper() + test_name[1:]
if hasattr(self, func_name):
func = getattr(self, func_name)
print("- Running test: %s" % test_name, end="")
s = time.time()
ret = func(*args, **kwargs)
if type(ret) is types.GeneratorType:
for progress in ret:
print(progress, end="")
sys.stdout.flush()
print("\n* Test %s done in %.3fs" % (test_name, time.time() - s))
else:
print("Unknown test: %r (choose from: %s)" % (
test_name, test_names
))

View file

@ -9,23 +9,35 @@ import logging
import logging.handlers import logging.handlers
import stat import stat
import time import time
from pathlib import Path
VERSION = "0.7.10+"
class StartupError(RuntimeError):
pass
class Config: class Config:
"""Class responsible for storing and loading config.
Used as singleton `config`
"""
def __init__(self, argv): def __init__(self, argv):
try: try:
from . import Build from . import Build
except ImportError: except ImportError:
print('cannot find build')
from .util import Git from .util import Git
self.build_type = 'source' self.build_type = 'source'
self.branch = Git.branch() or 'unknown' self.branch = Git.branch() or 'unknown'
self.commit = Git.commit() or 'unknown' self.commit = Git.commit() or 'unknown'
self.version = VERSION
self.platform = 'source'
else: else:
self.build_type = Build.build_type self.build_type = Build.build_type
self.branch = Build.branch self.branch = Build.branch
self.commit = Build.commit self.commit = Build.commit
self.version = "0.7.10+" self.version = Build.version or VERSION
self.platform = Build.platform
self.version_full = f'{self.version} ({self.build_type} from {self.branch}-{self.commit})' self.version_full = f'{self.version} ({self.build_type} from {self.branch}-{self.commit})'
self.user_agent = "conservancy" self.user_agent = "conservancy"
# for compatibility # for compatibility
@ -43,15 +55,18 @@ class Config:
self.keys_restart_need = set([ self.keys_restart_need = set([
"tor", "fileserver_port", "fileserver_ip_type", "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db" "tor", "fileserver_port", "fileserver_ip_type", "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db"
]) ])
self.start_dir = self.getStartDir()
self.config_file = self.start_dir + "/zeronet.conf" self.config_file = None
self.data_dir = self.start_dir + "/data" self.config_dir = None
self.log_dir = self.start_dir + "/log" self.data_dir = None
self.private_dir = None
self.log_dir = None
self.configurePaths(argv)
self.openssl_lib_file = None self.openssl_lib_file = None
self.openssl_bin_file = None self.openssl_bin_file = None
self.trackers_file = False self.trackers_file = None
self.createParser() self.createParser()
self.createArguments() self.createArguments()
@ -68,7 +83,8 @@ class Config:
def strToBool(self, v): def strToBool(self, v):
return v.lower() in ("yes", "true", "t", "1") return v.lower() in ("yes", "true", "t", "1")
def getStartDir(self): def getStartDirOld(self):
"""Get directory that would have been used by older versions (pre v0.7.11)"""
this_file = os.path.abspath(__file__).replace("\\", "/").rstrip("cd") this_file = os.path.abspath(__file__).replace("\\", "/").rstrip("cd")
if "--start-dir" in self.argv: if "--start-dir" in self.argv:
@ -89,9 +105,127 @@ class Config:
start_dir = os.path.expanduser("~/ZeroNet") start_dir = os.path.expanduser("~/ZeroNet")
else: else:
start_dir = "." start_dir = "."
return start_dir return start_dir
def migrateOld(self, source):
print(f'[bold red]WARNING: found data {source}[/bold red]')
print( ' It used to be default behaviour to store data there,')
print( ' but now we default to place data and config in user home directory.')
print( '')
def configurePaths(self, argv):
if '--config-file' in argv:
self.config_file = argv[argv.index('--config-file') + 1]
old_dir = Path(self.getStartDirOld())
new_dir = Path(self.getStartDir())
no_migrate = '--no-migrate' in argv
silent_migrate = '--portable' in argv or '--migrate' in argv
try:
self.start_dir = self.maybeMigrate(old_dir, new_dir, no_migrate, silent_migrate)
except Exception as ex:
raise ex
self.updatePaths()
def updatePaths(self):
if self.config_file is None:
self.config_file = self.start_dir / 'znc.conf'
if self.config_dir is None:
self.config_dir = self.start_dir
if self.private_dir is None:
self.private_dir = self.start_dir / 'private'
if self.data_dir is None:
self.data_dir = self.start_dir / 'data'
if self.log_dir is None:
self.log_dir = self.start_dir / 'log'
def createPaths(self):
self.start_dir.mkdir(parents=True, exist_ok=True)
self.private_dir.mkdir(parents=True, exist_ok=True)
self.data_dir.mkdir(parents=True, exist_ok=True)
self.log_dir.mkdir(parents=True, exist_ok=True)
def checkDir(self, root):
return (root / 'znc.conf').is_file()
def doMigrate(self, old_dir, new_dir):
raise RuntimeError('Migration not implemented yet')
def askMigrate(self, old_dir, new_dir, silent):
if not sys.stdin.isatty():
raise StartupError('Migration refused: non-interactive shell')
while True:
r = input(f'You have old data in `{old_dir}`. Migrate to new format to `{new_dir}`? [Y/n]')
if r.lower().startswith('n'):
raise StartupError('Migration refused')
if r.lower().startswith('y'):
return self.doMigrate(old_dir, new_dir)
def createNewConfig(self, new_dir):
new_dir.mkdir(parents=True, exist_ok=True)
with (new_dir / 'znc.conf').open('w') as f:
f.write('# zeronet-conervancy config file')
def maybeMigrate(self, old_dir, new_dir, no_migrate, silent_migrate):
if old_dir.exists() and new_dir.exists():
if old_dir == new_dir:
if self.checkDir(new_dir):
return new_dir
elif no_migrate:
return StartError('Migration refused, but new directory should be migrated')
else:
return askMigrate(old_dir, new_dir, silent_migrate)
else:
if self.checkDir(new_dir):
if not no_migrate:
print("There's an old starting directory")
return new_dir
else:
raise StartupError('Bad startup directory')
elif old_dir.exists():
if no_migrate:
self.createNewConfig(new_dir)
return new_dir
else:
return self.askMigrate(old_dir, new_dir, silent_migrate)
elif new_dir.exists():
if self.checkDir(new_dir):
return new_dir
else:
return StartupError('Bad startup directory')
else:
self.createNewConfig(new_dir)
return new_dir
def getStartDir(self):
"""Return directory with config & data"""
if "--start-dir" in self.argv:
return self.argv[self.argv.index("--start-dir") + 1]
here = os.path.dirname(os.path.abspath(__file__).replace("\\", "/")).rstrip('/src')
if '--portable' in self.argv or self.build_type == 'portable':
return here
MACOSX_DIR = '~/Library/Application Support/zeronet-conservancy'
WINDOWS_DIR = '~/AppData/zeronet-conservancy'
LIBREDESKTOP_DIR = '~/.local/share/zeronet-conservancy'
if self.platform == 'source':
if platform.system() == 'Darwin':
path = MACOSX_DIR
elif platform.system() == 'Windows':
path = WINDOWS_DIR
else:
path = LIBREDESKTOP_DIR
elif self.platform == 'macosx':
path = MACOSX_DIR
elif self.platform == 'windows':
path = WINDOWS_DIR
elif self.platform == 'libredesktop':
path = LIBREDESKTOP_DIR
else:
raise RuntimeError(f'UNKNOWN PLATFORM: {self.platform}. Something must have went terribly wrong!')
return os.path.expanduser(path)
# Create command line arguments # Create command line arguments
def createArguments(self): def createArguments(self):
try: try:
@ -109,9 +243,9 @@ class Config:
else: else:
fix_float_decimals = False fix_float_decimals = False
config_file = self.start_dir + "/zeronet.conf" config_file = self.config_file
data_dir = self.start_dir + "/data" data_dir = self.data_dir
log_dir = self.start_dir + "/log" log_dir = self.log_dir
ip_local = ["127.0.0.1", "::1"] ip_local = ["127.0.0.1", "::1"]
@ -229,9 +363,11 @@ class Config:
self.parser.add_argument('--batch', help="Batch mode (No interactive input for commands)", action='store_true') self.parser.add_argument('--batch', help="Batch mode (No interactive input for commands)", action='store_true')
self.parser.add_argument('--start-dir', help='Path of working dir for variable content (data, log, .conf)', default=self.start_dir, metavar="path") self.parser.add_argument('--portable', action=argparse.BooleanOptionalAction)
self.parser.add_argument('--start-dir', help='Path of working dir for variable content (data, log, config)', default=self.start_dir, metavar="path")
self.parser.add_argument('--config-file', help='Path of config file', default=config_file, metavar="path") self.parser.add_argument('--config-file', help='Path of config file', default=config_file, metavar="path")
self.parser.add_argument('--data-dir', help='Path of data directory', default=data_dir, metavar="path") self.parser.add_argument('--data-dir', help='Path of data directory', default=data_dir, metavar="path")
self.parser.add_argument('--no-migrate', help='Ignore data directories from old 0net versions', action=argparse.BooleanOptionalAction, default=False)
self.parser.add_argument('--console-log-level', help='Level of logging to console', default="default", choices=["default", "DEBUG", "INFO", "ERROR", "off"]) self.parser.add_argument('--console-log-level', help='Level of logging to console', default="default", choices=["default", "DEBUG", "INFO", "ERROR", "off"])
@ -277,7 +413,7 @@ class Config:
self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port') self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port')
self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip') self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip')
self.parser.add_argument('--bootstrap-url', help='URL of file with link to bootstrap bundle', default='https://raw.githubusercontent.com/zeronet-conservancy/zeronet-conservancy/master/bootstrap.url', type=str) self.parser.add_argument('--bootstrap-url', help='URL of file with link to bootstrap bundle', default='https://raw.githubusercontent.com/zeronet-conservancy/zeronet-conservancy/master/bootstrap.url', type=str)
self.parser.add_argument('--disable-bootstrap', help='Disable downloading bootstrap information from clearnet', action='store_true') self.parser.add_argument('--bootstrap', help='Enable downloading bootstrap information from clearnet', action=argparse.BooleanOptionalAction, default=True)
self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=[], metavar='protocol://address', nargs='*') self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=[], metavar='protocol://address', nargs='*')
self.parser.add_argument('--trackers-file', help='Load torrent trackers dynamically from a file (using Syncronite by default)', default=['{data_dir}/15CEFKBRHFfAP9rmL6hhLmHoXrrgmw4B5o/cache/1/Syncronite.html'], metavar='path', nargs='*') self.parser.add_argument('--trackers-file', help='Load torrent trackers dynamically from a file (using Syncronite by default)', default=['{data_dir}/15CEFKBRHFfAP9rmL6hhLmHoXrrgmw4B5o/cache/1/Syncronite.html'], metavar='path', nargs='*')
self.parser.add_argument('--trackers-proxy', help='Force use proxy to connect to trackers (disable, tor, ip:port)', default="disable") self.parser.add_argument('--trackers-proxy', help='Force use proxy to connect to trackers (disable, tor, ip:port)', default="disable")
@ -328,7 +464,7 @@ class Config:
return self.parser return self.parser
def loadTrackersFile(self): def loadTrackersFile(self):
if not self.trackers_file: if self.trackers_file is None:
return None return None
self.trackers = self.arguments.trackers[:] self.trackers = self.arguments.trackers[:]
@ -338,16 +474,17 @@ class Config:
if trackers_file.startswith("/"): # Absolute if trackers_file.startswith("/"): # Absolute
trackers_file_path = trackers_file trackers_file_path = trackers_file
elif trackers_file.startswith("{data_dir}"): # Relative to data_dir elif trackers_file.startswith("{data_dir}"): # Relative to data_dir
trackers_file_path = trackers_file.replace("{data_dir}", self.data_dir) trackers_file_path = trackers_file.replace('{data_dir}', str(self.data_dir))
else: # Relative to zeronet.py else:
trackers_file_path = self.start_dir + "/" + trackers_file # Relative to zeronet.py or something else, unsupported
raise RuntimeError(f'trackers_file should be relative to {{data_dir}} or absolute path (not {trackers_file})')
for line in open(trackers_file_path): for line in open(trackers_file_path):
tracker = line.strip() tracker = line.strip()
if "://" in tracker and tracker not in self.trackers: if "://" in tracker and tracker not in self.trackers:
self.trackers.append(tracker) self.trackers.append(tracker)
except Exception as err: except Exception as err:
print("Error loading trackers file: %s" % err) print(f'Error loading trackers file: {err}')
# Find arguments specified for current action # Find arguments specified for current action
def getActionArguments(self): def getActionArguments(self):
@ -412,6 +549,8 @@ class Config:
self.parseCommandline(argv, silent) # Parse argv self.parseCommandline(argv, silent) # Parse argv
self.setAttributes() self.setAttributes()
self.updatePaths()
self.createPaths()
if parse_config: if parse_config:
argv = self.parseConfig(argv) # Add arguments from config file argv = self.parseConfig(argv) # Add arguments from config file
@ -436,7 +575,7 @@ class Config:
for arg in args: for arg in args:
if arg.startswith('--') and '_' in arg: if arg.startswith('--') and '_' in arg:
farg = arg.replace('_', '-') farg = arg.replace('_', '-')
print(f'WARNING: using deprecated flag in command line: {arg} should be {farg}') print(f'[bold red]WARNING: using deprecated flag in command line: {arg} should be {farg}[/bold red]')
print('Support for deprecated flags might be removed in the future') print('Support for deprecated flags might be removed in the future')
else: else:
farg = arg farg = arg
@ -473,9 +612,6 @@ class Config:
def parseConfig(self, argv): def parseConfig(self, argv):
argv = self.fixArgs(argv) argv = self.fixArgs(argv)
# Find config file path from parameters
if "--config-file" in argv:
self.config_file = argv[argv.index("--config-file") + 1]
# Load config file # Load config file
if os.path.isfile(self.config_file): if os.path.isfile(self.config_file):
config = configparser.RawConfigParser(allow_no_value=True, strict=False) config = configparser.RawConfigParser(allow_no_value=True, strict=False)
@ -518,7 +654,7 @@ class Config:
val = val[:] val = val[:]
if key in ("data_dir", "log_dir", "start_dir", "openssl_bin_file", "openssl_lib_file"): if key in ("data_dir", "log_dir", "start_dir", "openssl_bin_file", "openssl_lib_file"):
if val: if val:
val = val.replace("\\", "/") val = Path(val)
setattr(self, key, val) setattr(self, key, val)
def loadPlugins(self): def loadPlugins(self):

View file

@ -153,7 +153,7 @@ content_dbs = {}
def getContentDb(path=None): def getContentDb(path=None):
if not path: if not path:
path = "%s/content.db" % config.data_dir path = config.start_dir / 'content.db'
if path not in content_dbs: if path not in content_dbs:
content_dbs[path] = ContentDb(path) content_dbs[path] = ContentDb(path)
content_dbs[path].init() content_dbs[path].init()

View file

@ -24,20 +24,20 @@ class CryptConnectionManager:
self.context_server = None self.context_server = None
self.openssl_conf_template = "src/lib/openssl/openssl.cnf" self.openssl_conf_template = "src/lib/openssl/openssl.cnf"
self.openssl_conf = config.data_dir + "/openssl.cnf" self.openssl_conf = config.private_dir / "openssl.cnf"
self.openssl_env = { self.openssl_env = {
"OPENSSL_CONF": self.openssl_conf, "OPENSSL_CONF": self.openssl_conf,
"RANDFILE": config.data_dir + "/openssl-rand.tmp" "RANDFILE": config.private_dir / "openssl-rand.tmp"
} }
self.crypt_supported = [] # Supported cryptos self.crypt_supported = [] # Supported cryptos
self.cacert_pem = config.data_dir + "/cacert-rsa.pem" self.cacert_pem = config.private_dir / "cacert-rsa.pem"
self.cakey_pem = config.data_dir + "/cakey-rsa.pem" self.cakey_pem = config.private_dir / "cakey-rsa.pem"
self.cert_pem = config.data_dir + "/cert-rsa.pem" self.cert_pem = config.private_dir / "cert-rsa.pem"
self.cert_csr = config.data_dir + "/cert-rsa.csr" self.cert_csr = config.private_dir / "cert-rsa.csr"
self.key_pem = config.data_dir + "/key-rsa.pem" self.key_pem = config.private_dir / "key-rsa.pem"
self.log = logging.getLogger("CryptConnectionManager") self.log = logging.getLogger("CryptConnectionManager")
self.log.debug("Version: %s" % ssl.OPENSSL_VERSION) self.log.debug("Version: %s" % ssl.OPENSSL_VERSION)
@ -105,8 +105,8 @@ class CryptConnectionManager:
if config.keep_ssl_cert: if config.keep_ssl_cert:
return False return False
for file_name in ["cert-rsa.pem", "key-rsa.pem", "cacert-rsa.pem", "cakey-rsa.pem", "cacert-rsa.srl", "cert-rsa.csr", "openssl-rand.tmp"]: for file_name in ["cert-rsa.pem", "key-rsa.pem", "cacert-rsa.pem", "cakey-rsa.pem", "cacert-rsa.srl", "cert-rsa.csr", "openssl-rand.tmp"]:
file_path = "%s/%s" % (config.data_dir, file_name) file_path = config.data_dir / file_name
if os.path.isfile(file_path): if file_path.is_file():
os.unlink(file_path) os.unlink(file_path)
# Load and create cert files is necessary # Load and create cert files is necessary

View file

@ -21,7 +21,7 @@ else:
class DebugReloader: class DebugReloader:
def __init__(self, paths=None): def __init__(self, paths=None):
if not paths: if not paths:
paths = ["src", "plugins", config.data_dir + "/__plugins__"] paths = ["src", "plugins"]
self.log = logging.getLogger("DebugReloader") self.log = logging.getLogger("DebugReloader")
self.last_chaged = 0 self.last_chaged = 0
self.callbacks = [] self.callbacks = []

View file

@ -25,7 +25,7 @@ class PluginManager:
self.after_load = [] # Execute functions after loaded plugins self.after_load = [] # Execute functions after loaded plugins
self.function_flags = {} # Flag function for permissions self.function_flags = {} # Flag function for permissions
self.reloading = False self.reloading = False
self.config_path = config.data_dir + "/plugins.json" self.config_path = config.config_dir / 'plugins.json'
self.loadConfig() self.loadConfig()
self.config.setdefault("builtin", {}) self.config.setdefault("builtin", {})

View file

@ -88,9 +88,10 @@ class Site(object):
def loadSettings(self, settings=None): def loadSettings(self, settings=None):
if not settings: if not settings:
try: try:
settings = json.load(open(f'{config.data_dir}/sites.json')).get(self.address) with (config.private_dir / 'sites.json').open() as f:
settings = json.load(f).get(self.address)
except Exception as err: except Exception as err:
logging.error(f'Error loading {config.data_dir}/sites.json: {err}') logging.error(f'Error loading {config.private_dir}/sites.json: {err}')
settings = {} settings = {}
if settings: if settings:
self.settings = settings self.settings = settings

View file

@ -38,7 +38,7 @@ class SiteManager(object):
load_s = time.time() load_s = time.time()
# Load new adresses # Load new adresses
try: try:
json_path = f"{config.data_dir}/sites.json" json_path = config.private_dir / 'sites.json'
data = json.load(open(json_path)) data = json.load(open(json_path))
except Exception as err: except Exception as err:
self.log.error(f"Unable to load {json_path}: {err}") self.log.error(f"Unable to load {json_path}: {err}")
@ -48,7 +48,7 @@ class SiteManager(object):
for address, settings in data.items(): for address, settings in data.items():
if address not in self.sites: if address not in self.sites:
if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)): if (config.data_dir / address / 'content.json').is_file():
# Root content.json exists, try load site # Root content.json exists, try load site
s = time.time() s = time.time()
try: try:
@ -121,7 +121,7 @@ class SiteManager(object):
s = time.time() s = time.time()
if data: if data:
helper.atomicWrite("%s/sites.json" % config.data_dir, helper.jsonDumps(data).encode("utf8")) helper.atomicWrite(config.private_dir / 'sites.json', helper.jsonDumps(data).encode("utf8"))
else: else:
self.log.debug("Save error: No data") self.log.debug("Save error: No data")
time_write = time.time() - s time_write = time.time() - s

View file

@ -29,7 +29,7 @@ thread_pool_fs_batch = ThreadPool.ThreadPool(1, name="FS batch")
class SiteStorage(object): class SiteStorage(object):
def __init__(self, site, allow_create=True): def __init__(self, site, allow_create=True):
self.site = site self.site = site
self.directory = f'{config.data_dir}/{self.site.address}' # Site data diretory self.directory = config.data_dir / self.site.address # Site data diretory
self.allowed_dir = os.path.abspath(self.directory) # Only serve file within this dir self.allowed_dir = os.path.abspath(self.directory) # Only serve file within this dir
self.log = site.log self.log = site.log
self.db = None # Db class self.db = None # Db class

View file

@ -786,7 +786,7 @@ class UiRequest:
address = path_parts["address"] address = path_parts["address"]
file_path = "%s/%s/%s" % (config.data_dir, address, path_parts["inner_path"]) file_path = config.data_dir / address / path_parts['inner_path']
if (config.debug or config.merge_media) and file_path.split("/")[-1].startswith("all."): if (config.debug or config.merge_media) and file_path.split("/")[-1].startswith("all."):
# If debugging merge *.css to all.css and *.js to all.js # If debugging merge *.css to all.css and *.js to all.js

View file

@ -35,8 +35,9 @@ class User(object):
# Save to data/users.json # Save to data/users.json
@util.Noparallel(queue=True, ignore_class=True) @util.Noparallel(queue=True, ignore_class=True)
def save(self): def save(self):
users_json = config.private_dir / 'users.json'
s = time.time() s = time.time()
users = json.load(open("%s/users.json" % config.data_dir)) users = json.load(open(users_json))
if self.master_address not in users: if self.master_address not in users:
users[self.master_address] = {} # Create if not exist users[self.master_address] = {} # Create if not exist
user_data = users[self.master_address] user_data = users[self.master_address]
@ -45,7 +46,7 @@ class User(object):
user_data["sites"] = self.sites user_data["sites"] = self.sites
user_data["certs"] = self.certs user_data["certs"] = self.certs
user_data["settings"] = self.settings user_data["settings"] = self.settings
helper.atomicWrite("%s/users.json" % config.data_dir, helper.jsonDumps(users).encode("utf8")) helper.atomicWrite(users_json, helper.jsonDumps(users).encode("utf8"))
self.log.debug("Saved in %.3fs" % (time.time() - s)) self.log.debug("Saved in %.3fs" % (time.time() - s))
self.delayed_save_thread = None self.delayed_save_thread = None

View file

@ -15,7 +15,7 @@ class UserManager(object):
self.users = {} self.users = {}
self.log = logging.getLogger("UserManager") self.log = logging.getLogger("UserManager")
# Load all user from data/users.json # Load all user from users.json
def load(self): def load(self):
if not self.users: if not self.users:
self.users = {} self.users = {}
@ -25,7 +25,7 @@ class UserManager(object):
s = time.time() s = time.time()
# Load new users # Load new users
try: try:
json_path = "%s/users.json" % config.data_dir json_path = config.private_dir / 'users.json'
data = json.load(open(json_path)) data = json.load(open(json_path))
except Exception as err: except Exception as err:
raise Exception("Unable to load %s: %s" % (json_path, err)) raise Exception("Unable to load %s: %s" % (json_path, err))
@ -57,7 +57,7 @@ class UserManager(object):
user.saveDelayed() user.saveDelayed()
return user return user
# List all users from data/users.json # List all users
# Return: {"usermasteraddr": User} # Return: {"usermasteraddr": User}
def list(self): def list(self):
if self.users == {}: # Not loaded yet if self.users == {}: # Not loaded yet

View file

@ -4,6 +4,9 @@ import stat
import time import time
import logging import logging
from util.compat import * from util.compat import *
from pathlib import Path
from rich import print
startup_errors = [] startup_errors = []
def startupError(msg): def startupError(msg):
@ -12,13 +15,6 @@ def startupError(msg):
# Third party modules # Third party modules
import gevent import gevent
if gevent.version_info.major <= 1: # Workaround for random crash when libuv used with threads
try:
if "libev" not in str(gevent.config.loop):
gevent.config.loop = "libev-cext"
except Exception as err:
startupError("Unable to switch gevent loop to libev: %s" % err)
import gevent.monkey import gevent.monkey
gevent.monkey.patch_all(thread=False, subprocess=False) gevent.monkey.patch_all(thread=False, subprocess=False)
@ -33,14 +29,12 @@ def load_config():
# Config parse failed completely, show the help screen and exit # Config parse failed completely, show the help screen and exit
config.parse() config.parse()
load_config()
def importBundle(bundle): def importBundle(bundle):
from zipfile import ZipFile from zipfile import ZipFile
from Crypt.CryptBitcoin import isValidAddress from Crypt.CryptBitcoin import isValidAddress
import json import json
sites_json_path = f"{config.data_dir}/sites.json" sites_json_path = config.private_dir / 'sites.json'
try: try:
with open(sites_json_path) as f: with open(sites_json_path) as f:
sites = json.load(f) sites = json.load(f)
@ -58,31 +52,36 @@ def importBundle(bundle):
map(lambda f: removeprefix(f, prefix).split('/')[0], all_files)))) map(lambda f: removeprefix(f, prefix).split('/')[0], all_files))))
for d in top_2: for d in top_2:
if isValidAddress(d): if isValidAddress(d):
logging.info(f'unpack {d} into {config.data_dir}') print(f'Unpacking {d} into {config.data_dir}')
for fname in filter(lambda f: f.startswith(prefix+d) and not f.endswith('/'), all_files): for fname in filter(lambda f: f.startswith(prefix+d) and not f.endswith('/'), all_files):
tgt = config.data_dir + '/' + removeprefix(fname, prefix) tgt = removeprefix(fname, prefix)
logging.info(f'-- {fname} --> {tgt}') print(f'-- {fname} --> {tgt}')
info = zf.getinfo(fname) info = zf.getinfo(fname)
info.filename = tgt info.filename = tgt
zf.extract(info) zf.extract(info, path=config.data_dir)
logging.info(f'add site {d}') logging.info(f'add site {d}')
sites[d] = {} sites[d] = {}
else: else:
logging.info(f'Warning: unknown file in a bundle: {prefix+d}') print(f'Warning: unknown file in a bundle: {prefix+d}')
with open(sites_json_path, 'w') as f: with open(sites_json_path, 'w') as f:
json.dump(sites, f) json.dump(sites, f)
def init_dirs(): def init_dirs():
data_dir = config.data_dir data_dir = Path(config.data_dir)
has_data_dir = os.path.isdir(data_dir) private_dir = Path(config.private_dir)
need_bootstrap = not config.disable_bootstrap and (not has_data_dir or not os.path.isfile(f'{data_dir}/sites.json')) and not config.offline need_bootstrap = (config.bootstrap
and not config.offline
and (not data_dir.is_dir() or not (private_dir / 'sites.json').is_file()))
if not has_data_dir: # old_users_json = data_dir / 'users.json'
os.mkdir(data_dir) # if old_users_json.is_file():
try: # print('Migrating existing users.json file to private/')
os.chmod(data_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) # old_sites_json = data_dir / 'sites.json'
except Exception as err: # if old_sites_json.is_file():
startupError(f"Can't change permission of {data_dir}: {err}") # print('Migrating existing sites.json file to private/')
if not data_dir.is_dir():
data_dir.mkdir(parents=True, exist_ok=True)
if need_bootstrap: if need_bootstrap:
import requests import requests
@ -99,626 +98,105 @@ def init_dirs():
startupError(f"Cannot load boostrap bundle (response status: {response.status_code})") startupError(f"Cannot load boostrap bundle (response status: {response.status_code})")
importBundle(BytesIO(response.content)) importBundle(BytesIO(response.content))
sites_json = f"{data_dir}/sites.json" sites_json = private_dir / 'sites.json'
if not os.path.isfile(sites_json): if not os.path.isfile(sites_json):
with open(sites_json, "w") as f: with open(sites_json, "w") as f:
f.write("{}") f.write("{}")
users_json = f"{data_dir}/users.json" users_json = private_dir / 'users.json'
if not os.path.isfile(users_json): if not os.path.isfile(users_json):
with open(users_json, "w") as f: with open(users_json, "w") as f:
f.write("{}") f.write("{}")
# TODO: GET RID OF TOP-LEVEL CODE!!!
config.initConsoleLogger()
try:
init_dirs()
except:
import traceback as tb
print(tb.format_exc())
# at least make sure to print help if we're otherwise so helpless
config.parser.print_help()
sys.exit(1)
if config.action == "main":
from util import helper
try:
lock = helper.openLocked(f"{config.data_dir}/lock.pid", "w")
lock.write(f"{os.getpid()}")
except BlockingIOError as err:
startupError(f"Can't open lock file, your 0net client is probably already running, exiting... ({err})")
proc = helper.openBrowser(config.open_browser)
r = proc.wait()
sys.exit(r)
config.initLogging(console_logging=False)
# Debug dependent configuration
from Debug import DebugHook
from Plugin import PluginManager
def load_plugins(): def load_plugins():
from Plugin import PluginManager
PluginManager.plugin_manager.loadPlugins() PluginManager.plugin_manager.loadPlugins()
config.loadPlugins() config.loadPlugins()
config.parse() # Parse again to add plugin configuration options config.parse() # Parse again to add plugin configuration options
load_plugins() def init():
load_config()
config.initConsoleLogger()
# Log current config
logging.debug("Config: %s" % config)
# Modify stack size on special hardwares
if config.stack_size:
import threading
threading.stack_size(config.stack_size)
# Use pure-python implementation of msgpack to save CPU
if config.msgpack_purepython:
os.environ["MSGPACK_PUREPYTHON"] = "True"
# Fix console encoding on Windows
# TODO: check if this is still required
if sys.platform.startswith("win"):
import subprocess
try: try:
chcp_res = subprocess.check_output("chcp 65001", shell=True).decode(errors="ignore").strip() init_dirs()
logging.debug("Changed console encoding to utf8: %s" % chcp_res) except:
except Exception as err: import traceback as tb
logging.error("Error changing console encoding to utf8: %s" % err) print(tb.format_exc())
# at least make sure to print help if we're otherwise so helpless
# config.parser.print_help()
sys.exit(1)
# Socket monkey patch if config.action == "main":
if config.proxy: from util import helper
from util import SocksProxy try:
import urllib.request lock = helper.openLocked(config.start_dir / 'lock.pid', "w")
logging.info("Patching sockets to socks proxy: %s" % config.proxy) lock.write(f"{os.getpid()}")
if config.fileserver_ip == "*": except BlockingIOError as err:
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost startupError(f"Can't open lock file, your 0net client is probably already running, exiting... ({err})")
config.disable_udp = True # UDP not supported currently with proxy proc = helper.openBrowser(config.open_browser)
SocksProxy.monkeyPatch(*config.proxy.split(":")) r = proc.wait()
elif config.tor == "always": sys.exit(r)
from util import SocksProxy
import urllib.request
logging.info("Patching sockets to tor socks proxy: %s" % config.tor_proxy)
if config.fileserver_ip == "*":
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
SocksProxy.monkeyPatch(*config.tor_proxy_split())
config.disable_udp = True
elif config.bind:
bind = config.bind
if ":" not in config.bind:
bind += ":0"
from util import helper
helper.socketBindMonkeyPatch(*bind.split(":"))
# -- Actions -- config.initLogging(console_logging=False)
# Debug dependent configuration
from Debug import DebugHook
@PluginManager.acceptPlugins load_plugins()
class Actions:
def call(self, function_name, kwargs):
logging.info(f'zeronet-conservancy {config.version_full} on Python {sys.version} Gevent {gevent.__version__}')
func = getattr(self, function_name, None) # Log current config
back = func(**kwargs) logging.debug("Config: %s" % config)
if back:
print(back)
def ipythonThread(self):
import IPython
IPython.embed()
self.gevent_quit.set()
# Default action: Start serving UiServer and FileServer
def main(self):
global ui_server, file_server
from File import FileServer
from Ui import UiServer
logging.info("Creating FileServer....")
file_server = FileServer()
logging.info("Creating UiServer....")
ui_server = UiServer()
file_server.ui_server = ui_server
for startup_error in startup_errors:
logging.error("Startup error: %s" % startup_error)
logging.info("Removing old SSL certs...")
from Crypt import CryptConnection
CryptConnection.manager.removeCerts()
logging.info("Starting servers....")
# Modify stack size on special hardwares
if config.stack_size:
import threading import threading
self.gevent_quit = threading.Event() threading.stack_size(config.stack_size)
launched_greenlets = [gevent.spawn(ui_server.start), gevent.spawn(file_server.start), gevent.spawn(ui_server.startSiteServer)]
# if --repl, start ipython thread # Use pure-python implementation of msgpack to save CPU
# FIXME: Unfortunately this leads to exceptions on exit so use with care if config.msgpack_purepython:
if config.repl: os.environ["MSGPACK_PUREPYTHON"] = "True"
threading.Thread(target=self.ipythonThread).start()
stopped = 0 # Fix console encoding on Windows
# Process all greenlets in main thread # TODO: check if this is still required
while not self.gevent_quit.is_set() and stopped < len(launched_greenlets): if sys.platform.startswith("win"):
stopped += len(gevent.joinall(launched_greenlets, timeout=1)) import subprocess
# Exited due to repl, so must kill greenlets
if stopped < len(launched_greenlets):
gevent.killall(launched_greenlets, exception=KeyboardInterrupt)
logging.info("All server stopped")
# Site commands
def siteCreate(self, use_master_seed=True):
logging.info("Generating new privatekey (use_master_seed: %s)..." % config.use_master_seed)
from Crypt import CryptBitcoin
if use_master_seed:
from User import UserManager
user = UserManager.user_manager.get()
if not user:
user = UserManager.user_manager.create()
address, address_index, site_data = user.getNewSiteData()
privatekey = site_data["privatekey"]
logging.info("Generated using master seed from users.json, site index: %s" % address_index)
else:
privatekey = CryptBitcoin.newPrivatekey()
address = CryptBitcoin.privatekeyToAddress(privatekey)
logging.info("----------------------------------------------------------------------")
logging.info("Site private key: %s" % privatekey)
logging.info(" !!! ^ Save it now, required to modify the site ^ !!!")
logging.info("Site address: %s" % address)
logging.info("----------------------------------------------------------------------")
while True and not config.batch and not use_master_seed:
if input("? Have you secured your private key? (yes, no) > ").lower() == "yes":
break
else:
logging.info("Please, secure it now, you going to need it to modify your site!")
logging.info("Creating directory structure...")
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
os.mkdir("%s/%s" % (config.data_dir, address))
open("%s/%s/index.html" % (config.data_dir, address), "w").write("Hello %s!" % address)
logging.info("Creating content.json...")
site = Site(address)
extend = {"postmessage_nonce_security": True}
if use_master_seed:
extend["address_index"] = address_index
site.content_manager.sign(privatekey=privatekey, extend=extend)
site.settings["own"] = True
site.saveSettings()
logging.info("Site created!")
def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False, remove_missing_optional=False):
from Site.Site import Site
from Site import SiteManager
from Debug import Debug
SiteManager.site_manager.load()
logging.info("Signing site: %s..." % address)
site = Site(address, allow_create=False)
if not privatekey: # If no privatekey defined
from User import UserManager
user = UserManager.user_manager.get()
if user:
site_data = user.getSiteData(address)
privatekey = site_data.get("privatekey")
else:
privatekey = None
if not privatekey:
# Not found in users.json, ask from console
import getpass
privatekey = getpass.getpass("Private key (input hidden):")
# inner_path can be either relative to site directory or absolute/relative path
if os.path.isabs(inner_path):
full_path = os.path.abspath(inner_path)
else:
full_path = os.path.abspath(config.working_dir + '/' + inner_path)
print(full_path)
if os.path.isfile(full_path):
if address in full_path:
# assuming site address is unique, keep only path after it
inner_path = full_path.split(address+'/')[1]
else:
# oops, file that we found seems to be rogue, so reverting to old behaviour
logging.warning(f'using {inner_path} relative to site directory')
try: try:
succ = site.content_manager.sign( chcp_res = subprocess.check_output("chcp 65001", shell=True).decode(errors="ignore").strip()
inner_path=inner_path, privatekey=privatekey, logging.debug("Changed console encoding to utf8: %s" % chcp_res)
update_changed_files=True, remove_missing_optional=remove_missing_optional
)
except Exception as err: except Exception as err:
logging.error("Sign error: %s" % Debug.formatException(err)) logging.error("Error changing console encoding to utf8: %s" % err)
succ = False
if succ and publish:
self.sitePublish(address, inner_path=inner_path)
def siteVerify(self, address): # Socket monkey patch
import time if config.proxy:
from Site.Site import Site from util import SocksProxy
from Site import SiteManager import urllib.request
SiteManager.site_manager.load() logging.info("Patching sockets to socks proxy: %s" % config.proxy)
if config.fileserver_ip == "*":
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
config.disable_udp = True # UDP not supported currently with proxy
SocksProxy.monkeyPatch(*config.proxy.split(":"))
elif config.tor == "always":
from util import SocksProxy
import urllib.request
logging.info("Patching sockets to tor socks proxy: %s" % config.tor_proxy)
if config.fileserver_ip == "*":
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
SocksProxy.monkeyPatch(*config.tor_proxy_split())
config.disable_udp = True
elif config.bind:
bind = config.bind
if ":" not in config.bind:
bind += ":0"
from util import helper
helper.socketBindMonkeyPatch(*bind.split(":"))
s = time.time() init()
logging.info("Verifing site: %s..." % address)
site = Site(address)
bad_files = []
for content_inner_path in site.content_manager.contents:
s = time.time()
logging.info("Verifing %s signature..." % content_inner_path)
error = None
try:
file_correct = site.content_manager.verifyFile(
content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False
)
except Exception as err:
file_correct = False
error = err
if file_correct is True:
logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s))
else:
logging.error("[ERROR] %s: invalid file: %s!" % (content_inner_path, error))
input("Continue?")
bad_files += content_inner_path
logging.info("Verifying site files...")
bad_files += site.storage.verifyFiles()["bad_files"]
if not bad_files:
logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s))
else:
logging.error("[ERROR] Error during verifying site files!")
def dbRebuild(self, address):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
logging.info("Rebuilding site sql cache: %s..." % address)
site = SiteManager.site_manager.get(address)
s = time.time()
try:
site.storage.rebuildDb()
logging.info("Done in %.3fs" % (time.time() - s))
except Exception as err:
logging.error(err)
def dbQuery(self, address, query):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
import json
site = Site(address)
result = []
for row in site.storage.query(query):
result.append(dict(row))
print(json.dumps(result, indent=4))
def siteAnnounce(self, address):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
logging.info("Opening a simple connection server")
global file_server
from File import FileServer
file_server = FileServer("127.0.0.1", 1234)
file_server.start()
logging.info("Announcing site %s to tracker..." % address)
site = Site(address)
s = time.time()
site.announce()
print("Response time: %.3fs" % (time.time() - s))
print(site.peers)
def siteDownload(self, address):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
logging.info("Opening a simple connection server")
global file_server
from File import FileServer
file_server = FileServer("127.0.0.1", 1234)
file_server_thread = gevent.spawn(file_server.start, check_sites=False)
site = Site(address)
on_completed = gevent.event.AsyncResult()
def onComplete(evt):
evt.set(True)
site.onComplete.once(lambda: onComplete(on_completed))
print("Announcing...")
site.announce()
s = time.time()
print("Downloading...")
site.downloadContent("content.json", check_modifications=True)
print("Downloaded in %.3fs" % (time.time()-s))
def siteNeedFile(self, address, inner_path):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
def checker():
while 1:
s = time.time()
time.sleep(1)
print("Switch time:", time.time() - s)
gevent.spawn(checker)
logging.info("Opening a simple connection server")
global file_server
from File import FileServer
file_server = FileServer("127.0.0.1", 1234)
file_server_thread = gevent.spawn(file_server.start, check_sites=False)
site = Site(address)
site.announce()
print(site.needFile(inner_path, update=True))
def siteCmd(self, address, cmd, parameters):
import json
from Site import SiteManager
site = SiteManager.site_manager.get(address)
if not site:
logging.error("Site not found: %s" % address)
return None
ws = self.getWebsocket(site)
ws.send(json.dumps({"cmd": cmd, "params": parameters, "id": 1}))
res_raw = ws.recv()
try:
res = json.loads(res_raw)
except Exception as err:
return {"error": "Invalid result: %s" % err, "res_raw": res_raw}
if "result" in res:
return res["result"]
else:
return res
def importBundle(self, bundle):
importBundle(bundle)
def getWebsocket(self, site):
import websocket
ws_address = "ws://%s:%s/Websocket?wrapper_key=%s" % (config.ui_ip, config.ui_port, site.settings["wrapper_key"])
logging.info("Connecting to %s" % ws_address)
ws = websocket.create_connection(ws_address)
return ws
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json", recursive=False):
from Site import SiteManager
logging.info("Loading site...")
site = SiteManager.site_manager.get(address)
site.settings["serving"] = True # Serving the site even if its disabled
if not recursive:
inner_paths = [inner_path]
else:
inner_paths = list(site.content_manager.contents.keys())
try:
ws = self.getWebsocket(site)
except Exception as err:
self.sitePublishFallback(site, peer_ip, peer_port, inner_paths, err)
else:
logging.info("Sending siteReload")
self.siteCmd(address, "siteReload", inner_path)
for inner_path in inner_paths:
logging.info(f"Sending sitePublish for {inner_path}")
self.siteCmd(address, "sitePublish", {"inner_path": inner_path, "sign": False})
logging.info("Done.")
ws.close()
def sitePublishFallback(self, site, peer_ip, peer_port, inner_paths, err):
if err is not None:
logging.info(f"Can't connect to local websocket client: {err}")
logging.info("Publish using fallback mechanism. "
"Note that there might be not enough time for peer discovery, "
"but you can specify target peer on command line.")
logging.info("Creating FileServer....")
file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity
time.sleep(0.001)
# Started fileserver
file_server.portCheck()
if peer_ip: # Announce ip specificed
site.addPeer(peer_ip, peer_port)
else: # Just ask the tracker
logging.info("Gathering peers from tracker")
site.announce() # Gather peers
for inner_path in inner_paths:
published = site.publish(5, inner_path) # Push to peers
if published > 0:
time.sleep(3)
logging.info("Serving files (max 60s)...")
gevent.joinall([file_server_thread], timeout=60)
logging.info("Done.")
else:
logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
# Crypto commands
def cryptPrivatekeyToAddress(self, privatekey=None):
from Crypt import CryptBitcoin
if not privatekey: # If no privatekey in args then ask it now
import getpass
privatekey = getpass.getpass("Private key (input hidden):")
print(CryptBitcoin.privatekeyToAddress(privatekey))
def cryptSign(self, message, privatekey):
from Crypt import CryptBitcoin
print(CryptBitcoin.sign(message, privatekey))
def cryptVerify(self, message, sign, address):
from Crypt import CryptBitcoin
print(CryptBitcoin.verify(message, address, sign))
def cryptGetPrivatekey(self, master_seed, site_address_index=None):
from Crypt import CryptBitcoin
if len(master_seed) != 64:
logging.error("Error: Invalid master seed length: %s (required: 64)" % len(master_seed))
return False
privatekey = CryptBitcoin.hdPrivatekey(master_seed, site_address_index)
print("Requested private key: %s" % privatekey)
# Peer
def peerPing(self, peer_ip, peer_port=None):
if not peer_port:
peer_port = 15441
logging.info("Opening a simple connection server")
global file_server
from Connection import ConnectionServer
file_server = ConnectionServer("127.0.0.1", 1234)
file_server.start(check_connections=False)
from Crypt import CryptConnection
CryptConnection.manager.loadCerts()
from Peer import Peer
logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port)))
s = time.time()
peer = Peer(peer_ip, peer_port)
peer.connect()
if not peer.connection:
print("Error: Can't connect to peer (connection error: %s)" % peer.connection_error)
return False
if "shared_ciphers" in dir(peer.connection.sock):
print("Shared ciphers:", peer.connection.sock.shared_ciphers())
if "cipher" in dir(peer.connection.sock):
print("Cipher:", peer.connection.sock.cipher()[0])
if "version" in dir(peer.connection.sock):
print("TLS version:", peer.connection.sock.version())
print("Connection time: %.3fs (connection error: %s)" % (time.time() - s, peer.connection_error))
for i in range(5):
ping_delay = peer.ping()
print("Response time: %.3fs" % ping_delay)
time.sleep(1)
peer.remove()
print("Reconnect test...")
peer = Peer(peer_ip, peer_port)
for i in range(5):
ping_delay = peer.ping()
print("Response time: %.3fs" % ping_delay)
time.sleep(1)
def peerGetFile(self, peer_ip, peer_port, site, filename, benchmark=False):
logging.info("Opening a simple connection server")
global file_server
from Connection import ConnectionServer
file_server = ConnectionServer("127.0.0.1", 1234)
file_server.start(check_connections=False)
from Crypt import CryptConnection
CryptConnection.manager.loadCerts()
from Peer import Peer
logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port))
peer = Peer(peer_ip, peer_port)
s = time.time()
if benchmark:
for i in range(10):
peer.getFile(site, filename),
print("Response time: %.3fs" % (time.time() - s))
input("Check memory")
else:
print(peer.getFile(site, filename).read())
def peerCmd(self, peer_ip, peer_port, cmd, parameters):
logging.info("Opening a simple connection server")
global file_server
from Connection import ConnectionServer
file_server = ConnectionServer()
file_server.start(check_connections=False)
from Crypt import CryptConnection
CryptConnection.manager.loadCerts()
from Peer import Peer
peer = Peer(peer_ip, peer_port)
import json
if parameters:
parameters = json.loads(parameters.replace("'", '"'))
else:
parameters = {}
try:
res = peer.request(cmd, parameters)
print(json.dumps(res, indent=2, ensure_ascii=False))
except Exception as err:
print("Unknown response (%s): %s" % (err, res))
def getConfig(self):
import json
print(json.dumps(config.getServerInfo(), indent=2, ensure_ascii=False))
def test(self, test_name, *args, **kwargs):
import types
def funcToName(func_name):
test_name = func_name.replace("test", "")
return test_name[0].lower() + test_name[1:]
test_names = [funcToName(name) for name in dir(self) if name.startswith("test") and name != "test"]
if not test_name:
# No test specificed, list tests
print("\nNo test specified, possible tests:")
for test_name in test_names:
func_name = "test" + test_name[0].upper() + test_name[1:]
func = getattr(self, func_name)
if func.__doc__:
print("- %s: %s" % (test_name, func.__doc__.strip()))
else:
print("- %s" % test_name)
return None
# Run tests
func_name = "test" + test_name[0].upper() + test_name[1:]
if hasattr(self, func_name):
func = getattr(self, func_name)
print("- Running test: %s" % test_name, end="")
s = time.time()
ret = func(*args, **kwargs)
if type(ret) is types.GeneratorType:
for progress in ret:
print(progress, end="")
sys.stdout.flush()
print("\n* Test %s done in %.3fs" % (test_name, time.time() - s))
else:
print("Unknown test: %r (choose from: %s)" % (
test_name, test_names
))
from Actions import Actions
actions = Actions() actions = Actions()
# Starts here when running zeronet.py # Starts here when running zeronet.py
def start(): def start():
# Call function # Call function
action_kwargs = config.getActionArguments() action_kwargs = config.getActionArguments()

View file

@ -48,7 +48,7 @@ def _gitted(f):
return lambda *args, **kwargs: None return lambda *args, **kwargs: None
@_gitted @_gitted
def commit() -> str: def commit() -> Optional[str]:
"""Returns git revision, possibly suffixed with -dirty""" """Returns git revision, possibly suffixed with -dirty"""
dirty = '-dirty' if _repo.is_dirty() else '' dirty = '-dirty' if _repo.is_dirty() else ''
return f'{_repo.head.commit}{dirty}' return f'{_repo.head.commit}{dirty}'

114
src/util/argparseCompat.py Normal file
View file

@ -0,0 +1,114 @@
# This code is taken from CPython Lib/argparse.py and contains BooleanOptionalAction
# for use in py<3.9
# Author: Steven J. Bethard <steven.bethard@gmail.com>.
# New maintainer as of 29 August 2019: Raymond Hettinger <raymond.hettinger@gmail.com>
# Copyright © 2001-2024 Python Software Foundation. All rights reserved.
# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
# --------------------------------------------
#
# 1. This LICENSE AGREEMENT is between the Python Software Foundation
# ("PSF"), and the Individual or Organization ("Licensee") accessing and
# otherwise using this software ("Python") in source or binary form and
# its associated documentation.
#
# 2. Subject to the terms and conditions of this License Agreement, PSF hereby
# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
# analyze, test, perform and/or display publicly, prepare derivative works,
# distribute, and otherwise use Python alone or in any derivative version,
# provided, however, that PSF's License Agreement and PSF's notice of copyright,
# i.e., "Copyright (c) 2001-2024 Python Software Foundation; All Rights Reserved"
# are retained in Python alone or in any derivative version prepared by Licensee.
#
# 3. In the event Licensee prepares a derivative work that is based on
# or incorporates Python or any part thereof, and wants to make
# the derivative work available to others as provided herein, then
# Licensee hereby agrees to include in any such work a brief summary of
# the changes made to Python.
#
# 4. PSF is making Python available to Licensee on an "AS IS"
# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
# INFRINGE ANY THIRD PARTY RIGHTS.
#
# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
#
# 6. This License Agreement will automatically terminate upon a material
# breach of its terms and conditions.
#
# 7. Nothing in this License Agreement shall be deemed to create any
# relationship of agency, partnership, or joint venture between PSF and
# Licensee. This License Agreement does not grant permission to use PSF
# trademarks or trade name in a trademark sense to endorse or promote
# products or services of Licensee, or any third party.
#
# 8. By copying, installing or otherwise using Python, Licensee
# agrees to be bound by the terms and conditions of this License
# Agreement.
from argparse import Action
class BooleanOptionalAction(Action):
def __init__(self,
option_strings,
dest,
default=None,
type=_deprecated_default,
choices=_deprecated_default,
required=False,
help=None,
metavar=_deprecated_default,
deprecated=False):
_option_strings = []
for option_string in option_strings:
_option_strings.append(option_string)
if option_string.startswith('--'):
option_string = '--no-' + option_string[2:]
_option_strings.append(option_string)
# We need `_deprecated` special value to ban explicit arguments that
# match default value. Like:
# parser.add_argument('-f', action=BooleanOptionalAction, type=int)
for field_name in ('type', 'choices', 'metavar'):
if locals()[field_name] is not _deprecated_default:
import warnings
warnings._deprecated(
field_name,
"{name!r} is deprecated as of Python 3.12 and will be "
"removed in Python {remove}.",
remove=(3, 14))
if type is _deprecated_default:
type = None
if choices is _deprecated_default:
choices = None
if metavar is _deprecated_default:
metavar = None
super().__init__(
option_strings=_option_strings,
dest=dest,
nargs=0,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar,
deprecated=deprecated)
def __call__(self, parser, namespace, values, option_string=None):
if option_string in self.option_strings:
setattr(namespace, self.dest, not option_string.startswith('--no-'))
def format_usage(self):
return ' | '.join(self.option_strings)

View file

@ -14,3 +14,9 @@ else:
return s.removeprefix(prefix) return s.removeprefix(prefix)
def removesuffix(s, suffix, /): def removesuffix(s, suffix, /):
return s.removesuffix(suffix) return s.removesuffix(suffix)
import argparse
if not hasattr(argparse, 'BooleanOptionalAction'):
from .argparseCompat import BooleanOptionalAction
argparse.BooleanOptionalAction = BooleanOptionalAction

View file

@ -16,17 +16,17 @@ from Config import config
def atomicWrite(dest, content, mode="wb"): def atomicWrite(dest, content, mode="wb"):
try: try:
with open(dest + "-tmpnew", mode) as f: with open(f'{dest}-tmpnew', mode) as f:
f.write(content) f.write(content)
f.flush() f.flush()
os.fsync(f.fileno()) os.fsync(f.fileno())
if os.path.isfile(dest + "-tmpold"): # Previous incomplete write if os.path.isfile(f'{dest}-tmpold'): # Previous incomplete write
os.rename(dest + "-tmpold", dest + "-tmpold-%s" % time.time()) os.rename(f'{dest}-tmpold', f'{dest}-tmpold-{time.time()}')
if os.path.isfile(dest): # Rename old file to -tmpold if os.path.isfile(dest): # Rename old file to -tmpold
os.rename(dest, dest + "-tmpold") os.rename(dest, f'{dest}-tmpold')
os.rename(dest + "-tmpnew", dest) os.rename(f'{dest}-tmpnew', dest)
if os.path.isfile(dest + "-tmpold"): if os.path.isfile(f'{dest}-tmpold'):
os.unlink(dest + "-tmpold") # Remove old file os.unlink(f'{dest}-tmpold') # Remove old file
return True return True
except Exception as err: except Exception as err:
from Debug import Debug from Debug import Debug
@ -34,8 +34,8 @@ def atomicWrite(dest, content, mode="wb"):
"File %s write failed: %s, (%s) reverting..." % "File %s write failed: %s, (%s) reverting..." %
(dest, Debug.formatException(err), Debug.formatStack()) (dest, Debug.formatException(err), Debug.formatStack())
) )
if os.path.isfile(dest + "-tmpold") and not os.path.isfile(dest): if os.path.isfile(f'{dest}-tmpold') and not os.path.isfile(dest):
os.rename(dest + "-tmpold", dest) os.rename(f'{dest}-tmpold', dest)
return False return False
@ -85,7 +85,7 @@ def openLocked(path, mode="wb"):
def getFreeSpace(): def getFreeSpace():
free_space = -1 free_space = -1
if "statvfs" in dir(os): # Unix if "statvfs" in dir(os): # Unix
statvfs = os.statvfs(config.data_dir.encode("utf8")) statvfs = os.statvfs(str(config.data_dir).encode("utf8"))
free_space = statvfs.f_frsize * statvfs.f_bavail free_space = statvfs.f_frsize * statvfs.f_bavail
else: # Windows else: # Windows
try: try:
@ -111,7 +111,7 @@ def shellquote(*args):
if len(args) == 1: if len(args) == 1:
return '"%s"' % args[0].replace('"', "") return '"%s"' % args[0].replace('"', "")
else: else:
return tuple(['"%s"' % arg.replace('"', "") for arg in args]) return tuple(['"%s"' % str(arg).replace('"', "") for arg in args])
def packPeers(peers): def packPeers(peers):

View file

@ -6,11 +6,18 @@ from src.Config import config
# fix further imports from src dir # fix further imports from src dir
sys.modules['Config'] = sys.modules['src.Config'] sys.modules['Config'] = sys.modules['src.Config']
def pyReq():
major = sys.version_info.major
minor = sys.version_info.minor
if major < 3 or (major == 3 and minor < 8):
print("Error: Python 3.8+ is required")
sys.exit(0)
if major == 3 and minor < 11:
print(f"Python 3.11+ is recommended (you're running {sys.version})")
def launch(): def launch():
'''renamed from main to avoid clashes with main module''' '''renamed from main to avoid clashes with main module'''
if sys.version_info.major < 3: pyReq()
print("Error: Python 3.x is required")
sys.exit(0)
if '--silent' not in sys.argv: if '--silent' not in sys.argv:
from greet import fancy_greet from greet import fancy_greet
@ -27,7 +34,7 @@ def launch():
except Exception as log_err: except Exception as log_err:
print("Failed to log error:", log_err) print("Failed to log error:", log_err)
traceback.print_exc() traceback.print_exc()
error_log_path = config.log_dir + "/error.log" error_log_path = config.log_dir / "error.log"
traceback.print_exc(file=open(error_log_path, "w")) traceback.print_exc(file=open(error_log_path, "w"))
print("---") print("---")
print("Please report it: https://github.com/zeronet-conservancy/zeronet-conservancy/issues/new?template=bug-report.md") print("Please report it: https://github.com/zeronet-conservancy/zeronet-conservancy/issues/new?template=bug-report.md")