Restructure main, split Actions.py, minor clean-up

This commit is contained in:
caryoscelus 2024-04-04 13:08:21 +00:00
parent bbe577310c
commit b674737f99
No known key found for this signature in database
GPG key ID: 254EDDB85B66CB1F
3 changed files with 591 additions and 596 deletions

521
src/Actions.py Normal file
View file

@ -0,0 +1,521 @@
import logging
import sys
import gevent
from Config import config
from Plugin import PluginManager
@PluginManager.acceptPlugins
class Actions:
def call(self, function_name, kwargs):
logging.info(f'zeronet-conservancy {config.version_full} on Python {sys.version} Gevent {gevent.__version__}')
func = getattr(self, function_name, None)
back = func(**kwargs)
if back:
print(back)
def ipythonThread(self):
import IPython
IPython.embed()
self.gevent_quit.set()
# Default action: Start serving UiServer and FileServer
def main(self):
import main
from File import FileServer
from Ui import UiServer
logging.info("Creating FileServer....")
main.file_server = FileServer()
logging.info("Creating UiServer....")
main.ui_server = UiServer()
main.file_server.ui_server = main.ui_server
# for startup_error in startup_errors:
# logging.error("Startup error: %s" % startup_error)
logging.info("Removing old SSL certs...")
from Crypt import CryptConnection
CryptConnection.manager.removeCerts()
logging.info("Starting servers....")
import threading
self.gevent_quit = threading.Event()
launched_greenlets = [gevent.spawn(main.ui_server.start), gevent.spawn(main.file_server.start), gevent.spawn(main.ui_server.startSiteServer)]
# if --repl, start ipython thread
# FIXME: Unfortunately this leads to exceptions on exit so use with care
if config.repl:
threading.Thread(target=self.ipythonThread).start()
stopped = 0
# Process all greenlets in main thread
while not self.gevent_quit.is_set() and stopped < len(launched_greenlets):
stopped += len(gevent.joinall(launched_greenlets, timeout=1))
# Exited due to repl, so must kill greenlets
if stopped < len(launched_greenlets):
gevent.killall(launched_greenlets, exception=KeyboardInterrupt)
logging.info("All server stopped")
# Site commands
def siteCreate(self, use_master_seed=True):
logging.info("Generating new privatekey (use_master_seed: %s)..." % config.use_master_seed)
from Crypt import CryptBitcoin
if use_master_seed:
from User import UserManager
user = UserManager.user_manager.get()
if not user:
user = UserManager.user_manager.create()
address, address_index, site_data = user.getNewSiteData()
privatekey = site_data["privatekey"]
logging.info("Generated using master seed from users.json, site index: %s" % address_index)
else:
privatekey = CryptBitcoin.newPrivatekey()
address = CryptBitcoin.privatekeyToAddress(privatekey)
logging.info("----------------------------------------------------------------------")
logging.info("Site private key: %s" % privatekey)
logging.info(" !!! ^ Save it now, required to modify the site ^ !!!")
logging.info("Site address: %s" % address)
logging.info("----------------------------------------------------------------------")
while True and not config.batch and not use_master_seed:
if input("? Have you secured your private key? (yes, no) > ").lower() == "yes":
break
else:
logging.info("Please, secure it now, you going to need it to modify your site!")
logging.info("Creating directory structure...")
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
os.mkdir("%s/%s" % (config.data_dir, address))
open("%s/%s/index.html" % (config.data_dir, address), "w").write("Hello %s!" % address)
logging.info("Creating content.json...")
site = Site(address)
extend = {"postmessage_nonce_security": True}
if use_master_seed:
extend["address_index"] = address_index
site.content_manager.sign(privatekey=privatekey, extend=extend)
site.settings["own"] = True
site.saveSettings()
logging.info("Site created!")
def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False, remove_missing_optional=False):
from Site.Site import Site
from Site import SiteManager
from Debug import Debug
SiteManager.site_manager.load()
logging.info("Signing site: %s..." % address)
site = Site(address, allow_create=False)
if not privatekey: # If no privatekey defined
from User import UserManager
user = UserManager.user_manager.get()
if user:
site_data = user.getSiteData(address)
privatekey = site_data.get("privatekey")
else:
privatekey = None
if not privatekey:
# Not found in users.json, ask from console
import getpass
privatekey = getpass.getpass("Private key (input hidden):")
# inner_path can be either relative to site directory or absolute/relative path
if os.path.isabs(inner_path):
full_path = os.path.abspath(inner_path)
else:
full_path = os.path.abspath(config.working_dir + '/' + inner_path)
print(full_path)
if os.path.isfile(full_path):
if address in full_path:
# assuming site address is unique, keep only path after it
inner_path = full_path.split(address+'/')[1]
else:
# oops, file that we found seems to be rogue, so reverting to old behaviour
logging.warning(f'using {inner_path} relative to site directory')
try:
succ = site.content_manager.sign(
inner_path=inner_path, privatekey=privatekey,
update_changed_files=True, remove_missing_optional=remove_missing_optional
)
except Exception as err:
logging.error("Sign error: %s" % Debug.formatException(err))
succ = False
if succ and publish:
self.sitePublish(address, inner_path=inner_path)
def siteVerify(self, address):
import time
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
s = time.time()
logging.info("Verifing site: %s..." % address)
site = Site(address)
bad_files = []
for content_inner_path in site.content_manager.contents:
s = time.time()
logging.info("Verifing %s signature..." % content_inner_path)
error = None
try:
file_correct = site.content_manager.verifyFile(
content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False
)
except Exception as err:
file_correct = False
error = err
if file_correct is True:
logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s))
else:
logging.error("[ERROR] %s: invalid file: %s!" % (content_inner_path, error))
input("Continue?")
bad_files += content_inner_path
logging.info("Verifying site files...")
bad_files += site.storage.verifyFiles()["bad_files"]
if not bad_files:
logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s))
else:
logging.error("[ERROR] Error during verifying site files!")
def dbRebuild(self, address):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
logging.info("Rebuilding site sql cache: %s..." % address)
site = SiteManager.site_manager.get(address)
s = time.time()
try:
site.storage.rebuildDb()
logging.info("Done in %.3fs" % (time.time() - s))
except Exception as err:
logging.error(err)
def dbQuery(self, address, query):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
import json
site = Site(address)
result = []
for row in site.storage.query(query):
result.append(dict(row))
print(json.dumps(result, indent=4))
def siteAnnounce(self, address):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
logging.info("Opening a simple connection server")
from File import FileServer
main.file_server = FileServer("127.0.0.1", 1234)
main.file_server.start()
logging.info("Announcing site %s to tracker..." % address)
site = Site(address)
s = time.time()
site.announce()
print("Response time: %.3fs" % (time.time() - s))
print(site.peers)
def siteDownload(self, address):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
logging.info("Opening a simple connection server")
from File import FileServer
main.file_server = FileServer("127.0.0.1", 1234)
file_server_thread = gevent.spawn(main.file_server.start, check_sites=False)
site = Site(address)
on_completed = gevent.event.AsyncResult()
def onComplete(evt):
evt.set(True)
site.onComplete.once(lambda: onComplete(on_completed))
print("Announcing...")
site.announce()
s = time.time()
print("Downloading...")
site.downloadContent("content.json", check_modifications=True)
print("Downloaded in %.3fs" % (time.time()-s))
def siteNeedFile(self, address, inner_path):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
def checker():
while 1:
s = time.time()
time.sleep(1)
print("Switch time:", time.time() - s)
gevent.spawn(checker)
logging.info("Opening a simple connection server")
from File import FileServer
main.file_server = FileServer("127.0.0.1", 1234)
file_server_thread = gevent.spawn(main.file_server.start, check_sites=False)
site = Site(address)
site.announce()
print(site.needFile(inner_path, update=True))
def siteCmd(self, address, cmd, parameters):
import json
from Site import SiteManager
site = SiteManager.site_manager.get(address)
if not site:
logging.error("Site not found: %s" % address)
return None
ws = self.getWebsocket(site)
ws.send(json.dumps({"cmd": cmd, "params": parameters, "id": 1}))
res_raw = ws.recv()
try:
res = json.loads(res_raw)
except Exception as err:
return {"error": "Invalid result: %s" % err, "res_raw": res_raw}
if "result" in res:
return res["result"]
else:
return res
def importBundle(self, bundle):
import main
main.importBundle(bundle)
def getWebsocket(self, site):
import websocket
ws_address = "ws://%s:%s/Websocket?wrapper_key=%s" % (config.ui_ip, config.ui_port, site.settings["wrapper_key"])
logging.info("Connecting to %s" % ws_address)
ws = websocket.create_connection(ws_address)
return ws
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json", recursive=False):
from Site import SiteManager
logging.info("Loading site...")
site = SiteManager.site_manager.get(address)
site.settings["serving"] = True # Serving the site even if its disabled
if not recursive:
inner_paths = [inner_path]
else:
inner_paths = list(site.content_manager.contents.keys())
try:
ws = self.getWebsocket(site)
except Exception as err:
self.sitePublishFallback(site, peer_ip, peer_port, inner_paths, err)
else:
logging.info("Sending siteReload")
self.siteCmd(address, "siteReload", inner_path)
for inner_path in inner_paths:
logging.info(f"Sending sitePublish for {inner_path}")
self.siteCmd(address, "sitePublish", {"inner_path": inner_path, "sign": False})
logging.info("Done.")
ws.close()
def sitePublishFallback(self, site, peer_ip, peer_port, inner_paths, err):
if err is not None:
logging.info(f"Can't connect to local websocket client: {err}")
logging.info("Publish using fallback mechanism. "
"Note that there might be not enough time for peer discovery, "
"but you can specify target peer on command line.")
logging.info("Creating FileServer....")
file_server_thread = gevent.spawn(main.file_server.start, check_sites=False) # Dont check every site integrity
time.sleep(0.001)
# Started fileserver
main.file_server.portCheck()
if peer_ip: # Announce ip specificed
site.addPeer(peer_ip, peer_port)
else: # Just ask the tracker
logging.info("Gathering peers from tracker")
site.announce() # Gather peers
for inner_path in inner_paths:
published = site.publish(5, inner_path) # Push to peers
if published > 0:
time.sleep(3)
logging.info("Serving files (max 60s)...")
gevent.joinall([file_server_thread], timeout=60)
logging.info("Done.")
else:
logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
# Crypto commands
def cryptPrivatekeyToAddress(self, privatekey=None):
from Crypt import CryptBitcoin
if not privatekey: # If no privatekey in args then ask it now
import getpass
privatekey = getpass.getpass("Private key (input hidden):")
print(CryptBitcoin.privatekeyToAddress(privatekey))
def cryptSign(self, message, privatekey):
from Crypt import CryptBitcoin
print(CryptBitcoin.sign(message, privatekey))
def cryptVerify(self, message, sign, address):
from Crypt import CryptBitcoin
print(CryptBitcoin.verify(message, address, sign))
def cryptGetPrivatekey(self, master_seed, site_address_index=None):
from Crypt import CryptBitcoin
if len(master_seed) != 64:
logging.error("Error: Invalid master seed length: %s (required: 64)" % len(master_seed))
return False
privatekey = CryptBitcoin.hdPrivatekey(master_seed, site_address_index)
print("Requested private key: %s" % privatekey)
# Peer
def peerPing(self, peer_ip, peer_port=None):
if not peer_port:
peer_port = 15441
logging.info("Opening a simple connection server")
from Connection import ConnectionServer
main.file_server = ConnectionServer("127.0.0.1", 1234)
main.file_server.start(check_connections=False)
from Crypt import CryptConnection
CryptConnection.manager.loadCerts()
from Peer import Peer
logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port)))
s = time.time()
peer = Peer(peer_ip, peer_port)
peer.connect()
if not peer.connection:
print("Error: Can't connect to peer (connection error: %s)" % peer.connection_error)
return False
if "shared_ciphers" in dir(peer.connection.sock):
print("Shared ciphers:", peer.connection.sock.shared_ciphers())
if "cipher" in dir(peer.connection.sock):
print("Cipher:", peer.connection.sock.cipher()[0])
if "version" in dir(peer.connection.sock):
print("TLS version:", peer.connection.sock.version())
print("Connection time: %.3fs (connection error: %s)" % (time.time() - s, peer.connection_error))
for i in range(5):
ping_delay = peer.ping()
print("Response time: %.3fs" % ping_delay)
time.sleep(1)
peer.remove()
print("Reconnect test...")
peer = Peer(peer_ip, peer_port)
for i in range(5):
ping_delay = peer.ping()
print("Response time: %.3fs" % ping_delay)
time.sleep(1)
def peerGetFile(self, peer_ip, peer_port, site, filename, benchmark=False):
logging.info("Opening a simple connection server")
from Connection import ConnectionServer
main.file_server = ConnectionServer("127.0.0.1", 1234)
main.file_server.start(check_connections=False)
from Crypt import CryptConnection
CryptConnection.manager.loadCerts()
from Peer import Peer
logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port))
peer = Peer(peer_ip, peer_port)
s = time.time()
if benchmark:
for i in range(10):
peer.getFile(site, filename),
print("Response time: %.3fs" % (time.time() - s))
input("Check memory")
else:
print(peer.getFile(site, filename).read())
def peerCmd(self, peer_ip, peer_port, cmd, parameters):
logging.info("Opening a simple connection server")
from Connection import ConnectionServer
main.file_server = ConnectionServer()
main.file_server.start(check_connections=False)
from Crypt import CryptConnection
CryptConnection.manager.loadCerts()
from Peer import Peer
peer = Peer(peer_ip, peer_port)
import json
if parameters:
parameters = json.loads(parameters.replace("'", '"'))
else:
parameters = {}
try:
res = peer.request(cmd, parameters)
print(json.dumps(res, indent=2, ensure_ascii=False))
except Exception as err:
print("Unknown response (%s): %s" % (err, res))
def getConfig(self):
import json
print(json.dumps(config.getServerInfo(), indent=2, ensure_ascii=False))
def test(self, test_name, *args, **kwargs):
import types
def funcToName(func_name):
test_name = func_name.replace("test", "")
return test_name[0].lower() + test_name[1:]
test_names = [funcToName(name) for name in dir(self) if name.startswith("test") and name != "test"]
if not test_name:
# No test specificed, list tests
print("\nNo test specified, possible tests:")
for test_name in test_names:
func_name = "test" + test_name[0].upper() + test_name[1:]
func = getattr(self, func_name)
if func.__doc__:
print("- %s: %s" % (test_name, func.__doc__.strip()))
else:
print("- %s" % test_name)
return None
# Run tests
func_name = "test" + test_name[0].upper() + test_name[1:]
if hasattr(self, func_name):
func = getattr(self, func_name)
print("- Running test: %s" % test_name, end="")
s = time.time()
ret = func(*args, **kwargs)
if type(ret) is types.GeneratorType:
for progress in ret:
print(progress, end="")
sys.stdout.flush()
print("\n* Test %s done in %.3fs" % (test_name, time.time() - s))
else:
print("Unknown test: %r (choose from: %s)" % (
test_name, test_names
))

View file

@ -96,7 +96,7 @@ class Config:
print(f' you have to run it with --start-dir "{home_zn}" option') print(f' you have to run it with --start-dir "{home_zn}" option')
if platform.system() == 'Linux': if platform.system() == 'Linux':
# XDG! # TODO: XDG!
return os.path.expanduser('~/.local/zeronet-conservancy') return os.path.expanduser('~/.local/zeronet-conservancy')
if platform.system() == 'Darwin': if platform.system() == 'Darwin':
@ -434,14 +434,11 @@ class Config:
self.parseCommandline(argv, silent) # Parse argv self.parseCommandline(argv, silent) # Parse argv
self.setAttributes() self.setAttributes()
print('Parsed command line once')
print(self.arguments)
if parse_config: if parse_config:
argv = self.parseConfig(argv) # Add arguments from config file argv = self.parseConfig(argv) # Add arguments from config file
self.parseCommandline(argv, silent) # Parse argv self.parseCommandline(argv, silent) # Parse argv
self.setAttributes() self.setAttributes()
print('Parsed command line twice')
if not silent: if not silent:
if self.fileserver_ip != "*" and self.fileserver_ip not in self.ip_local: if self.fileserver_ip != "*" and self.fileserver_ip not in self.ip_local:
@ -454,7 +451,6 @@ class Config:
current_parser.exit = original_exit current_parser.exit = original_exit
self.loadTrackersFile() self.loadTrackersFile()
print('Parse done')
def fixArgs(self, args): def fixArgs(self, args):
"Fix old-style flags and issue a warning" "Fix old-style flags and issue a warning"
@ -490,9 +486,7 @@ class Config:
else: else:
self.arguments = {} self.arguments = {}
else: else:
print('Parsing again')
self.arguments = self.parser.parse_args(argv[1:]) self.arguments = self.parser.parse_args(argv[1:])
print('Parsed thrice')
if self.arguments.ui_site_port is None: if self.arguments.ui_site_port is None:
self.arguments.ui_site_port = self.arguments.ui_port + 1 self.arguments.ui_site_port = self.arguments.ui_port + 1

View file

@ -6,6 +6,8 @@ import logging
from util.compat import * from util.compat import *
from pathlib import Path from pathlib import Path
from rich import print
startup_errors = [] startup_errors = []
def startupError(msg): def startupError(msg):
startup_errors.append(msg) startup_errors.append(msg)
@ -27,8 +29,6 @@ def load_config():
# Config parse failed completely, show the help screen and exit # Config parse failed completely, show the help screen and exit
config.parse() config.parse()
load_config()
def importBundle(bundle): def importBundle(bundle):
from zipfile import ZipFile from zipfile import ZipFile
from Crypt.CryptBitcoin import isValidAddress from Crypt.CryptBitcoin import isValidAddress
@ -99,617 +99,97 @@ def init_dirs():
with open(users_json, "w") as f: with open(users_json, "w") as f:
f.write("{}") f.write("{}")
# TODO: GET RID OF TOP-LEVEL CODE!!!
config.initConsoleLogger()
try:
init_dirs()
except:
import traceback as tb
print(tb.format_exc())
# at least make sure to print help if we're otherwise so helpless
config.parser.print_help()
sys.exit(1)
if config.action == "main":
from util import helper
try:
lock = helper.openLocked(f"{config.data_dir}/lock.pid", "w")
lock.write(f"{os.getpid()}")
except BlockingIOError as err:
startupError(f"Can't open lock file, your 0net client is probably already running, exiting... ({err})")
proc = helper.openBrowser(config.open_browser)
r = proc.wait()
sys.exit(r)
config.initLogging(console_logging=False)
# Debug dependent configuration
from Debug import DebugHook
from Plugin import PluginManager
def load_plugins(): def load_plugins():
from Plugin import PluginManager
PluginManager.plugin_manager.loadPlugins() PluginManager.plugin_manager.loadPlugins()
config.loadPlugins() config.loadPlugins()
config.parse() # Parse again to add plugin configuration options config.parse() # Parse again to add plugin configuration options
load_plugins() def init():
load_config()
config.initConsoleLogger()
# Log current config
logging.debug("Config: %s" % config)
# Modify stack size on special hardwares
if config.stack_size:
import threading
threading.stack_size(config.stack_size)
# Use pure-python implementation of msgpack to save CPU
if config.msgpack_purepython:
os.environ["MSGPACK_PUREPYTHON"] = "True"
# Fix console encoding on Windows
# TODO: check if this is still required
if sys.platform.startswith("win"):
import subprocess
try: try:
chcp_res = subprocess.check_output("chcp 65001", shell=True).decode(errors="ignore").strip() print(config.start_dir)
logging.debug("Changed console encoding to utf8: %s" % chcp_res) init_dirs()
except Exception as err: except:
logging.error("Error changing console encoding to utf8: %s" % err) import traceback as tb
print(tb.format_exc())
# at least make sure to print help if we're otherwise so helpless
# config.parser.print_help()
sys.exit(1)
# Socket monkey patch if config.action == "main":
if config.proxy: from util import helper
from util import SocksProxy try:
import urllib.request lock = helper.openLocked(f"{config.data_dir}/lock.pid", "w")
logging.info("Patching sockets to socks proxy: %s" % config.proxy) lock.write(f"{os.getpid()}")
if config.fileserver_ip == "*": except BlockingIOError as err:
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost startupError(f"Can't open lock file, your 0net client is probably already running, exiting... ({err})")
config.disable_udp = True # UDP not supported currently with proxy proc = helper.openBrowser(config.open_browser)
SocksProxy.monkeyPatch(*config.proxy.split(":")) r = proc.wait()
elif config.tor == "always": sys.exit(r)
from util import SocksProxy
import urllib.request
logging.info("Patching sockets to tor socks proxy: %s" % config.tor_proxy)
if config.fileserver_ip == "*":
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
SocksProxy.monkeyPatch(*config.tor_proxy_split())
config.disable_udp = True
elif config.bind:
bind = config.bind
if ":" not in config.bind:
bind += ":0"
from util import helper
helper.socketBindMonkeyPatch(*bind.split(":"))
# -- Actions -- config.initLogging(console_logging=False)
# Debug dependent configuration
from Debug import DebugHook
@PluginManager.acceptPlugins load_plugins()
class Actions:
def call(self, function_name, kwargs):
logging.info(f'zeronet-conservancy {config.version_full} on Python {sys.version} Gevent {gevent.__version__}')
func = getattr(self, function_name, None) # Log current config
back = func(**kwargs) logging.debug("Config: %s" % config)
if back:
print(back)
def ipythonThread(self):
import IPython
IPython.embed()
self.gevent_quit.set()
# Default action: Start serving UiServer and FileServer
def main(self):
global ui_server, file_server
from File import FileServer
from Ui import UiServer
logging.info("Creating FileServer....")
file_server = FileServer()
logging.info("Creating UiServer....")
ui_server = UiServer()
file_server.ui_server = ui_server
for startup_error in startup_errors:
logging.error("Startup error: %s" % startup_error)
logging.info("Removing old SSL certs...")
from Crypt import CryptConnection
CryptConnection.manager.removeCerts()
logging.info("Starting servers....")
# Modify stack size on special hardwares
if config.stack_size:
import threading import threading
self.gevent_quit = threading.Event() threading.stack_size(config.stack_size)
launched_greenlets = [gevent.spawn(ui_server.start), gevent.spawn(file_server.start), gevent.spawn(ui_server.startSiteServer)]
# if --repl, start ipython thread # Use pure-python implementation of msgpack to save CPU
# FIXME: Unfortunately this leads to exceptions on exit so use with care if config.msgpack_purepython:
if config.repl: os.environ["MSGPACK_PUREPYTHON"] = "True"
threading.Thread(target=self.ipythonThread).start()
stopped = 0 # Fix console encoding on Windows
# Process all greenlets in main thread # TODO: check if this is still required
while not self.gevent_quit.is_set() and stopped < len(launched_greenlets): if sys.platform.startswith("win"):
stopped += len(gevent.joinall(launched_greenlets, timeout=1)) import subprocess
# Exited due to repl, so must kill greenlets
if stopped < len(launched_greenlets):
gevent.killall(launched_greenlets, exception=KeyboardInterrupt)
logging.info("All server stopped")
# Site commands
def siteCreate(self, use_master_seed=True):
logging.info("Generating new privatekey (use_master_seed: %s)..." % config.use_master_seed)
from Crypt import CryptBitcoin
if use_master_seed:
from User import UserManager
user = UserManager.user_manager.get()
if not user:
user = UserManager.user_manager.create()
address, address_index, site_data = user.getNewSiteData()
privatekey = site_data["privatekey"]
logging.info("Generated using master seed from users.json, site index: %s" % address_index)
else:
privatekey = CryptBitcoin.newPrivatekey()
address = CryptBitcoin.privatekeyToAddress(privatekey)
logging.info("----------------------------------------------------------------------")
logging.info("Site private key: %s" % privatekey)
logging.info(" !!! ^ Save it now, required to modify the site ^ !!!")
logging.info("Site address: %s" % address)
logging.info("----------------------------------------------------------------------")
while True and not config.batch and not use_master_seed:
if input("? Have you secured your private key? (yes, no) > ").lower() == "yes":
break
else:
logging.info("Please, secure it now, you going to need it to modify your site!")
logging.info("Creating directory structure...")
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
os.mkdir("%s/%s" % (config.data_dir, address))
open("%s/%s/index.html" % (config.data_dir, address), "w").write("Hello %s!" % address)
logging.info("Creating content.json...")
site = Site(address)
extend = {"postmessage_nonce_security": True}
if use_master_seed:
extend["address_index"] = address_index
site.content_manager.sign(privatekey=privatekey, extend=extend)
site.settings["own"] = True
site.saveSettings()
logging.info("Site created!")
def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False, remove_missing_optional=False):
from Site.Site import Site
from Site import SiteManager
from Debug import Debug
SiteManager.site_manager.load()
logging.info("Signing site: %s..." % address)
site = Site(address, allow_create=False)
if not privatekey: # If no privatekey defined
from User import UserManager
user = UserManager.user_manager.get()
if user:
site_data = user.getSiteData(address)
privatekey = site_data.get("privatekey")
else:
privatekey = None
if not privatekey:
# Not found in users.json, ask from console
import getpass
privatekey = getpass.getpass("Private key (input hidden):")
# inner_path can be either relative to site directory or absolute/relative path
if os.path.isabs(inner_path):
full_path = os.path.abspath(inner_path)
else:
full_path = os.path.abspath(config.working_dir + '/' + inner_path)
print(full_path)
if os.path.isfile(full_path):
if address in full_path:
# assuming site address is unique, keep only path after it
inner_path = full_path.split(address+'/')[1]
else:
# oops, file that we found seems to be rogue, so reverting to old behaviour
logging.warning(f'using {inner_path} relative to site directory')
try: try:
succ = site.content_manager.sign( chcp_res = subprocess.check_output("chcp 65001", shell=True).decode(errors="ignore").strip()
inner_path=inner_path, privatekey=privatekey, logging.debug("Changed console encoding to utf8: %s" % chcp_res)
update_changed_files=True, remove_missing_optional=remove_missing_optional
)
except Exception as err: except Exception as err:
logging.error("Sign error: %s" % Debug.formatException(err)) logging.error("Error changing console encoding to utf8: %s" % err)
succ = False
if succ and publish:
self.sitePublish(address, inner_path=inner_path)
def siteVerify(self, address): # Socket monkey patch
import time if config.proxy:
from Site.Site import Site from util import SocksProxy
from Site import SiteManager import urllib.request
SiteManager.site_manager.load() logging.info("Patching sockets to socks proxy: %s" % config.proxy)
if config.fileserver_ip == "*":
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
config.disable_udp = True # UDP not supported currently with proxy
SocksProxy.monkeyPatch(*config.proxy.split(":"))
elif config.tor == "always":
from util import SocksProxy
import urllib.request
logging.info("Patching sockets to tor socks proxy: %s" % config.tor_proxy)
if config.fileserver_ip == "*":
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
SocksProxy.monkeyPatch(*config.tor_proxy_split())
config.disable_udp = True
elif config.bind:
bind = config.bind
if ":" not in config.bind:
bind += ":0"
from util import helper
helper.socketBindMonkeyPatch(*bind.split(":"))
s = time.time() init()
logging.info("Verifing site: %s..." % address)
site = Site(address)
bad_files = []
for content_inner_path in site.content_manager.contents:
s = time.time()
logging.info("Verifing %s signature..." % content_inner_path)
error = None
try:
file_correct = site.content_manager.verifyFile(
content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False
)
except Exception as err:
file_correct = False
error = err
if file_correct is True:
logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s))
else:
logging.error("[ERROR] %s: invalid file: %s!" % (content_inner_path, error))
input("Continue?")
bad_files += content_inner_path
logging.info("Verifying site files...")
bad_files += site.storage.verifyFiles()["bad_files"]
if not bad_files:
logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s))
else:
logging.error("[ERROR] Error during verifying site files!")
def dbRebuild(self, address):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
logging.info("Rebuilding site sql cache: %s..." % address)
site = SiteManager.site_manager.get(address)
s = time.time()
try:
site.storage.rebuildDb()
logging.info("Done in %.3fs" % (time.time() - s))
except Exception as err:
logging.error(err)
def dbQuery(self, address, query):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
import json
site = Site(address)
result = []
for row in site.storage.query(query):
result.append(dict(row))
print(json.dumps(result, indent=4))
def siteAnnounce(self, address):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
logging.info("Opening a simple connection server")
global file_server
from File import FileServer
file_server = FileServer("127.0.0.1", 1234)
file_server.start()
logging.info("Announcing site %s to tracker..." % address)
site = Site(address)
s = time.time()
site.announce()
print("Response time: %.3fs" % (time.time() - s))
print(site.peers)
def siteDownload(self, address):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
logging.info("Opening a simple connection server")
global file_server
from File import FileServer
file_server = FileServer("127.0.0.1", 1234)
file_server_thread = gevent.spawn(file_server.start, check_sites=False)
site = Site(address)
on_completed = gevent.event.AsyncResult()
def onComplete(evt):
evt.set(True)
site.onComplete.once(lambda: onComplete(on_completed))
print("Announcing...")
site.announce()
s = time.time()
print("Downloading...")
site.downloadContent("content.json", check_modifications=True)
print("Downloaded in %.3fs" % (time.time()-s))
def siteNeedFile(self, address, inner_path):
from Site.Site import Site
from Site import SiteManager
SiteManager.site_manager.load()
def checker():
while 1:
s = time.time()
time.sleep(1)
print("Switch time:", time.time() - s)
gevent.spawn(checker)
logging.info("Opening a simple connection server")
global file_server
from File import FileServer
file_server = FileServer("127.0.0.1", 1234)
file_server_thread = gevent.spawn(file_server.start, check_sites=False)
site = Site(address)
site.announce()
print(site.needFile(inner_path, update=True))
def siteCmd(self, address, cmd, parameters):
import json
from Site import SiteManager
site = SiteManager.site_manager.get(address)
if not site:
logging.error("Site not found: %s" % address)
return None
ws = self.getWebsocket(site)
ws.send(json.dumps({"cmd": cmd, "params": parameters, "id": 1}))
res_raw = ws.recv()
try:
res = json.loads(res_raw)
except Exception as err:
return {"error": "Invalid result: %s" % err, "res_raw": res_raw}
if "result" in res:
return res["result"]
else:
return res
def importBundle(self, bundle):
importBundle(bundle)
def getWebsocket(self, site):
import websocket
ws_address = "ws://%s:%s/Websocket?wrapper_key=%s" % (config.ui_ip, config.ui_port, site.settings["wrapper_key"])
logging.info("Connecting to %s" % ws_address)
ws = websocket.create_connection(ws_address)
return ws
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json", recursive=False):
from Site import SiteManager
logging.info("Loading site...")
site = SiteManager.site_manager.get(address)
site.settings["serving"] = True # Serving the site even if its disabled
if not recursive:
inner_paths = [inner_path]
else:
inner_paths = list(site.content_manager.contents.keys())
try:
ws = self.getWebsocket(site)
except Exception as err:
self.sitePublishFallback(site, peer_ip, peer_port, inner_paths, err)
else:
logging.info("Sending siteReload")
self.siteCmd(address, "siteReload", inner_path)
for inner_path in inner_paths:
logging.info(f"Sending sitePublish for {inner_path}")
self.siteCmd(address, "sitePublish", {"inner_path": inner_path, "sign": False})
logging.info("Done.")
ws.close()
def sitePublishFallback(self, site, peer_ip, peer_port, inner_paths, err):
if err is not None:
logging.info(f"Can't connect to local websocket client: {err}")
logging.info("Publish using fallback mechanism. "
"Note that there might be not enough time for peer discovery, "
"but you can specify target peer on command line.")
logging.info("Creating FileServer....")
file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity
time.sleep(0.001)
# Started fileserver
file_server.portCheck()
if peer_ip: # Announce ip specificed
site.addPeer(peer_ip, peer_port)
else: # Just ask the tracker
logging.info("Gathering peers from tracker")
site.announce() # Gather peers
for inner_path in inner_paths:
published = site.publish(5, inner_path) # Push to peers
if published > 0:
time.sleep(3)
logging.info("Serving files (max 60s)...")
gevent.joinall([file_server_thread], timeout=60)
logging.info("Done.")
else:
logging.info("No peers found, sitePublish command only works if you already have visitors serving your site")
# Crypto commands
def cryptPrivatekeyToAddress(self, privatekey=None):
from Crypt import CryptBitcoin
if not privatekey: # If no privatekey in args then ask it now
import getpass
privatekey = getpass.getpass("Private key (input hidden):")
print(CryptBitcoin.privatekeyToAddress(privatekey))
def cryptSign(self, message, privatekey):
from Crypt import CryptBitcoin
print(CryptBitcoin.sign(message, privatekey))
def cryptVerify(self, message, sign, address):
from Crypt import CryptBitcoin
print(CryptBitcoin.verify(message, address, sign))
def cryptGetPrivatekey(self, master_seed, site_address_index=None):
from Crypt import CryptBitcoin
if len(master_seed) != 64:
logging.error("Error: Invalid master seed length: %s (required: 64)" % len(master_seed))
return False
privatekey = CryptBitcoin.hdPrivatekey(master_seed, site_address_index)
print("Requested private key: %s" % privatekey)
# Peer
def peerPing(self, peer_ip, peer_port=None):
if not peer_port:
peer_port = 15441
logging.info("Opening a simple connection server")
global file_server
from Connection import ConnectionServer
file_server = ConnectionServer("127.0.0.1", 1234)
file_server.start(check_connections=False)
from Crypt import CryptConnection
CryptConnection.manager.loadCerts()
from Peer import Peer
logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port)))
s = time.time()
peer = Peer(peer_ip, peer_port)
peer.connect()
if not peer.connection:
print("Error: Can't connect to peer (connection error: %s)" % peer.connection_error)
return False
if "shared_ciphers" in dir(peer.connection.sock):
print("Shared ciphers:", peer.connection.sock.shared_ciphers())
if "cipher" in dir(peer.connection.sock):
print("Cipher:", peer.connection.sock.cipher()[0])
if "version" in dir(peer.connection.sock):
print("TLS version:", peer.connection.sock.version())
print("Connection time: %.3fs (connection error: %s)" % (time.time() - s, peer.connection_error))
for i in range(5):
ping_delay = peer.ping()
print("Response time: %.3fs" % ping_delay)
time.sleep(1)
peer.remove()
print("Reconnect test...")
peer = Peer(peer_ip, peer_port)
for i in range(5):
ping_delay = peer.ping()
print("Response time: %.3fs" % ping_delay)
time.sleep(1)
def peerGetFile(self, peer_ip, peer_port, site, filename, benchmark=False):
logging.info("Opening a simple connection server")
global file_server
from Connection import ConnectionServer
file_server = ConnectionServer("127.0.0.1", 1234)
file_server.start(check_connections=False)
from Crypt import CryptConnection
CryptConnection.manager.loadCerts()
from Peer import Peer
logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port))
peer = Peer(peer_ip, peer_port)
s = time.time()
if benchmark:
for i in range(10):
peer.getFile(site, filename),
print("Response time: %.3fs" % (time.time() - s))
input("Check memory")
else:
print(peer.getFile(site, filename).read())
def peerCmd(self, peer_ip, peer_port, cmd, parameters):
logging.info("Opening a simple connection server")
global file_server
from Connection import ConnectionServer
file_server = ConnectionServer()
file_server.start(check_connections=False)
from Crypt import CryptConnection
CryptConnection.manager.loadCerts()
from Peer import Peer
peer = Peer(peer_ip, peer_port)
import json
if parameters:
parameters = json.loads(parameters.replace("'", '"'))
else:
parameters = {}
try:
res = peer.request(cmd, parameters)
print(json.dumps(res, indent=2, ensure_ascii=False))
except Exception as err:
print("Unknown response (%s): %s" % (err, res))
def getConfig(self):
import json
print(json.dumps(config.getServerInfo(), indent=2, ensure_ascii=False))
def test(self, test_name, *args, **kwargs):
import types
def funcToName(func_name):
test_name = func_name.replace("test", "")
return test_name[0].lower() + test_name[1:]
test_names = [funcToName(name) for name in dir(self) if name.startswith("test") and name != "test"]
if not test_name:
# No test specificed, list tests
print("\nNo test specified, possible tests:")
for test_name in test_names:
func_name = "test" + test_name[0].upper() + test_name[1:]
func = getattr(self, func_name)
if func.__doc__:
print("- %s: %s" % (test_name, func.__doc__.strip()))
else:
print("- %s" % test_name)
return None
# Run tests
func_name = "test" + test_name[0].upper() + test_name[1:]
if hasattr(self, func_name):
func = getattr(self, func_name)
print("- Running test: %s" % test_name, end="")
s = time.time()
ret = func(*args, **kwargs)
if type(ret) is types.GeneratorType:
for progress in ret:
print(progress, end="")
sys.stdout.flush()
print("\n* Test %s done in %.3fs" % (test_name, time.time() - s))
else:
print("Unknown test: %r (choose from: %s)" % (
test_name, test_names
))
from Actions import Actions
actions = Actions() actions = Actions()
# Starts here when running zeronet.py # Starts here when running zeronet.py
def start(): def start():
# Call function # Call function
action_kwargs = config.getActionArguments() action_kwargs = config.getActionArguments()