Fix Tests

This commit is contained in:
canewsin 2022-01-06 15:07:17 +05:30
parent 59f7d3221f
commit 36adb63f61
13 changed files with 58 additions and 58 deletions

@ -1 +1 @@
Subproject commit 7bb1715ce030352ff46ddd43ad58b7ab90ef978b
Subproject commit 9cadd0c69f5af06739cc34cc79dc650053a9b4bd

View file

@ -82,7 +82,7 @@ class ContentDb(Db):
return schema
def initSite(self, site):
self.sites[site.address] = site
self.getSites()[site.address] = site
def needSite(self, site):
if site.address not in self.site_ids:
@ -97,7 +97,7 @@ class ContentDb(Db):
if site_id:
self.execute("DELETE FROM site WHERE site_id = :site_id", {"site_id": site_id})
del self.site_ids[site.address]
del self.sites[site.address]
del self.getSites()[site.address]
def setContent(self, site, inner_path, content, size=0):
self.insertOrUpdate("content", {

View file

@ -33,7 +33,7 @@ class FileRequest(object):
self.connection = connection
self.req_id = None
self.sites = self.server.sites
self.sites = self.server.getSites()
self.log = server.log
self.responded = False # Responded to the request

View file

@ -290,7 +290,7 @@ class Site(object):
if not SiteManager.site_manager.sites:
SiteManager.site_manager.sites = {}
if not SiteManager.site_manager.sites.get(self.address):
SiteManager.site_manager.sites[self.address] = self
SiteManager.site_manager.getSites()[self.address] = self
SiteManager.site_manager.load(False)
SiteManager.site_manager.saveDelayed()

View file

@ -73,7 +73,7 @@ class SiteManager(object):
except Exception as err:
self.log.debug("Error loading site %s: %s" % (address, err))
continue
self.sites[address] = site
self.getSites()[address] = site
self.log.debug("Loaded site %s in %.3fs" % (address, time.time() - s))
added += 1
elif startup:
@ -88,7 +88,7 @@ class SiteManager(object):
if cleanup:
for address in list(self.sites.keys()):
if address not in address_found:
del(self.sites[address])
del(self.getSites()[address])
self.log.debug("Removed site: %s" % address)
# Remove orpan sites from contentdb
@ -106,7 +106,7 @@ class SiteManager(object):
if address in content_db.site_ids:
del content_db.site_ids[address]
if address in content_db.sites:
del content_db.sites[address]
del content_db.getSites()[address]
self.loaded = True
for address, settings in sites_need:
@ -203,7 +203,7 @@ class SiteManager(object):
self.log.debug("Added new site: %s" % address)
config.loadTrackersFile()
site = Site(address, settings=settings)
self.sites[address] = site
self.getSites()[address] = site
if not site.settings["serving"]: # Maybe it was deleted before
site.settings["serving"] = True
site.saveSettings()
@ -226,7 +226,7 @@ class SiteManager(object):
def delete(self, address):
self.sites_changed = int(time.time())
self.log.debug("Deleted site: %s" % address)
del(self.sites[address])
del(self.getSites()[address])
# Delete from sites.json
self.save()

View file

@ -16,7 +16,7 @@ class TestFileRequest:
client = ConnectionServer(file_server.ip, 1545)
connection = client.getConnection(file_server.ip, 1544)
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
# Normal request
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
@ -61,7 +61,7 @@ class TestFileRequest:
file_server.ip_incoming = {} # Reset flood protection
client = ConnectionServer(file_server.ip, 1545)
connection = client.getConnection(file_server.ip, 1544)
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
buff = io.BytesIO()
response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff)
@ -89,7 +89,7 @@ class TestFileRequest:
client.stop()
def testPex(self, file_server, site, site_temp):
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client

View file

@ -13,7 +13,7 @@ from . import Spy
@pytest.mark.usefixtures("resetTempSettings")
class TestPeer:
def testPing(self, file_server, site, site_temp):
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
@ -32,7 +32,7 @@ class TestPeer:
client.stop()
def testDownloadFile(self, file_server, site, site_temp):
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
@ -77,11 +77,11 @@ class TestPeer:
def testHashfieldExchange(self, file_server, site, site_temp):
server1 = file_server
server1.sites[site.address] = site
server1.getSites()[site.address] = site
site.connection_server = server1
server2 = FileServer(file_server.ip, 1545)
server2.sites[site_temp.address] = site_temp
server2.getSites()[site_temp.address] = site_temp
site_temp.connection_server = server2
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
@ -127,7 +127,7 @@ class TestPeer:
server2.stop()
def testFindHash(self, file_server, site, site_temp):
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client

View file

@ -23,7 +23,7 @@ class TestSiteDownload:
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
@ -74,7 +74,7 @@ class TestSiteDownload:
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
@ -130,7 +130,7 @@ class TestSiteDownload:
def testArchivedDownload(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
@ -178,7 +178,7 @@ class TestSiteDownload:
def testArchivedBeforeDownload(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
@ -229,7 +229,7 @@ class TestSiteDownload:
def testOptionalDownload(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
# Init client server
client = ConnectionServer(file_server.ip, 1545)
@ -271,7 +271,7 @@ class TestSiteDownload:
def testFindOptional(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
# Init full source server (has optional files)
site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
@ -284,7 +284,7 @@ class TestSiteDownload:
gevent.spawn(listen)
time.sleep(0.001) # Port opening
file_server_full.sites[site_full.address] = site_full # Add site
file_server_full.getSites()[site_full.address] = site_full # Add site
site_full.storage.verifyFiles(quick_check=True) # Check optional files
site_full_peer = site.addPeer(file_server.ip, 1546) # Add it to source server
hashfield = site_full_peer.updateHashfield() # Update hashfield
@ -342,7 +342,7 @@ class TestSiteDownload:
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
@ -423,7 +423,7 @@ class TestSiteDownload:
def testBigUpdate(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
@ -476,7 +476,7 @@ class TestSiteDownload:
def testHugeContentSiteUpdate(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
@ -524,7 +524,7 @@ class TestSiteDownload:
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)

View file

@ -75,7 +75,7 @@ class TestTor:
assert file_server.getConnection(address + ".onion", 1544, site=site) != file_server.getConnection(address + ".onion", 1544, site=site_temp)
# Only allow to query from the locked site
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
connection_locked = file_server.getConnection(address + ".onion", 1544, site=site)
assert "body" in connection_locked.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
assert connection_locked.request("getFile", {"site": "1OTHERSITE", "inner_path": "content.json", "location": 0})["error"] == "Invalid site"
@ -83,11 +83,11 @@ class TestTor:
def testPex(self, file_server, site, site_temp):
# Register site to currently running fileserver
site.connection_server = file_server
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
# Create a new file server to emulate new peer connecting to our peer
file_server_temp = FileServer(file_server.ip, 1545)
site_temp.connection_server = file_server_temp
file_server_temp.sites[site_temp.address] = site_temp
file_server_temp.getSites()[site_temp.address] = site_temp
# We will request peers from this
peer_source = site_temp.addPeer(file_server.ip, 1544)
@ -113,7 +113,7 @@ class TestTor:
def testFindHash(self, tor_manager, file_server, site, site_temp):
file_server.ip_incoming = {} # Reset flood protection
file_server.sites[site.address] = site
file_server.getSites()[site.address] = site
file_server.tor_manager = tor_manager
client = FileServer(file_server.ip, 1545)

View file

@ -219,7 +219,7 @@ def site(request):
site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") # Create new Site object to load content.json files
if not SiteManager.site_manager.sites:
SiteManager.site_manager.sites = {}
SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] = site
SiteManager.site_manager.getSites()["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] = site
site.settings["serving"] = True
return site

View file

@ -643,7 +643,7 @@ class UiRequest(object):
if (config.debug or config.merge_media) and file_path.split("/")[-1].startswith("all."):
# If debugging merge *.css to all.css and *.js to all.js
site = self.server.sites.get(address)
site = self.server.getSites().get(address)
if site and site.settings["own"]:
from Debug import DebugMedia
DebugMedia.merge(file_path)
@ -734,7 +734,7 @@ class UiRequest(object):
block = block.replace(b"{themeclass}", themeclass.encode("utf8"))
if path_parts:
site = self.server.sites.get(path_parts.get("address"))
site = self.server.getSites().get(path_parts.get("address"))
if site.settings["own"]:
modified = int(time.time())
else:
@ -823,7 +823,7 @@ class UiRequest(object):
# Find site by wrapper_key
wrapper_key = self.get["wrapper_key"]
site = None
for site_check in list(self.server.sites.values()):
for site_check in list(self.server.getSites().values()):
if site_check.settings["wrapper_key"] == wrapper_key:
site = site_check
@ -841,7 +841,7 @@ class UiRequest(object):
self.server.websockets.append(ui_websocket)
ui_websocket.start()
self.server.websockets.remove(ui_websocket)
for site_check in list(self.server.sites.values()):
for site_check in list(self.server.getSites().values()):
# Remove websocket from every site (admin sites allowed to join other sites event channels)
if ui_websocket in site_check.websockets:
site_check.websockets.remove(ui_websocket)
@ -867,11 +867,11 @@ class UiRequest(object):
# Just raise an error to get console
def actionConsole(self):
import sys
sites = self.server.sites
sites = self.server.getSites()
main = sys.modules["main"]
def bench(code, times=100, init=None):
sites = self.server.sites
sites = self.server.getSites()
main = sys.modules["main"]
s = time.time()
if init:

View file

@ -330,7 +330,7 @@ class UiWebsocket(object):
if not self.hasSitePermission(address, cmd=cmd):
return self.response(to, "No permission for site %s" % address)
req_self = copy.copy(self)
req_self.site = self.server.sites.get(address)
req_self.site = self.server.getSites().get(address)
req_self.hasCmdPermission = self.hasCmdPermission # Use the same permissions as current site
req_obj = super(UiWebsocket, req_self)
req = {"id": to, "cmd": cmd, "params": params}
@ -388,7 +388,7 @@ class UiWebsocket(object):
def actionAnnouncerStats(self, to):
back = {}
trackers = self.site.announcer.getTrackers()
for site in list(self.server.sites.values()):
for site in list(self.server.getSites().values()):
for tracker, stats in site.announcer.stats.items():
if tracker not in trackers:
continue
@ -894,7 +894,7 @@ class UiWebsocket(object):
@flag.admin
def actionSiteList(self, to, connecting_sites=False):
ret = []
for site in list(self.server.sites.values()):
for site in list(self.server.getSites().values()):
if not site.content_manager.contents.get("content.json") and not connecting_sites:
continue # Incomplete site
ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing
@ -906,7 +906,7 @@ class UiWebsocket(object):
if channel not in self.channels: # Add channel to channels
self.channels.append(channel)
for site in list(self.server.sites.values()): # Add websocket to every channel
for site in list(self.server.getSites().values()): # Add websocket to every channel
if self not in site.websockets:
site.websockets.append(self)
@ -918,7 +918,7 @@ class UiWebsocket(object):
site.update(announce=announce, check_files=check_files, verify_files=verify_files, since=since)
self.response(to, "Updated")
site = self.server.sites.get(address)
site = self.server.getSites().get(address)
if site and (site.address == self.site.address or "ADMIN" in self.site.settings["permissions"]):
if not site.settings["serving"]:
site.settings["serving"] = True
@ -931,7 +931,7 @@ class UiWebsocket(object):
# Pause site serving
@flag.admin
def actionSitePause(self, to, address):
site = self.server.sites.get(address)
site = self.server.getSites().get(address)
if site:
site.settings["serving"] = False
site.saveSettings()
@ -944,7 +944,7 @@ class UiWebsocket(object):
# Resume site serving
@flag.admin
def actionSiteResume(self, to, address):
site = self.server.sites.get(address)
site = self.server.getSites().get(address)
if site:
site.settings["serving"] = True
site.saveSettings()
@ -958,7 +958,7 @@ class UiWebsocket(object):
@flag.admin
@flag.no_multiuser
def actionSiteDelete(self, to, address):
site = self.server.sites.get(address)
site = self.server.getSites().get(address)
if site:
site.delete()
self.user.deleteSiteData(address)
@ -970,10 +970,10 @@ class UiWebsocket(object):
def cbSiteClone(self, to, address, root_inner_path="", target_address=None, redirect=True):
self.cmd("notification", ["info", _["Cloning site..."]])
site = self.server.sites.get(address)
site = self.server.getSites().get(address)
response = {}
if target_address:
target_site = self.server.sites.get(target_address)
target_site = self.server.getSites().get(target_address)
privatekey = self.user.getSiteData(target_site.address).get("privatekey")
site.clone(target_address, privatekey, root_inner_path=root_inner_path)
self.cmd("notification", ["done", _["Site source code upgraded!"]])
@ -999,11 +999,11 @@ class UiWebsocket(object):
self.response(to, {"error": "Not a site: %s" % address})
return
if not self.server.sites.get(address):
if not self.server.getSites().get(address):
# Don't expose site existence
return
site = self.server.sites.get(address)
site = self.server.getSites().get(address)
if site.bad_files:
for bad_inner_path in list(site.bad_files.keys()):
is_user_file = "cert_signers" in site.content_manager.getRules(bad_inner_path)

View file

@ -61,13 +61,13 @@ class User(object):
s = time.time()
address_id = self.getAddressAuthIndex(address) # Convert site address to int
auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id)
self.sites[address] = {
self.getSites()[address] = {
"auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey),
"auth_privatekey": auth_privatekey
}
self.saveDelayed()
self.log.debug("Added new site: %s in %.3fs" % (address, time.time() - s))
return self.sites[address]
return self.getSites()[address]
# Get user site data
# Return: {"auth_address": "xxx", "auth_privatekey": "xxx"}
@ -76,11 +76,11 @@ class User(object):
if not create:
return {"auth_address": None, "auth_privatekey": None} # Dont create user yet
self.generateAuthAddress(address)
return self.sites[address]
return self.getSites()[address]
def deleteSiteData(self, address):
if address in self.sites:
del(self.sites[address])
del(self.getSites()[address])
self.saveDelayed()
self.log.debug("Deleted site: %s" % address)
@ -101,9 +101,9 @@ class User(object):
raise Exception("Random error: site exist!")
# Save to sites
self.getSiteData(site_address)
self.sites[site_address]["privatekey"] = site_privatekey
self.getSites()[site_address]["privatekey"] = site_privatekey
self.save()
return site_address, bip32_index, self.sites[site_address]
return site_address, bip32_index, self.getSites()[site_address]
# Get BIP32 address from site address
# Return: BIP32 auth address