Make test compatible with new content.db
This commit is contained in:
parent
dae5cd3969
commit
b8bf773c40
4 changed files with 53 additions and 14 deletions
|
@ -102,6 +102,9 @@ class TestContent:
|
|||
)
|
||||
|
||||
def testSignOptionalFiles(self, site):
|
||||
for hash in list(site.content_manager.hashfield):
|
||||
site.content_manager.hashfield.remove(hash)
|
||||
|
||||
assert len(site.content_manager.hashfield) == 0
|
||||
|
||||
site.content_manager.contents["content.json"]["optional"] = "((data/img/zero.*))"
|
||||
|
|
|
@ -57,8 +57,6 @@ class TestPeer:
|
|||
def testHashfield(self, site):
|
||||
sample_hash = site.content_manager.contents["content.json"]["files_optional"].values()[0]["sha512"]
|
||||
|
||||
assert not site.content_manager.hashfield
|
||||
|
||||
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
|
||||
|
||||
# Check if hashfield has any files
|
||||
|
|
|
@ -69,14 +69,11 @@ class TestSiteDownload:
|
|||
client.sites[site_temp.address] = site_temp
|
||||
site_temp.connection_server = client
|
||||
|
||||
# Don't try to find peers from the net
|
||||
site.announce = mock.MagicMock(return_value=True)
|
||||
site_temp.announce = mock.MagicMock(return_value=True)
|
||||
|
||||
# Download normally
|
||||
site_temp.addPeer("127.0.0.1", 1544)
|
||||
site_temp.download(blind_includes=True).join(timeout=5)
|
||||
bad_files = site_temp.storage.verifyFiles(quick_check=True)
|
||||
|
||||
assert not bad_files
|
||||
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
|
||||
assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
|
||||
|
@ -171,6 +168,13 @@ class TestSiteDownload:
|
|||
site_full.storage.verifyFiles(quick_check=True) # Check optional files
|
||||
site_full_peer = site.addPeer("127.0.0.1", 1546) # Add it to source server
|
||||
assert site_full_peer.updateHashfield() # Update hashfield
|
||||
assert site_full.storage.isFile("data/optional.txt")
|
||||
assert site_full.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
|
||||
assert len(site_full_peer.hashfield) == 8
|
||||
|
||||
# Remove hashes from source server
|
||||
for hash in list(site.content_manager.hashfield):
|
||||
site.content_manager.hashfield.remove(hash)
|
||||
|
||||
# Init client server
|
||||
site_temp.connection_server = ConnectionServer("127.0.0.1", 1545)
|
||||
|
@ -178,15 +182,22 @@ class TestSiteDownload:
|
|||
site_temp.addPeer("127.0.0.1", 1544) # Add source server
|
||||
|
||||
# Download normal files
|
||||
site_temp.log.info("Start Downloading site")
|
||||
site_temp.download(blind_includes=True).join(timeout=5)
|
||||
|
||||
# Download optional data/optional.txt
|
||||
optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
|
||||
optional_file_info2 = site_temp.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
|
||||
assert not site_temp.storage.isFile("data/optional.txt")
|
||||
assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
|
||||
assert not site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source server don't know he has the file
|
||||
assert not site.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source server don't know he has the file
|
||||
assert site_full_peer.hashfield.hasHash(optional_file_info["sha512"]) # Source full peer on source server has the file
|
||||
assert site_full_peer.hashfield.hasHash(optional_file_info2["sha512"]) # Source full peer on source server has the file
|
||||
assert site_full.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source full server he has the file
|
||||
assert site_full.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source full server he has the file
|
||||
|
||||
site_temp.log.info("Request optional files")
|
||||
with Spy.Spy(FileRequest, "route") as requests:
|
||||
# Request 2 file same time
|
||||
threads = []
|
||||
|
@ -238,10 +249,12 @@ class TestSiteDownload:
|
|||
assert site.storage.open("data/data.json").read() == data_new
|
||||
assert site_temp.storage.open("data/data.json").read() == data_original
|
||||
|
||||
site.log.info("Publish new data.json without patch")
|
||||
# Publish without patch
|
||||
with Spy.Spy(FileRequest, "route") as requests:
|
||||
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
|
||||
site.publish()
|
||||
time.sleep(0.1)
|
||||
site_temp.download(blind_includes=True).join(timeout=5)
|
||||
assert len([request for request in requests if request[0] in ("getFile", "streamFile")]) == 1
|
||||
|
||||
|
@ -270,6 +283,7 @@ class TestSiteDownload:
|
|||
assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', ['\t"title": "PatchedZeroBlog",\n']), ('=', 31102)]
|
||||
|
||||
# Publish with patch
|
||||
site.log.info("Publish new data.json with patch")
|
||||
with Spy.Spy(FileRequest, "route") as requests:
|
||||
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
|
||||
site.publish(diffs=diffs)
|
||||
|
|
|
@ -37,24 +37,36 @@ config.data_dir = "src/Test/testdata" # Use test data for unittests
|
|||
config.debug_socket = True # Use test data for unittests
|
||||
config.verbose = True # Use test data for unittests
|
||||
config.tor = "disabled" # Don't start Tor client
|
||||
config.trackers = []
|
||||
|
||||
os.chdir(os.path.abspath(os.path.dirname(__file__) + "/../..")) # Set working dir
|
||||
# Cleanup content.db caches
|
||||
if os.path.isfile("%s/content.db" % config.data_dir):
|
||||
os.unlink("%s/content.db" % config.data_dir)
|
||||
if os.path.isfile("%s-temp/content.db" % config.data_dir):
|
||||
os.unlink("%s-temp/content.db" % config.data_dir)
|
||||
|
||||
import gevent
|
||||
from gevent import monkey
|
||||
monkey.patch_all(thread=False)
|
||||
|
||||
from Site import Site
|
||||
from Site import SiteManager
|
||||
from User import UserManager
|
||||
from File import FileServer
|
||||
from Connection import ConnectionServer
|
||||
from Crypt import CryptConnection
|
||||
from Ui import UiWebsocket
|
||||
from Tor import TorManager
|
||||
from Content import ContentDb
|
||||
from util import RateLimit
|
||||
|
||||
# SiteManager.site_manager.load = mock.MagicMock(return_value=True) # Don't try to load from sites.json
|
||||
# SiteManager.site_manager.save = mock.MagicMock(return_value=True) # Don't try to load from sites.json
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def resetSettings(request):
|
||||
os.chdir(os.path.abspath(os.path.dirname(__file__) + "/../..")) # Set working dir
|
||||
open("%s/sites.json" % config.data_dir, "w").write("{}")
|
||||
open("%s/users.json" % config.data_dir, "w").write("""
|
||||
{
|
||||
|
@ -66,12 +78,6 @@ def resetSettings(request):
|
|||
}
|
||||
""")
|
||||
|
||||
def cleanup():
|
||||
os.unlink("%s/sites.json" % config.data_dir)
|
||||
os.unlink("%s/users.json" % config.data_dir)
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def resetTempSettings(request):
|
||||
data_dir_temp = config.data_dir + "-temp"
|
||||
|
@ -96,17 +102,30 @@ def resetTempSettings(request):
|
|||
|
||||
@pytest.fixture()
|
||||
def site(request):
|
||||
# Reset ratelimit
|
||||
RateLimit.queue_db = {}
|
||||
RateLimit.called_db = {}
|
||||
|
||||
site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
|
||||
|
||||
# Always use original data
|
||||
assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in site.storage.getPath("") # Make sure we dont delete everything
|
||||
shutil.rmtree(site.storage.getPath(""), True)
|
||||
shutil.copytree(site.storage.getPath("")+"-original", site.storage.getPath(""))
|
||||
shutil.copytree(site.storage.getPath("") + "-original", site.storage.getPath(""))
|
||||
def cleanup():
|
||||
site.storage.deleteFiles()
|
||||
site.content_manager.contents.db.deleteSite("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
|
||||
del SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"]
|
||||
site.content_manager.contents.db.close()
|
||||
db_path = "%s/content.db" % config.data_dir
|
||||
os.unlink(db_path)
|
||||
del ContentDb.content_dbs[db_path]
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") # Create new Site object to load content.json files
|
||||
if not SiteManager.site_manager.sites:
|
||||
SiteManager.site_manager.sites = {}
|
||||
SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] = site
|
||||
return site
|
||||
|
||||
|
||||
|
@ -117,6 +136,11 @@ def site_temp(request):
|
|||
|
||||
def cleanup():
|
||||
site_temp.storage.deleteFiles()
|
||||
site_temp.content_manager.contents.db.deleteSite("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
|
||||
site_temp.content_manager.contents.db.close()
|
||||
db_path = "%s-temp/content.db" % config.data_dir
|
||||
os.unlink(db_path)
|
||||
del ContentDb.content_dbs[db_path]
|
||||
request.addfinalizer(cleanup)
|
||||
return site_temp
|
||||
|
||||
|
|
Loading…
Reference in a new issue