Test user file archiving

This commit is contained in:
ZeroNet 2016-08-10 12:59:42 +02:00
parent 48d59e355c
commit 96f368c7e2

View file

@ -35,7 +35,6 @@ class TestSiteDownload:
def boostRequest(inner_path): def boostRequest(inner_path):
# I really want these file # I really want these file
if inner_path == "index.html": if inner_path == "index.html":
print "needFile"
site_temp.needFile("data/img/multiuser.png", priority=9, blocking=False) site_temp.needFile("data/img/multiuser.png", priority=9, blocking=False)
site_temp.needFile("data/img/direct_domains.png", priority=10, blocking=False) site_temp.needFile("data/img/direct_domains.png", priority=10, blocking=False)
site_temp.onFileDone.append(boostRequest) site_temp.onFileDone.append(boostRequest)
@ -58,6 +57,58 @@ class TestSiteDownload:
assert site_temp.storage.deleteFiles() assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections] [connection.close() for connection in file_server.connections]
def testArchivedDownload(self, file_server, site, site_temp):
file_server.ip_incoming = {} # Reset flood protection
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer("127.0.0.1", 1545)
client.sites[site_temp.address] = site_temp
site_temp.connection_server = client
# Don't try to find peers from the net
site.announce = mock.MagicMock(return_value=True)
site_temp.announce = mock.MagicMock(return_value=True)
# Download normally
site_temp.addPeer("127.0.0.1", 1544)
site_temp.download(blind_includes=True).join(timeout=5)
bad_files = site_temp.storage.verifyFiles(quick_check=True)
assert not bad_files
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2
# Add archived data
assert not "archived" in site.content_manager.contents["data/users/content.json"]["user_contents"]
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)
site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"] = {"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()}
site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"]
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1)
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived)
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later
# Push archived update
assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
site.publish()
site_temp.download(blind_includes=True).join(timeout=5) # Wait for download
# The archived content should disappear from remote client
assert "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
# Test when connected peer has the optional file # Test when connected peer has the optional file
def testOptionalDownload(self, file_server, site, site_temp): def testOptionalDownload(self, file_server, site, site_temp):
file_server.ip_incoming = {} # Reset flood protection file_server.ip_incoming = {} # Reset flood protection