Save content.json of site even if limit size is reached (#2114)
* fix #2107; Still save the content.json received even if site size limit is reached but dont download files; Allow better distribution of latest version of content.json * Added test * Fix test for huge content file (now it fails) * Dont download huge content.json file and update test * Remove comments
This commit is contained in:
parent
deec2e62ce
commit
0738964e64
3 changed files with 67 additions and 9 deletions
|
@ -858,15 +858,16 @@ class ContentManager(object):
|
|||
if content.get("inner_path") and content["inner_path"] != inner_path:
|
||||
raise VerifyError("Wrong inner_path: %s" % content["inner_path"])
|
||||
|
||||
# Check total site size limit
|
||||
if site_size > site_size_limit:
|
||||
if inner_path == "content.json" and self.site.settings["size"] == 0:
|
||||
# First content.json download, save site size to display warning
|
||||
# If our content.json file bigger than the size limit throw error
|
||||
if inner_path == "content.json":
|
||||
content_size_file = len(json.dumps(content, indent=1))
|
||||
if content_size_file > site_size_limit:
|
||||
# Save site size to display warning
|
||||
self.site.settings["size"] = site_size
|
||||
task = self.site.worker_manager.findTask(inner_path)
|
||||
if task: # Dont try to download from other peers
|
||||
self.site.worker_manager.failTask(task)
|
||||
raise VerifyError("Content too large %sB > %sB, aborting task..." % (site_size, site_size_limit))
|
||||
task = self.site.worker_manager.findTask(inner_path)
|
||||
if task: # Dont try to download from other peers
|
||||
self.site.worker_manager.failTask(task)
|
||||
raise VerifyError("Content too large %s B > %s B, aborting task..." % (site_size, site_size_limit))
|
||||
|
||||
# Verify valid filenames
|
||||
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
|
||||
|
|
|
@ -179,6 +179,15 @@ class Site(object):
|
|||
if peer: # Update last received update from peer to prevent re-sending the same update to it
|
||||
peer.last_content_json_update = self.content_manager.contents[inner_path]["modified"]
|
||||
|
||||
# Verify size limit
|
||||
if inner_path == "content.json":
|
||||
site_size_limit = self.getSizeLimit() * 1024 * 1024
|
||||
content_size = len(json.dumps(self.content_manager.contents[inner_path], indent=1)) + sum([file["size"] for file in list(self.content_manager.contents[inner_path].get("files", {}).values()) if file["size"] >= 0]) # Size of new content
|
||||
if site_size_limit < content_size:
|
||||
# Not enought don't download anything
|
||||
self.log.debug("Size limit reached (site too big please increase limit): %.2f MB > %.2f MB" % (content_size / 1024 / 1024, site_size_limit / 1024 / 1024))
|
||||
return False
|
||||
|
||||
# Start download files
|
||||
file_threads = []
|
||||
if download_files:
|
||||
|
@ -720,6 +729,10 @@ class Site(object):
|
|||
return self.needFile(*args, **kwargs)
|
||||
|
||||
def isFileDownloadAllowed(self, inner_path, file_info):
|
||||
# Verify space for all site
|
||||
if self.settings["size"] > self.getSizeLimit() * 1024 * 1024:
|
||||
return False
|
||||
# Verify space for file
|
||||
if file_info.get("size", 0) > config.file_size_limit * 1024 * 1024:
|
||||
self.log.debug(
|
||||
"File size %s too large: %sMB > %sMB, skipping..." %
|
||||
|
|
|
@ -422,7 +422,7 @@ class TestSiteDownload:
|
|||
client.sites[site_temp.address] = site_temp
|
||||
site_temp.connection_server = client
|
||||
|
||||
# Connect peers
|
||||
# Connect peersself, file_server, site, site_temp
|
||||
site_temp.addPeer(file_server.ip, 1544)
|
||||
|
||||
# Download site from site to site_temp
|
||||
|
@ -460,3 +460,47 @@ class TestSiteDownload:
|
|||
assert len(file_requests) == 1
|
||||
|
||||
assert site_temp.storage.open("data/data.json").read() == data_new
|
||||
assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read()
|
||||
|
||||
# Test what happened if the content.json of the site is bigger than the site limit
|
||||
def testHugeContentSiteUpdate(self, file_server, site, site_temp):
|
||||
# Init source server
|
||||
site.connection_server = file_server
|
||||
file_server.sites[site.address] = site
|
||||
|
||||
# Init client server
|
||||
client = FileServer(file_server.ip, 1545)
|
||||
client.sites[site_temp.address] = site_temp
|
||||
site_temp.connection_server = client
|
||||
|
||||
# Connect peersself, file_server, site, site_temp
|
||||
site_temp.addPeer(file_server.ip, 1544)
|
||||
|
||||
# Download site from site to site_temp
|
||||
site_temp.download(blind_includes=True).join(timeout=5)
|
||||
|
||||
# Raise limit size to 20MB on site so it can be signed
|
||||
site.settings["size_limit"] = int(20 * 1024 *1024)
|
||||
site.saveSettings()
|
||||
|
||||
content_json = site.storage.loadJson("content.json")
|
||||
content_json["description"] = "PartirUnJour" * 1024 * 1024
|
||||
site.storage.writeJson("content.json", content_json)
|
||||
changed, deleted = site.content_manager.loadContent("content.json", force=True)
|
||||
|
||||
# Make sure we have 2 differents content.json
|
||||
assert site_temp.storage.open("content.json").read() != site.storage.open("content.json").read()
|
||||
|
||||
# Generate diff
|
||||
diffs = site.content_manager.getDiffs("content.json")
|
||||
|
||||
# Publish with patch
|
||||
site.log.info("Publish new content.json bigger than 10MB")
|
||||
with Spy.Spy(FileRequest, "route") as requests:
|
||||
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
|
||||
assert site.storage.getSize("content.json") > 10 * 1024 * 1024 # verify it over 10MB
|
||||
site.publish(diffs=diffs)
|
||||
site_temp.download(blind_includes=True).join(timeout=5)
|
||||
|
||||
assert site_temp.storage.getSize("content.json") < site_temp.getSizeLimit() * 1024 * 1024
|
||||
assert site_temp.storage.open("content.json").read() != site.storage.open("content.json").read()
|
||||
|
|
Loading…
Reference in a new issue