Save content.json of site even if limit size is reached (#2114)

* fix #2107; Still save the content.json received even if site size limit is reached but dont download files; Allow better distribution of latest version of content.json

* Added test

* Fix test for huge content file (now it fails)

* Dont download huge content.json file and update test

* Remove comments
This commit is contained in:
Lola Dam 2019-09-10 18:18:21 +02:00 committed by ZeroNet
parent deec2e62ce
commit 0738964e64
3 changed files with 67 additions and 9 deletions

View file

@ -858,15 +858,16 @@ class ContentManager(object):
if content.get("inner_path") and content["inner_path"] != inner_path:
raise VerifyError("Wrong inner_path: %s" % content["inner_path"])
# Check total site size limit
if site_size > site_size_limit:
if inner_path == "content.json" and self.site.settings["size"] == 0:
# First content.json download, save site size to display warning
# If our content.json file bigger than the size limit throw error
if inner_path == "content.json":
content_size_file = len(json.dumps(content, indent=1))
if content_size_file > site_size_limit:
# Save site size to display warning
self.site.settings["size"] = site_size
task = self.site.worker_manager.findTask(inner_path)
if task: # Dont try to download from other peers
self.site.worker_manager.failTask(task)
raise VerifyError("Content too large %sB > %sB, aborting task..." % (site_size, site_size_limit))
task = self.site.worker_manager.findTask(inner_path)
if task: # Dont try to download from other peers
self.site.worker_manager.failTask(task)
raise VerifyError("Content too large %s B > %s B, aborting task..." % (site_size, site_size_limit))
# Verify valid filenames
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):