diff --git a/src/Content/ContentManager.py b/src/Content/ContentManager.py index 7a1a8447..c1ec533c 100644 --- a/src/Content/ContentManager.py +++ b/src/Content/ContentManager.py @@ -858,15 +858,16 @@ class ContentManager(object): if content.get("inner_path") and content["inner_path"] != inner_path: raise VerifyError("Wrong inner_path: %s" % content["inner_path"]) - # Check total site size limit - if site_size > site_size_limit: - if inner_path == "content.json" and self.site.settings["size"] == 0: - # First content.json download, save site size to display warning + # If our content.json file bigger than the size limit throw error + if inner_path == "content.json": + content_size_file = len(json.dumps(content, indent=1)) + if content_size_file > site_size_limit: + # Save site size to display warning self.site.settings["size"] = site_size - task = self.site.worker_manager.findTask(inner_path) - if task: # Dont try to download from other peers - self.site.worker_manager.failTask(task) - raise VerifyError("Content too large %sB > %sB, aborting task..." % (site_size, site_size_limit)) + task = self.site.worker_manager.findTask(inner_path) + if task: # Dont try to download from other peers + self.site.worker_manager.failTask(task) + raise VerifyError("Content too large %s B > %s B, aborting task..." % (site_size, site_size_limit)) # Verify valid filenames for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()): diff --git a/src/Site/Site.py b/src/Site/Site.py index c08e067e..af6563e3 100644 --- a/src/Site/Site.py +++ b/src/Site/Site.py @@ -179,6 +179,15 @@ class Site(object): if peer: # Update last received update from peer to prevent re-sending the same update to it peer.last_content_json_update = self.content_manager.contents[inner_path]["modified"] + # Verify size limit + if inner_path == "content.json": + site_size_limit = self.getSizeLimit() * 1024 * 1024 + content_size = len(json.dumps(self.content_manager.contents[inner_path], indent=1)) + sum([file["size"] for file in list(self.content_manager.contents[inner_path].get("files", {}).values()) if file["size"] >= 0]) # Size of new content + if site_size_limit < content_size: + # Not enought don't download anything + self.log.debug("Size limit reached (site too big please increase limit): %.2f MB > %.2f MB" % (content_size / 1024 / 1024, site_size_limit / 1024 / 1024)) + return False + # Start download files file_threads = [] if download_files: @@ -720,6 +729,10 @@ class Site(object): return self.needFile(*args, **kwargs) def isFileDownloadAllowed(self, inner_path, file_info): + # Verify space for all site + if self.settings["size"] > self.getSizeLimit() * 1024 * 1024: + return False + # Verify space for file if file_info.get("size", 0) > config.file_size_limit * 1024 * 1024: self.log.debug( "File size %s too large: %sMB > %sMB, skipping..." % diff --git a/src/Test/TestSiteDownload.py b/src/Test/TestSiteDownload.py index 1d4ba4c1..fc7ba23a 100644 --- a/src/Test/TestSiteDownload.py +++ b/src/Test/TestSiteDownload.py @@ -422,7 +422,7 @@ class TestSiteDownload: client.sites[site_temp.address] = site_temp site_temp.connection_server = client - # Connect peers + # Connect peersself, file_server, site, site_temp site_temp.addPeer(file_server.ip, 1544) # Download site from site to site_temp @@ -460,3 +460,47 @@ class TestSiteDownload: assert len(file_requests) == 1 assert site_temp.storage.open("data/data.json").read() == data_new + assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read() + + # Test what happened if the content.json of the site is bigger than the site limit + def testHugeContentSiteUpdate(self, file_server, site, site_temp): + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = FileServer(file_server.ip, 1545) + client.sites[site_temp.address] = site_temp + site_temp.connection_server = client + + # Connect peersself, file_server, site, site_temp + site_temp.addPeer(file_server.ip, 1544) + + # Download site from site to site_temp + site_temp.download(blind_includes=True).join(timeout=5) + + # Raise limit size to 20MB on site so it can be signed + site.settings["size_limit"] = int(20 * 1024 *1024) + site.saveSettings() + + content_json = site.storage.loadJson("content.json") + content_json["description"] = "PartirUnJour" * 1024 * 1024 + site.storage.writeJson("content.json", content_json) + changed, deleted = site.content_manager.loadContent("content.json", force=True) + + # Make sure we have 2 differents content.json + assert site_temp.storage.open("content.json").read() != site.storage.open("content.json").read() + + # Generate diff + diffs = site.content_manager.getDiffs("content.json") + + # Publish with patch + site.log.info("Publish new content.json bigger than 10MB") + with Spy.Spy(FileRequest, "route") as requests: + site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") + assert site.storage.getSize("content.json") > 10 * 1024 * 1024 # verify it over 10MB + site.publish(diffs=diffs) + site_temp.download(blind_includes=True).join(timeout=5) + + assert site_temp.storage.getSize("content.json") < site_temp.getSizeLimit() * 1024 * 1024 + assert site_temp.storage.open("content.json").read() != site.storage.open("content.json").read()