Fix uploading bigfile to non-user directory

This commit is contained in:
shortcutme 2017-10-16 15:11:19 +02:00
parent 231df44c94
commit c40f0c6919
No known key found for this signature in database
GPG key ID: 5B63BAE6CB9613AE
2 changed files with 14 additions and 6 deletions

View file

@ -302,7 +302,7 @@ class ContentManager(object):
# Find the file info line from self.contents
# Return: { "sha512": "c29d73d...21f518", "size": 41 , "content_inner_path": "content.json"}
def getFileInfo(self, inner_path):
def getFileInfo(self, inner_path, new_file=False):
dirs = inner_path.split("/") # Parent dirs of content.json
inner_path_parts = [dirs.pop()] # Filename relative to content.json
while True:
@ -338,6 +338,14 @@ class ContentManager(object):
else:
back["content_inner_path"] = content_inner_path_dir + "content.json"
back["optional"] = None
back["relative_path"] = "/".join(inner_path_parts)
return back
if new_file and content:
back = {}
back["content_inner_path"] = content_inner_path
back["relative_path"] = "/".join(inner_path_parts)
back["optional"] = None
return back
# No inner path in this dir, lets try the parent dir
@ -874,7 +882,7 @@ class ContentManager(object):
else: # Check using sha512 hash
file_info = self.getFileInfo(inner_path)
if file_info:
if CryptHash.sha512sum(file) != file_info["sha512"]:
if CryptHash.sha512sum(file) != file_info.get("sha512", ""):
raise VerifyError("Invalid hash")
if file_info.get("size", 0) != file.tell():