Add file_size to request to avoid unnecessary download

This commit is contained in:
shortcutme 2017-06-19 16:10:18 +02:00
parent 9a9bd71634
commit f773bf3336
No known key found for this signature in database
GPG key ID: 5B63BAE6CB9613AE
3 changed files with 7 additions and 3 deletions

View file

@ -198,6 +198,10 @@ class FileRequest(object):
file.seek(params["location"])
file.read_bytes = FILE_BUFF
file_size = os.fstat(file.fileno()).st_size
if params.get("file_size") and params["file_size"] != file_size:
self.connection.badAction(5)
raise RequestError("File size does not match")
if params["location"] > file_size:
self.connection.badAction(5)
raise RequestError("Bad file location")

View file

@ -155,7 +155,7 @@ class Peer(object):
return None # Failed after 4 retry
# Get a file content from peer
def getFile(self, site, inner_path):
def getFile(self, site, inner_path, file_size=None):
# Use streamFile if client supports it
if config.stream_downloads and self.connection and self.connection.handshake and self.connection.handshake["rev"] > 310:
return self.streamFile(site, inner_path)
@ -168,7 +168,7 @@ class Peer(object):
s = time.time()
while True: # Read in 512k parts
res = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location})
res = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location, "file_size": file_size})
if not res or "body" not in res: # Error
return False

View file

@ -46,7 +46,7 @@ class Worker(object):
site = task["site"]
task["workers_num"] += 1
try:
buff = self.peer.getFile(site.address, task["inner_path"])
buff = self.peer.getFile(site.address, task["inner_path"], task["size"])
except Exception, err:
self.manager.log.debug("%s: getFile error: %s" % (self.key, err))
buff = None