Change to Python3 coding style

This commit is contained in:
shortcutme 2019-03-15 21:06:59 +01:00
parent fc0fe0557b
commit b0b9a4d33c
No known key found for this signature in database
GPG key ID: 5B63BAE6CB9613AE
137 changed files with 910 additions and 913 deletions

View file

@ -4,7 +4,7 @@ import array
def packPiecefield(data):
res = []
if not data:
return array.array("H", "")
return array.array("H", b"")
if data[0] == "0":
res.append(0)
@ -48,7 +48,7 @@ class BigfilePiecefield(object):
__slots__ = ["data"]
def __init__(self):
self.data = ""
self.data = b""
def fromstring(self, s):
self.data = s
@ -71,7 +71,7 @@ class BigfilePiecefield(object):
def __setitem__(self, key, value):
data = self.data
if len(data) < key:
data = data.ljust(key+1, "0")
data = data.ljust(key + 1, "0")
data = data[:key] + str(int(value)) + data[key + 1:]
self.data = data
@ -80,7 +80,7 @@ class BigfilePiecefieldPacked(object):
__slots__ = ["data"]
def __init__(self):
self.data = ""
self.data = b""
def fromstring(self, data):
self.data = packPiecefield(data).tostring()
@ -103,7 +103,7 @@ class BigfilePiecefieldPacked(object):
def __setitem__(self, key, value):
data = self.tostring()
if len(data) < key:
data = data.ljust(key+1, "0")
data = data.ljust(key + 1, "0")
data = data[:key] + str(int(value)) + data[key + 1:]
self.fromstring(data)
@ -116,7 +116,7 @@ if __name__ == "__main__":
meminfo = psutil.Process(os.getpid()).memory_info
for storage in [BigfilePiecefieldPacked, BigfilePiecefield]:
print "-- Testing storage: %s --" % storage
print("-- Testing storage: %s --" % storage))
m = meminfo()[0]
s = time.time()
piecefields = {}
@ -125,34 +125,34 @@ if __name__ == "__main__":
piecefield.fromstring(testdata[:i] + "0" + testdata[i + 1:])
piecefields[i] = piecefield
print "Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))
print("Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)))
m = meminfo()[0]
s = time.time()
for piecefield in piecefields.values():
for piecefield in list(piecefields.values()):
val = piecefield[1000]
print "Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)
print("Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s))
m = meminfo()[0]
s = time.time()
for piecefield in piecefields.values():
for piecefield in list(piecefields.values()):
piecefield[1000] = True
print "Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)
print("Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s))
m = meminfo()[0]
s = time.time()
for piecefield in piecefields.values():
for piecefield in list(piecefields.values()):
packed = piecefield.pack()
print "Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed))
print("Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed)))
m = meminfo()[0]
s = time.time()
for piecefield in piecefields.values():
for piecefield in list(piecefields.values()):
piecefield.unpack(packed)
print "Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))
print("Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)))
piecefields = {}

View file

@ -5,7 +5,6 @@ import shutil
import collections
import math
import msgpack
import gevent
import gevent.lock
@ -15,7 +14,7 @@ from Crypt import CryptHash
from lib import merkletools
from util import helper
import util
from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
# We can only import plugin host clases after the plugins are loaded
@ -61,7 +60,7 @@ class UiRequestPlugin(object):
)
if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split
hash = piecemap_info["sha512_pieces"][0].encode("hex")
hash = piecemap_info["sha512_pieces"][0].hex()
hash_id = site.content_manager.hashfield.getHashId(hash)
site.content_manager.optionalDownloaded(inner_path, hash_id, upload_info["size"], own=True)
@ -178,7 +177,7 @@ class UiWebsocketPlugin(object):
self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True)
try:
self.site.storage.delete(piecemap_inner_path)
except Exception, err:
except Exception as err:
self.log.error("File %s delete error: %s" % (piecemap_inner_path, err))
return super(UiWebsocketPlugin, self).actionFileDelete(to, inner_path)
@ -324,7 +323,7 @@ class ContentManagerPlugin(object):
def verifyPiece(self, inner_path, pos, piece):
piecemap = self.getPiecemap(inner_path)
piece_i = pos / piecemap["piece_size"]
piece_i = int(pos / piecemap["piece_size"])
if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]:
raise VerifyError("Invalid hash")
return True
@ -345,7 +344,7 @@ class ContentManagerPlugin(object):
file_info = self.getFileInfo(inner_path)
# Mark piece downloaded
piece_i = pos_from / file_info["piece_size"]
piece_i = int(pos_from / file_info["piece_size"])
self.site.storage.piecefields[file_info["sha512"]][piece_i] = True
# Only add to site size on first request
@ -368,7 +367,7 @@ class ContentManagerPlugin(object):
del self.site.storage.piecefields[sha512]
# Also remove other pieces of the file from download queue
for key in self.site.bad_files.keys():
for key in list(self.site.bad_files.keys()):
if key.startswith(inner_path + "|"):
del self.site.bad_files[key]
self.site.worker_manager.removeSolvedFileTasks()
@ -381,9 +380,9 @@ class SiteStoragePlugin(object):
super(SiteStoragePlugin, self).__init__(*args, **kwargs)
self.piecefields = collections.defaultdict(BigfilePiecefield)
if "piecefields" in self.site.settings.get("cache", {}):
for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").iteritems():
for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").items():
if piecefield_packed:
self.piecefields[sha512].unpack(piecefield_packed.decode("base64"))
self.piecefields[sha512].unpack(base64.b64decode(piecefield_packed))
self.site.settings["cache"]["piecefields"] = {}
def createSparseFile(self, inner_path, size, sha512=None):
@ -486,7 +485,7 @@ class BigFile(object):
requests = []
# Request all required blocks
while 1:
piece_i = pos / self.piece_size
piece_i = int(pos / self.piece_size)
if piece_i * self.piece_size >= read_until:
break
pos_from = piece_i * self.piece_size
@ -503,7 +502,7 @@ class BigFile(object):
prebuffer_until = min(self.size, read_until + self.prebuffer)
priority = 3
while 1:
piece_i = pos / self.piece_size
piece_i = int(pos / self.piece_size)
if piece_i * self.piece_size >= prebuffer_until:
break
pos_from = piece_i * self.piece_size
@ -565,7 +564,7 @@ class WorkerManagerPlugin(object):
inner_path, file_range = inner_path.split("|")
pos_from, pos_to = map(int, file_range.split("-"))
task["piece_i"] = pos_from / file_info["piece_size"]
task["piece_i"] = int(pos_from / file_info["piece_size"])
task["sha512"] = file_info["sha512"]
else:
if inner_path in self.site.bad_files:
@ -601,10 +600,10 @@ class WorkerManagerPlugin(object):
class FileRequestPlugin(object):
def isReadable(self, site, inner_path, file, pos):
# Peek into file
if file.read(10) == "\0" * 10:
if file.read(10) == b"\0" * 10:
# Looks empty, but makes sures we don't have that piece
file_info = site.content_manager.getFileInfo(inner_path)
piece_i = pos / file_info["piece_size"]
piece_i = int(pos / file_info["piece_size"])
if not site.storage.piecefields[file_info["sha512"]][piece_i]:
return False
# Seek back to position we want to read
@ -622,7 +621,7 @@ class FileRequestPlugin(object):
if not peer.connection: # Just added
peer.connect(self.connection) # Assign current connection to peer
piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.iteritems()}
piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.items()}
self.response({"piecefields_packed": piecefields_packed})
def actionSetPiecefields(self, params):
@ -638,7 +637,7 @@ class FileRequestPlugin(object):
peer.connect(self.connection)
peer.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
for sha512, piecefield_packed in params["piecefields_packed"].iteritems():
for sha512, piecefield_packed in params["piecefields_packed"].items():
peer.piecefields[sha512].unpack(piecefield_packed)
site.settings["has_bigfile"] = True
@ -673,7 +672,7 @@ class PeerPlugin(object):
self.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
try:
for sha512, piecefield_packed in res["piecefields_packed"].iteritems():
for sha512, piecefield_packed in res["piecefields_packed"].items():
self.piecefields[sha512].unpack(piecefield_packed)
except Exception as err:
self.log("Invalid updatePiecefields response: %s" % Debug.formatException(err))
@ -720,7 +719,7 @@ class SitePlugin(object):
def getSettingsCache(self):
back = super(SitePlugin, self).getSettingsCache()
if self.storage.piecefields:
back["piecefields"] = {sha512: piecefield.pack().encode("base64") for sha512, piecefield in self.storage.piecefields.iteritems()}
back["piecefields"] = {sha512: base64.b64encode(piecefield.pack()).decode("utf8") for sha512, piecefield in self.storage.piecefields.items()}
return back
def needFile(self, inner_path, *args, **kwargs):

View file

@ -1,5 +1,5 @@
import time
from cStringIO import StringIO
import io
import pytest
import msgpack
@ -40,7 +40,7 @@ class TestBigfile:
piecemap = msgpack.unpack(site.storage.open(file_node["piecemap"], "rb"))["optional.any.iso"]
assert len(piecemap["sha512_pieces"]) == 10
assert piecemap["sha512_pieces"][0] != piecemap["sha512_pieces"][1]
assert piecemap["sha512_pieces"][0].encode("hex") == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3"
assert piecemap["sha512_pieces"][0].hex() == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3"
def testVerifyPiece(self, site):
inner_path = self.createBigfile(site)
@ -48,7 +48,7 @@ class TestBigfile:
# Verify all 10 piece
f = site.storage.open(inner_path, "rb")
for i in range(10):
piece = StringIO(f.read(1024 * 1024))
piece = io.BytesIO(f.read(1024 * 1024))
piece.seek(0)
site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
f.close()
@ -57,7 +57,7 @@ class TestBigfile:
with pytest.raises(VerifyError) as err:
i = 1
f = site.storage.open(inner_path, "rb")
piece = StringIO(f.read(1024 * 1024))
piece = io.BytesIO(f.read(1024 * 1024))
f.close()
site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
assert "Invalid hash" in str(err)
@ -70,19 +70,19 @@ class TestBigfile:
# Write to file beginning
s = time.time()
f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), "hellostart" * 1024)
f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), b"hellostart" * 1024)
time_write_start = time.time() - s
# Write to file end
s = time.time()
f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), "helloend" * 1024)
f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), b"helloend" * 1024)
time_write_end = time.time() - s
# Verify writes
f = site.storage.open(inner_path)
assert f.read(10) == "hellostart"
assert f.read(10) == b"hellostart"
f.seek(99 * 1024 * 1024)
assert f.read(8) == "helloend"
assert f.read(8) == b"helloend"
f.close()
site.storage.delete(inner_path)
@ -105,7 +105,7 @@ class TestBigfile:
buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
assert len(buff.getvalue()) == 1 * 1024 * 1024 # Correct block size
assert buff.getvalue().startswith("Test524") # Correct data
assert buff.getvalue().startswith(b"Test524") # Correct data
buff.seek(0)
assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff) # Correct hash
@ -147,12 +147,12 @@ class TestBigfile:
# Verify 0. block not downloaded
f = site_temp.storage.open(inner_path)
assert f.read(10) == "\0" * 10
assert f.read(10) == b"\0" * 10
# Verify 5. and 10. block downloaded
f.seek(5 * 1024 * 1024)
assert f.read(7) == "Test524"
assert f.read(7) == b"Test524"
f.seek(9 * 1024 * 1024)
assert f.read(7) == "943---T"
assert f.read(7) == b"943---T"
# Verify hashfield
assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) # 18343: data/optional.any.iso, 30970: data/optional.any.iso.hashmap.msgpack
@ -178,14 +178,14 @@ class TestBigfile:
with site_temp.storage.openBigfile(inner_path) as f:
with Spy.Spy(FileRequest, "route") as requests:
f.seek(5 * 1024 * 1024)
assert f.read(7) == "Test524"
assert f.read(7) == b"Test524"
f.seek(9 * 1024 * 1024)
assert f.read(7) == "943---T"
assert f.read(7) == b"943---T"
assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces
assert set(site_temp.content_manager.hashfield) == set([18343, 30970])
assert set(site_temp.content_manager.hashfield) == set([18343, 43727])
assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001"
assert f.sha512 in site_temp.getSettingsCache()["piecefields"]
@ -193,7 +193,7 @@ class TestBigfile:
# Test requesting already downloaded
with Spy.Spy(FileRequest, "route") as requests:
f.seek(5 * 1024 * 1024)
assert f.read(7) == "Test524"
assert f.read(7) == b"Test524"
assert len(requests) == 0
@ -201,9 +201,9 @@ class TestBigfile:
with Spy.Spy(FileRequest, "route") as requests:
f.seek(5 * 1024 * 1024) # We already have this block
data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block
assert data.startswith("Test524")
assert data.endswith("Test838-")
assert "\0" not in data # No null bytes allowed
assert data.startswith(b"Test524")
assert data.endswith(b"Test838-")
assert b"\0" not in data # No null bytes allowed
assert len(requests) == 2 # Two block download
@ -258,11 +258,11 @@ class TestBigfile:
# Download second block
with site_temp.storage.openBigfile(inner_path) as f:
f.seek(1024 * 1024)
assert f.read(1024)[0] != "\0"
assert f.read(1024)[0:1] != b"\0"
# Make sure first block not download
with site_temp.storage.open(inner_path) as f:
assert f.read(1024)[0] == "\0"
assert f.read(1024)[0:1] == b"\0"
peer2 = site.addPeer(file_server.ip, 1545, return_peer=True)
@ -284,8 +284,8 @@ class TestBigfile:
s = time.time()
for i in range(25000):
site.addPeer(file_server.ip, i)
print "%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024) # 0.082s MEM: + 6800KB
print site.peers.values()[0].piecefields
print("%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024)) # 0.082s MEM: + 6800KB
print(list(site.peers.values())[0].piecefields)
def testUpdatePiecefield(self, file_server, site, site_temp):
inner_path = self.createBigfile(site)
@ -390,16 +390,16 @@ class TestBigfile:
size_bigfile = site_temp.content_manager.getFileInfo(inner_path)["size"]
with site_temp.storage.openBigfile(inner_path) as f:
assert "\0" not in f.read(1024)
assert b"\0" not in f.read(1024)
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
with site_temp.storage.openBigfile(inner_path) as f:
# Don't count twice
assert "\0" not in f.read(1024)
assert b"\0" not in f.read(1024)
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
# Add second block
assert "\0" not in f.read(1024 * 1024)
assert b"\0" not in f.read(1024 * 1024)
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
def testPrebuffer(self, file_server, site, site_temp):
@ -423,7 +423,7 @@ class TestBigfile:
with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f:
with Spy.Spy(FileRequest, "route") as requests:
f.seek(5 * 1024 * 1024)
assert f.read(7) == "Test524"
assert f.read(7) == b"Test524"
# assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces
assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2
@ -434,7 +434,7 @@ class TestBigfile:
# No prebuffer beyond end of the file
f.seek(9 * 1024 * 1024)
assert "\0" not in f.read(7)
assert b"\0" not in f.read(7)
assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0

View file

@ -1,2 +1,2 @@
import BigfilePlugin
from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
from . import BigfilePlugin
from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked