diff --git a/plugins/AnnounceLocal/AnnounceLocalPlugin.py b/plugins/AnnounceLocal/AnnounceLocalPlugin.py index 27b4d38a..0919762a 100644 --- a/plugins/AnnounceLocal/AnnounceLocalPlugin.py +++ b/plugins/AnnounceLocal/AnnounceLocalPlugin.py @@ -4,7 +4,7 @@ import gevent from Plugin import PluginManager from Config import config -import BroadcastServer +from . import BroadcastServer @PluginManager.registerTo("SiteAnnouncer") @@ -42,7 +42,7 @@ class LocalAnnouncer(BroadcastServer.BroadcastServer): if force: # Probably new site added, clean cache self.known_peers = {} - for peer_id, known_peer in self.known_peers.items(): + for peer_id, known_peer in list(self.known_peers.items()): if time.time() - known_peer["found"] > 20 * 60: del(self.known_peers[peer_id]) self.log.debug("Timeout, removing from known_peers: %s" % peer_id) @@ -78,7 +78,7 @@ class LocalAnnouncer(BroadcastServer.BroadcastServer): def actionSiteListRequest(self, sender, params): back = [] - sites = self.server.sites.values() + sites = list(self.server.sites.values()) # Split adresses to group of 100 to avoid UDP size limit site_groups = [sites[i:i + 100] for i in range(0, len(sites), 100)] diff --git a/plugins/AnnounceLocal/Test/TestAnnounce.py b/plugins/AnnounceLocal/Test/TestAnnounce.py index 691ecc26..4def02ed 100644 --- a/plugins/AnnounceLocal/Test/TestAnnounce.py +++ b/plugins/AnnounceLocal/Test/TestAnnounce.py @@ -85,10 +85,10 @@ class TestAnnounce: def testPeerDiscover(self, announcer, announcer_remote, site): assert announcer.server.peer_id != announcer_remote.server.peer_id - assert len(announcer.server.sites.values()[0].peers) == 0 + assert len(list(announcer.server.sites.values())[0].peers) == 0 announcer.broadcast({"cmd": "discoverRequest"}, port=announcer_remote.listen_port) time.sleep(0.1) - assert len(announcer.server.sites.values()[0].peers) == 1 + assert len(list(announcer.server.sites.values())[0].peers) == 1 def testRecentPeerList(self, announcer, announcer_remote, site): assert len(site.peers_recent) == 0 @@ -101,13 +101,13 @@ class TestAnnounce: assert len(site.peers) == 1 # It should update peer without siteListResponse - last_time_found = site.peers.values()[0].time_found + last_time_found = list(site.peers.values())[0].time_found site.peers_recent.clear() with Spy.Spy(announcer, "handleMessage") as responses: announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port) time.sleep(0.1) assert [response[1]["cmd"] for response in responses] == ["discoverResponse"] assert len(site.peers_recent) == 1 - assert site.peers.values()[0].time_found > last_time_found + assert list(site.peers.values())[0].time_found > last_time_found diff --git a/plugins/AnnounceLocal/__init__.py b/plugins/AnnounceLocal/__init__.py index defe2412..5b80abd2 100644 --- a/plugins/AnnounceLocal/__init__.py +++ b/plugins/AnnounceLocal/__init__.py @@ -1 +1 @@ -import AnnounceLocalPlugin \ No newline at end of file +from . import AnnounceLocalPlugin \ No newline at end of file diff --git a/plugins/AnnounceShare/AnnounceSharePlugin.py b/plugins/AnnounceShare/AnnounceSharePlugin.py index 10e3a3e6..8c0a8ec4 100644 --- a/plugins/AnnounceShare/AnnounceSharePlugin.py +++ b/plugins/AnnounceShare/AnnounceSharePlugin.py @@ -75,7 +75,7 @@ class TrackerStorage(object): def getWorkingTrackers(self, type="shared"): trackers = { - key: tracker for key, tracker in self.getTrackers(type).iteritems() + key: tracker for key, tracker in self.getTrackers(type).items() if tracker["time_success"] > time.time() - 60 * 60 } return trackers @@ -95,7 +95,7 @@ class TrackerStorage(object): trackers = self.getTrackers() self.log.debug("Loaded %s shared trackers" % len(trackers)) - for address, tracker in trackers.items(): + for address, tracker in list(trackers.items()): tracker["num_error"] = 0 if not address.startswith("zero://"): del trackers[address] @@ -144,7 +144,7 @@ class SiteAnnouncerPlugin(object): tracker_storage.time_discover = time.time() gevent.spawn(tracker_storage.discoverTrackers, self.site.getConnectedPeers()) trackers = super(SiteAnnouncerPlugin, self).getTrackers() - shared_trackers = tracker_storage.getTrackers("shared").keys() + shared_trackers = list(tracker_storage.getTrackers("shared").keys()) if shared_trackers: return trackers + shared_trackers else: @@ -164,7 +164,7 @@ class SiteAnnouncerPlugin(object): @PluginManager.registerTo("FileRequest") class FileRequestPlugin(object): def actionGetTrackers(self, params): - shared_trackers = tracker_storage.getWorkingTrackers("shared").keys() + shared_trackers = list(tracker_storage.getWorkingTrackers("shared").keys()) self.response({"trackers": shared_trackers}) diff --git a/plugins/AnnounceShare/Test/TestAnnounceShare.py b/plugins/AnnounceShare/Test/TestAnnounceShare.py index 4608eda7..7178eac8 100644 --- a/plugins/AnnounceShare/Test/TestAnnounceShare.py +++ b/plugins/AnnounceShare/Test/TestAnnounceShare.py @@ -12,7 +12,6 @@ class TestAnnounceShare: open("%s/trackers.json" % config.data_dir, "w").write("{}") tracker_storage = AnnounceSharePlugin.tracker_storage tracker_storage.load() - print tracker_storage.file_path, config.data_dir peer = Peer(file_server.ip, 1544, connection_server=file_server) assert peer.request("getTrackers")["trackers"] == [] diff --git a/plugins/AnnounceShare/__init__.py b/plugins/AnnounceShare/__init__.py index f55cb2c6..dc1e40bd 100644 --- a/plugins/AnnounceShare/__init__.py +++ b/plugins/AnnounceShare/__init__.py @@ -1 +1 @@ -import AnnounceSharePlugin +from . import AnnounceSharePlugin diff --git a/plugins/AnnounceZero/AnnounceZeroPlugin.py b/plugins/AnnounceZero/AnnounceZeroPlugin.py index b7f9e823..a3f4197a 100644 --- a/plugins/AnnounceZero/AnnounceZeroPlugin.py +++ b/plugins/AnnounceZero/AnnounceZeroPlugin.py @@ -119,7 +119,7 @@ class SiteAnnouncerPlugin(object): onion = self.site.connection_server.tor_manager.getOnion(site.address) publickey = self.site.connection_server.tor_manager.getPublickey(onion) if publickey not in request["onion_signs"]: - sign = CryptRsa.sign(res["onion_sign_this"], self.site.connection_server.tor_manager.getPrivatekey(onion)) + sign = CryptRsa.sign(res["onion_sign_this"].encode("utf8"), self.site.connection_server.tor_manager.getPrivatekey(onion)) request["onion_signs"][publickey] = sign res = tracker_peer.request("announce", request) if not res or "onion_sign_this" in res: diff --git a/plugins/AnnounceZero/__init__.py b/plugins/AnnounceZero/__init__.py index 4b9cbe10..8aec5ddb 100644 --- a/plugins/AnnounceZero/__init__.py +++ b/plugins/AnnounceZero/__init__.py @@ -1 +1 @@ -import AnnounceZeroPlugin \ No newline at end of file +from . import AnnounceZeroPlugin \ No newline at end of file diff --git a/plugins/Bigfile/BigfilePiecefield.py b/plugins/Bigfile/BigfilePiecefield.py index c7690279..87170c83 100644 --- a/plugins/Bigfile/BigfilePiecefield.py +++ b/plugins/Bigfile/BigfilePiecefield.py @@ -4,7 +4,7 @@ import array def packPiecefield(data): res = [] if not data: - return array.array("H", "") + return array.array("H", b"") if data[0] == "0": res.append(0) @@ -48,7 +48,7 @@ class BigfilePiecefield(object): __slots__ = ["data"] def __init__(self): - self.data = "" + self.data = b"" def fromstring(self, s): self.data = s @@ -71,7 +71,7 @@ class BigfilePiecefield(object): def __setitem__(self, key, value): data = self.data if len(data) < key: - data = data.ljust(key+1, "0") + data = data.ljust(key + 1, "0") data = data[:key] + str(int(value)) + data[key + 1:] self.data = data @@ -80,7 +80,7 @@ class BigfilePiecefieldPacked(object): __slots__ = ["data"] def __init__(self): - self.data = "" + self.data = b"" def fromstring(self, data): self.data = packPiecefield(data).tostring() @@ -103,7 +103,7 @@ class BigfilePiecefieldPacked(object): def __setitem__(self, key, value): data = self.tostring() if len(data) < key: - data = data.ljust(key+1, "0") + data = data.ljust(key + 1, "0") data = data[:key] + str(int(value)) + data[key + 1:] self.fromstring(data) @@ -116,7 +116,7 @@ if __name__ == "__main__": meminfo = psutil.Process(os.getpid()).memory_info for storage in [BigfilePiecefieldPacked, BigfilePiecefield]: - print "-- Testing storage: %s --" % storage + print("-- Testing storage: %s --" % storage)) m = meminfo()[0] s = time.time() piecefields = {} @@ -125,34 +125,34 @@ if __name__ == "__main__": piecefield.fromstring(testdata[:i] + "0" + testdata[i + 1:]) piecefields[i] = piecefield - print "Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)) + print("Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))) m = meminfo()[0] s = time.time() - for piecefield in piecefields.values(): + for piecefield in list(piecefields.values()): val = piecefield[1000] - print "Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s) + print("Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)) m = meminfo()[0] s = time.time() - for piecefield in piecefields.values(): + for piecefield in list(piecefields.values()): piecefield[1000] = True - print "Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s) + print("Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)) m = meminfo()[0] s = time.time() - for piecefield in piecefields.values(): + for piecefield in list(piecefields.values()): packed = piecefield.pack() - print "Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed)) + print("Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed))) m = meminfo()[0] s = time.time() - for piecefield in piecefields.values(): + for piecefield in list(piecefields.values()): piecefield.unpack(packed) - print "Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)) + print("Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))) piecefields = {} diff --git a/plugins/Bigfile/BigfilePlugin.py b/plugins/Bigfile/BigfilePlugin.py index 484d2b6d..2757983e 100644 --- a/plugins/Bigfile/BigfilePlugin.py +++ b/plugins/Bigfile/BigfilePlugin.py @@ -5,7 +5,6 @@ import shutil import collections import math -import msgpack import gevent import gevent.lock @@ -15,7 +14,7 @@ from Crypt import CryptHash from lib import merkletools from util import helper import util -from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked +from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked # We can only import plugin host clases after the plugins are loaded @@ -61,7 +60,7 @@ class UiRequestPlugin(object): ) if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split - hash = piecemap_info["sha512_pieces"][0].encode("hex") + hash = piecemap_info["sha512_pieces"][0].hex() hash_id = site.content_manager.hashfield.getHashId(hash) site.content_manager.optionalDownloaded(inner_path, hash_id, upload_info["size"], own=True) @@ -178,7 +177,7 @@ class UiWebsocketPlugin(object): self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True) try: self.site.storage.delete(piecemap_inner_path) - except Exception, err: + except Exception as err: self.log.error("File %s delete error: %s" % (piecemap_inner_path, err)) return super(UiWebsocketPlugin, self).actionFileDelete(to, inner_path) @@ -324,7 +323,7 @@ class ContentManagerPlugin(object): def verifyPiece(self, inner_path, pos, piece): piecemap = self.getPiecemap(inner_path) - piece_i = pos / piecemap["piece_size"] + piece_i = int(pos / piecemap["piece_size"]) if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]: raise VerifyError("Invalid hash") return True @@ -345,7 +344,7 @@ class ContentManagerPlugin(object): file_info = self.getFileInfo(inner_path) # Mark piece downloaded - piece_i = pos_from / file_info["piece_size"] + piece_i = int(pos_from / file_info["piece_size"]) self.site.storage.piecefields[file_info["sha512"]][piece_i] = True # Only add to site size on first request @@ -368,7 +367,7 @@ class ContentManagerPlugin(object): del self.site.storage.piecefields[sha512] # Also remove other pieces of the file from download queue - for key in self.site.bad_files.keys(): + for key in list(self.site.bad_files.keys()): if key.startswith(inner_path + "|"): del self.site.bad_files[key] self.site.worker_manager.removeSolvedFileTasks() @@ -381,9 +380,9 @@ class SiteStoragePlugin(object): super(SiteStoragePlugin, self).__init__(*args, **kwargs) self.piecefields = collections.defaultdict(BigfilePiecefield) if "piecefields" in self.site.settings.get("cache", {}): - for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").iteritems(): + for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").items(): if piecefield_packed: - self.piecefields[sha512].unpack(piecefield_packed.decode("base64")) + self.piecefields[sha512].unpack(base64.b64decode(piecefield_packed)) self.site.settings["cache"]["piecefields"] = {} def createSparseFile(self, inner_path, size, sha512=None): @@ -486,7 +485,7 @@ class BigFile(object): requests = [] # Request all required blocks while 1: - piece_i = pos / self.piece_size + piece_i = int(pos / self.piece_size) if piece_i * self.piece_size >= read_until: break pos_from = piece_i * self.piece_size @@ -503,7 +502,7 @@ class BigFile(object): prebuffer_until = min(self.size, read_until + self.prebuffer) priority = 3 while 1: - piece_i = pos / self.piece_size + piece_i = int(pos / self.piece_size) if piece_i * self.piece_size >= prebuffer_until: break pos_from = piece_i * self.piece_size @@ -565,7 +564,7 @@ class WorkerManagerPlugin(object): inner_path, file_range = inner_path.split("|") pos_from, pos_to = map(int, file_range.split("-")) - task["piece_i"] = pos_from / file_info["piece_size"] + task["piece_i"] = int(pos_from / file_info["piece_size"]) task["sha512"] = file_info["sha512"] else: if inner_path in self.site.bad_files: @@ -601,10 +600,10 @@ class WorkerManagerPlugin(object): class FileRequestPlugin(object): def isReadable(self, site, inner_path, file, pos): # Peek into file - if file.read(10) == "\0" * 10: + if file.read(10) == b"\0" * 10: # Looks empty, but makes sures we don't have that piece file_info = site.content_manager.getFileInfo(inner_path) - piece_i = pos / file_info["piece_size"] + piece_i = int(pos / file_info["piece_size"]) if not site.storage.piecefields[file_info["sha512"]][piece_i]: return False # Seek back to position we want to read @@ -622,7 +621,7 @@ class FileRequestPlugin(object): if not peer.connection: # Just added peer.connect(self.connection) # Assign current connection to peer - piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.iteritems()} + piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.items()} self.response({"piecefields_packed": piecefields_packed}) def actionSetPiecefields(self, params): @@ -638,7 +637,7 @@ class FileRequestPlugin(object): peer.connect(self.connection) peer.piecefields = collections.defaultdict(BigfilePiecefieldPacked) - for sha512, piecefield_packed in params["piecefields_packed"].iteritems(): + for sha512, piecefield_packed in params["piecefields_packed"].items(): peer.piecefields[sha512].unpack(piecefield_packed) site.settings["has_bigfile"] = True @@ -673,7 +672,7 @@ class PeerPlugin(object): self.piecefields = collections.defaultdict(BigfilePiecefieldPacked) try: - for sha512, piecefield_packed in res["piecefields_packed"].iteritems(): + for sha512, piecefield_packed in res["piecefields_packed"].items(): self.piecefields[sha512].unpack(piecefield_packed) except Exception as err: self.log("Invalid updatePiecefields response: %s" % Debug.formatException(err)) @@ -720,7 +719,7 @@ class SitePlugin(object): def getSettingsCache(self): back = super(SitePlugin, self).getSettingsCache() if self.storage.piecefields: - back["piecefields"] = {sha512: piecefield.pack().encode("base64") for sha512, piecefield in self.storage.piecefields.iteritems()} + back["piecefields"] = {sha512: base64.b64encode(piecefield.pack()).decode("utf8") for sha512, piecefield in self.storage.piecefields.items()} return back def needFile(self, inner_path, *args, **kwargs): diff --git a/plugins/Bigfile/Test/TestBigfile.py b/plugins/Bigfile/Test/TestBigfile.py index 2b71ec8e..d6c057bf 100644 --- a/plugins/Bigfile/Test/TestBigfile.py +++ b/plugins/Bigfile/Test/TestBigfile.py @@ -1,5 +1,5 @@ import time -from cStringIO import StringIO +import io import pytest import msgpack @@ -40,7 +40,7 @@ class TestBigfile: piecemap = msgpack.unpack(site.storage.open(file_node["piecemap"], "rb"))["optional.any.iso"] assert len(piecemap["sha512_pieces"]) == 10 assert piecemap["sha512_pieces"][0] != piecemap["sha512_pieces"][1] - assert piecemap["sha512_pieces"][0].encode("hex") == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3" + assert piecemap["sha512_pieces"][0].hex() == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3" def testVerifyPiece(self, site): inner_path = self.createBigfile(site) @@ -48,7 +48,7 @@ class TestBigfile: # Verify all 10 piece f = site.storage.open(inner_path, "rb") for i in range(10): - piece = StringIO(f.read(1024 * 1024)) + piece = io.BytesIO(f.read(1024 * 1024)) piece.seek(0) site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece) f.close() @@ -57,7 +57,7 @@ class TestBigfile: with pytest.raises(VerifyError) as err: i = 1 f = site.storage.open(inner_path, "rb") - piece = StringIO(f.read(1024 * 1024)) + piece = io.BytesIO(f.read(1024 * 1024)) f.close() site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece) assert "Invalid hash" in str(err) @@ -70,19 +70,19 @@ class TestBigfile: # Write to file beginning s = time.time() - f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), "hellostart" * 1024) + f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), b"hellostart" * 1024) time_write_start = time.time() - s # Write to file end s = time.time() - f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), "helloend" * 1024) + f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), b"helloend" * 1024) time_write_end = time.time() - s # Verify writes f = site.storage.open(inner_path) - assert f.read(10) == "hellostart" + assert f.read(10) == b"hellostart" f.seek(99 * 1024 * 1024) - assert f.read(8) == "helloend" + assert f.read(8) == b"helloend" f.close() site.storage.delete(inner_path) @@ -105,7 +105,7 @@ class TestBigfile: buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024)) assert len(buff.getvalue()) == 1 * 1024 * 1024 # Correct block size - assert buff.getvalue().startswith("Test524") # Correct data + assert buff.getvalue().startswith(b"Test524") # Correct data buff.seek(0) assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff) # Correct hash @@ -147,12 +147,12 @@ class TestBigfile: # Verify 0. block not downloaded f = site_temp.storage.open(inner_path) - assert f.read(10) == "\0" * 10 + assert f.read(10) == b"\0" * 10 # Verify 5. and 10. block downloaded f.seek(5 * 1024 * 1024) - assert f.read(7) == "Test524" + assert f.read(7) == b"Test524" f.seek(9 * 1024 * 1024) - assert f.read(7) == "943---T" + assert f.read(7) == b"943---T" # Verify hashfield assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) # 18343: data/optional.any.iso, 30970: data/optional.any.iso.hashmap.msgpack @@ -178,14 +178,14 @@ class TestBigfile: with site_temp.storage.openBigfile(inner_path) as f: with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) - assert f.read(7) == "Test524" + assert f.read(7) == b"Test524" f.seek(9 * 1024 * 1024) - assert f.read(7) == "943---T" + assert f.read(7) == b"943---T" assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces - assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) + assert set(site_temp.content_manager.hashfield) == set([18343, 43727]) assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001" assert f.sha512 in site_temp.getSettingsCache()["piecefields"] @@ -193,7 +193,7 @@ class TestBigfile: # Test requesting already downloaded with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) - assert f.read(7) == "Test524" + assert f.read(7) == b"Test524" assert len(requests) == 0 @@ -201,9 +201,9 @@ class TestBigfile: with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) # We already have this block data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block - assert data.startswith("Test524") - assert data.endswith("Test838-") - assert "\0" not in data # No null bytes allowed + assert data.startswith(b"Test524") + assert data.endswith(b"Test838-") + assert b"\0" not in data # No null bytes allowed assert len(requests) == 2 # Two block download @@ -258,11 +258,11 @@ class TestBigfile: # Download second block with site_temp.storage.openBigfile(inner_path) as f: f.seek(1024 * 1024) - assert f.read(1024)[0] != "\0" + assert f.read(1024)[0:1] != b"\0" # Make sure first block not download with site_temp.storage.open(inner_path) as f: - assert f.read(1024)[0] == "\0" + assert f.read(1024)[0:1] == b"\0" peer2 = site.addPeer(file_server.ip, 1545, return_peer=True) @@ -284,8 +284,8 @@ class TestBigfile: s = time.time() for i in range(25000): site.addPeer(file_server.ip, i) - print "%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024) # 0.082s MEM: + 6800KB - print site.peers.values()[0].piecefields + print("%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024)) # 0.082s MEM: + 6800KB + print(list(site.peers.values())[0].piecefields) def testUpdatePiecefield(self, file_server, site, site_temp): inner_path = self.createBigfile(site) @@ -390,16 +390,16 @@ class TestBigfile: size_bigfile = site_temp.content_manager.getFileInfo(inner_path)["size"] with site_temp.storage.openBigfile(inner_path) as f: - assert "\0" not in f.read(1024) + assert b"\0" not in f.read(1024) assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile with site_temp.storage.openBigfile(inner_path) as f: # Don't count twice - assert "\0" not in f.read(1024) + assert b"\0" not in f.read(1024) assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile # Add second block - assert "\0" not in f.read(1024 * 1024) + assert b"\0" not in f.read(1024 * 1024) assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile def testPrebuffer(self, file_server, site, site_temp): @@ -423,7 +423,7 @@ class TestBigfile: with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f: with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) - assert f.read(7) == "Test524" + assert f.read(7) == b"Test524" # assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2 @@ -434,7 +434,7 @@ class TestBigfile: # No prebuffer beyond end of the file f.seek(9 * 1024 * 1024) - assert "\0" not in f.read(7) + assert b"\0" not in f.read(7) assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0 diff --git a/plugins/Bigfile/__init__.py b/plugins/Bigfile/__init__.py index 005d6661..cf2dcb49 100644 --- a/plugins/Bigfile/__init__.py +++ b/plugins/Bigfile/__init__.py @@ -1,2 +1,2 @@ -import BigfilePlugin -from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked \ No newline at end of file +from . import BigfilePlugin +from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked \ No newline at end of file diff --git a/plugins/Chart/ChartCollector.py b/plugins/Chart/ChartCollector.py index 471c4b91..ad4d11a8 100644 --- a/plugins/Chart/ChartCollector.py +++ b/plugins/Chart/ChartCollector.py @@ -29,7 +29,7 @@ class ChartCollector(object): sites = file_server.sites if not sites: return collectors - content_db = sites.values()[0].content_manager.contents.db + content_db = list(sites.values())[0].content_manager.contents.db # Connection stats collectors["connection"] = lambda: len(file_server.connections) @@ -67,8 +67,8 @@ class ChartCollector(object): collectors["optional_downloaded"] = lambda: sum([site.settings.get("optional_downloaded", 0) for site in sites.values()]) # Peers - collectors["peer"] = lambda (peers): len(peers) - collectors["peer_onion"] = lambda (peers): len([True for peer in peers if ".onion" in peer]) + collectors["peer"] = lambda peers: len(peers) + collectors["peer_onion"] = lambda peers: len([True for peer in peers if ".onion" in peer]) # Size collectors["size"] = lambda: sum([site.settings.get("size", 0) for site in sites.values()]) @@ -81,21 +81,21 @@ class ChartCollector(object): site_collectors = {} # Size - site_collectors["site_size"] = lambda(site): site.settings.get("size", 0) - site_collectors["site_size_optional"] = lambda(site): site.settings.get("size_optional", 0) - site_collectors["site_optional_downloaded"] = lambda(site): site.settings.get("optional_downloaded", 0) - site_collectors["site_content"] = lambda(site): len(site.content_manager.contents) + site_collectors["site_size"] = lambda site: site.settings.get("size", 0) + site_collectors["site_size_optional"] = lambda site: site.settings.get("size_optional", 0) + site_collectors["site_optional_downloaded"] = lambda site: site.settings.get("optional_downloaded", 0) + site_collectors["site_content"] = lambda site: len(site.content_manager.contents) # Data transfer - site_collectors["site_bytes_recv|change"] = lambda(site): site.settings.get("bytes_recv", 0) - site_collectors["site_bytes_sent|change"] = lambda(site): site.settings.get("bytes_sent", 0) + site_collectors["site_bytes_recv|change"] = lambda site: site.settings.get("bytes_recv", 0) + site_collectors["site_bytes_sent|change"] = lambda site: site.settings.get("bytes_sent", 0) # Peers - site_collectors["site_peer"] = lambda(site): len(site.peers) - site_collectors["site_peer_onion"] = lambda(site): len( - [True for peer in site.peers.itervalues() if peer.ip.endswith(".onion")] + site_collectors["site_peer"] = lambda site: len(site.peers) + site_collectors["site_peer_onion"] = lambda site: len( + [True for peer in site.peers.values() if peer.ip.endswith(".onion")] ) - site_collectors["site_peer_connected"] = lambda(site): len([True for peer in site.peers.itervalues() if peer.connection]) + site_collectors["site_peer_connected"] = lambda site: len([True for peer in site.peers.values() if peer.connection]) return site_collectors @@ -109,7 +109,7 @@ class ChartCollector(object): if site is None: peers = self.getUniquePeers() datas = {} - for key, collector in collectors.iteritems(): + for key, collector in collectors.items(): try: if site: value = collector(site) @@ -138,7 +138,7 @@ class ChartCollector(object): s = time.time() datas = self.collectDatas(collectors, last_values["global"]) values = [] - for key, value in datas.iteritems(): + for key, value in datas.items(): values.append((self.db.getTypeId(key), value, now)) self.log.debug("Global collectors done in %.3fs" % (time.time() - s)) @@ -154,9 +154,9 @@ class ChartCollector(object): now = int(time.time()) s = time.time() values = [] - for address, site in sites.iteritems(): + for address, site in sites.items(): site_datas = self.collectDatas(collectors, last_values["site:%s" % address], site) - for key, value in site_datas.iteritems(): + for key, value in site_datas.items(): values.append((self.db.getTypeId(key), self.db.getSiteId(address), value, now)) time.sleep(0.000001) self.log.debug("Site collections done in %.3fs" % (time.time() - s)) diff --git a/plugins/Chart/ChartPlugin.py b/plugins/Chart/ChartPlugin.py index a491618b..ddc1e609 100644 --- a/plugins/Chart/ChartPlugin.py +++ b/plugins/Chart/ChartPlugin.py @@ -6,8 +6,8 @@ import gevent from Config import config from util import helper from Plugin import PluginManager -from ChartDb import ChartDb -from ChartCollector import ChartCollector +from .ChartDb import ChartDb +from .ChartCollector import ChartCollector if "db" not in locals().keys(): # Share on reloads db = ChartDb() @@ -39,7 +39,7 @@ class UiWebsocketPlugin(object): if not query.strip().upper().startswith("SELECT"): raise Exception("Only SELECT query supported") res = db.execute(query, params) - except Exception, err: # Response the error to client + except Exception as err: # Response the error to client self.log.error("ChartDbQuery error: %s" % err) return {"error": str(err)} # Convert result to dict diff --git a/plugins/Chart/__init__.py b/plugins/Chart/__init__.py index 78981122..2c284609 100644 --- a/plugins/Chart/__init__.py +++ b/plugins/Chart/__init__.py @@ -1 +1 @@ -import ChartPlugin \ No newline at end of file +from . import ChartPlugin \ No newline at end of file diff --git a/plugins/ContentFilter/ContentFilterPlugin.py b/plugins/ContentFilter/ContentFilterPlugin.py index 4c30a140..f6d74e7a 100644 --- a/plugins/ContentFilter/ContentFilterPlugin.py +++ b/plugins/ContentFilter/ContentFilterPlugin.py @@ -1,13 +1,13 @@ import time import re -import cgi +import html import hashlib from Plugin import PluginManager from Translate import Translate from Config import config -from ContentFilterStorage import ContentFilterStorage +from .ContentFilterStorage import ContentFilterStorage if "_" not in locals(): @@ -39,8 +39,8 @@ class UiWebsocketPlugin(object): else: self.cmd( "confirm", - [_["Hide all content from <b>%s</b>?"] % cgi.escape(cert_user_id), _["Mute"]], - lambda (res): self.cbMuteAdd(to, auth_address, cert_user_id, reason) + [_["Hide all content from <b>%s</b>?"] % html.escape(cert_user_id), _["Mute"]], + lambda res: self.cbMuteAdd(to, auth_address, cert_user_id, reason) ) def cbMuteRemove(self, to, auth_address): @@ -55,8 +55,8 @@ class UiWebsocketPlugin(object): else: self.cmd( "confirm", - [_["Unmute <b>%s</b>?"] % cgi.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]], - lambda (res): self.cbMuteRemove(to, auth_address) + [_["Unmute <b>%s</b>?"] % html.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]], + lambda res: self.cbMuteRemove(to, auth_address) ) def actionMuteList(self, to): @@ -101,13 +101,13 @@ class UiWebsocketPlugin(object): else: content = site.storage.loadJson(inner_path) title = _["New shared global content filter: <b>%s</b> (%s sites, %s users)"] % ( - cgi.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {})) + html.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {})) ) self.cmd( "confirm", [title, "Add"], - lambda (res): self.cbFilterIncludeAdd(to, res, address, inner_path, description) + lambda res: self.cbFilterIncludeAdd(to, res, address, inner_path, description) ) def cbFilterIncludeAdd(self, to, res, address, inner_path, description): @@ -189,7 +189,7 @@ class UiRequestPlugin(object): address = self.server.site_manager.resolveDomain(address) if address: - address_sha256 = "0x" + hashlib.sha256(address).hexdigest() + address_sha256 = "0x" + hashlib.sha256(address.encode("utf8")).hexdigest() else: address_sha256 = None diff --git a/plugins/ContentFilter/ContentFilterStorage.py b/plugins/ContentFilter/ContentFilterStorage.py index 17af298f..84908e09 100644 --- a/plugins/ContentFilter/ContentFilterStorage.py +++ b/plugins/ContentFilter/ContentFilterStorage.py @@ -62,7 +62,7 @@ class ContentFilterStorage(object): ) continue - for key, val in content.iteritems(): + for key, val in content.items(): if type(val) is not dict: continue diff --git a/plugins/ContentFilter/__init__.py b/plugins/ContentFilter/__init__.py index 4d8c3acc..2cbca8ee 100644 --- a/plugins/ContentFilter/__init__.py +++ b/plugins/ContentFilter/__init__.py @@ -1 +1 @@ -import ContentFilterPlugin +from . import ContentFilterPlugin diff --git a/plugins/Cors/CorsPlugin.py b/plugins/Cors/CorsPlugin.py index 8d758988..af501462 100644 --- a/plugins/Cors/CorsPlugin.py +++ b/plugins/Cors/CorsPlugin.py @@ -1,5 +1,5 @@ import re -import cgi +import html import copy from Plugin import PluginManager @@ -78,8 +78,8 @@ class UiWebsocketPlugin(object): self.cmd( "confirm", - [_["This site requests <b>read</b> permission to: <b>%s</b>"] % cgi.escape(site_name), button_title], - lambda (res): self.cbCorsPermission(to, address) + [_["This site requests <b>read</b> permission to: <b>%s</b>"] % html.escape(site_name), button_title], + lambda res: self.cbCorsPermission(to, address) ) def cbCorsPermission(self, to, address): diff --git a/plugins/Cors/__init__.py b/plugins/Cors/__init__.py index bca1ab3e..bcaa502b 100644 --- a/plugins/Cors/__init__.py +++ b/plugins/Cors/__init__.py @@ -1 +1 @@ -import CorsPlugin \ No newline at end of file +from . import CorsPlugin \ No newline at end of file diff --git a/plugins/CryptMessage/CryptMessage.py b/plugins/CryptMessage/CryptMessage.py index 955dd9b1..88441e44 100644 --- a/plugins/CryptMessage/CryptMessage.py +++ b/plugins/CryptMessage/CryptMessage.py @@ -43,11 +43,11 @@ def getEcc(privatekey=None): def toOpensslPrivatekey(privatekey): privatekey_bin = btctools.encode_privkey(privatekey, "bin") - return '\x02\xca\x00\x20' + privatekey_bin + return b'\x02\xca\x00\x20' + privatekey_bin def toOpensslPublickey(publickey): publickey_bin = btctools.encode_pubkey(publickey, "bin") publickey_bin = publickey_bin[1:] - publickey_openssl = '\x02\xca\x00 ' + publickey_bin[:32] + '\x00 ' + publickey_bin[32:] + publickey_openssl = b'\x02\xca\x00 ' + publickey_bin[:32] + b'\x00 ' + publickey_bin[32:] return publickey_openssl diff --git a/plugins/CryptMessage/CryptMessagePlugin.py b/plugins/CryptMessage/CryptMessagePlugin.py index 71499eca..e37e4c17 100644 --- a/plugins/CryptMessage/CryptMessagePlugin.py +++ b/plugins/CryptMessage/CryptMessagePlugin.py @@ -3,9 +3,9 @@ import os from Plugin import PluginManager from Crypt import CryptBitcoin -from lib.pybitcointools import bitcoin as btctools +import lib.pybitcointools as btctools -import CryptMessage +from . import CryptMessage @PluginManager.registerTo("UiWebsocket") diff --git a/plugins/CryptMessage/__init__.py b/plugins/CryptMessage/__init__.py index 3eb41820..6aeb4e52 100644 --- a/plugins/CryptMessage/__init__.py +++ b/plugins/CryptMessage/__init__.py @@ -1 +1 @@ -import CryptMessagePlugin \ No newline at end of file +from . import CryptMessagePlugin \ No newline at end of file diff --git a/plugins/FilePack/FilePackPlugin.py b/plugins/FilePack/FilePackPlugin.py index 8d662bba..bbaf0d29 100644 --- a/plugins/FilePack/FilePackPlugin.py +++ b/plugins/FilePack/FilePackPlugin.py @@ -48,7 +48,7 @@ class UiRequestPlugin(object): if ".zip/" in path or ".tar.gz/" in path: file_obj = None path_parts = self.parsePath(path) - file_path = u"%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"].decode("utf8")) + file_path = "%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"]) match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", file_path) archive_path, path_within = match.groups() if archive_path not in archive_cache: diff --git a/plugins/FilePack/__init__.py b/plugins/FilePack/__init__.py index ab07a1ff..660a0920 100644 --- a/plugins/FilePack/__init__.py +++ b/plugins/FilePack/__init__.py @@ -1 +1 @@ -import FilePackPlugin \ No newline at end of file +from . import FilePackPlugin \ No newline at end of file diff --git a/plugins/MergerSite/MergerSitePlugin.py b/plugins/MergerSite/MergerSitePlugin.py index 3de92a91..36c1dbae 100644 --- a/plugins/MergerSite/MergerSitePlugin.py +++ b/plugins/MergerSite/MergerSitePlugin.py @@ -66,7 +66,7 @@ class UiWebsocketPlugin(object): self.cmd( "confirm", [_["Add <b>%s</b> new site?"] % len(addresses), "Add"], - lambda (res): self.cbMergerSiteAdd(to, addresses) + lambda res: self.cbMergerSiteAdd(to, addresses) ) self.response(to, "ok") @@ -102,7 +102,7 @@ class UiWebsocketPlugin(object): ret = {} if not merger_types: return self.response(to, {"error": "Not a merger site"}) - for address, merged_type in merged_db.iteritems(): + for address, merged_type in merged_db.items(): if merged_type not in merger_types: continue # Site not for us if query_site_info: @@ -215,7 +215,7 @@ class UiWebsocketPlugin(object): if not re.match("^[A-Za-z0-9-]+$", merger_type): raise Exception("Invalid merger_type: %s" % merger_type) merged_sites = [] - for address, merged_type in merged_db.iteritems(): + for address, merged_type in merged_db.items(): if merged_type != merger_type: continue site = self.server.sites.get(address) @@ -253,18 +253,18 @@ class SiteStoragePlugin(object): # Not a merger site, that's all if not merger_types: - raise StopIteration + return merged_sites = [ site_manager.sites[address] - for address, merged_type in merged_db.iteritems() + for address, merged_type in merged_db.items() if merged_type in merger_types ] found = 0 for merged_site in merged_sites: self.log.debug("Loading merged site: %s" % merged_site) merged_type = merged_db[merged_site.address] - for content_inner_path, content in merged_site.content_manager.contents.iteritems(): + for content_inner_path, content in merged_site.content_manager.contents.items(): # content.json file itself if merged_site.storage.isFile(content_inner_path): # Missing content.json file merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path) @@ -273,7 +273,7 @@ class SiteStoragePlugin(object): merged_site.log.error("[MISSING] %s" % content_inner_path) # Data files in content.json content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site - for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys(): + for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()): if not file_relative_path.endswith(".json"): continue # We only interesed in json files file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir @@ -285,7 +285,7 @@ class SiteStoragePlugin(object): merged_site.log.error("[MISSING] %s" % file_inner_path) found += 1 if found % 100 == 0: - time.sleep(0.000001) # Context switch to avoid UI block + time.sleep(0.001) # Context switch to avoid UI block # Also notice merger sites on a merged site file change def onUpdated(self, inner_path, file=None): @@ -339,11 +339,11 @@ class SiteManagerPlugin(object): site_manager = self if not self.sites: return - for site in self.sites.itervalues(): + for site in self.sites.values(): # Update merged sites try: merged_type = site.content_manager.contents.get("content.json", {}).get("merged_type") - except Exception, err: + except Exception as err: self.log.error("Error loading site %s: %s" % (site.address, Debug.formatException(err))) continue if merged_type: @@ -368,7 +368,7 @@ class SiteManagerPlugin(object): # Update merged to merger if merged_type: - for merger_site in self.sites.itervalues(): + for merger_site in self.sites.values(): if "Merger:" + merged_type in merger_site.settings["permissions"]: if site.address not in merged_to_merger: merged_to_merger[site.address] = [] diff --git a/plugins/MergerSite/__init__.py b/plugins/MergerSite/__init__.py index f1f3412c..2cf54611 100644 --- a/plugins/MergerSite/__init__.py +++ b/plugins/MergerSite/__init__.py @@ -1 +1 @@ -import MergerSitePlugin \ No newline at end of file +from . import MergerSitePlugin \ No newline at end of file diff --git a/plugins/Newsfeed/NewsfeedPlugin.py b/plugins/Newsfeed/NewsfeedPlugin.py index 802fa50b..8d040127 100644 --- a/plugins/Newsfeed/NewsfeedPlugin.py +++ b/plugins/Newsfeed/NewsfeedPlugin.py @@ -37,7 +37,7 @@ class UiWebsocketPlugin(object): total_s = time.time() num_sites = 0 - for address, site_data in self.user.sites.items(): + for address, site_data in list(self.user.sites.items()): feeds = site_data.get("follow") if not feeds: continue @@ -45,7 +45,7 @@ class UiWebsocketPlugin(object): self.log.debug("Invalid feed for site %s" % address) continue num_sites += 1 - for name, query_set in feeds.iteritems(): + for name, query_set in feeds.items(): site = SiteManager.site_manager.get(address) if not site or not site.storage.has_db: continue @@ -78,7 +78,7 @@ class UiWebsocketPlugin(object): for row in res: row = dict(row) - if not isinstance(row["date_added"], (int, long, float, complex)): + if not isinstance(row["date_added"], (int, float, complex)): self.log.debug("Invalid date_added from site %s: %r" % (address, row["date_added"])) continue if row["date_added"] > 1000000000000: # Formatted as millseconds @@ -116,7 +116,7 @@ class UiWebsocketPlugin(object): search_text, filters = self.parseSearch(search) - for address, site in SiteManager.site_manager.list().iteritems(): + for address, site in SiteManager.site_manager.list().items(): if not site.storage.has_db: continue @@ -137,7 +137,7 @@ class UiWebsocketPlugin(object): num_sites += 1 - for name, query in feeds.iteritems(): + for name, query in feeds.items(): s = time.time() try: db_query = DbQuery(query) @@ -162,7 +162,7 @@ class UiWebsocketPlugin(object): db_query.parts["LIMIT"] = str(limit) res = site.storage.query(str(db_query), params) - except Exception, err: + except Exception as err: self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err))) stats.append({"site": site.address, "feed_name": name, "error": str(err), "query": query}) continue diff --git a/plugins/Newsfeed/__init__.py b/plugins/Newsfeed/__init__.py index 20cc04a1..6e624df6 100644 --- a/plugins/Newsfeed/__init__.py +++ b/plugins/Newsfeed/__init__.py @@ -1 +1 @@ -import NewsfeedPlugin \ No newline at end of file +from . import NewsfeedPlugin \ No newline at end of file diff --git a/plugins/OptionalManager/ContentDbPlugin.py b/plugins/OptionalManager/ContentDbPlugin.py index 1a1f10af..f3716b44 100644 --- a/plugins/OptionalManager/ContentDbPlugin.py +++ b/plugins/OptionalManager/ContentDbPlugin.py @@ -88,8 +88,8 @@ class ContentDbPlugin(object): site_sizes[row["site_id"]]["optional_downloaded"] += row["size"] # Site site size stats to sites.json settings - site_ids_reverse = {val: key for key, val in self.site_ids.iteritems()} - for site_id, stats in site_sizes.iteritems(): + site_ids_reverse = {val: key for key, val in self.site_ids.items()} + for site_id, stats in site_sizes.items(): site_address = site_ids_reverse.get(site_id) if not site_address: self.log.error("Not found site_id: %s" % site_id) @@ -166,7 +166,7 @@ class ContentDbPlugin(object): num = 0 site_id = self.site_ids[site.address] content_inner_dir = helper.getDirname(content_inner_path) - for relative_inner_path, file in content.get("files_optional", {}).iteritems(): + for relative_inner_path, file in content.get("files_optional", {}).items(): file_inner_path = content_inner_dir + relative_inner_path hash_id = int(file["sha512"][0:4], 16) if hash_id in site.content_manager.hashfield: @@ -232,14 +232,14 @@ class ContentDbPlugin(object): num_file = 0 num_updated = 0 num_site = 0 - for site in self.sites.values(): + for site in list(self.sites.values()): if not site.content_manager.has_optional_files: continue if not site.settings["serving"]: continue has_updated_hashfield = next(( peer - for peer in site.peers.itervalues() + for peer in site.peers.values() if peer.has_hashfield and peer.hashfield.time_changed > self.time_peer_numbers_updated ), None) @@ -248,7 +248,7 @@ class ContentDbPlugin(object): hashfield_peers = itertools.chain.from_iterable( peer.hashfield.storage - for peer in site.peers.itervalues() + for peer in site.peers.values() if peer.has_hashfield ) peer_nums = collections.Counter( @@ -270,7 +270,7 @@ class ContentDbPlugin(object): updates[row["file_id"]] = peer_num self.execute("BEGIN") - for file_id, peer_num in updates.iteritems(): + for file_id, peer_num in updates.items(): self.execute("UPDATE file_optional SET peer = ? WHERE file_id = ?", (peer_num, file_id)) self.execute("END") @@ -394,7 +394,7 @@ class ContentDbPlugin(object): self.updatePeerNumbers() - site_ids_reverse = {val: key for key, val in self.site_ids.iteritems()} + site_ids_reverse = {val: key for key, val in self.site_ids.items()} deleted_file_ids = [] for row in self.queryDeletableFiles(): site_address = site_ids_reverse.get(row["site_id"]) diff --git a/plugins/OptionalManager/OptionalManagerPlugin.py b/plugins/OptionalManager/OptionalManagerPlugin.py index 9da93041..4e1b4336 100644 --- a/plugins/OptionalManager/OptionalManagerPlugin.py +++ b/plugins/OptionalManager/OptionalManagerPlugin.py @@ -6,7 +6,7 @@ import gevent from util import helper from Plugin import PluginManager -import ContentDbPlugin +from . import ContentDbPlugin # We can only import plugin host clases after the plugins are loaded @@ -24,7 +24,7 @@ def processAccessLog(): for site_id in access_log: content_db.execute( "UPDATE file_optional SET time_accessed = %s WHERE ?" % now, - {"site_id": site_id, "inner_path": access_log[site_id].keys()} + {"site_id": site_id, "inner_path": list(access_log[site_id].keys())} ) num += len(access_log[site_id]) access_log.clear() @@ -37,7 +37,7 @@ def processRequestLog(): num = 0 cur.execute("BEGIN") for site_id in request_log: - for inner_path, uploaded in request_log[site_id].iteritems(): + for inner_path, uploaded in request_log[site_id].items(): content_db.execute( "UPDATE file_optional SET uploaded = uploaded + %s WHERE ?" % uploaded, {"site_id": site_id, "inner_path": inner_path} @@ -101,7 +101,7 @@ class ContentManagerPlugin(object): {"site_id": self.contents.db.site_ids[self.site.address], "hash_id": hash_id} ) row = res.fetchone() - if row and row[0]: + if row and row["is_downloaded"]: return True else: return False @@ -191,7 +191,7 @@ class SitePlugin(object): if is_downloadable: return is_downloadable - for path in self.settings.get("optional_help", {}).iterkeys(): + for path in self.settings.get("optional_help", {}).keys(): if inner_path.startswith(path): return True diff --git a/plugins/OptionalManager/Test/TestOptionalManager.py b/plugins/OptionalManager/Test/TestOptionalManager.py index 00a5fcb7..4325cb2c 100644 --- a/plugins/OptionalManager/Test/TestOptionalManager.py +++ b/plugins/OptionalManager/Test/TestOptionalManager.py @@ -1,15 +1,7 @@ -import hashlib -import os import copy -import json -from cStringIO import StringIO import pytest -from OptionalManager import OptionalManagerPlugin -from util import helper -from Crypt import CryptBitcoin - @pytest.mark.usefixtures("resetSettings") class TestOptionalManager: @@ -58,7 +50,7 @@ class TestOptionalManager: assert not file_row["is_downloaded"] # Write file from outside of ZeroNet - site.storage.open("testfile", "wb").write("A" * 1234) # For quick check hash does not matter only file size + site.storage.open("testfile", "wb").write(b"A" * 1234) # For quick check hash does not matter only file size hashfield_len_before = len(site.content_manager.hashfield) site.storage.verifyFiles(quick_check=True) @@ -92,8 +84,8 @@ class TestOptionalManager: assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") == site.content_manager.hashfield.getHashId("aaaabbbbdddd") # Write files from outside of ZeroNet (For quick check hash does not matter only file size) - site.storage.open("testfile1", "wb").write("A" * 1234) - site.storage.open("testfile2", "wb").write("B" * 2345) + site.storage.open("testfile1", "wb").write(b"A" * 1234) + site.storage.open("testfile2", "wb").write(b"B" * 2345) site.storage.verifyFiles(quick_check=True) @@ -129,7 +121,6 @@ class TestOptionalManager: assert site.bad_files["data/fake_bigfile.mp4|2048-3064"] == 1 def testOptionalDelete(self, site): - privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" contents = site.content_manager.contents site.content_manager.setPin("data/img/zerotalk-upvote.png", True) diff --git a/plugins/OptionalManager/UiWebsocketPlugin.py b/plugins/OptionalManager/UiWebsocketPlugin.py index 94d3f501..efdfdf9d 100644 --- a/plugins/OptionalManager/UiWebsocketPlugin.py +++ b/plugins/OptionalManager/UiWebsocketPlugin.py @@ -1,6 +1,6 @@ import re import time -import cgi +import html import gevent @@ -28,7 +28,7 @@ class UiWebsocketPlugin(object): content_db.my_optional_files[self.site.address + "/" + content_inner_dir] = time.time() if len(content_db.my_optional_files) > 50: # Keep only last 50 oldest_key = min( - content_db.my_optional_files.iterkeys(), + iter(content_db.my_optional_files.keys()), key=(lambda key: content_db.my_optional_files[key]) ) del content_db.my_optional_files[oldest_key] @@ -80,7 +80,7 @@ class UiWebsocketPlugin(object): # Add leech / seed stats row["peer_seed"] = 0 row["peer_leech"] = 0 - for peer in site.peers.itervalues(): + for peer in site.peers.values(): if not peer.time_piecefields_updated or sha512 not in peer.piecefields: continue peer_piecefield = peer.piecefields[sha512].tostring() @@ -212,7 +212,7 @@ class UiWebsocketPlugin(object): num_file = len(inner_path) if back == "ok": if num_file == 1: - self.cmd("notification", ["done", _["Pinned %s"] % cgi.escape(helper.getFilename(inner_path[0])), 5000]) + self.cmd("notification", ["done", _["Pinned %s"] % html.escape(helper.getFilename(inner_path[0])), 5000]) else: self.cmd("notification", ["done", _["Pinned %s files"] % num_file, 5000]) self.response(to, back) @@ -224,7 +224,7 @@ class UiWebsocketPlugin(object): num_file = len(inner_path) if back == "ok": if num_file == 1: - self.cmd("notification", ["done", _["Removed pin from %s"] % cgi.escape(helper.getFilename(inner_path[0])), 5000]) + self.cmd("notification", ["done", _["Removed pin from %s"] % html.escape(helper.getFilename(inner_path[0])), 5000]) else: self.cmd("notification", ["done", _["Removed pin from %s files"] % num_file, 5000]) self.response(to, back) @@ -325,7 +325,7 @@ class UiWebsocketPlugin(object): self.cmd("notification", [ "done", _["You started to help distribute <b>%s</b>.<br><small>Directory: %s</small>"] % - (cgi.escape(title), cgi.escape(directory)), + (html.escape(title), html.escape(directory)), 10000 ]) @@ -369,10 +369,10 @@ class UiWebsocketPlugin(object): self.cmd( "confirm", [ - _["Help distribute all new optional files on site <b>%s</b>"] % cgi.escape(site_title), + _["Help distribute all new optional files on site <b>%s</b>"] % html.escape(site_title), _["Yes, I want to help!"] ], - lambda (res): self.cbOptionalHelpAll(to, site, True) + lambda res: self.cbOptionalHelpAll(to, site, True) ) else: site.settings["autodownloadoptional"] = False diff --git a/plugins/OptionalManager/__init__.py b/plugins/OptionalManager/__init__.py index 02969bba..1f0ad2dd 100644 --- a/plugins/OptionalManager/__init__.py +++ b/plugins/OptionalManager/__init__.py @@ -1 +1 @@ -import OptionalManagerPlugin \ No newline at end of file +from . import OptionalManagerPlugin \ No newline at end of file diff --git a/plugins/PeerDb/PeerDbPlugin.py b/plugins/PeerDb/PeerDbPlugin.py index 241b5c58..86613fc7 100644 --- a/plugins/PeerDb/PeerDbPlugin.py +++ b/plugins/PeerDb/PeerDbPlugin.py @@ -96,8 +96,8 @@ class ContentDbPlugin(object): gevent.spawn_later(60*60, self.savePeers, site, spawn=True) def saveAllPeers(self): - for site in self.sites.values(): + for site in list(self.sites.values()): try: self.savePeers(site) - except Exception, err: + except Exception as err: site.log.error("Save peer error: %s" % err) diff --git a/plugins/PeerDb/__init__.py b/plugins/PeerDb/__init__.py index 967561dc..bc8c93b9 100644 --- a/plugins/PeerDb/__init__.py +++ b/plugins/PeerDb/__init__.py @@ -1,2 +1,2 @@ -import PeerDbPlugin +from . import PeerDbPlugin diff --git a/plugins/Sidebar/SidebarPlugin.py b/plugins/Sidebar/SidebarPlugin.py index c56a2cb4..039c32b3 100644 --- a/plugins/Sidebar/SidebarPlugin.py +++ b/plugins/Sidebar/SidebarPlugin.py @@ -1,14 +1,11 @@ import re import os -import cgi +import html import sys import math import time import json -try: - import cStringIO as StringIO -except: - import StringIO +import io import gevent @@ -17,7 +14,7 @@ from Plugin import PluginManager from Debug import Debug from Translate import Translate from util import helper -from ZipStream import ZipStream +from .ZipStream import ZipStream plugin_dir = "plugins/Sidebar" media_dir = plugin_dir + "/media" @@ -46,7 +43,7 @@ class UiRequestPlugin(object): from Debug import DebugMedia DebugMedia.merge(plugin_media_file) if ext == "js": - yield _.translateData(open(plugin_media_file).read()) + yield _.translateData(open(plugin_media_file).read()).encode("utf8") else: for part in self.actionFile(plugin_media_file, send_header=False): yield part @@ -84,15 +81,13 @@ class UiRequestPlugin(object): yield data - - @PluginManager.registerTo("UiWebsocket") class UiWebsocketPlugin(object): def sidebarRenderPeerStats(self, body, site): - connected = len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]) - connectable = len([peer_id for peer_id in site.peers.keys() if not peer_id.endswith(":0")]) - onion = len([peer_id for peer_id in site.peers.keys() if ".onion" in peer_id]) - local = len([peer for peer in site.peers.values() if helper.isPrivateIp(peer.ip)]) + connected = len([peer for peer in list(site.peers.values()) if peer.connection and peer.connection.connected]) + connectable = len([peer_id for peer_id in list(site.peers.keys()) if not peer_id.endswith(":0")]) + onion = len([peer_id for peer_id in list(site.peers.keys()) if ".onion" in peer_id]) + local = len([peer for peer in list(site.peers.values()) if helper.isPrivateIp(peer.ip)]) peers_total = len(site.peers) # Add myself @@ -111,7 +106,7 @@ class UiWebsocketPlugin(object): percent_connectable = percent_connected = percent_onion = 0 if local: - local_html = _(u"<li class='color-yellow'><span>{_[Local]}:</span><b>{local}</b></li>") + local_html = _("<li class='color-yellow'><span>{_[Local]}:</span><b>{local}</b></li>") else: local_html = "" @@ -122,7 +117,7 @@ class UiWebsocketPlugin(object): ",".join(peer_ips) ) - body.append(_(u""" + body.append(_(""" <li> <label> {_[Peers]} @@ -155,7 +150,7 @@ class UiWebsocketPlugin(object): percent_recv = 0.5 percent_sent = 0.5 - body.append(_(u""" + body.append(_(""" <li> <label>{_[Data transfer]}</label> <ul class='graph graph-stacked'> @@ -170,7 +165,7 @@ class UiWebsocketPlugin(object): """)) def sidebarRenderFileStats(self, body, site): - body.append(_(u""" + body.append(_(""" <li> <label> {_[Files]} @@ -198,7 +193,7 @@ class UiWebsocketPlugin(object): content = site.content_manager.contents[inner_path] if "files" not in content or content["files"] is None: continue - for file_name, file_details in content["files"].items(): + for file_name, file_details in list(content["files"].items()): size_total += file_details["size"] ext = file_name.split(".")[-1] size_filetypes[ext] = size_filetypes.get(ext, 0) + file_details["size"] @@ -236,7 +231,7 @@ class UiWebsocketPlugin(object): percent = 100 * (float(size) / size_total) percent = math.floor(percent * 100) / 100 # Floor to 2 digits body.append( - u"""<li style='width: %.2f%%' class='%s back-%s' title="%s"></li>""" % + """<li style='width: %.2f%%' class='%s back-%s' title="%s"></li>""" % (percent, _[extension], color, _[extension]) ) @@ -262,7 +257,7 @@ class UiWebsocketPlugin(object): else: size_formatted = "%.0fkB" % (size / 1024) - body.append(u"<li class='color-%s'><span>%s:</span><b>%s</b></li>" % (color, _[title], size_formatted)) + body.append("<li class='color-%s'><span>%s:</span><b>%s</b></li>" % (color, _[title], size_formatted)) body.append("</ul></li>") @@ -272,9 +267,9 @@ class UiWebsocketPlugin(object): size_limit = site.getSizeLimit() percent_used = size / size_limit - body.append(_(u""" + body.append(_(""" <li> - <label>{_[Size limit]} <small>({_[limit used]}: {percent_used:.0%}, {_[free space]}: {free_space:,d}MB)</small></label> + <label>{_[Size limit]} <small>({_[limit used]}: {percent_used:.0%}, {_[free space]}: {free_space:,.0f}MB)</small></label> <input type='text' class='text text-num' value="{size_limit}" id='input-sitelimit'/><span class='text-post'>MB</span> <a href='#Set' class='button' id='button-sitelimit'>{_[Set]}</a> </li> @@ -292,7 +287,7 @@ class UiWebsocketPlugin(object): size_formatted_total = size_total / 1024 / 1024 size_formatted_downloaded = size_downloaded / 1024 / 1024 - body.append(_(u""" + body.append(_(""" <li> <label>{_[Optional files]}</label> <ul class='graph'> @@ -314,14 +309,14 @@ class UiWebsocketPlugin(object): else: checked = "" - body.append(_(u""" + body.append(_(""" <li> <label>{_[Download and help distribute all files]}</label> <input type="checkbox" class="checkbox" id="checkbox-autodownloadoptional" {checked}/><div class="checkbox-skin"></div> """)) autodownload_bigfile_size_limit = int(site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit)) - body.append(_(u""" + body.append(_(""" <div class='settings-autodownloadoptional'> <label>{_[Auto download big file size limit]}</label> <input type='text' class='text text-num' value="{autodownload_bigfile_size_limit}" id='input-autodownload_bigfile_size_limit'/><span class='text-post'>MB</span> @@ -331,16 +326,16 @@ class UiWebsocketPlugin(object): body.append("</li>") def sidebarRenderBadFiles(self, body, site): - body.append(_(u""" + body.append(_(""" <li> <label>{_[Needs to be updated]}:</label> <ul class='filelist'> """)) i = 0 - for bad_file, tries in site.bad_files.iteritems(): + for bad_file, tries in site.bad_files.items(): i += 1 - body.append(_(u"""<li class='color-red' title="{bad_file_path} ({tries})">{bad_filename}</li>""", { + body.append(_("""<li class='color-red' title="{bad_file_path} ({tries})">{bad_filename}</li>""", { "bad_file_path": bad_file, "bad_filename": helper.getFilename(bad_file), "tries": _.pluralize(tries, "{} try", "{} tries") @@ -350,7 +345,7 @@ class UiWebsocketPlugin(object): if len(site.bad_files) > 30: num_bad_files = len(site.bad_files) - 30 - body.append(_(u"""<li class='color-red'>{_[+ {num_bad_files} more]}</li>""", nested=True)) + body.append(_("""<li class='color-red'>{_[+ {num_bad_files} more]}</li>""", nested=True)) body.append(""" </ul> @@ -363,11 +358,11 @@ class UiWebsocketPlugin(object): size = float(site.storage.getSize(inner_path)) / 1024 feeds = len(site.storage.db.schema.get("feeds", {})) else: - inner_path = _[u"No database found"] + inner_path = _["No database found"] size = 0.0 feeds = 0 - body.append(_(u""" + body.append(_(""" <li> <label>{_[Database]} <small>({size:.2f}kB, {_[search feeds]}: {_[{feeds} query]})</small></label> <div class='flex'> @@ -385,14 +380,14 @@ class UiWebsocketPlugin(object): quota = rules["max_size"] / 1024 try: content = site.content_manager.contents["data/users/%s/content.json" % auth_address] - used = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) + used = len(json.dumps(content)) + sum([file["size"] for file in list(content["files"].values())]) except: used = 0 used = used / 1024 else: quota = used = 0 - body.append(_(u""" + body.append(_(""" <li> <label>{_[Identity address]} <small>({_[limit used]}: {used:.2f}kB / {quota:.2f}kB)</small></label> <div class='flex'> @@ -411,7 +406,7 @@ class UiWebsocketPlugin(object): class_pause = "hidden" class_resume = "" - body.append(_(u""" + body.append(_(""" <li> <label>{_[Site control]}</label> <a href='#Update' class='button noupdate' id='button-update'>{_[Update]}</a> @@ -423,7 +418,7 @@ class UiWebsocketPlugin(object): donate_key = site.content_manager.contents.get("content.json", {}).get("donate", True) site_address = self.site.address - body.append(_(u""" + body.append(_(""" <li> <label>{_[Site address]}</label><br> <div class='flex'> @@ -431,8 +426,8 @@ class UiWebsocketPlugin(object): """)) if donate_key == False or donate_key == "": pass - elif (type(donate_key) == str or type(donate_key) == unicode) and len(donate_key) > 0: - body.append(_(u""" + elif (type(donate_key) == str or type(donate_key) == str) and len(donate_key) > 0: + body.append(_(""" </div> </li> <li> @@ -441,10 +436,10 @@ class UiWebsocketPlugin(object): {donate_key} """)) else: - body.append(_(u""" + body.append(_(""" <a href='bitcoin:{site_address}' class='button' id='button-donate'>{_[Donate]}</a> """)) - body.append(_(u""" + body.append(_(""" </div> </li> """)) @@ -455,7 +450,7 @@ class UiWebsocketPlugin(object): else: checked = "" - body.append(_(u""" + body.append(_(""" <h2 class='owned-title'>{_[This is my site]}</h2> <input type="checkbox" class="checkbox" id="checkbox-owned" {checked}/><div class="checkbox-skin"></div> """)) @@ -464,7 +459,7 @@ class UiWebsocketPlugin(object): title = site.content_manager.contents.get("content.json", {}).get("title", "") description = site.content_manager.contents.get("content.json", {}).get("description", "") - body.append(_(u""" + body.append(_(""" <li> <label for='settings-title'>{_[Site title]}</label> <input type='text' class='text' value="{title}" id='settings-title'/> @@ -483,17 +478,17 @@ class UiWebsocketPlugin(object): def sidebarRenderContents(self, body, site): has_privatekey = bool(self.user.getSiteData(site.address, create=False).get("privatekey")) if has_privatekey: - tag_privatekey = _(u"{_[Private key saved.]} <a href='#Forgot+private+key' id='privatekey-forgot' class='link-right'>{_[Forgot]}</a>") + tag_privatekey = _("{_[Private key saved.]} <a href='#Forgot+private+key' id='privatekey-forgot' class='link-right'>{_[Forgot]}</a>") else: - tag_privatekey = _(u"<a href='#Add+private+key' id='privatekey-add' class='link-right'>{_[Add saved private key]}</a>") + tag_privatekey = _("<a href='#Add+private+key' id='privatekey-add' class='link-right'>{_[Add saved private key]}</a>") - body.append(_(u""" + body.append(_(""" <li> <label>{_[Content publishing]} <small class='label-right'>{tag_privatekey}</small></label> """.replace("{tag_privatekey}", tag_privatekey))) # Choose content you want to sign - body.append(_(u""" + body.append(_(""" <div class='flex'> <input type='text' class='text' value="content.json" id='input-contents'/> <a href='#Sign-and-Publish' id='button-sign-publish' class='button'>{_[Sign and publish]}</a> @@ -502,8 +497,8 @@ class UiWebsocketPlugin(object): """)) contents = ["content.json"] - contents += site.content_manager.contents.get("content.json", {}).get("includes", {}).keys() - body.append(_(u"<div class='contents'>{_[Choose]}: ")) + contents += list(site.content_manager.contents.get("content.json", {}).get("includes", {}).keys()) + body.append(_("<div class='contents'>{_[Choose]}: ")) for content in contents: body.append(_("<a href='{content}' class='contents-content'>{content}</a> ")) body.append("</div>") @@ -520,7 +515,7 @@ class UiWebsocketPlugin(object): body.append("<div>") body.append("<a href='#Close' class='close'>×</a>") - body.append("<h1>%s</h1>" % cgi.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True)) + body.append("<h1>%s</h1>" % html.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True)) body.append("<div class='globe loading'></div>") @@ -554,7 +549,6 @@ class UiWebsocketPlugin(object): self.response(to, "".join(body)) def downloadGeoLiteDb(self, db_path): - import urllib import gzip import shutil from util import helper @@ -566,12 +560,13 @@ class UiWebsocketPlugin(object): "https://raw.githubusercontent.com/texnikru/GeoLite2-Database/master/GeoLite2-City.mmdb.gz" ] for db_url in db_urls: + downloadl_err = None try: # Download response = helper.httpRequest(db_url) data_size = response.getheader('content-length') data_recv = 0 - data = StringIO.StringIO() + data = io.BytesIO() while True: buff = response.read(1024 * 512) if not buff: @@ -592,11 +587,12 @@ class UiWebsocketPlugin(object): time.sleep(2) # Wait for notify animation return True except Exception as err: + download_err = err self.log.error("Error downloading %s: %s" % (db_url, err)) pass self.cmd("progress", [ "geolite-info", - _["GeoLite2 City database download error: {}!<br>Please download manually and unpack to data dir:<br>{}"].format(err, db_urls[0]), + _["GeoLite2 City database download error: {}!<br>Please download manually and unpack to data dir:<br>{}"].format(download_err, db_urls[0]), -100 ]) @@ -629,14 +625,14 @@ class UiWebsocketPlugin(object): return loc def getPeerLocations(self, peers): - import maxminddb + from . import maxminddb db_path = config.data_dir + '/GeoLite2-City.mmdb' if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: if not self.downloadGeoLiteDb(db_path): return False geodb = maxminddb.open_database(db_path) - peers = peers.values() + peers = list(peers.values()) # Place bars peer_locations = [] placed = {} # Already placed bars here @@ -704,9 +700,9 @@ class UiWebsocketPlugin(object): globe_data += [peer_location["lat"], peer_location["lon"], height] self.response(to, globe_data) - except Exception, err: + except Exception as err: self.log.debug("sidebarGetPeers error: %s" % Debug.formatException(err)) - self.response(to, {"error": err}) + self.response(to, {"error": str(err)}) def actionSiteSetOwned(self, to, owned): permissions = self.getPermissions(to) diff --git a/plugins/Sidebar/ZipStream.py b/plugins/Sidebar/ZipStream.py index 9d7de241..f4278eac 100644 --- a/plugins/Sidebar/ZipStream.py +++ b/plugins/Sidebar/ZipStream.py @@ -1,14 +1,14 @@ -import cStringIO as StringIO +import io import os import zipfile -class ZipStream(file): +class ZipStream(object): def __init__(self, dir_path): self.dir_path = dir_path self.pos = 0 - self.zf = zipfile.ZipFile(self, 'w', zipfile.ZIP_DEFLATED, allowZip64 = True) - self.buff = StringIO.StringIO() + self.zf = zipfile.ZipFile(self, 'w', zipfile.ZIP_DEFLATED, allowZip64=True) + self.buff = io.BytesIO() self.file_list = self.getFileList() def getFileList(self): diff --git a/plugins/Sidebar/__init__.py b/plugins/Sidebar/__init__.py index 8b61cb4a..f2669d96 100644 --- a/plugins/Sidebar/__init__.py +++ b/plugins/Sidebar/__init__.py @@ -1 +1 @@ -import SidebarPlugin \ No newline at end of file +from . import SidebarPlugin \ No newline at end of file diff --git a/plugins/Stats/__init__.py b/plugins/Stats/__init__.py index 90bd9d6e..791fb6e0 100644 --- a/plugins/Stats/__init__.py +++ b/plugins/Stats/__init__.py @@ -1 +1 @@ -import StatsPlugin \ No newline at end of file +from . import StatsPlugin \ No newline at end of file diff --git a/plugins/TranslateSite/TranslateSitePlugin.py b/plugins/TranslateSite/TranslateSitePlugin.py index 6eefbb77..67d7ffc3 100644 --- a/plugins/TranslateSite/TranslateSitePlugin.py +++ b/plugins/TranslateSite/TranslateSitePlugin.py @@ -15,7 +15,7 @@ class UiRequestPlugin(object): path_parts = self.parsePath(path) kwargs["header_length"] = False file_generator = super(UiRequestPlugin, self).actionSiteMedia(path, **kwargs) - if "next" in dir(file_generator): # File found and generator returned + if "__next__" in dir(file_generator): # File found and generator returned site = self.server.sites.get(path_parts["address"]) return self.actionPatchFile(site, path_parts["inner_path"], file_generator) else: @@ -28,10 +28,10 @@ class UiRequestPlugin(object): file_generator = super(UiRequestPlugin, self).actionUiMedia(path) if translate.lang != "en" and path.endswith(".js"): s = time.time() - data = "".join(list(file_generator)) - data = translate.translateData(data) + data = b"".join(list(file_generator)) + data = translate.translateData(data.decode("utf8")) self.log.debug("Patched %s (%s bytes) in %.3fs" % (path, len(data), time.time() - s)) - return iter([data]) + return iter([data.encode("utf8")]) else: return file_generator @@ -49,12 +49,12 @@ class UiRequestPlugin(object): if not lang_file_exist or inner_path not in content_json.get("translate", []): for part in file_generator: if inner_path.endswith(".html"): - yield part.replace("lang={lang}", "lang=" + str(translate.lang)) # lang get parameter to .js file to avoid cache + yield part.replace(b"lang={lang}", b"lang=%s" % translate.lang.encode("utf8")) # lang get parameter to .js file to avoid cache else: yield part else: s = time.time() - data = "".join(list(file_generator)) + data = b"".join(list(file_generator)).decode("utf8") # if site.content_manager.contents["content.json"]["files"].get(lang_file): site.needFile(lang_file, priority=10) @@ -63,9 +63,9 @@ class UiRequestPlugin(object): data = translate.translateData(data, site.storage.loadJson(lang_file), "js") else: data = translate.translateData(data, site.storage.loadJson(lang_file), "html") - data = data.replace("lang={lang}", "lang=" + str(translate.lang)) # lang get parameter to .js file to avoid cache + data = data.replace(b"lang={lang}", b"lang=%s" % translate.lang.encode("utf8")) # lang get parameter to .js file to avoid cache except Exception as err: site.log.error("Error loading translation file %s: %s" % (lang_file, err)) self.log.debug("Patched %s (%s bytes) in %.3fs" % (inner_path, len(data), time.time() - s)) - yield data + yield data.encode("utf8") diff --git a/plugins/TranslateSite/__init__.py b/plugins/TranslateSite/__init__.py index 0b50ddc8..1ebbe31f 100644 --- a/plugins/TranslateSite/__init__.py +++ b/plugins/TranslateSite/__init__.py @@ -1 +1 @@ -import TranslateSitePlugin +from . import TranslateSitePlugin diff --git a/plugins/Trayicon/TrayiconPlugin.py b/plugins/Trayicon/TrayiconPlugin.py index e32b5a6a..a87da714 100644 --- a/plugins/Trayicon/TrayiconPlugin.py +++ b/plugins/Trayicon/TrayiconPlugin.py @@ -17,7 +17,7 @@ class ActionsPlugin(object): def main(self): global notificationicon, winfolders - from lib import notificationicon, winfolders + from .lib import notificationicon, winfolders import gevent.threadpool self.main = sys.modules["main"] @@ -25,7 +25,7 @@ class ActionsPlugin(object): fs_encoding = sys.getfilesystemencoding() icon = notificationicon.NotificationIcon( - os.path.join(os.path.dirname(os.path.abspath(__file__).decode(fs_encoding)), 'trayicon.ico'), + os.path.join(os.path.dirname(os.path.abspath(__file__)), 'trayicon.ico'), "ZeroNet %s" % config.version ) self.icon = icon @@ -137,7 +137,7 @@ class ActionsPlugin(object): cmd += ' --open_browser ""' cmd = cmd.decode(sys.getfilesystemencoding()) - return u""" + return """ @echo off chcp 65001 > nul set PYTHONIOENCODING=utf-8 diff --git a/plugins/Trayicon/__init__.py b/plugins/Trayicon/__init__.py index 5b584962..918f76dc 100644 --- a/plugins/Trayicon/__init__.py +++ b/plugins/Trayicon/__init__.py @@ -1,4 +1,4 @@ import sys if sys.platform == 'win32': - import TrayiconPlugin \ No newline at end of file + from . import TrayiconPlugin \ No newline at end of file diff --git a/plugins/Trayicon/languages/es.json b/plugins/Trayicon/languages/es.json index 4cdc5d1f..6710c3c5 100644 --- a/plugins/Trayicon/languages/es.json +++ b/plugins/Trayicon/languages/es.json @@ -7,7 +7,7 @@ "Quit": "Sair", "(active)": "(activo)", "(passive)": "(pasivo)", - "Connections: %s": "Conecciones: %s", + "Connections: %s": "Conecciones: %s", "Received: %.2f MB | Sent: %.2f MB": "Recibido: %.2f MB | Enviado: %.2f MB", "Show console window": "Mostrar consola", "Start ZeroNet when Windows starts": "Iniciar Zeronet cuando inicie Windows" diff --git a/plugins/Trayicon/lib/notificationicon.py b/plugins/Trayicon/lib/notificationicon.py index 128c3beb..57a4ddd6 100644 --- a/plugins/Trayicon/lib/notificationicon.py +++ b/plugins/Trayicon/lib/notificationicon.py @@ -190,27 +190,27 @@ DefWindowProc = ctypes.windll.user32.DefWindowProcW DefWindowProc.restype = ctypes.c_int DefWindowProc.argtypes = [ctypes.wintypes.HWND, ctypes.c_uint, ctypes.wintypes.WPARAM, ctypes.wintypes.LPARAM] -WS_OVERLAPPED = 0x00000000L -WS_POPUP = 0x80000000L -WS_CHILD = 0x40000000L -WS_MINIMIZE = 0x20000000L -WS_VISIBLE = 0x10000000L -WS_DISABLED = 0x08000000L -WS_CLIPSIBLINGS = 0x04000000L -WS_CLIPCHILDREN = 0x02000000L -WS_MAXIMIZE = 0x01000000L -WS_CAPTION = 0x00C00000L -WS_BORDER = 0x00800000L -WS_DLGFRAME = 0x00400000L -WS_VSCROLL = 0x00200000L -WS_HSCROLL = 0x00100000L -WS_SYSMENU = 0x00080000L -WS_THICKFRAME = 0x00040000L -WS_GROUP = 0x00020000L -WS_TABSTOP = 0x00010000L +WS_OVERLAPPED = 0x00000000 +WS_POPUP = 0x80000000 +WS_CHILD = 0x40000000 +WS_MINIMIZE = 0x20000000 +WS_VISIBLE = 0x10000000 +WS_DISABLED = 0x08000000 +WS_CLIPSIBLINGS = 0x04000000 +WS_CLIPCHILDREN = 0x02000000 +WS_MAXIMIZE = 0x01000000 +WS_CAPTION = 0x00C00000 +WS_BORDER = 0x00800000 +WS_DLGFRAME = 0x00400000 +WS_VSCROLL = 0x00200000 +WS_HSCROLL = 0x00100000 +WS_SYSMENU = 0x00080000 +WS_THICKFRAME = 0x00040000 +WS_GROUP = 0x00020000 +WS_TABSTOP = 0x00010000 -WS_MINIMIZEBOX = 0x00020000L -WS_MAXIMIZEBOX = 0x00010000L +WS_MINIMIZEBOX = 0x00020000 +WS_MAXIMIZEBOX = 0x00010000 WS_OVERLAPPEDWINDOW = (WS_OVERLAPPED | WS_CAPTION | @@ -497,7 +497,7 @@ DispatchMessage.argtypes = [ctypes.POINTER(MSG)] def LoadIcon(iconfilename, small=False): return LoadImage(0, - unicode(iconfilename), + str(iconfilename), IMAGE_ICON, 16 if small else 0, 16 if small else 0, @@ -506,15 +506,15 @@ def LoadIcon(iconfilename, small=False): class NotificationIcon(object): def __init__(self, iconfilename, tooltip=None): - assert os.path.isfile(unicode(iconfilename)), "{} doesn't exist".format(iconfilename) - self._iconfile = unicode(iconfilename) + assert os.path.isfile(str(iconfilename)), "{} doesn't exist".format(iconfilename) + self._iconfile = str(iconfilename) self._hicon = LoadIcon(self._iconfile, True) assert self._hicon, "Failed to load {}".format(iconfilename) #self._pumpqueue = Queue.Queue() self._die = False self._timerid = None self._uid = uuid.uuid4() - self._tooltip = unicode(tooltip) if tooltip else u'' + self._tooltip = str(tooltip) if tooltip else '' #self._thread = threading.Thread(target=self._run) #self._thread.start() self._info_bubble = None @@ -525,7 +525,7 @@ class NotificationIcon(object): if self._info_bubble: info_bubble = self._info_bubble self._info_bubble = None - message = unicode(self._info_bubble) + message = str(self._info_bubble) iconinfo.uFlags |= NIF_INFO iconinfo.szInfo = message iconinfo.szInfoTitle = message @@ -535,7 +535,7 @@ class NotificationIcon(object): def _run(self): - self.WM_TASKBARCREATED = ctypes.windll.user32.RegisterWindowMessageW(u'TaskbarCreated') + self.WM_TASKBARCREATED = ctypes.windll.user32.RegisterWindowMessageW('TaskbarCreated') self._windowproc = WNDPROC(self._callback) self._hwnd = GenerateDummyWindow(self._windowproc, str(self._uid)) @@ -562,11 +562,11 @@ class NotificationIcon(object): ret = GetMessage(ctypes.pointer(message), 0, 0, 0) TranslateMessage(ctypes.pointer(message)) DispatchMessage(ctypes.pointer(message)) - except Exception, err: + except Exception as err: # print "NotificationIcon error", err, message message = MSG() time.sleep(0.125) - print "Icon thread stopped, removing icon..." + print("Icon thread stopped, removing icon...") Shell_NotifyIcon(NIM_DELETE, ctypes.cast(ctypes.pointer(iconinfo), ctypes.POINTER(NOTIFYICONDATA))) ctypes.windll.user32.DestroyWindow(self._hwnd) @@ -586,7 +586,7 @@ class NotificationIcon(object): item_map = {} for fs in self.items: iidx += 1 - if isinstance(fs, basestring): + if isinstance(fs, str): if fs and not fs.strip('-_='): AppendMenu(menu, MF_SEPARATOR, iidx, fs) else: @@ -595,7 +595,7 @@ class NotificationIcon(object): if callable(fs[0]): itemstring = fs[0]() else: - itemstring = unicode(fs[0]) + itemstring = str(fs[0]) flags = MF_STRING if itemstring.startswith("!"): itemstring = itemstring[1:] @@ -660,8 +660,8 @@ class NotificationIcon(object): time.sleep(0.2) try: Shell_NotifyIcon(NIM_DELETE, self.iconinfo) - except Exception, err: - print "Icon remove error", err + except Exception as err: + print("Icon remove error", err) ctypes.windll.user32.DestroyWindow(self._hwnd) ctypes.windll.user32.DestroyIcon(self._hicon) @@ -693,7 +693,7 @@ if __name__ == "__main__": def greet(): ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 0) - print "Hello" + print("Hello") def quit(): ni._die = True @@ -724,6 +724,6 @@ if __name__ == "__main__": @atexit.register def goodbye(): - print "You are now leaving the Python sector." + print("You are now leaving the Python sector.") ni._run() diff --git a/plugins/Trayicon/lib/winfolders.py b/plugins/Trayicon/lib/winfolders.py index d28efc1a..75437c19 100644 --- a/plugins/Trayicon/lib/winfolders.py +++ b/plugins/Trayicon/lib/winfolders.py @@ -3,14 +3,15 @@ import specialfolders start_programs = specialfolders.get(specialfolders.PROGRAMS) -Code is public domain, do with it what you will. +Code is public domain, do with it what you will. Luke Pinner - Environment.gov.au, 2010 February 10 ''' #Imports use _syntax to mask them from autocomplete IDE's import ctypes as _ctypes -from ctypes.wintypes import HWND as _HWND, HANDLE as _HANDLE,DWORD as _DWORD,LPCWSTR as _LPCWSTR,MAX_PATH as _MAX_PATH, create_unicode_buffer as _cub +from ctypes import create_unicode_buffer as _cub +from ctypes.wintypes import HWND as _HWND, HANDLE as _HANDLE,DWORD as _DWORD,LPCWSTR as _LPCWSTR,MAX_PATH as _MAX_PATH _SHGetFolderPath = _ctypes.windll.shell32.SHGetFolderPathW #public special folder constants @@ -49,5 +50,5 @@ def get(intFolder): if __name__ == "__main__": import os - print get(STARTUP) + print(get(STARTUP)) open(get(STARTUP)+"\\zeronet.cmd", "w").write("cd /D %s\r\nzeronet.py" % os.getcwd()) \ No newline at end of file diff --git a/plugins/UiConfig/UiConfigPlugin.py b/plugins/UiConfig/UiConfigPlugin.py index 3610d414..a30f083f 100644 --- a/plugins/UiConfig/UiConfigPlugin.py +++ b/plugins/UiConfig/UiConfigPlugin.py @@ -1,7 +1,8 @@ +import io + from Plugin import PluginManager from Config import config from Translate import Translate -from cStringIO import StringIO if "_" not in locals(): @@ -47,7 +48,7 @@ class UiRequestPlugin(object): else: data = open(file_path).read() - return self.actionFile(file_path, file_obj=StringIO(data), file_size=len(data)) + return self.actionFile(file_path, file_obj=io.BytesIO(data), file_size=len(data)) else: return super(UiRequestPlugin, self).actionUiMedia(path) @@ -58,7 +59,7 @@ class UiWebsocketPlugin(object): back = {} config_values = vars(config.arguments) config_values.update(config.pending_changes) - for key, val in config_values.iteritems(): + for key, val in config_values.items(): if key not in config.keys_api_change_allowed: continue is_pending = key in config.pending_changes diff --git a/plugins/UiConfig/__init__.py b/plugins/UiConfig/__init__.py index 3c48da61..d4892e03 100644 --- a/plugins/UiConfig/__init__.py +++ b/plugins/UiConfig/__init__.py @@ -1 +1 @@ -import UiConfigPlugin +from . import UiConfigPlugin diff --git a/plugins/disabled-Bootstrapper/BootstrapperPlugin.py b/plugins/disabled-Bootstrapper/BootstrapperPlugin.py index ba6d1e23..a7ecbdba 100644 --- a/plugins/disabled-Bootstrapper/BootstrapperPlugin.py +++ b/plugins/disabled-Bootstrapper/BootstrapperPlugin.py @@ -3,7 +3,7 @@ import time from util import helper from Plugin import PluginManager -from BootstrapperDb import BootstrapperDb +from .BootstrapperDb import BootstrapperDb from Crypt import CryptRsa from Config import config @@ -70,7 +70,7 @@ class FileRequestPlugin(object): hashes_changed = 0 db.execute("BEGIN") - for onion, onion_hashes in onion_to_hash.iteritems(): + for onion, onion_hashes in onion_to_hash.items(): hashes_changed += db.peerAnnounce( ip_type="onion", address=onion, @@ -113,7 +113,7 @@ class FileRequestPlugin(object): hash_peers = db.peerList( hash, - address=self.connection.ip, onions=onion_to_hash.keys(), port=params["port"], + address=self.connection.ip, onions=list(onion_to_hash.keys()), port=params["port"], limit=min(limit, params["need_num"]), need_types=params["need_types"], order=order ) if "ip4" in params["need_types"]: # Backward compatibility diff --git a/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py b/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py index d99f8ea7..116de193 100644 --- a/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py +++ b/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py @@ -78,7 +78,7 @@ class TestBootstrapper: assert len(res["peers"][0][ip_type]) == 1 # Test DB cleanup - assert map(lambda row: row[0], bootstrapper_db.execute("SELECT address FROM peer").fetchall()) == [file_server.ip_external] # 127.0.0.1 never get added to db + assert [row[0] for row in bootstrapper_db.execute("SELECT address FROM peer").fetchall()] == [file_server.ip_external] # 127.0.0.1 never get added to db # Delete peers bootstrapper_db.execute("DELETE FROM peer WHERE address = ?", [file_server.ip_external]) diff --git a/plugins/disabled-Bootstrapper/__init__.py b/plugins/disabled-Bootstrapper/__init__.py index ca533eac..cce30eea 100644 --- a/plugins/disabled-Bootstrapper/__init__.py +++ b/plugins/disabled-Bootstrapper/__init__.py @@ -1 +1 @@ -import BootstrapperPlugin \ No newline at end of file +from . import BootstrapperPlugin \ No newline at end of file diff --git a/plugins/disabled-Dnschain/SiteManagerPlugin.py b/plugins/disabled-Dnschain/SiteManagerPlugin.py index a5122ec1..8b9508f1 100644 --- a/plugins/disabled-Dnschain/SiteManagerPlugin.py +++ b/plugins/disabled-Dnschain/SiteManagerPlugin.py @@ -54,7 +54,7 @@ class SiteManagerPlugin(object): res = Http.get("https://api.dnschain.net/v1/namecoin/key/%s" % top_domain).read() data = json.loads(res)["data"]["value"] if "zeronet" in data: - for key, val in data["zeronet"].iteritems(): + for key, val in data["zeronet"].items(): self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours self.saveDnsCache() return data["zeronet"].get(sub_domain) @@ -76,7 +76,7 @@ class SiteManagerPlugin(object): with gevent.Timeout(5, Exception("Timeout: 5s")): res = Http.get("https://dnschain.info/bit/d/%s" % re.sub(r"\.bit$", "", top_domain)).read() data = json.loads(res)["value"] - for key, val in data["zeronet"].iteritems(): + for key, val in data["zeronet"].items(): self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours self.saveDnsCache() return data["zeronet"].get(sub_domain) diff --git a/plugins/disabled-DonationMessage/__init__.py b/plugins/disabled-DonationMessage/__init__.py index f8dcae2f..1d4b47c3 100644 --- a/plugins/disabled-DonationMessage/__init__.py +++ b/plugins/disabled-DonationMessage/__init__.py @@ -1 +1 @@ -import DonationMessagePlugin +from . import DonationMessagePlugin diff --git a/plugins/disabled-Multiuser/MultiuserPlugin.py b/plugins/disabled-Multiuser/MultiuserPlugin.py index 8e70d3e3..ee4edd7c 100644 --- a/plugins/disabled-Multiuser/MultiuserPlugin.py +++ b/plugins/disabled-Multiuser/MultiuserPlugin.py @@ -5,11 +5,11 @@ import json from Config import config from Plugin import PluginManager from Crypt import CryptBitcoin -import UserPlugin +from . import UserPlugin try: local_master_addresses = set(json.load(open("%s/users.json" % config.data_dir)).keys()) # Users in users.json -except Exception, err: +except Exception as err: local_master_addresses = set() @@ -59,7 +59,7 @@ class UiRequestPlugin(object): return False elif loggedin: - back = back_generator.next() + back = next(back_generator) inject_html = """ <!-- Multiser plugin --> <script nonce="{script_nonce}"> diff --git a/plugins/disabled-Multiuser/__init__.py b/plugins/disabled-Multiuser/__init__.py index 154d6008..c56ddf84 100644 --- a/plugins/disabled-Multiuser/__init__.py +++ b/plugins/disabled-Multiuser/__init__.py @@ -1 +1 @@ -import MultiuserPlugin +from . import MultiuserPlugin diff --git a/plugins/disabled-StemPort/StemPortPlugin.py b/plugins/disabled-StemPort/StemPortPlugin.py index 3a3787c7..c53d38e6 100644 --- a/plugins/disabled-StemPort/StemPortPlugin.py +++ b/plugins/disabled-StemPort/StemPortPlugin.py @@ -16,9 +16,9 @@ if config.tor != "disable": monkey.patch_time() monkey.patch_socket(dns=False) monkey.patch_thread() - print "Stem Port Plugin: modules are patched." + print("Stem Port Plugin: modules are patched.") else: - print "Stem Port Plugin: Tor mode disabled. Module patching skipped." + print("Stem Port Plugin: Tor mode disabled. Module patching skipped.") class PatchedControlPort(ControlPort): @@ -66,14 +66,14 @@ class TorManagerPlugin(object): controller = from_port(port=self.port) controller.authenticate() self.controller = controller - self.status = u"Connected (via Stem)" - except Exception, err: + self.status = "Connected (via Stem)" + except Exception as err: print("\n") traceback.print_exc() print("\n") self.controller = None - self.status = u"Error (%s)" % err + self.status = "Error (%s)" % err self.log.error("Tor stem connect error: %s" % Debug.formatException(err)) return self.controller @@ -87,8 +87,8 @@ class TorManagerPlugin(object): def resetCircuits(self): try: self.controller.signal(Signal.NEWNYM) - except Exception, err: - self.status = u"Stem reset circuits error (%s)" % err + except Exception as err: + self.status = "Stem reset circuits error (%s)" % err self.log.error("Stem reset circuits error: %s" % err) @@ -105,8 +105,8 @@ class TorManagerPlugin(object): return (service.service_id, service.private_key) - except Exception, err: - self.status = u"AddOnion error (Stem: %s)" % err + except Exception as err: + self.status = "AddOnion error (Stem: %s)" % err self.log.error("Failed to create hidden service with Stem: " + err) return False @@ -115,8 +115,8 @@ class TorManagerPlugin(object): try: self.controller.remove_ephemeral_hidden_service(address) return True - except Exception, err: - self.status = u"DelOnion error (Stem: %s)" % err + except Exception as err: + self.status = "DelOnion error (Stem: %s)" % err self.log.error("Stem failed to delete %s.onion: %s" % (address, err)) self.disconnect() # Why? return False diff --git a/plugins/disabled-StemPort/__init__.py b/plugins/disabled-StemPort/__init__.py index 71150ad6..33f8e034 100644 --- a/plugins/disabled-StemPort/__init__.py +++ b/plugins/disabled-StemPort/__init__.py @@ -1,10 +1,10 @@ -try: - from stem.control import Controller - stem_found = True -except Exception as err: - print "STEM NOT FOUND! %s" % err - stem_found = False - -if stem_found: - print "Starting Stem plugin..." - import StemPortPlugin +try: + from stem.control import Controller + stem_found = True +except Exception as err: + print(("STEM NOT FOUND! %s" % err)) + stem_found = False + +if stem_found: + print("Starting Stem plugin...") + from . import StemPortPlugin diff --git a/plugins/disabled-UiPassword/__init__.py b/plugins/disabled-UiPassword/__init__.py index 37350c3d..1779c597 100644 --- a/plugins/disabled-UiPassword/__init__.py +++ b/plugins/disabled-UiPassword/__init__.py @@ -1 +1 @@ -import UiPasswordPlugin \ No newline at end of file +from . import UiPasswordPlugin \ No newline at end of file diff --git a/plugins/disabled-Zeroname-local/SiteManagerPlugin.py b/plugins/disabled-Zeroname-local/SiteManagerPlugin.py index e8fc8610..c43e792a 100644 --- a/plugins/disabled-Zeroname-local/SiteManagerPlugin.py +++ b/plugins/disabled-Zeroname-local/SiteManagerPlugin.py @@ -3,7 +3,7 @@ import gevent from Plugin import PluginManager from Config import config from Debug import Debug -from domainLookup import lookupDomain +from .domainLookup import lookupDomain allow_reload = False # No reload supported diff --git a/plugins/disabled-Zeroname-local/__init__.py b/plugins/disabled-Zeroname-local/__init__.py index 889802db..cf724069 100644 --- a/plugins/disabled-Zeroname-local/__init__.py +++ b/plugins/disabled-Zeroname-local/__init__.py @@ -1,2 +1,2 @@ -import UiRequestPlugin -import SiteManagerPlugin \ No newline at end of file +from . import UiRequestPlugin +from . import SiteManagerPlugin \ No newline at end of file diff --git a/plugins/disabled-Zeroname-local/bitcoinrpc/authproxy.py b/plugins/disabled-Zeroname-local/bitcoinrpc/authproxy.py index 52cdb107..facf2c41 100644 --- a/plugins/disabled-Zeroname-local/bitcoinrpc/authproxy.py +++ b/plugins/disabled-Zeroname-local/bitcoinrpc/authproxy.py @@ -37,7 +37,7 @@ try: import http.client as httplib except ImportError: - import httplib + import http.client import base64 import decimal import json @@ -45,7 +45,7 @@ import logging try: import urllib.parse as urlparse except ImportError: - import urlparse + import urllib.parse USER_AGENT = "AuthServiceProxy/0.1" @@ -83,7 +83,7 @@ class AuthServiceProxy(object): def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None): self.__service_url = service_url self.__service_name = service_name - self.__url = urlparse.urlparse(service_url) + self.__url = urllib.parse.urlparse(service_url) if self.__url.port is None: port = 80 else: @@ -106,10 +106,10 @@ class AuthServiceProxy(object): # Callables re-use the connection of the original proxy self.__conn = connection elif self.__url.scheme == 'https': - self.__conn = httplib.HTTPSConnection(self.__url.hostname, port, + self.__conn = http.client.HTTPSConnection(self.__url.hostname, port, timeout=timeout) else: - self.__conn = httplib.HTTPConnection(self.__url.hostname, port, + self.__conn = http.client.HTTPConnection(self.__url.hostname, port, timeout=timeout) def __getattr__(self, name): diff --git a/plugins/disabled-Zeroname-local/domainLookup.py b/plugins/disabled-Zeroname-local/domainLookup.py index 930168c0..0521d233 100644 --- a/plugins/disabled-Zeroname-local/domainLookup.py +++ b/plugins/disabled-Zeroname-local/domainLookup.py @@ -1,4 +1,4 @@ -from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException +from .bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException import time, json, os, sys, re, socket # Connecting to RPC diff --git a/src/Config.py b/src/Config.py index fd7b06bc..9a2cc58c 100644 --- a/src/Config.py +++ b/src/Config.py @@ -3,7 +3,7 @@ import sys import os import locale import re -import ConfigParser +import configparser import logging import logging.handlers import stat @@ -304,7 +304,7 @@ class Config(object): if "://" in tracker and tracker not in self.trackers: self.trackers.append(tracker) except Exception as err: - print "Error loading trackers file: %s" % err + print("Error loading trackers file: %s" % err) # Find arguments specified for current action def getActionArguments(self): @@ -316,7 +316,7 @@ class Config(object): # Try to find action from argv def getAction(self, argv): - actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions + actions = [list(action.choices.keys()) for action in self.parser._actions if action.dest == "action"][0] # Valid actions found_action = False for action in actions: # See if any in argv if action in argv: @@ -404,7 +404,7 @@ class Config(object): self.config_file = argv[argv.index("--config_file") + 1] # Load config file if os.path.isfile(self.config_file): - config = ConfigParser.ConfigParser(allow_no_value=True) + config = configparser.ConfigParser(allow_no_value=True) config.read(self.config_file) for section in config.sections(): for key, val in config.items(section): @@ -570,7 +570,7 @@ class Config(object): try: os.chmod(self.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) except Exception as err: - print "Can't change permission of %s: %s" % (self.log_dir, err) + print("Can't change permission of %s: %s" % (self.log_dir, err)) # Make warning hidden from console logging.WARNING = 15 # Don't display warnings if not in debug mode diff --git a/src/Connection/Connection.py b/src/Connection/Connection.py index b5f7ae70..1d105b5f 100644 --- a/src/Connection/Connection.py +++ b/src/Connection/Connection.py @@ -314,7 +314,7 @@ class Connection(object): self.incomplete_buff_recv += 1 self.bytes_recv += buff_len self.server.bytes_recv += buff_len - except Exception, err: + except Exception as err: self.log("Stream read error: %s" % Debug.formatException(err)) if config.debug_socket: @@ -328,7 +328,7 @@ class Connection(object): if unpacker_stream_bytes: return buff[buff_stream_start + unpacker_stream_bytes:] else: - return "" + return b"" # My handshake info def getHandshakeInfo(self): @@ -476,7 +476,7 @@ class Connection(object): try: self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin) self.sock_wrapped = True - except Exception, err: + except Exception as err: if not config.force_encryption: self.log("Crypt connection error: %s, adding ip %s as broken ssl." % (err, self.ip)) self.server.broken_ssl_ips[self.ip] = True @@ -526,7 +526,7 @@ class Connection(object): message = None with self.send_lock: self.sock.sendall(data) - except Exception, err: + except Exception as err: self.close("Send error: %s (cmd: %s)" % (err, stat_key)) return False self.last_sent_time = time.time() @@ -577,9 +577,9 @@ class Connection(object): with gevent.Timeout(10.0, False): try: response = self.request("ping") - except Exception, err: + except Exception as err: self.log("Ping error: %s" % Debug.formatException(err)) - if response and "body" in response and response["body"] == "Pong!": + if response and "body" in response and response["body"] == b"Pong!": self.last_ping_delay = time.time() - s return True else: @@ -608,7 +608,7 @@ class Connection(object): if self.sock: self.sock.shutdown(gevent.socket.SHUT_WR) self.sock.close() - except Exception, err: + except Exception as err: if config.debug_socket: self.log("Close error: %s" % err) diff --git a/src/Connection/ConnectionServer.py b/src/Connection/ConnectionServer.py index 15274a54..6b42d8b8 100644 --- a/src/Connection/ConnectionServer.py +++ b/src/Connection/ConnectionServer.py @@ -12,7 +12,7 @@ from gevent.pool import Pool import util from util import helper from Debug import Debug -from Connection import Connection +from .Connection import Connection from Config import config from Crypt import CryptConnection from Crypt import CryptHash @@ -94,7 +94,7 @@ class ConnectionServer(object): self.stream_server = StreamServer( (self.ip, self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100 ) - except Exception, err: + except Exception as err: self.log.info("StreamServer create error: %s" % Debug.formatException(err)) def listen(self): @@ -102,7 +102,7 @@ class ConnectionServer(object): gevent.spawn(self.listenProxy) try: self.stream_server.serve_forever() - except Exception, err: + except Exception as err: self.log.info("StreamServer listen error: %s" % err) def stop(self): @@ -199,7 +199,7 @@ class ConnectionServer(object): connection.close("Connection event return error") raise Exception("Connection event return error") - except Exception, err: + except Exception as err: connection.close("%s Connect error: %s" % (ip, Debug.formatException(err))) raise err @@ -346,6 +346,6 @@ class ConnectionServer(object): ]) if len(corrections) < 6: return 0.0 - mid = len(corrections) / 2 - 1 + mid = int(len(corrections) / 2 - 1) median = (corrections[mid - 1] + corrections[mid] + corrections[mid + 1]) / 3 return median diff --git a/src/Connection/__init__.py b/src/Connection/__init__.py index 5bd29c6e..d419a3f0 100644 --- a/src/Connection/__init__.py +++ b/src/Connection/__init__.py @@ -1,2 +1,2 @@ -from ConnectionServer import ConnectionServer -from Connection import Connection +from .ConnectionServer import ConnectionServer +from .Connection import Connection diff --git a/src/Content/ContentDb.py b/src/Content/ContentDb.py index 307b47bb..07421dfe 100644 --- a/src/Content/ContentDb.py +++ b/src/Content/ContentDb.py @@ -19,7 +19,7 @@ class ContentDb(Db): foreign_key_error = self.execute("PRAGMA foreign_key_check").fetchone() if foreign_key_error: raise Exception("Database foreign key error: %s" % foreign_key_error) - except Exception, err: + except Exception as err: self.log.error("Error loading content.db: %s, rebuilding..." % Debug.formatException(err)) self.close() os.unlink(path) # Remove and try again @@ -95,8 +95,8 @@ class ContentDb(Db): def setContent(self, site, inner_path, content, size=0): self.insertOrUpdate("content", { "size": size, - "size_files": sum([val["size"] for key, val in content.get("files", {}).iteritems()]), - "size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).iteritems()]), + "size_files": sum([val["size"] for key, val in content.get("files", {}).items()]), + "size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).items()]), "modified": int(content.get("modified", 0)) }, { "site_id": self.site_ids.get(site.address, 0), diff --git a/src/Content/ContentDbDict.py b/src/Content/ContentDbDict.py index b47a15a3..01df0427 100644 --- a/src/Content/ContentDbDict.py +++ b/src/Content/ContentDbDict.py @@ -1,7 +1,7 @@ import time import os -import ContentDb +from . import ContentDb from Debug import Debug from Config import config @@ -127,29 +127,29 @@ if __name__ == "__main__": s_mem = process.memory_info()[0] / float(2 ** 20) root = "data-live/1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27" contents = ContentDbDict("1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27", root) - print "Init len", len(contents) + print("Init len", len(contents)) s = time.time() for dir_name in os.listdir(root + "/data/users/")[0:8000]: contents["data/users/%s/content.json" % dir_name] - print "Load: %.3fs" % (time.time() - s) + print("Load: %.3fs" % (time.time() - s)) s = time.time() found = 0 - for key, val in contents.iteritems(): + for key, val in contents.items(): found += 1 assert key assert val - print "Found:", found - print "Iteritem: %.3fs" % (time.time() - s) + print("Found:", found) + print("Iteritem: %.3fs" % (time.time() - s)) s = time.time() found = 0 - for key in contents.keys(): + for key in list(contents.keys()): found += 1 assert key in contents - print "In: %.3fs" % (time.time() - s) + print("In: %.3fs" % (time.time() - s)) - print "Len:", len(contents.values()), len(contents.keys()) + print("Len:", len(list(contents.values())), len(list(contents.keys()))) - print "Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem + print("Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem) diff --git a/src/Content/ContentManager.py b/src/Content/ContentManager.py index e2e2860a..d92fa8fa 100644 --- a/src/Content/ContentManager.py +++ b/src/Content/ContentManager.py @@ -3,6 +3,7 @@ import time import re import os import copy +import base64 import gevent @@ -13,7 +14,7 @@ from util import helper from util import Diff from util import SafeRe from Peer import PeerHashfield -from ContentDbDict import ContentDbDict +from .ContentDbDict import ContentDbDict from Plugin import PluginManager @@ -44,7 +45,7 @@ class ContentManager(object): # Load hashfield cache if "hashfield" in self.site.settings.get("cache", {}): - self.hashfield.fromstring(self.site.settings["cache"]["hashfield"].decode("base64")) + self.hashfield.frombytes(base64.b64decode(self.site.settings["cache"]["hashfield"])) del self.site.settings["cache"]["hashfield"] elif self.contents.get("content.json") and self.site.settings["size_optional"] > 0: self.site.storage.updateBadFiles() # No hashfield cache created yet @@ -74,7 +75,7 @@ class ContentManager(object): return [], [] new_content = json.load(open(content_path)) - except Exception, err: + except Exception as err: self.log.warning("%s load error: %s" % (content_path, Debug.formatException(err))) return [], [] else: @@ -86,7 +87,7 @@ class ContentManager(object): changed = [] deleted = [] # Check changed - for relative_path, info in new_content.get("files", {}).iteritems(): + for relative_path, info in new_content.get("files", {}).items(): if "sha512" in info: hash_type = "sha512" else: # Backward compatibility @@ -101,7 +102,7 @@ class ContentManager(object): changed.append(content_inner_dir + relative_path) # Check changed optional files - for relative_path, info in new_content.get("files_optional", {}).iteritems(): + for relative_path, info in new_content.get("files_optional", {}).items(): file_inner_path = content_inner_dir + relative_path new_hash = info["sha512"] if old_content and old_content.get("files_optional", {}).get(relative_path): @@ -115,7 +116,7 @@ class ContentManager(object): self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][relative_path]["size"]) self.optionalDelete(file_inner_path) self.log.debug("Deleted changed optional file: %s" % file_inner_path) - except Exception, err: + except Exception as err: self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err))) else: # The file is not in the old content if self.site.isDownloadable(file_inner_path): @@ -151,7 +152,7 @@ class ContentManager(object): self.site.storage.delete(file_inner_path) self.log.debug("Deleted file: %s" % file_inner_path) - except Exception, err: + except Exception as err: self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err))) # Cleanup empty dirs @@ -165,7 +166,7 @@ class ContentManager(object): self.site.storage.deleteDir(root_inner_path) # Remove from tree dict to reflect changed state tree[os.path.dirname(root)][0].remove(os.path.basename(root)) - except Exception, err: + except Exception as err: self.log.debug("Error deleting empty directory %s: %s" % (root_inner_path, err)) # Check archived @@ -175,12 +176,12 @@ class ContentManager(object): self.log.debug("old archived: %s, new archived: %s" % (len(old_archived), len(new_archived))) archived_changed = { key: date_archived - for key, date_archived in new_archived.iteritems() + for key, date_archived in new_archived.items() if old_archived.get(key) != new_archived[key] } if archived_changed: self.log.debug("Archived changed: %s" % archived_changed) - for archived_dirname, date_archived in archived_changed.iteritems(): + for archived_dirname, date_archived in archived_changed.items(): archived_inner_path = content_inner_dir + archived_dirname + "/content.json" if self.contents.get(archived_inner_path, {}).get("modified", 0) < date_archived: self.removeContent(archived_inner_path) @@ -204,7 +205,7 @@ class ContentManager(object): # Remove archived files from download queue num_removed_bad_files = 0 - for bad_file in self.site.bad_files.keys(): + for bad_file in list(self.site.bad_files.keys()): if bad_file.endswith("content.json"): del self.site.bad_files[bad_file] num_removed_bad_files += 1 @@ -217,7 +218,7 @@ class ContentManager(object): # Load includes if load_includes and "includes" in new_content: - for relative_path, info in new_content["includes"].items(): + for relative_path, info in list(new_content["includes"].items()): include_inner_path = content_inner_dir + relative_path if self.site.storage.isFile(include_inner_path): # Content.json exists, load it include_changed, include_deleted = self.loadContent( @@ -255,7 +256,7 @@ class ContentManager(object): self.has_optional_files = True # Update the content self.contents[content_inner_path] = new_content - except Exception, err: + except Exception as err: self.log.warning("%s parse error: %s" % (content_inner_path, Debug.formatException(err))) return [], [] # Content.json parse error @@ -282,7 +283,7 @@ class ContentManager(object): content.get("files", {}), **content.get("files_optional", {}) ) - except Exception, err: + except Exception as err: self.log.debug("Error loading %s for removeContent: %s" % (inner_path, Debug.formatException(err))) files = {} files["content.json"] = True @@ -292,16 +293,16 @@ class ContentManager(object): try: self.site.storage.delete(file_inner_path) self.log.debug("Deleted file: %s" % file_inner_path) - except Exception, err: + except Exception as err: self.log.debug("Error deleting file %s: %s" % (file_inner_path, err)) try: self.site.storage.deleteDir(inner_dir) - except Exception, err: + except Exception as err: self.log.debug("Error deleting dir %s: %s" % (inner_dir, err)) try: del self.contents[inner_path] - except Exception, err: + except Exception as err: self.log.debug("Error key from contents: %s" % inner_path) # Get total size of site @@ -317,7 +318,7 @@ class ContentManager(object): return [] back = [inner_path] content_inner_dir = helper.getDirname(inner_path) - for relative_path in self.contents[inner_path].get("includes", {}).keys(): + for relative_path in list(self.contents[inner_path].get("includes", {}).keys()): include_inner_path = content_inner_dir + relative_path back += self.listContents(include_inner_path) return back @@ -333,7 +334,7 @@ class ContentManager(object): file_info = self.getFileInfo(user_contents_inner_path) if file_info: time_archived_before = file_info.get("archived_before", 0) - time_directory_archived = file_info.get("archived", {}).get(relative_directory) + time_directory_archived = file_info.get("archived", {}).get(relative_directory, 0) if modified <= time_archived_before or modified <= time_directory_archived: return True else: @@ -493,11 +494,11 @@ class ContentManager(object): banned = False if "signers" in rules: rules["signers"] = rules["signers"][:] # Make copy of the signers - for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules + for permission_pattern, permission_rules in list(user_contents["permission_rules"].items()): # Regexp rules if not SafeRe.match(permission_pattern, user_urn): continue # Rule is not valid for user # Update rules if its better than current recorded ones - for key, val in permission_rules.iteritems(): + for key, val in permission_rules.items(): if key not in rules: if type(val) is list: rules[key] = val[:] # Make copy @@ -649,7 +650,7 @@ class ContentManager(object): if extend: # Add extend keys if not exists - for key, val in extend.items(): + for key, val in list(extend.items()): if not content.get(key): content[key] = val self.log.info("Extending content.json with: %s" % key) @@ -664,14 +665,14 @@ class ContentManager(object): ) if not remove_missing_optional: - for file_inner_path, file_details in content.get("files_optional", {}).iteritems(): + for file_inner_path, file_details in content.get("files_optional", {}).items(): if file_inner_path not in files_optional_node: files_optional_node[file_inner_path] = file_details # Find changed files files_merged = files_node.copy() files_merged.update(files_optional_node) - for file_relative_path, file_details in files_merged.iteritems(): + for file_relative_path, file_details in files_merged.items(): old_hash = content.get("files", {}).get(file_relative_path, {}).get("sha512") new_hash = files_merged[file_relative_path]["sha512"] if old_hash != new_hash: @@ -795,19 +796,19 @@ class ContentManager(object): try: cert_subject = "%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name) result = CryptBitcoin.verify(cert_subject, cert_address, content["cert_sign"]) - except Exception, err: + except Exception as err: raise VerifyError("Certificate verify error: %s" % err) return result # Checks if the content.json content is valid # Return: True or False def verifyContent(self, inner_path, content): - content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in content["files"].values() if file["size"] >= 0]) # Size of new content + content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in list(content["files"].values()) if file["size"] >= 0]) # Size of new content # Calculate old content size old_content = self.contents.get(inner_path) if old_content: - old_content_size = len(json.dumps(old_content, indent=1)) + sum([file["size"] for file in old_content.get("files", {}).values()]) - old_content_size_optional = sum([file["size"] for file in old_content.get("files_optional", {}).values()]) + old_content_size = len(json.dumps(old_content, indent=1)) + sum([file["size"] for file in list(old_content.get("files", {}).values())]) + old_content_size_optional = sum([file["size"] for file in list(old_content.get("files_optional", {}).values())]) else: old_content_size = 0 old_content_size_optional = 0 @@ -816,7 +817,7 @@ class ContentManager(object): if not old_content and inner_path == "content.json": self.site.settings["size"] = 0 - content_size_optional = sum([file["size"] for file in content.get("files_optional", {}).values() if file["size"] >= 0]) + content_size_optional = sum([file["size"] for file in list(content.get("files_optional", {}).values()) if file["size"] >= 0]) site_size = self.site.settings["size"] - old_content_size + content_size # Site size without old content plus the new site_size_optional = self.site.settings["size_optional"] - old_content_size_optional + content_size_optional # Site size without old content plus the new @@ -841,7 +842,7 @@ class ContentManager(object): raise VerifyError("Content too large %sB > %sB, aborting task..." % (site_size, site_size_limit)) # Verify valid filenames - for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys(): + for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()): if not self.isValidRelativePath(file_relative_path): raise VerifyError("Invalid relative path: %s" % file_relative_path) @@ -876,12 +877,12 @@ class ContentManager(object): # Filename limit if rules.get("files_allowed"): - for file_inner_path in content["files"].keys(): + for file_inner_path in list(content["files"].keys()): if not SafeRe.match("^%s$" % rules["files_allowed"], file_inner_path): raise VerifyError("File not allowed: %s" % file_inner_path) if rules.get("files_allowed_optional"): - for file_inner_path in content.get("files_optional", {}).keys(): + for file_inner_path in list(content.get("files_optional", {}).keys()): if not SafeRe.match("^%s$" % rules["files_allowed_optional"], file_inner_path): raise VerifyError("Optional file not allowed: %s" % file_inner_path) @@ -964,7 +965,7 @@ class ContentManager(object): else: raise VerifyError("Invalid old-style sign") - except Exception, err: + except Exception as err: self.log.warning("%s: verify sign error: %s" % (inner_path, Debug.formatException(err))) raise err diff --git a/src/Content/__init__.py b/src/Content/__init__.py index fab39f93..fbbd39f4 100644 --- a/src/Content/__init__.py +++ b/src/Content/__init__.py @@ -1 +1 @@ -from ContentManager import ContentManager \ No newline at end of file +from .ContentManager import ContentManager \ No newline at end of file diff --git a/src/Crypt/CryptHash.py b/src/Crypt/CryptHash.py index 118053b6..96ee3e24 100644 --- a/src/Crypt/CryptHash.py +++ b/src/Crypt/CryptHash.py @@ -13,10 +13,10 @@ def sha1sum(file, blocksize=65536): def sha512sum(file, blocksize=65536, format="hexdigest"): - if hasattr(file, "endswith"): # Its a string open it + if type(file) is str: # Filename specified file = open(file, "rb") hash = hashlib.sha512() - for block in iter(lambda: file.read(blocksize), ""): + for block in iter(lambda: file.read(blocksize), b""): hash.update(block) # Truncate to 256bits is good enough @@ -31,7 +31,7 @@ def sha256sum(file, blocksize=65536): if hasattr(file, "endswith"): # Its a string open it file = open(file, "rb") hash = hashlib.sha256() - for block in iter(lambda: file.read(blocksize), ""): + for block in iter(lambda: file.read(blocksize), b""): hash.update(block) return hash.hexdigest() @@ -39,7 +39,7 @@ def sha256sum(file, blocksize=65536): def random(length=64, encoding="hex"): if encoding == "base64": # Characters: A-Za-z0-9 hash = hashlib.sha512(os.urandom(256)).digest() - return base64.standard_b64encode(hash).replace("+", "").replace("/", "").replace("=", "")[0:length] + return base64.b64encode(hash).decode("ascii").replace("+", "").replace("/", "").replace("=", "")[0:length] else: # Characters: a-f0-9 (faster) return hashlib.sha512(os.urandom(256)).hexdigest()[0:length] diff --git a/src/Crypt/CryptRsa.py b/src/Crypt/CryptRsa.py index 694ef34f..b2ee3564 100644 --- a/src/Crypt/CryptRsa.py +++ b/src/Crypt/CryptRsa.py @@ -35,4 +35,4 @@ def privatekeyToPublickey(privatekey): return pub.save_pkcs1("DER") def publickeyToOnion(publickey): - return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower() + return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower().decode("ascii") diff --git a/src/Db/Db.py b/src/Db/Db.py index 186d45fe..bcc36201 100644 --- a/src/Db/Db.py +++ b/src/Db/Db.py @@ -7,7 +7,7 @@ import os import gevent from Debug import Debug -from DbCursor import DbCursor +from .DbCursor import DbCursor from Config import config from util import SafeRe from util import helper @@ -149,8 +149,8 @@ class Db(object): if not self.db_keyvalues: # Get db keyvalues try: res = self.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues - except sqlite3.OperationalError, err: # Table not exist - self.log.debug("Query error: %s" % err) + except sqlite3.OperationalError as err: # Table not exist + self.log.debug("Query table version error: %s" % err) return False for row in res: @@ -260,7 +260,7 @@ class Db(object): data = json.load(helper.limitedGzipFile(fileobj=file)) else: data = json.load(file) - except Exception, err: + except Exception as err: self.log.debug("Json file %s load error: %s" % (file_path, err)) data = {} @@ -274,7 +274,7 @@ class Db(object): commit_after_done = False # Row for current json file if required - if not data or filter(lambda dbmap: "to_keyvalue" in dbmap or "to_table" in dbmap, matched_maps): + if not data or [dbmap for dbmap in matched_maps if "to_keyvalue" in dbmap or "to_table" in dbmap]: json_row = cur.getJsonRow(relative_path) # Check matched mappings in schema @@ -311,7 +311,7 @@ class Db(object): changed = True if changed: # Add the custom col values - data_json_row.update({key: val for key, val in data.iteritems() if key in dbmap["to_json_table"]}) + data_json_row.update({key: val for key, val in data.items() if key in dbmap["to_json_table"]}) cur.execute("INSERT OR REPLACE INTO json ?", data_json_row) # Insert data to tables @@ -333,7 +333,7 @@ class Db(object): # Fill import cols from table cols if not import_cols: - import_cols = set(map(lambda item: item[0], self.schema["tables"][table_name]["cols"])) + import_cols = set([item[0] for item in self.schema["tables"][table_name]["cols"]]) cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],)) @@ -341,7 +341,7 @@ class Db(object): continue if key_col: # Map as dict - for key, val in data[node].iteritems(): + for key, val in data[node].items(): if val_col: # Single value cur.execute( "INSERT OR REPLACE INTO %s ?" % table_name, @@ -355,9 +355,9 @@ class Db(object): row[key_col] = key # Replace in value if necessary if replaces: - for replace_key, replace in replaces.iteritems(): + for replace_key, replace in replaces.items(): if replace_key in row: - for replace_from, replace_to in replace.iteritems(): + for replace_from, replace_to in replace.items(): row[replace_key] = row[replace_key].replace(replace_from, replace_to) row["json_id"] = json_row["json_id"] @@ -402,7 +402,6 @@ if __name__ == "__main__": dbjson.updateJson("data/users/%s/data.json" % user_dir, cur=cur) # print ".", cur.logging = True - cur.execute("COMMIT") - print "Done in %.3fs" % (time.time() - s) + print("Done in %.3fs" % (time.time() - s)) for query, stats in sorted(dbjson.query_stats.items()): - print "-", query, stats + print("-", query, stats) diff --git a/src/Db/DbQuery.py b/src/Db/DbQuery.py index a7730d5b..3fb5ef73 100644 --- a/src/Db/DbQuery.py +++ b/src/Db/DbQuery.py @@ -9,9 +9,9 @@ class DbQuery: # Split main parts of query def parseParts(self, query): parts = re.split("(SELECT|FROM|WHERE|ORDER BY|LIMIT)", query) - parts = filter(None, parts) # Remove empty parts - parts = map(lambda s: s.strip(), parts) # Remove whitespace - return dict(zip(parts[0::2], parts[1::2])) + parts = [_f for _f in parts if _f] # Remove empty parts + parts = [s.strip() for s in parts] # Remove whitespace + return dict(list(zip(parts[0::2], parts[1::2]))) # Parse selected fields SELECT ... FROM def parseFields(self, query_select): diff --git a/src/Db/__init__.py b/src/Db/__init__.py index 5bede9f4..93c5b911 100644 --- a/src/Db/__init__.py +++ b/src/Db/__init__.py @@ -1,3 +1,3 @@ -from Db import Db -from DbQuery import DbQuery -from DbCursor import DbCursor \ No newline at end of file +from .Db import Db +from .DbQuery import DbQuery +from .DbCursor import DbCursor \ No newline at end of file diff --git a/src/Debug/Debug.py b/src/Debug/Debug.py index 960d260c..78573b2a 100644 --- a/src/Debug/Debug.py +++ b/src/Debug/Debug.py @@ -63,10 +63,10 @@ gevent.spawn(testBlock) if __name__ == "__main__": try: - print 1 / 0 - except Exception, err: - print type(err).__name__ - print "1/0 error: %s" % formatException(err) + print(1 / 0) + except Exception as err: + print(type(err).__name__) + print("1/0 error: %s" % formatException(err)) def loadJson(): json.loads("Errr") @@ -74,13 +74,13 @@ if __name__ == "__main__": import json try: loadJson() - except Exception, err: - print err - print "Json load error: %s" % formatException(err) + except Exception as err: + print(err) + print("Json load error: %s" % formatException(err)) try: raise Notify("nothing...") - except Exception, err: - print "Notify: %s" % formatException(err) + except Exception as err: + print("Notify: %s" % formatException(err)) loadJson() diff --git a/src/Debug/DebugHook.py b/src/Debug/DebugHook.py index c3956eed..c31ca4df 100644 --- a/src/Debug/DebugHook.py +++ b/src/Debug/DebugHook.py @@ -5,19 +5,20 @@ import gevent import gevent.hub from Config import config +import importlib last_error = None def shutdown(): - print "Shutting down..." + print("Shutting down...") if "file_server" in dir(sys.modules["main"]) and sys.modules["main"].file_server.running: try: if "file_server" in dir(sys.modules["main"]): gevent.spawn(sys.modules["main"].file_server.stop) if "ui_server" in dir(sys.modules["main"]): gevent.spawn(sys.modules["main"].ui_server.stop) - except Exception, err: - print "Proper shutdown error: %s" % err + except Exception as err: + print("Proper shutdown error: %s" % err) sys.exit(0) else: sys.exit(0) @@ -67,7 +68,7 @@ else: sys.excepthook(exc_info[0], exc_info[1], exc_info[2]) gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet - reload(gevent) + importlib.reload(gevent) def handleGreenletError(self, context, type, value, tb): if isinstance(value, str): @@ -83,18 +84,18 @@ if __name__ == "__main__": import time from gevent import monkey monkey.patch_all(thread=False, ssl=False) - import Debug + from . import Debug def sleeper(num): - print "started", num + print("started", num) time.sleep(3) raise Exception("Error") - print "stopped", num + print("stopped", num) thread1 = gevent.spawn(sleeper, 1) thread2 = gevent.spawn(sleeper, 2) time.sleep(1) - print "killing..." + print("killing...") thread1.kill(exception=Debug.Notify("Worker stopped")) #thread2.throw(Debug.Notify("Throw")) - print "killed" + print("killed") gevent.joinall([thread1,thread2]) diff --git a/src/Debug/DebugMedia.py b/src/Debug/DebugMedia.py index 4f576860..3289a41f 100644 --- a/src/Debug/DebugMedia.py +++ b/src/Debug/DebugMedia.py @@ -3,6 +3,7 @@ import subprocess import re import logging import time +import functools from Config import config from util import helper @@ -18,9 +19,9 @@ def findfiles(path, find_ext): elif f2 == "": return -1 else: - return cmp(f1.lower(), f2.lower()) + return helper.cmp(f1.lower(), f2.lower()) - for root, dirs, files in sorted(os.walk(path, topdown=False), cmp=sorter): + for root, dirs, files in sorted(os.walk(path, topdown=False), key=functools.cmp_to_key(sorter)): for file in sorted(files): file_path = root + "/" + file file_ext = file.split(".")[-1] @@ -66,16 +67,16 @@ def merge(merged_path): return # Assets not changed, nothing to do if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile - merged_old = open(merged_path, "rb").read().decode("utf8") + merged_old = open(merged_path, "rb").read() old_parts = {} - for match in re.findall(r"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL): - old_parts[match[1]] = match[2].strip("\n\r") + for match in re.findall(rb"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL): + old_parts[match[1]] = match[2].strip(rb"\n\r") # Merge files parts = [] s_total = time.time() for file_path in findfiles(merge_dir, find_ext): - parts.append("\n\n/* ---- %s ---- */\n\n" % file_path.replace(config.data_dir, "")) + parts.append(b"\n\n/* ---- %s ---- */\n\n" % file_path.replace(config.data_dir, "").encode("utf8")) if file_path.endswith(".coffee"): # Compile coffee script if file_path in changed or file_path.replace(config.data_dir, "") not in old_parts: # Only recompile if changed or its not compiled before if config.coffeescript_compiler is None: @@ -95,31 +96,31 @@ def merge(merged_path): # Start compiling s = time.time() compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE) - out = compiler.stdout.read().decode("utf8") + out = compiler.stdout.read() compiler.wait() logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s)) # Check errors - if out and out.startswith("("): # No error found + if out and out.startswith(b"("): # No error found parts.append(out) else: # Put error message in place of source code error = out logging.error("%s Compile error: %s" % (file_path, error)) parts.append( - "alert('%s compile error: %s');" % - (file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n")) + b"alert('%s compile error: %s');" % + (file_path, re.escape(error).replace(b"\n", b"\\n").replace(r"\\n", r"\n")) ) else: # Not changed use the old_part parts.append(old_parts[file_path.replace(config.data_dir, "")]) else: # Add to parts - parts.append(open(file_path).read().decode("utf8")) + parts.append(open(file_path, "rb").read()) - merged = u"\n".join(parts) + merged = b"\n".join(parts) if ext == "css": # Vendor prefix css from lib.cssvendor import cssvendor merged = cssvendor.prefix(merged) - merged = merged.replace("\r", "") - open(merged_path, "wb").write(merged.encode("utf8")) + merged = merged.replace(b"\r", b"") + open(merged_path, "wb").write(merged) logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total)) diff --git a/src/Debug/__init__.py b/src/Debug/__init__.py index 8632f92f..e69de29b 100644 --- a/src/Debug/__init__.py +++ b/src/Debug/__init__.py @@ -1 +0,0 @@ -from DebugReloader import DebugReloader \ No newline at end of file diff --git a/src/File/FileRequest.py b/src/File/FileRequest.py index b3a13f7f..9025eb8e 100644 --- a/src/File/FileRequest.py +++ b/src/File/FileRequest.py @@ -118,7 +118,7 @@ class FileRequest(object): try: content = json.loads(params["body"]) - except Exception, err: + except Exception as err: self.log.debug("Update for %s is invalid JSON: %s" % (inner_path, err)) self.response({"error": "File invalid JSON"}) self.connection.badAction(5) @@ -131,7 +131,7 @@ class FileRequest(object): else: try: valid = site.content_manager.verifyFile(inner_path, content) - except Exception, err: + except Exception as err: self.log.debug("Update for %s is invalid: %s" % (inner_path, err)) valid = False @@ -251,10 +251,10 @@ class FileRequest(object): return {"bytes_sent": bytes_sent, "file_size": file_size, "location": params["location"]} - except RequestError, err: + except RequestError as err: self.log.debug("GetFile %s %s request error: %s" % (self.connection, params["inner_path"], Debug.formatException(err))) self.response({"error": "File read error: %s" % err}) - except Exception, err: + except Exception as err: if config.verbose: self.log.debug("GetFile read error: %s" % Debug.formatException(err)) self.response({"error": "File read error"}) @@ -306,7 +306,7 @@ class FileRequest(object): if config.verbose: self.log.debug( "Added %s peers to %s using pex, sending back %s" % - (added, site, {key: len(val) for key, val in packed_peers.iteritems()}) + (added, site, {key: len(val) for key, val in packed_peers.items()}) ) back = { @@ -353,7 +353,7 @@ class FileRequest(object): back = collections.defaultdict(lambda: collections.defaultdict(list)) found = site.worker_manager.findOptionalHashIds(hash_ids, limit=limit) - for hash_id, peers in found.iteritems(): + for hash_id, peers in found.items(): for peer in peers: ip_type = helper.getIpType(peer.ip) if len(back[ip_type][hash_id]) < 20: @@ -385,7 +385,7 @@ class FileRequest(object): if config.verbose: self.log.debug( "Found: %s for %s hashids in %.3fs" % - ({key: len(val) for key, val in back.iteritems()}, len(params["hash_ids"]), time.time() - s) + ({key: len(val) for key, val in back.items()}, len(params["hash_ids"]), time.time() - s) ) self.response({"peers": back["ipv4"], "peers_onion": back["onion"], "peers_ipv6": back["ipv6"], "my": my_hashes}) @@ -405,7 +405,7 @@ class FileRequest(object): # Send a simple Pong! answer def actionPing(self, params): - self.response("Pong!") + self.response(b"Pong!") # Check requested port of the other peer def actionCheckport(self, params): diff --git a/src/File/FileServer.py b/src/File/FileServer.py index 0e167a6f..2c39846e 100644 --- a/src/File/FileServer.py +++ b/src/File/FileServer.py @@ -10,7 +10,7 @@ from gevent.server import StreamServer import util from util import helper from Config import config -from FileRequest import FileRequest +from .FileRequest import FileRequest from Peer import PeerPortchecker from Site import SiteManager from Connection import ConnectionServer @@ -41,7 +41,7 @@ class FileServer(ConnectionServer): port = config.tor_hs_port config.fileserver_port = port elif port == 0: # Use random port - port_range_from, port_range_to = map(int, config.fileserver_port_range.split("-")) + port_range_from, port_range_to = list(map(int, config.fileserver_port_range.split("-"))) port = self.getRandomPort(ip, port_range_from, port_range_to) config.fileserver_port = port if not port: @@ -59,7 +59,7 @@ class FileServer(ConnectionServer): self.stream_server_proxy = StreamServer( ("0.0.0.0", self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100 ) - except Exception, err: + except Exception as err: self.log.info("StreamServer proxy create error: %s" % Debug.formatException(err)) self.port_opened = {} @@ -117,7 +117,7 @@ class FileServer(ConnectionServer): def listenProxy(self): try: self.stream_server_proxy.serve_forever() - except Exception, err: + except Exception as err: if err.errno == 98: # Address already in use error self.log.debug("StreamServer proxy listen error: %s" % err) else: @@ -231,7 +231,7 @@ class FileServer(ConnectionServer): if not self.port_opened or force_port_check: # Test and open port if not tested yet if len(self.sites) <= 2: # Don't wait port opening on first startup sites_checking = True - for address, site in self.sites.items(): + for address, site in list(self.sites.items()): gevent.spawn(self.checkSite, site, check_files) self.portCheck() @@ -242,7 +242,7 @@ class FileServer(ConnectionServer): if not sites_checking: check_pool = gevent.pool.Pool(5) # Check sites integrity - for site in sorted(self.sites.values(), key=lambda site: site.settings.get("modified", 0), reverse=True): + for site in sorted(list(self.sites.values()), key=lambda site: site.settings.get("modified", 0), reverse=True): if not site.settings["serving"]: continue check_thread = check_pool.spawn(self.checkSite, site, check_files) # Check in new thread @@ -263,7 +263,7 @@ class FileServer(ConnectionServer): (len(self.connections), self.has_internet, len(peers_protected)) ) - for address, site in self.sites.items(): + for address, site in list(self.sites.items()): if not site.settings["serving"]: continue @@ -273,7 +273,7 @@ class FileServer(ConnectionServer): time.sleep(1) # Prevent too quick request peers_protected = set([]) - for address, site in self.sites.items(): + for address, site in list(self.sites.items()): if not site.settings["serving"]: continue @@ -313,7 +313,7 @@ class FileServer(ConnectionServer): while 1: config.loadTrackersFile() s = time.time() - for address, site in self.sites.items(): + for address, site in list(self.sites.items()): if not site.settings["serving"]: continue gevent.spawn(self.announceSite, site).join(timeout=10) diff --git a/src/File/__init__.py b/src/File/__init__.py index 20b28a97..1eb602d6 100644 --- a/src/File/__init__.py +++ b/src/File/__init__.py @@ -1,2 +1,2 @@ -from FileServer import FileServer -from FileRequest import FileRequest \ No newline at end of file +from .FileServer import FileServer +from .FileRequest import FileRequest \ No newline at end of file diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index b999257e..c40db716 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -6,11 +6,11 @@ import collections import gevent -from cStringIO import StringIO +import io from Debug import Debug from Config import config from util import helper -from PeerHashfield import PeerHashfield +from .PeerHashfield import PeerHashfield from Plugin import PluginManager if config.use_tempfiles: @@ -95,7 +95,7 @@ class Peer(object): self.connection = connection_server.getConnection(self.ip, self.port, site=self.site, is_tracker_connection=self.is_tracker_connection) self.reputation += 1 self.connection.sites += 1 - except Exception, err: + except Exception as err: self.onConnectionError("Getting connection error") self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed)) @@ -164,7 +164,7 @@ class Peer(object): return res else: raise Exception("Invalid response: %s" % res) - except Exception, err: + except Exception as err: if type(err).__name__ == "Notify": # Greenlet killed by worker self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd)) break @@ -195,7 +195,7 @@ class Peer(object): if config.use_tempfiles: buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b') else: - buff = StringIO() + buff = io.BytesIO() s = time.time() while True: # Read in smaller parts @@ -240,7 +240,7 @@ class Peer(object): with gevent.Timeout(10.0, False): # 10 sec timeout, don't raise exception res = self.request("ping") - if res and "body" in res and res["body"] == "Pong!": + if res and "body" in res and res["body"] == b"Pong!": response_time = time.time() - s break # All fine, exit from for loop # Timeout reached or bad response @@ -267,12 +267,9 @@ class Peer(object): request["peers_onion"] = packed_peers["onion"] if packed_peers["ipv6"]: request["peers_ipv6"] = packed_peers["ipv6"] - res = self.request("pex", request) - if not res or "error" in res: return False - added = 0 # Remove unsupported peer types @@ -331,13 +328,13 @@ class Peer(object): key = "peers" else: key = "peers_%s" % ip_type - for hash, peers in res.get(key, {}).items()[0:30]: + for hash, peers in list(res.get(key, {}).items())[0:30]: if ip_type == "onion": unpacker_func = helper.unpackOnionAddress else: unpacker_func = helper.unpackAddress - back[hash] += map(unpacker_func, peers) + back[hash] += list(map(unpacker_func, peers)) for hash in res.get("my", []): back[hash].append((self.connection.ip, self.connection.port)) diff --git a/src/Peer/PeerHashfield.py b/src/Peer/PeerHashfield.py index 050d47f4..b7bca64f 100644 --- a/src/Peer/PeerHashfield.py +++ b/src/Peer/PeerHashfield.py @@ -68,8 +68,8 @@ if __name__ == "__main__": s = time.time() for i in range(10000): field.appendHashId(i) - print time.time()-s + print(time.time()-s) s = time.time() for i in range(10000): field.hasHash("AABB") - print time.time()-s \ No newline at end of file + print(time.time()-s) \ No newline at end of file diff --git a/src/Peer/PeerPortchecker.py b/src/Peer/PeerPortchecker.py index 5bcf91df..f6bca89d 100644 --- a/src/Peer/PeerPortchecker.py +++ b/src/Peer/PeerPortchecker.py @@ -1,6 +1,6 @@ import logging -import urllib -import urllib2 +import urllib.request +import urllib.parse import re import time @@ -16,10 +16,10 @@ class PeerPortchecker(object): def requestUrl(self, url, post_data=None): if type(post_data) is dict: - post_data = urllib.urlencode(post_data) - req = urllib2.Request(url, post_data) + post_data = urllib.parse.urlencode(post_data).encode("utf8") + req = urllib.request.Request(url, post_data) req.add_header('Referer', url) - return urllib2.urlopen(req, timeout=20.0) + return urllib.request.urlopen(req, timeout=20.0) def portOpen(self, port): self.log.info("Trying to open port using UpnpPunch...") @@ -67,7 +67,7 @@ class PeerPortchecker(object): return res def checkCanyouseeme(self, port): - data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read() + data = urllib.request.urlopen("http://www.canyouseeme.org/", b"port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8") message = re.match('.*<p style="padding-left:15px">(.*?)</p>', data, re.DOTALL).group(1) message = re.sub("<.*?>", "", message.replace("<br>", " ").replace(" ", " ")) # Strip http tags @@ -85,7 +85,7 @@ class PeerPortchecker(object): raise Exception("Invalid response: %s" % message) def checkPortchecker(self, port): - data = urllib2.urlopen("https://portchecker.co/check", "port=%s" % port, timeout=20.0).read() + data = urllib.request.urlopen("https://portchecker.co/check", b"port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8") message = re.match('.*<div id="results-wrapper">(.*?)</div>', data, re.DOTALL).group(1) message = re.sub("<.*?>", "", message.replace("<br>", " ").replace(" ", " ").strip()) # Strip http tags @@ -109,7 +109,6 @@ class PeerPortchecker(object): ip = re.match('.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL).group(1) token = re.match('.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1) - print ip post_data = {"host": ip, "port": port, "allow": "on", "token": token, "submit": "Scanning.."} data = self.requestUrl(url, post_data).read() @@ -168,4 +167,4 @@ if __name__ == "__main__": peer_portchecker = PeerPortchecker() for func_name in ["checkIpv6scanner", "checkMyaddr", "checkPortchecker", "checkCanyouseeme"]: s = time.time() - print(func_name, getattr(peer_portchecker, func_name)(3894), "%.3fs" % (time.time() - s)) + print((func_name, getattr(peer_portchecker, func_name)(3894), "%.3fs" % (time.time() - s))) diff --git a/src/Peer/__init__.py b/src/Peer/__init__.py index 3e92827f..e73c58c5 100644 --- a/src/Peer/__init__.py +++ b/src/Peer/__init__.py @@ -1,2 +1,2 @@ -from Peer import Peer -from PeerHashfield import PeerHashfield +from .Peer import Peer +from .PeerHashfield import PeerHashfield diff --git a/src/Plugin/PluginManager.py b/src/Plugin/PluginManager.py index c5d1f79b..6776f372 100644 --- a/src/Plugin/PluginManager.py +++ b/src/Plugin/PluginManager.py @@ -7,6 +7,7 @@ from collections import defaultdict from Debug import Debug from Config import config +import importlib class PluginManager: @@ -48,7 +49,7 @@ class PluginManager: self.log.debug("Loading plugin: %s" % dir_name) try: __import__(dir_name) - except Exception, err: + except Exception as err: self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err))) if dir_name not in self.plugin_names: self.plugin_names.append(dir_name) @@ -62,19 +63,19 @@ class PluginManager: self.after_load = [] self.plugins_before = self.plugins self.plugins = defaultdict(list) # Reset registered plugins - for module_name, module in sys.modules.items(): - if module and "__file__" in dir(module) and self.plugin_path in module.__file__: # Module file within plugin_path + for module_name, module in list(sys.modules.items()): + if module and getattr(module, "__file__", None) and self.plugin_path in module.__file__: # Module file in plugin_path if "allow_reload" in dir(module) and not module.allow_reload: # Reload disabled # Re-add non-reloadable plugins - for class_name, classes in self.plugins_before.iteritems(): + for class_name, classes in self.plugins_before.items(): for c in classes: if c.__module__ != module.__name__: continue self.plugins[class_name].append(c) else: try: - reload(module) - except Exception, err: + importlib.reload(module) + except Exception as err: self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err))) self.loadPlugins() # Load new plugins @@ -82,7 +83,7 @@ class PluginManager: # Change current classes in memory import gc patched = {} - for class_name, classes in self.plugins.iteritems(): + for class_name, classes in self.plugins.items(): classes = classes[:] # Copy the current plugins classes.reverse() base_class = self.pluggable[class_name] # Original class @@ -96,8 +97,8 @@ class PluginManager: # Change classes in modules patched = {} - for class_name, classes in self.plugins.iteritems(): - for module_name, module in sys.modules.iteritems(): + for class_name, classes in self.plugins.items(): + for module_name, module in list(sys.modules.items()): if class_name in dir(module): if "__class__" not in dir(getattr(module, class_name)): # Not a class continue @@ -134,7 +135,7 @@ def acceptPlugins(base_class): if str(key) in plugin_manager.subclass_order[class_name] else 9999 ) - plugin_manager.subclass_order[class_name] = map(str, classes) + plugin_manager.subclass_order[class_name] = list(map(str, classes)) classes.reverse() classes.append(base_class) # Add the class itself to end of inherience line @@ -181,4 +182,4 @@ if __name__ == "__main__": else: return "Can't route to", path - print Request().route("MainPage") + print(Request().route("MainPage")) diff --git a/src/Site/Site.py b/src/Site/Site.py index 1126d065..c2391f7a 100644 --- a/src/Site/Site.py +++ b/src/Site/Site.py @@ -7,6 +7,7 @@ import random import sys import hashlib import collections +import base64 import gevent import gevent.pool @@ -17,14 +18,14 @@ from Peer import Peer from Worker import WorkerManager from Debug import Debug from Content import ContentManager -from SiteStorage import SiteStorage +from .SiteStorage import SiteStorage from Crypt import CryptHash from util import helper from util import Diff from Plugin import PluginManager from File import FileServer -from SiteAnnouncer import SiteAnnouncer -import SiteManager +from .SiteAnnouncer import SiteAnnouncer +from . import SiteManager @PluginManager.acceptPlugins @@ -32,7 +33,8 @@ class Site(object): def __init__(self, address, allow_create=True, settings=None): self.address = str(re.sub("[^A-Za-z0-9]", "", address)) # Make sure its correct address - self.address_hash = hashlib.sha256(self.address).digest() + self.address_hash = hashlib.sha256(self.address.encode("ascii")).digest() + self.address_sha1 = hashlib.sha1(self.address.encode("ascii")).digest() self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging self.log = logging.getLogger("Site:%s" % self.address_short) self.addEventListeners() @@ -127,7 +129,7 @@ class Site(object): def getSettingsCache(self): back = {} back["bad_files"] = self.bad_files - back["hashfield"] = self.content_manager.hashfield.tostring().encode("base64") + back["hashfield"] = base64.b64encode(self.content_manager.hashfield.tobytes()).decode("ascii") return back # Max site size in MB @@ -173,7 +175,7 @@ class Site(object): # Start download files file_threads = [] if download_files: - for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys(): + for file_relative_path in list(self.content_manager.contents[inner_path].get("files", {}).keys()): file_inner_path = content_inner_dir + file_relative_path # Try to diff first @@ -204,7 +206,7 @@ class Site(object): "Patched successfully: %s (diff: %.3fs, verify: %.3fs, write: %.3fs, on_done: %.3fs)" % (file_inner_path, time_diff, time_verify, time_write, time_on_done) ) - except Exception, err: + except Exception as err: self.log.debug("Failed to patch %s: %s" % (file_inner_path, err)) diff_success = False @@ -218,7 +220,7 @@ class Site(object): if inner_path == "content.json": gevent.spawn(self.updateHashfield) - for file_relative_path in self.content_manager.contents[inner_path].get("files_optional", {}).keys(): + for file_relative_path in list(self.content_manager.contents[inner_path].get("files_optional", {}).keys()): file_inner_path = content_inner_dir + file_relative_path if file_inner_path not in changed and not self.bad_files.get(file_inner_path): continue @@ -233,7 +235,7 @@ class Site(object): # Wait for includes download include_threads = [] - for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys(): + for file_relative_path in list(self.content_manager.contents[inner_path].get("includes", {}).keys()): file_inner_path = content_inner_dir + file_relative_path include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer) include_threads.append(include_thread) @@ -262,7 +264,7 @@ class Site(object): def getReachableBadFiles(self): if not self.bad_files: return False - return [bad_file for bad_file, retry in self.bad_files.iteritems() if retry < 3] + return [bad_file for bad_file, retry in self.bad_files.items() if retry < 3] # Retry download bad files def retryBadFiles(self, force=False): @@ -272,7 +274,7 @@ class Site(object): content_inner_paths = [] file_inner_paths = [] - for bad_file, tries in self.bad_files.items(): + for bad_file, tries in list(self.bad_files.items()): if force or random.randint(0, min(40, tries)) < 4: # Larger number tries = less likely to check every 15min if bad_file.endswith("content.json"): content_inner_paths.append(bad_file) @@ -286,7 +288,7 @@ class Site(object): self.pooledDownloadFile(file_inner_paths, only_if_bad=True) def checkBadFiles(self): - for bad_file in self.bad_files.keys(): + for bad_file in list(self.bad_files.keys()): file_info = self.content_manager.getFileInfo(bad_file) if bad_file.endswith("content.json"): if file_info is False and bad_file != "content.json": @@ -374,7 +376,7 @@ class Site(object): queried.append(peer) modified_contents = [] my_modified = self.content_manager.listModified(since) - for inner_path, modified in res["modified_files"].iteritems(): # Check if the peer has newer files than we + for inner_path, modified in res["modified_files"].items(): # Check if the peer has newer files than we has_newer = int(modified) > my_modified.get(inner_path, 0) has_older = int(modified) < my_modified.get(inner_path, 0) if inner_path not in self.bad_files and not self.content_manager.isArchived(inner_path, modified): @@ -480,7 +482,7 @@ class Site(object): def redownloadContents(self): # Download all content.json again content_threads = [] - for inner_path in self.content_manager.contents.keys(): + for inner_path in list(self.content_manager.contents.keys()): content_threads.append(self.needFile(inner_path, update=True, blocking=False)) self.log.debug("Waiting %s content.json to finish..." % len(content_threads)) @@ -523,7 +525,7 @@ class Site(object): }) if result: break - except Exception, err: + except Exception as err: self.log.error("Publish error: %s" % Debug.formatException(err)) result = {"exception": Debug.formatException(err)} @@ -563,7 +565,7 @@ class Site(object): peers = set(peers) self.log.info("Publishing %s to %s/%s peers (connected: %s) diffs: %s (%.2fk)..." % ( - inner_path, limit, len(self.peers), num_connected_peers, diffs.keys(), float(len(str(diffs))) / 1024 + inner_path, limit, len(self.peers), num_connected_peers, list(diffs.keys()), float(len(str(diffs))) / 1024 )) if not peers: @@ -631,8 +633,8 @@ class Site(object): ) # Copy files - for content_inner_path, content in self.content_manager.contents.items(): - file_relative_paths = content.get("files", {}).keys() + for content_inner_path, content in list(self.content_manager.contents.items()): + file_relative_paths = list(content.get("files", {}).keys()) # Sign content.json at the end to make sure every file is included file_relative_paths.sort() @@ -812,7 +814,7 @@ class Site(object): self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, connected, len(self.peers))) if connected < need: # Need more than we have - for peer in self.peers.values(): + for peer in list(self.peers.values()): if not peer.connection or not peer.connection.connected: # No peer connection or disconnected peer.pex() # Initiate peer exchange if peer.connection and peer.connection.connected: @@ -831,7 +833,7 @@ class Site(object): # Return: Probably peers verified to be connectable recently def getConnectablePeers(self, need_num=5, ignore=[], allow_private=True): - peers = self.peers.values() + peers = list(self.peers.values()) found = [] for peer in peers: if peer.key.endswith(":0"): @@ -874,7 +876,7 @@ class Site(object): # Add random peers need_more = need_num - len(found) found_more = sorted( - self.peers.values()[0:need_more * 50], + list(self.peers.values())[0:need_more * 50], key=lambda peer: peer.reputation, reverse=True )[0:need_more * 2] @@ -906,7 +908,7 @@ class Site(object): # Cleanup probably dead peers and close connection if too much def cleanupPeers(self, peers_protected=[]): - peers = self.peers.values() + peers = list(self.peers.values()) if len(peers) > 20: # Cleanup old peers removed = 0 @@ -1019,7 +1021,7 @@ class Site(object): # Send site status update to websocket clients def updateWebsocket(self, **kwargs): if kwargs: - param = {"event": kwargs.items()[0]} + param = {"event": list(kwargs.items())[0]} else: param = None for ws in self.websockets: diff --git a/src/Site/SiteAnnouncer.py b/src/Site/SiteAnnouncer.py index 48a05e69..bd2fc2cb 100644 --- a/src/Site/SiteAnnouncer.py +++ b/src/Site/SiteAnnouncer.py @@ -1,17 +1,16 @@ import random import time import hashlib -import urllib -import urllib2 +import urllib.request import struct import socket import re import collections -from lib import bencode +import bencode from lib.subtl.subtl import UdpTrackerClient -from lib.PySocks import socks -from lib.PySocks import sockshandler +import socks +import sockshandler import gevent from Plugin import PluginManager @@ -69,7 +68,7 @@ class SiteAnnouncer(object): back = [] # Type of addresses they can reach me if config.trackers_proxy == "disable": - for ip_type, opened in self.site.connection_server.port_opened.items(): + for ip_type, opened in list(self.site.connection_server.port_opened.items()): if opened: back.append(ip_type) if self.site.connection_server.tor_manager.start_onions: @@ -221,7 +220,7 @@ class SiteAnnouncer(object): if error: self.stats[tracker]["status"] = "error" self.stats[tracker]["time_status"] = time.time() - self.stats[tracker]["last_error"] = str(err).decode("utf8", "ignore") + self.stats[tracker]["last_error"] = str(error) self.stats[tracker]["time_last_error"] = time.time() self.stats[tracker]["num_error"] += 1 self.stats[tracker]["num_request"] += 1 @@ -359,9 +358,9 @@ class SiteAnnouncer(object): try: peer_data = bencode.decode(response)["peers"] response = None - peer_count = len(peer_data) / 6 + peer_count = int(len(peer_data) / 6) peers = [] - for peer_offset in xrange(peer_count): + for peer_offset in range(peer_count): off = 6 * peer_offset peer = peer_data[off:off + 6] addr, port = struct.unpack('!LH', peer) @@ -379,7 +378,7 @@ class SiteAnnouncer(object): peers = self.site.getConnectedPeers() if len(peers) == 0: # Small number of connected peers for this site, connect to any - peers = self.site.peers.values() + peers = list(self.site.peers.values()) need_num = 10 random.shuffle(peers) @@ -399,7 +398,7 @@ class SiteAnnouncer(object): def updateWebsocket(self, **kwargs): if kwargs: - param = {"event": kwargs.items()[0]} + param = {"event": list(kwargs.items())[0]} else: param = None diff --git a/src/Site/SiteManager.py b/src/Site/SiteManager.py index 04461cd7..4b5bab97 100644 --- a/src/Site/SiteManager.py +++ b/src/Site/SiteManager.py @@ -28,11 +28,11 @@ class SiteManager(object): def load(self, cleanup=True, startup=False): self.log.debug("Loading sites...") self.loaded = False - from Site import Site + from .Site import Site address_found = [] added = 0 # Load new adresses - for address, settings in json.load(open("%s/sites.json" % config.data_dir)).iteritems(): + for address, settings in json.load(open("%s/sites.json" % config.data_dir)).items(): if address not in self.sites: if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)): # Root content.json exists, try load site @@ -40,7 +40,7 @@ class SiteManager(object): try: site = Site(address, settings=settings) site.content_manager.contents.get("content.json") - except Exception, err: + except Exception as err: self.log.debug("Error loading site %s: %s" % (address, err)) continue self.sites[address] = site @@ -56,7 +56,7 @@ class SiteManager(object): # Remove deleted adresses if cleanup: - for address in self.sites.keys(): + for address in list(self.sites.keys()): if address not in address_found: del(self.sites[address]) self.log.debug("Removed site: %s" % address) @@ -93,7 +93,7 @@ class SiteManager(object): data = {} # Generate data file s = time.time() - for address, site in self.list().iteritems(): + for address, site in self.list().items(): if recalculate_size: site.settings["size"], site.settings["size_optional"] = site.content_manager.getTotalSize() # Update site size data[address] = site.settings @@ -108,7 +108,7 @@ class SiteManager(object): time_write = time.time() - s # Remove cache from site settings - for address, site in self.list().iteritems(): + for address, site in self.list().items(): site.settings["cache"] = {} self.log.debug("Saved sites in %.2fs (generate: %.2fs, write: %.2fs)" % (time.time() - s, time_generate, time_write)) @@ -134,12 +134,12 @@ class SiteManager(object): # Return or create site and start download site files def need(self, address, all_file=True, settings=None): - from Site import Site + from .Site import Site site = self.get(address) if not site: # Site not exist yet self.sites_changed = int(time.time()) # Try to find site with differect case - for recover_address, recover_site in self.sites.items(): + for recover_address, recover_site in list(self.sites.items()): if recover_address.lower() == address.lower(): return recover_site diff --git a/src/Site/SiteStorage.py b/src/Site/SiteStorage.py index d901d5fa..7cc45541 100644 --- a/src/Site/SiteStorage.py +++ b/src/Site/SiteStorage.py @@ -23,7 +23,7 @@ from Translate import translate as _ class SiteStorage(object): def __init__(self, site, allow_create=True): self.site = site - self.directory = u"%s/%s" % (config.data_dir, self.site.address) # Site data diretory + self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory self.allowed_dir = os.path.abspath(self.directory) # Only serve file within this dir self.log = site.log self.db = None # Db class @@ -59,7 +59,7 @@ class SiteStorage(object): def getDbSchema(self): try: schema = self.loadJson("dbschema.json") - except Exception, err: + except Exception as err: raise Exception("dbschema.json is not a valid JSON: %s" % err) return schema @@ -92,7 +92,7 @@ class SiteStorage(object): # Return possible db files for the site def getDbFiles(self): found = 0 - for content_inner_path, content in self.site.content_manager.contents.iteritems(): + for content_inner_path, content in self.site.content_manager.contents.items(): # content.json file itself if self.isFile(content_inner_path): yield content_inner_path, self.getPath(content_inner_path) @@ -100,7 +100,7 @@ class SiteStorage(object): self.log.error("[MISSING] %s" % content_inner_path) # Data files in content.json content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site - for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys(): + for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()): if not file_relative_path.endswith(".json") and not file_relative_path.endswith("json.gz"): continue # We only interesed in json files file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir @@ -181,7 +181,7 @@ class SiteStorage(object): self.event_db_busy.get() # Wait for event try: res = self.getDb().execute(query, params) - except sqlite3.DatabaseError, err: + except sqlite3.DatabaseError as err: if err.__class__.__name__ == "DatabaseError": self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query)) self.rebuildDb() @@ -240,7 +240,7 @@ class SiteStorage(object): os.rename(self.getPath(inner_path_before), self.getPath(inner_path_after)) err = None break - except Exception, err: + except Exception as err: self.log.error("%s rename error: %s (retry #%s)" % (inner_path_before, err, retry)) time.sleep(0.1 + retry) if err: @@ -297,7 +297,7 @@ class SiteStorage(object): self.log.debug("Loading json file to db: %s (file: %s)" % (inner_path, file)) try: self.updateDbFile(inner_path, file) - except Exception, err: + except Exception as err: self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err))) self.closeDb() @@ -363,9 +363,9 @@ class SiteStorage(object): return self.directory if ".." in inner_path: - raise Exception(u"File not allowed: %s" % inner_path) + raise Exception("File not allowed: %s" % inner_path) - return u"%s/%s" % (self.directory, inner_path) + return "%s/%s" % (self.directory, inner_path) # Get site dir relative path def getInnerPath(self, path): @@ -375,7 +375,7 @@ class SiteStorage(object): if path.startswith(self.directory): inner_path = path[len(self.directory) + 1:] else: - raise Exception(u"File not allowed: %s" % path) + raise Exception("File not allowed: %s" % path) return inner_path # Verify all files sha512sum using content.json @@ -390,7 +390,7 @@ class SiteStorage(object): self.log.debug("VerifyFile content.json not exists") self.site.needFile("content.json", update=True) # Force update to fix corrupt file self.site.content_manager.loadContent() # Reload content.json - for content_inner_path, content in self.site.content_manager.contents.items(): + for content_inner_path, content in list(self.site.content_manager.contents.items()): back["num_content"] += 1 i += 1 if i % 50 == 0: @@ -400,7 +400,7 @@ class SiteStorage(object): self.log.debug("[MISSING] %s" % content_inner_path) bad_files.append(content_inner_path) - for file_relative_path in content.get("files", {}).keys(): + for file_relative_path in list(content.get("files", {}).keys()): back["num_file"] += 1 file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir file_inner_path = file_inner_path.strip("/") # Strip leading / @@ -418,7 +418,7 @@ class SiteStorage(object): else: try: ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) - except Exception, err: + except Exception as err: ok = False if not ok: @@ -430,7 +430,7 @@ class SiteStorage(object): # Optional files optional_added = 0 optional_removed = 0 - for file_relative_path in content.get("files_optional", {}).keys(): + for file_relative_path in list(content.get("files_optional", {}).keys()): back["num_optional"] += 1 file_node = content["files_optional"][file_relative_path] file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir @@ -451,7 +451,7 @@ class SiteStorage(object): else: try: ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) - except Exception, err: + except Exception as err: ok = False if ok: @@ -475,7 +475,7 @@ class SiteStorage(object): ) self.site.content_manager.contents.db.processDelayed() - time.sleep(0.0001) # Context switch to avoid gevent hangs + time.sleep(0.001) # Context switch to avoid gevent hangs return back # Check and try to fix site files integrity @@ -497,15 +497,15 @@ class SiteStorage(object): def deleteFiles(self): self.log.debug("Deleting files from content.json...") files = [] # Get filenames - for content_inner_path in self.site.content_manager.contents.keys(): + for content_inner_path in list(self.site.content_manager.contents.keys()): content = self.site.content_manager.contents.get(content_inner_path, {}) files.append(content_inner_path) # Add normal files - for file_relative_path in content.get("files", {}).keys(): + for file_relative_path in list(content.get("files", {}).keys()): file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir files.append(file_inner_path) # Add optional files - for file_relative_path in content.get("files_optional", {}).keys(): + for file_relative_path in list(content.get("files_optional", {}).keys()): file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir files.append(file_inner_path) @@ -518,7 +518,7 @@ class SiteStorage(object): db_path = self.getPath(schema["db_file"]) if os.path.isfile(db_path): os.unlink(db_path) - except Exception, err: + except Exception as err: self.log.error("Db file delete error: %s" % err) for inner_path in files: @@ -528,8 +528,8 @@ class SiteStorage(object): try: os.unlink(path) break - except Exception, err: - self.log.error(u"Error removing %s: %s, try #%s" % (inner_path, err, retry)) + except Exception as err: + self.log.error("Error removing %s: %s, try #%s" % (inner_path, err, retry)) time.sleep(float(retry) / 10) self.onUpdated(inner_path, False) diff --git a/src/Site/__init__.py b/src/Site/__init__.py index 07a21d40..340385b3 100644 --- a/src/Site/__init__.py +++ b/src/Site/__init__.py @@ -1,3 +1,3 @@ -from Site import Site -from SiteStorage import SiteStorage -from SiteAnnouncer import SiteAnnouncer +from .Site import Site +from .SiteStorage import SiteStorage +from .SiteAnnouncer import SiteAnnouncer diff --git a/src/Test/BenchmarkSsl.py b/src/Test/BenchmarkSsl.py index 06b18537..06181b89 100644 --- a/src/Test/BenchmarkSsl.py +++ b/src/Test/BenchmarkSsl.py @@ -8,7 +8,7 @@ import socket import ssl sys.path.append(os.path.abspath("..")) # Imports relative to src dir -import cStringIO as StringIO +import io as StringIO import gevent from gevent.server import StreamServer @@ -46,8 +46,8 @@ def handle(sock_raw, addr): ) else: sock.sendall(data) - except Exception, err: - print err + except Exception as err: + print(err) try: sock.shutdown(gevent.socket.SHUT_WR) sock.close() @@ -102,7 +102,7 @@ def getData(): total_num += 1 total_bytes += buff.tell() if not data: - print "No data" + print("No data") sock.shutdown(gevent.socket.SHUT_WR) sock.close() @@ -119,8 +119,8 @@ def info(): else: memory_info = process.get_memory_info while 1: - print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, - print "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20) + print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, end=' ') + print("using", clipher, "Mem:", memory_info()[0] / float(2 ** 20)) time.sleep(1) gevent.spawn(info) @@ -132,7 +132,7 @@ for test in range(1): gevent.joinall(clients) -print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s +print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s) # Separate client/server process: # 10*10*100: diff --git a/src/Test/Spy.py b/src/Test/Spy.py index 8d8f1800..8062d063 100644 --- a/src/Test/Spy.py +++ b/src/Test/Spy.py @@ -1,7 +1,7 @@ class Spy: def __init__(self, obj, func_name): self.obj = obj - self.func_name = func_name + self.__name__ = func_name self.func_original = getattr(self.obj, func_name) self.calls = [] @@ -10,11 +10,11 @@ class Spy: call = dict(enumerate(args, 1)) call[0] = cls call.update(kwargs) - print "Logging", call + print("Logging", call) self.calls.append(call) return self.func_original(cls, *args, **kwargs) - setattr(self.obj, self.func_name, loggedFunc) + setattr(self.obj, self.__name__, loggedFunc) return self.calls def __exit__(self, *args, **kwargs): - setattr(self.obj, self.func_name, self.func_original) \ No newline at end of file + setattr(self.obj, self.__name__, self.func_original) \ No newline at end of file diff --git a/src/Test/TestContent.py b/src/Test/TestContent.py index e4afb91e..2f29a103 100644 --- a/src/Test/TestContent.py +++ b/src/Test/TestContent.py @@ -1,6 +1,6 @@ import json import time -from cStringIO import StringIO +import io import pytest @@ -52,7 +52,7 @@ class TestContent: # Normal data data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)} - data = StringIO(json.dumps(data_dict)) + data = io.StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) # Reset del data_dict["signs"] @@ -60,7 +60,7 @@ class TestContent: # Too large data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)} - data = StringIO(json.dumps(data_dict)) + data = io.StringIO(json.dumps(data_dict)) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) assert "Include too large" in str(err) @@ -72,7 +72,7 @@ class TestContent: # Not allowed file data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"] data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)} - data = StringIO(json.dumps(data_dict)) + data = io.StringIO(json.dumps(data_dict)) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) assert "File not allowed" in str(err) @@ -83,7 +83,7 @@ class TestContent: # Should work again data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)} - data = StringIO(json.dumps(data_dict)) + data = io.StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) @pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"]) @@ -166,7 +166,7 @@ class TestContent: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.StringIO(json.dumps(data_dict)) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(inner_path, data, ignore_same=False) assert "Wrong site address" in str(err) @@ -178,7 +178,7 @@ class TestContent: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.StringIO(json.dumps(data_dict)) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(inner_path, data, ignore_same=False) assert "Wrong inner_path" in str(err) @@ -190,7 +190,7 @@ class TestContent: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile(inner_path, data, ignore_same=False) def testVerifyInnerPath(self, site): @@ -206,7 +206,7 @@ class TestContent: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile(inner_path, data, ignore_same=False) for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg"]: @@ -218,7 +218,7 @@ class TestContent: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.StringIO(json.dumps(data_dict)) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(inner_path, data, ignore_same=False) assert "Invalid relative path" in str(err) diff --git a/src/Test/TestContentUser.py b/src/Test/TestContentUser.py index 46d8bdef..ff5ab76c 100644 --- a/src/Test/TestContentUser.py +++ b/src/Test/TestContentUser.py @@ -1,5 +1,5 @@ import json -from cStringIO import StringIO +from io import StringIO import pytest diff --git a/src/Test/TestDb.py b/src/Test/TestDb.py index d821fe5d..10777f2f 100644 --- a/src/Test/TestDb.py +++ b/src/Test/TestDb.py @@ -1,4 +1,4 @@ -import cStringIO as StringIO +import io class TestDb: @@ -63,11 +63,11 @@ class TestDb: # Large ammount of IN values assert db.execute( "SELECT COUNT(*) AS num FROM test WHERE ?", - {"not__test_id": range(2, 3000)} + {"not__test_id": list(range(2, 3000))} ).fetchone()["num"] == 2 assert db.execute( "SELECT COUNT(*) AS num FROM test WHERE ?", - {"test_id": range(50, 3000)} + {"test_id": list(range(50, 3000))} ).fetchone()["num"] == 50 assert db.execute( @@ -103,7 +103,7 @@ class TestDb: def testUpdateJson(self, db): - f = StringIO.StringIO() + f = io.StringIO() f.write(""" { "test": [ @@ -118,7 +118,7 @@ class TestDb: def testUnsafePattern(self, db): db.schema["maps"] = {"[A-Za-z.]*": db.schema["maps"]["data.json"]} # Only repetition of . supported - f = StringIO.StringIO() + f = io.StringIO() f.write(""" { "test": [ @@ -129,4 +129,4 @@ class TestDb: f.seek(0) assert db.updateJson(db.db_dir + "data.json", f) is False assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 0 - assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 0 + assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 0 \ No newline at end of file diff --git a/src/Test/TestDiff.py b/src/Test/TestDiff.py index 0e387e2a..2f80060b 100644 --- a/src/Test/TestDiff.py +++ b/src/Test/TestDiff.py @@ -1,4 +1,4 @@ -import cStringIO as StringIO +import io from util import Diff @@ -31,19 +31,19 @@ class TestDiff: ) == [("-", 11)] def testDiffLimit(self): - old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix") - new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix") + old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix") + new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix") actions = Diff.diff(list(old_f), list(new_f), limit=1024) assert actions - old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix") - new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix"*1024) + old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix") + new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix"*1024) actions = Diff.diff(list(old_f), list(new_f), limit=1024) assert actions is False def testPatch(self): - old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix") - new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix") + old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix") + new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix") actions = Diff.diff( list(old_f), list(new_f) diff --git a/src/Test/TestFileRequest.py b/src/Test/TestFileRequest.py index 5210ce82..f2da250b 100644 --- a/src/Test/TestFileRequest.py +++ b/src/Test/TestFileRequest.py @@ -1,4 +1,4 @@ -import cStringIO as StringIO +import io import pytest import time @@ -20,10 +20,10 @@ class TestFileRequest: # Normal request response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0}) - assert "sign" in response["body"] + assert b"sign" in response["body"] response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json")}) - assert "sign" in response["body"] + assert b"sign" in response["body"] # Invalid file response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}) @@ -57,25 +57,25 @@ class TestFileRequest: connection = client.getConnection(file_server.ip, 1544) file_server.sites[site.address] = site - buff = StringIO.StringIO() + buff = io.BytesIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff) assert "stream_bytes" in response - assert "sign" in buff.getvalue() + assert b"sign" in buff.getvalue() # Invalid file - buff = StringIO.StringIO() + buff = io.BytesIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff) assert "File read error" in response["error"] # Location over size - buff = StringIO.StringIO() + buff = io.BytesIO() response = connection.request( "streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff ) assert "File read error" in response["error"] # Stream from parent dir - buff = StringIO.StringIO() + buff = io.BytesIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff) assert "File read error" in response["error"] diff --git a/src/Test/TestPeer.py b/src/Test/TestPeer.py index 32ca1323..f7bdb6da 100644 --- a/src/Test/TestPeer.py +++ b/src/Test/TestPeer.py @@ -1,12 +1,12 @@ import time -from cStringIO import StringIO +import io import pytest from File import FileServer from File import FileRequest from Crypt import CryptHash -import Spy +from . import Spy @pytest.mark.usefixtures("resetSettings") @@ -43,17 +43,17 @@ class TestPeer: # Testing streamFile buff = peer_file_server.getFile(site_temp.address, "content.json", streaming=True) - assert "sign" in buff.getvalue() + assert b"sign" in buff.getvalue() # Testing getFile buff = peer_file_server.getFile(site_temp.address, "content.json") - assert "sign" in buff.getvalue() + assert b"sign" in buff.getvalue() connection.close() client.stop() def testHashfield(self, site): - sample_hash = site.content_manager.contents["content.json"]["files_optional"].values()[0]["sha512"] + sample_hash = list(site.content_manager.contents["content.json"]["files_optional"].values())[0]["sha512"] site.storage.verifyFiles(quick_check=True) # Find what optional files we have @@ -65,7 +65,7 @@ class TestPeer: assert site.content_manager.hashfield.getHashId(sample_hash) in site.content_manager.hashfield # Add new hash - new_hash = CryptHash.sha512sum(StringIO("hello")) + new_hash = CryptHash.sha512sum(io.BytesIO(b"hello")) assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield assert site.content_manager.hashfield.appendHash(new_hash) assert not site.content_manager.hashfield.appendHash(new_hash) # Don't add second time diff --git a/src/Test/TestSite.py b/src/Test/TestSite.py index b9a40064..b869f093 100644 --- a/src/Test/TestSite.py +++ b/src/Test/TestSite.py @@ -36,8 +36,8 @@ class TestSite: # Test re-cloning (updating) # Changes in non-data files should be overwritten - new_site.storage.write("index.html", "this will be overwritten") - assert new_site.storage.read("index.html") == "this will be overwritten" + new_site.storage.write("index.html", b"this will be overwritten") + assert new_site.storage.read("index.html") == b"this will be overwritten" # Changes in data file should be kept after re-cloning changed_contentjson = new_site.storage.loadJson("content.json") diff --git a/src/Test/TestSiteDownload.py b/src/Test/TestSiteDownload.py index 842cfc82..08d1e910 100644 --- a/src/Test/TestSiteDownload.py +++ b/src/Test/TestSiteDownload.py @@ -9,7 +9,7 @@ from Config import config from File import FileRequest from File import FileServer from Site import Site -import Spy +from . import Spy @pytest.mark.usefixtures("resetTempSettings") @@ -289,7 +289,7 @@ class TestSiteDownload: # Update file data_original = site.storage.open("data/data.json").read() - data_new = data_original.replace('"ZeroBlog"', '"UpdatedZeroBlog"') + data_new = data_original.replace(b'"ZeroBlog"', b'"UpdatedZeroBlog"') assert data_original != data_new site.storage.open("data/data.json", "wb").write(data_new) @@ -309,13 +309,13 @@ class TestSiteDownload: assert site_temp.storage.open("data/data.json").read() == data_new # Close connection to avoid update spam limit - site.peers.values()[0].remove() + list(site.peers.values())[0].remove() site.addPeer(file_server.ip, 1545) - site_temp.peers.values()[0].ping() # Connect back + list(site_temp.peers.values())[0].ping() # Connect back time.sleep(0.1) # Update with patch - data_new = data_original.replace('"ZeroBlog"', '"PatchedZeroBlog"') + data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"') assert data_original != data_new site.storage.open("data/data.json-new", "wb").write(data_new) @@ -328,7 +328,7 @@ class TestSiteDownload: assert not site.storage.isFile("data/data.json-new") # New data file removed assert site.storage.open("data/data.json").read() == data_new # -new postfix removed assert "data/data.json" in diffs - assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', ['\t"title": "PatchedZeroBlog",\n']), ('=', 31102)] + assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', [b'\t"title": "PatchedZeroBlog",\n']), ('=', 31102)] # Publish with patch site.log.info("Publish new data.json with patch") diff --git a/src/Test/TestTor.py b/src/Test/TestTor.py index 9479aa2f..d53525a4 100644 --- a/src/Test/TestTor.py +++ b/src/Test/TestTor.py @@ -34,14 +34,14 @@ class TestTor: address = tor_manager.addOnion() # Sign - sign = CryptRsa.sign("hello", tor_manager.getPrivatekey(address)) + sign = CryptRsa.sign(b"hello", tor_manager.getPrivatekey(address)) assert len(sign) == 128 # Verify publickey = CryptRsa.privatekeyToPublickey(tor_manager.getPrivatekey(address)) assert len(publickey) == 140 - assert CryptRsa.verify("hello", publickey, sign) - assert not CryptRsa.verify("not hello", publickey, sign) + assert CryptRsa.verify(b"hello", publickey, sign) + assert not CryptRsa.verify(b"not hello", publickey, sign) # Pub to address assert CryptRsa.publickeyToOnion(publickey) == address @@ -54,7 +54,7 @@ class TestTor: file_server.tor_manager.start_onions = True address = file_server.tor_manager.getOnion(site.address) assert address - print "Connecting to", address + print("Connecting to", address) for retry in range(5): # Wait for hidden service creation time.sleep(10) try: diff --git a/src/Test/TestTranslate.py b/src/Test/TestTranslate.py index 530d1bcf..30eb6de4 100644 --- a/src/Test/TestTranslate.py +++ b/src/Test/TestTranslate.py @@ -1,5 +1,3 @@ -import os - from Translate import Translate class TestTranslate: @@ -13,7 +11,6 @@ class TestTranslate: assert 'translated = _("translated")' in data_translated assert 'not_translated = "original"' in data_translated - def testTranslateStrictNamed(self): translate = Translate() data = """ diff --git a/src/Test/TestUpnpPunch.py b/src/Test/TestUpnpPunch.py index 18338bb9..c7a0707e 100644 --- a/src/Test/TestUpnpPunch.py +++ b/src/Test/TestUpnpPunch.py @@ -1,5 +1,5 @@ import socket -from urlparse import urlparse +from urllib.parse import urlparse import pytest import mock @@ -10,7 +10,7 @@ from util import UpnpPunch as upnp @pytest.fixture def mock_socket(): mock_socket = mock.MagicMock() - mock_socket.recv = mock.MagicMock(return_value='Hello') + mock_socket.recv = mock.MagicMock(return_value=b'Hello') mock_socket.bind = mock.MagicMock() mock_socket.send_to = mock.MagicMock() @@ -79,12 +79,12 @@ class TestUpnpPunch(object): upnp._retrieve_location_from_ssdp(rsp) def test_retrieve_igd_profile(self, url_obj): - with mock.patch('urllib2.urlopen') as mock_urlopen: + with mock.patch('urllib.request.urlopen') as mock_urlopen: upnp._retrieve_igd_profile(url_obj) mock_urlopen.assert_called_with(url_obj.geturl(), timeout=5) def test_retrieve_igd_profile_timeout(self, url_obj): - with mock.patch('urllib2.urlopen') as mock_urlopen: + with mock.patch('urllib.request.urlopen') as mock_urlopen: mock_urlopen.side_effect = socket.error('Timeout error') with pytest.raises(upnp.IGDError): upnp._retrieve_igd_profile(url_obj) diff --git a/src/Test/TestUser.py b/src/Test/TestUser.py index 1fcdd1b7..e5ec5c8c 100644 --- a/src/Test/TestUser.py +++ b/src/Test/TestUser.py @@ -7,7 +7,7 @@ from Crypt import CryptBitcoin class TestUser: def testAddress(self, user): assert user.master_address == "15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc" - address_index = 1458664252141532163166741013621928587528255888800826689784628722366466547364755811L + address_index = 1458664252141532163166741013621928587528255888800826689784628722366466547364755811 assert user.getAddressAuthIndex("15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc") == address_index # Re-generate privatekey based on address_index diff --git a/src/Test/TestWeb.py b/src/Test/TestWeb.py index 10e1829d..2ce66c98 100644 --- a/src/Test/TestWeb.py +++ b/src/Test/TestWeb.py @@ -1,4 +1,4 @@ -import urllib +import urllib.request import pytest @@ -26,7 +26,7 @@ def getContextUrl(browser): def getUrl(url): - content = urllib.urlopen(url).read() + content = urllib.request.urlopen(url).read() assert "server error" not in content.lower(), "Got a server error! " + repr(url) return content diff --git a/src/Test/conftest.py b/src/Test/conftest.py index 01f1e45d..f037cd0e 100644 --- a/src/Test/conftest.py +++ b/src/Test/conftest.py @@ -1,6 +1,6 @@ import os import sys -import urllib +import urllib.request import time import logging import json @@ -15,9 +15,11 @@ import gevent from gevent import monkey monkey.patch_all(thread=False, subprocess=False) + def pytest_addoption(parser): parser.addoption("--slow", action='store_true', default=False, help="Also run slow tests") + def pytest_collection_modifyitems(config, items): if config.getoption("--slow"): # --runslow given in cli: do not skip slow tests @@ -44,18 +46,18 @@ config.action = "test" logging.basicConfig(level=logging.DEBUG, stream=sys.stdout) + # Set custom formatter with realative time format (via: https://stackoverflow.com/questions/31521859/python-logging-module-time-since-last-log) class TimeFilter(logging.Filter): def filter(self, record): try: - last = self.last + last = self.last except AttributeError: - last = record.relativeCreated + last = record.relativeCreated - delta = datetime.datetime.fromtimestamp(record.relativeCreated/1000.0) - datetime.datetime.fromtimestamp(last/1000.0) - record.relative = '{0:.3f}'.format(delta.seconds + delta.microseconds/1000000.0) + record.relative = '{0:.3f}'.format(delta.seconds + delta.microseconds / 1000000.0) self.last = record.relativeCreated return True @@ -204,7 +206,7 @@ def user(): def browser(request): try: from selenium import webdriver - print "Starting chromedriver..." + print("Starting chromedriver...") options = webdriver.chrome.options.Options() options.add_argument("--headless") options.add_argument("--window-size=1920x1080") @@ -214,7 +216,7 @@ def browser(request): def quit(): browser.quit() request.addfinalizer(quit) - except Exception, err: + except Exception as err: raise pytest.skip("Test requires selenium + chromedriver: %s" % err) return browser @@ -222,8 +224,8 @@ def browser(request): @pytest.fixture(scope="session") def site_url(): try: - urllib.urlopen(SITE_URL).read() - except Exception, err: + urllib.request.urlopen(SITE_URL).read() + except Exception as err: raise pytest.skip("Test requires zeronet client running: %s" % err) return SITE_URL @@ -253,8 +255,8 @@ def file_server4(request): conn = file_server.getConnection("127.0.0.1", 1544) conn.close() break - except Exception, err: - print err + except Exception as err: + print(err) assert file_server.running file_server.ip_incoming = {} # Reset flood protection @@ -263,6 +265,7 @@ def file_server4(request): request.addfinalizer(stop) return file_server + @pytest.fixture def file_server6(request): file_server6 = FileServer("::1", 1544) @@ -280,8 +283,8 @@ def file_server6(request): conn = file_server6.getConnection("::1", 1544) conn.close() break - except Exception, err: - print err + except Exception as err: + print(err) assert file_server6.running file_server6.ip_incoming = {} # Reset flood protection @@ -318,10 +321,11 @@ def tor_manager(): tor_manager.start() assert tor_manager.conn tor_manager.startOnions() - except Exception, err: + except Exception as err: raise pytest.skip("Test requires Tor with ControlPort: %s, %s" % (config.tor_controller, err)) return tor_manager + @pytest.fixture() def db(request): db_path = "%s/zeronet.db" % config.data_dir diff --git a/src/Tor/TorManager.py b/src/Tor/TorManager.py index 70d6c015..026aef3d 100644 --- a/src/Tor/TorManager.py +++ b/src/Tor/TorManager.py @@ -110,8 +110,8 @@ class TorManager(object): break # Terminate on exit atexit.register(self.stopTor) - except Exception, err: - self.log.error(u"Error starting Tor client: %s" % Debug.formatException(str(err).decode("utf8", "ignore"))) + except Exception as err: + self.log.error("Error starting Tor client: %s" % Debug.formatException(str(err))) self.enabled = False self.starting = False self.event_started.set(False) @@ -125,7 +125,7 @@ class TorManager(object): try: if self.isSubprocessRunning(): self.request("SIGNAL SHUTDOWN") - except Exception, err: + except Exception as err: self.log.error("Error stopping Tor: %s" % err) def downloadTor(self): @@ -235,18 +235,18 @@ class TorManager(object): def resetCircuits(self): res = self.request("SIGNAL NEWNYM") if "250 OK" not in res: - self.setStatus(u"Reset circuits error (%s)" % res) + self.setStatus("Reset circuits error (%s)" % res) self.log.error("Tor reset circuits error: %s" % res) def addOnion(self): if len(self.privatekeys) >= config.tor_hs_limit: - return random.choice([key for key in self.privatekeys.keys() if key != self.site_onions.get("global")]) + return random.choice([key for key in list(self.privatekeys.keys()) if key != self.site_onions.get("global")]) result = self.makeOnionAndKey() if result: onion_address, onion_privatekey = result self.privatekeys[onion_address] = onion_privatekey - self.setStatus(u"OK (%s onions running)" % len(self.privatekeys)) + self.setStatus("OK (%s onions running)" % len(self.privatekeys)) SiteManager.peer_blacklist.append((onion_address + ".onion", self.fileserver_port)) return onion_address else: @@ -259,7 +259,7 @@ class TorManager(object): onion_address, onion_privatekey = match.groups() return (onion_address, onion_privatekey) else: - self.setStatus(u"AddOnion error (%s)" % res) + self.setStatus("AddOnion error (%s)" % res) self.log.error("Tor addOnion error: %s" % res) return False @@ -270,7 +270,7 @@ class TorManager(object): self.setStatus("OK (%s onion running)" % len(self.privatekeys)) return True else: - self.setStatus(u"DelOnion error (%s)" % res) + self.setStatus("DelOnion error (%s)" % res) self.log.error("Tor delOnion error: %s" % res) self.disconnect() return False @@ -291,11 +291,11 @@ class TorManager(object): back = "" for retry in range(2): try: - conn.sendall("%s\r\n" % cmd) + conn.sendall(b"%s\r\n" % cmd.encode("utf8")) while not back.endswith("250 OK\r\n"): back += conn.recv(1024 * 64).decode("utf8", "ignore") break - except Exception, err: + except Exception as err: self.log.error("Tor send error: %s, reconnecting..." % err) self.disconnect() time.sleep(1) diff --git a/src/Tor/__init__.py b/src/Tor/__init__.py index 250eac2d..d0fcffaf 100644 --- a/src/Tor/__init__.py +++ b/src/Tor/__init__.py @@ -1 +1 @@ -from TorManager import TorManager \ No newline at end of file +from .TorManager import TorManager \ No newline at end of file diff --git a/src/Translate/Translate.py b/src/Translate/Translate.py index 90b070b9..6865ad7b 100644 --- a/src/Translate/Translate.py +++ b/src/Translate/Translate.py @@ -3,7 +3,7 @@ import json import logging import inspect import re -import cgi +import html import string from Config import config @@ -15,8 +15,8 @@ class EscapeProxy(dict): # Automatically escape the accessed string values def __getitem__(self, key): val = dict.__getitem__(self, key) - if type(val) in (str, unicode): - return cgi.escape(val, quote=True) + if type(val) in (str, str): + return html.escape(val) elif type(val) is dict: return EscapeProxy(val) elif type(val) is list: @@ -105,7 +105,7 @@ class Translate(dict): data = data.decode("utf8") patterns = [] - for key, val in translate_table.items(): + for key, val in list(translate_table.items()): if key.startswith("_("): # Problematic string: only match if called between _(" ") function key = key.replace("_(", "").replace(")", "").replace(", ", '", "') translate_table[key] = "|" + val @@ -128,6 +128,6 @@ class Translate(dict): else: pattern = '"(' + "|".join(patterns) + ')"' data = re.sub(pattern, replacer, data) - return data.encode("utf8") + return data translate = Translate() diff --git a/src/Translate/__init__.py b/src/Translate/__init__.py index 40f34063..ba0ab6d4 100644 --- a/src/Translate/__init__.py +++ b/src/Translate/__init__.py @@ -1 +1 @@ -from Translate import * \ No newline at end of file +from .Translate import * \ No newline at end of file diff --git a/src/Ui/UiRequest.py b/src/Ui/UiRequest.py index 71c380d0..05ec047f 100644 --- a/src/Ui/UiRequest.py +++ b/src/Ui/UiRequest.py @@ -3,7 +3,8 @@ import re import os import mimetypes import json -import cgi +import html +import urllib import gevent @@ -157,7 +158,8 @@ class UiRequest(object): if func: return func() else: - return self.error404(path) + ret = self.error404(path) + return ret # The request is proxied by chrome extension or a transparent proxy def isProxyRequest(self): @@ -190,7 +192,7 @@ class UiRequest(object): # Return: <dict> Posted variables def getPosted(self): if self.env['REQUEST_METHOD'] == "POST": - return dict(cgi.parse_qsl( + return dict(urllib.parse.parse_qsl( self.env['wsgi.input'].readline().decode() )) else: @@ -200,7 +202,7 @@ class UiRequest(object): def getCookies(self): raw_cookies = self.env.get('HTTP_COOKIE') if raw_cookies: - cookies = cgi.parse_qsl(raw_cookies) + cookies = urllib.parse.parse_qsl(raw_cookies) return {key.strip(): val for key, val in cookies} else: return {} @@ -282,12 +284,12 @@ class UiRequest(object): headers["Cache-Control"] = "no-cache, no-store, private, must-revalidate, max-age=0" # No caching at all headers["Content-Type"] = content_type headers.update(extra_headers) - return self.start_response(status_texts[status], headers.items()) + return self.start_response(status_texts[status], list(headers.items())) # Renders a template def render(self, template_path, *args, **kwargs): template = open(template_path).read() - for key, val in kwargs.items(): + for key, val in list(kwargs.items()): template = template.replace("{%s}" % key, "%s" % val) return template.encode("utf8") @@ -296,7 +298,7 @@ class UiRequest(object): # Redirect to an url def actionRedirect(self, url): self.start_response('301 Redirect', [('Location', str(url))]) - yield "Location changed: %s" % url + yield b"Location changed: %s" % url.encode("utf8") def actionIndex(self): return self.actionRedirect("/" + config.homepage) @@ -447,11 +449,11 @@ class UiRequest(object): content = site.content_manager.contents["content.json"] if content.get("background-color"): background_color = content.get("background-color-%s" % theme, content["background-color"]) - body_style += "background-color: %s;" % cgi.escape(background_color, True) + body_style += "background-color: %s;" % html.escape(background_color) if content.get("viewport"): - meta_tags += '<meta name="viewport" id="viewport" content="%s">' % cgi.escape(content["viewport"], True) + meta_tags += '<meta name="viewport" id="viewport" content="%s">' % html.escape(content["viewport"]) if content.get("favicon"): - meta_tags += '<link rel="icon" href="%s%s">' % (root_url, cgi.escape(content["favicon"], True)) + meta_tags += '<link rel="icon" href="%s%s">' % (root_url, html.escape(content["favicon"])) if content.get("postmessage_nonce_security"): postmessage_nonce_security = "true" @@ -470,7 +472,7 @@ class UiRequest(object): file_url=re.escape(file_url), file_inner_path=re.escape(file_inner_path), address=site.address, - title=cgi.escape(title, True), + title=html.escape(title), body_style=body_style, meta_tags=meta_tags, query_string=re.escape(inner_query_string), @@ -612,7 +614,7 @@ class UiRequest(object): return self.error400() def actionSiteAdd(self): - post = dict(cgi.parse_qsl(self.env["wsgi.input"].read())) + post = dict(urllib.parse.parse_qsl(self.env["wsgi.input"].read())) if post["add_nonce"] not in self.server.add_nonces: return self.error403("Add nonce error.") self.server.add_nonces.remove(post["add_nonce"]) @@ -626,7 +628,7 @@ class UiRequest(object): self.sendHeader(200, "text/html", noscript=True) template = open("src/Ui/template/site_add.html").read() - template = template.replace("{url}", cgi.escape(self.env["PATH_INFO"], True)) + template = template.replace("{url}", html.escape(self.env["PATH_INFO"])) template = template.replace("{address}", path_parts["address"]) template = template.replace("{add_nonce}", self.getAddNonce()) return template @@ -634,7 +636,7 @@ class UiRequest(object): def replaceHtmlVariables(self, block, path_parts): user = self.getCurrentUser() themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light")) - block = block.replace("{themeclass}", themeclass.encode("utf8")) + block = block.replace(b"{themeclass}", themeclass.encode("utf8")) if path_parts: site = self.server.sites.get(path_parts.get("address")) @@ -642,7 +644,7 @@ class UiRequest(object): modified = int(time.time()) else: modified = int(site.content_manager.contents["content.json"]["modified"]) - block = block.replace("{site_modified}", str(modified)) + block = block.replace(b"{site_modified}", str(modified).encode("utf8")) return block @@ -708,14 +710,14 @@ class UiRequest(object): wrapper_key = self.get["wrapper_key"] # Find site by wrapper_key site = None - for site_check in self.server.sites.values(): + for site_check in list(self.server.sites.values()): if site_check.settings["wrapper_key"] == wrapper_key: site = site_check if site: # Correct wrapper key try: user = self.getCurrentUser() - except Exception, err: + except Exception as err: self.log.error("Error in data/user.json: %s" % err) return self.error500() if not user: @@ -726,7 +728,7 @@ class UiRequest(object): self.server.websockets.append(ui_websocket) ui_websocket.start() self.server.websockets.remove(ui_websocket) - for site_check in self.server.sites.values(): + for site_check in list(self.server.sites.values()): # Remove websocket from every site (admin sites allowed to join other sites event channels) if ui_websocket in site_check.websockets: site_check.websockets.remove(ui_websocket) @@ -744,10 +746,10 @@ class UiRequest(object): import sys last_error = sys.modules["main"].DebugHook.last_error if last_error: - raise last_error[0], last_error[1], last_error[2] + raise last_error[0](last_error[1]).with_traceback(last_error[2]) else: self.sendHeader() - return "No error! :)" + return [b"No error! :)"] # Just raise an error to get console def actionConsole(self): @@ -793,19 +795,19 @@ class UiRequest(object): # Send file not found error def error404(self, path=""): self.sendHeader(404) - return self.formatError("Not Found", cgi.escape(path.encode("utf8")), details=False) + return self.formatError("Not Found", html.escape(path), details=False) # Internal server error def error500(self, message=":("): self.sendHeader(500) - return self.formatError("Server error", cgi.escape(message)) + return self.formatError("Server error", html.escape(message)) def formatError(self, title, message, details=True): import sys import gevent if details: - details = {key: val for key, val in self.env.items() if hasattr(val, "endswith") and "COOKIE" not in key} + details = {key: val for key, val in list(self.env.items()) if hasattr(val, "endswith") and "COOKIE" not in key} details["version_zeronet"] = "%s r%s" % (config.version, config.rev) details["version_python"] = sys.version details["version_gevent"] = gevent.__version__ diff --git a/src/Ui/UiServer.py b/src/Ui/UiServer.py index 90b6a31c..74ae98a8 100644 --- a/src/Ui/UiServer.py +++ b/src/Ui/UiServer.py @@ -7,12 +7,13 @@ import gevent from gevent.pywsgi import WSGIServer from gevent.pywsgi import WSGIHandler -from lib.geventwebsocket.handler import WebSocketHandler +from geventwebsocket.handler import WebSocketHandler -from UiRequest import UiRequest +from .UiRequest import UiRequest from Site import SiteManager from Config import config from Debug import Debug +import importlib # Skip websocket handler if not necessary @@ -30,7 +31,7 @@ class UiWSGIHandler(WSGIHandler): ws_handler = WebSocketHandler(*self.args, **self.kwargs) ws_handler.__dict__ = self.__dict__ # Match class variables ws_handler.run_application() - except Exception, err: + except Exception as err: logging.error("UiWSGIHandler websocket error: %s" % Debug.formatException(err)) if config.debug: # Allow websocket errors to appear on /Debug import sys @@ -38,7 +39,7 @@ class UiWSGIHandler(WSGIHandler): else: # Standard HTTP request try: super(UiWSGIHandler, self).run_application() - except Exception, err: + except Exception as err: logging.error("UiWSGIHandler error: %s" % Debug.formatException(err)) if config.debug: # Allow websocket errors to appear on /Debug import sys @@ -101,7 +102,7 @@ class UiServer: else: # Catch and display the error try: return ui_request.route(path) - except Exception, err: + except Exception as err: logging.debug("UiRequest error: %s" % Debug.formatException(err)) return ui_request.error500("Err: %s" % Debug.formatException(err)) @@ -110,8 +111,8 @@ class UiServer: global UiRequest import imp import sys - reload(sys.modules["User.UserManager"]) - reload(sys.modules["Ui.UiWebsocket"]) + importlib.reload(sys.modules["User.UserManager"]) + importlib.reload(sys.modules["Ui.UiWebsocket"]) UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest # UiRequest.reload() @@ -128,7 +129,7 @@ class UiServer: try: from werkzeug.debug import DebuggedApplication handler = DebuggedApplication(self.handleRequest, evalex=True) - except Exception, err: + except Exception as err: self.log.info("%s: For debugging please download Werkzeug (http://werkzeug.pocoo.org/)" % err) from Debug import DebugReloader self.log.write = lambda msg: self.log.debug(msg.strip()) # For Wsgi access.log @@ -147,14 +148,14 @@ class UiServer: url = "http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage) gevent.spawn_later(0.3, browser.open, url, new=2) except Exception as err: - print "Error starting browser: %s" % err + print("Error starting browser: %s" % err) self.server = WSGIServer((self.ip, self.port), handler, handler_class=UiWSGIHandler, log=self.log) self.server.sockets = {} self.afterStarted() try: self.server.serve_forever() - except Exception, err: + except Exception as err: self.log.error("Web interface bind error, must be running already, exiting.... %s" % err) sys.modules["main"].file_server.stop() self.log.debug("Stopped.") @@ -163,18 +164,18 @@ class UiServer: self.log.debug("Stopping...") # Close WS sockets if "clients" in dir(self.server): - for client in self.server.clients.values(): + for client in list(self.server.clients.values()): client.ws.close() # Close http sockets sock_closed = 0 - for sock in self.server.sockets.values(): + for sock in list(self.server.sockets.values()): try: - sock.send("bye") + sock.send(b"bye") sock.shutdown(socket.SHUT_RDWR) # sock._sock.close() # sock.close() sock_closed += 1 - except Exception, err: + except Exception as err: self.log.debug("Http connection close error: %s" % err) self.log.debug("Socket closed: %s" % sock_closed) time.sleep(0.1) diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py index b407e597..ba0cc78b 100644 --- a/src/Ui/UiWebsocket.py +++ b/src/Ui/UiWebsocket.py @@ -55,7 +55,7 @@ class UiWebsocket(object): else: try: self.addHomepageNotifications() - except Exception, err: + except Exception as err: self.log.error("Uncaught Exception: " + Debug.formatException(err)) for notification in self.site.notifications: # Send pending notification messages @@ -73,7 +73,7 @@ class UiWebsocket(object): break else: message = ws.receive() - except Exception, err: + except Exception as err: self.log.error("WebSocket receive error: %s" % Debug.formatException(err)) break @@ -81,7 +81,7 @@ class UiWebsocket(object): try: req = json.loads(message) self.handleRequest(req) - except Exception, err: + except Exception as err: if config.debug: # Allow websocket errors to appear on /Debug sys.modules["main"].DebugHook.handleError() self.log.error("WebSocket handleRequest error: %s \n %s" % (Debug.formatException(err), message)) @@ -99,7 +99,7 @@ class UiWebsocket(object): if ("0.0.0.0" == bind_ip or "*" == bind_ip) and (not whitelist): self.site.notifications.append([ "error", - _(u"You are not going to set up a public gateway. However, <b>your Web UI is<br>" + + _("You are not going to set up a public gateway. However, <b>your Web UI is<br>" + "open to the whole Internet.</b> " + "Please check your configuration.") ]) @@ -114,7 +114,7 @@ class UiWebsocket(object): elif config.tor == "always" and file_server.tor_manager.start_onions: self.site.notifications.append([ "done", - _(u""" + _(""" {_[Tor mode active, every connection using Onion route.]}<br> {_[Successfully started Tor onion hidden services.]} """), @@ -123,7 +123,7 @@ class UiWebsocket(object): elif config.tor == "always" and file_server.tor_manager.start_onions is not False: self.site.notifications.append([ "error", - _(u""" + _(""" {_[Tor mode active, every connection using Onion route.]}<br> {_[Unable to start hidden services, please check your config.]} """), @@ -132,7 +132,7 @@ class UiWebsocket(object): elif file_server.tor_manager.start_onions: self.site.notifications.append([ "done", - _(u""" + _(""" {_[Successfully started Tor onion hidden services.]}<br> {_[For faster connections open <b>{0}</b> port on your router.]} """).format(config.fileserver_port), @@ -141,7 +141,7 @@ class UiWebsocket(object): else: self.site.notifications.append([ "error", - _(u""" + _(""" {_[Your connection is restricted. Please, open <b>{0}</b> port on your router]}<br> {_[or configure Tor to become a full member of the ZeroNet network.]} """).format(config.fileserver_port), @@ -213,7 +213,7 @@ class UiWebsocket(object): message = self.send_queue.pop(0) self.ws.send(json.dumps(message)) self.state["sending"] = False - except Exception, err: + except Exception as err: self.log.debug("Websocket send error: %s" % Debug.formatException(err)) self.state["sending"] = False @@ -230,7 +230,7 @@ class UiWebsocket(object): result = func(*args, **kwargs) if result is not None: self.response(args[0], result) - except Exception, err: + except Exception as err: if config.debug: # Allow websocket errors to appear on /Debug sys.modules["main"].DebugHook.handleError() self.log.error("WebSocket handleRequest error: %s" % Debug.formatException(err)) @@ -403,14 +403,14 @@ class UiWebsocket(object): def actionAnnouncerStats(self, to): back = {} trackers = self.site.announcer.getTrackers() - for site in self.server.sites.values(): - for tracker, stats in site.announcer.stats.iteritems(): + for site in list(self.server.sites.values()): + for tracker, stats in site.announcer.stats.items(): if tracker not in trackers: continue if tracker not in back: back[tracker] = {} is_latest_data = bool(stats["time_request"] > back[tracker].get("time_request", 0) and stats["status"]) - for key, val in stats.iteritems(): + for key, val in stats.items(): if key.startswith("num_"): back[tracker][key] = back[tracker].get(key, 0) + val elif is_latest_data: @@ -548,7 +548,7 @@ class UiWebsocket(object): if notification: self.cmd("notification", [ "info", - _(u"""{_[Your network connection is restricted. Please, open <b>{0}</b> port]}<br> + _("""{_[Your network connection is restricted. Please, open <b>{0}</b> port]}<br> {_[on your router to make your site accessible for everyone.]}""").format(config.fileserver_port) ]) if callback: @@ -580,7 +580,7 @@ class UiWebsocket(object): self.cmd( "confirm", [_["This file still in sync, if you write it now, then the previous content may be lost."], _["Write content anyway"]], - lambda (res): self.actionFileWrite(to, inner_path, content_base64, ignore_bad_files=True) + lambda res: self.actionFileWrite(to, inner_path, content_base64, ignore_bad_files=True) ) return False @@ -601,7 +601,7 @@ class UiWebsocket(object): shutil.copyfileobj(f_old, f_new) self.site.storage.write(inner_path, content) - except Exception, err: + except Exception as err: self.log.error("File write error: %s" % Debug.formatException(err)) return self.response(to, {"error": "Write error: %s" % Debug.formatException(err)}) @@ -636,7 +636,7 @@ class UiWebsocket(object): if need_delete: try: self.site.storage.delete(inner_path) - except Exception, err: + except Exception as err: self.log.error("File delete error: %s" % err) return self.response(to, {"error": "Delete error: %s" % err}) @@ -676,7 +676,7 @@ class UiWebsocket(object): rows = [] try: res = self.site.storage.query(query, params) - except Exception, err: # Response the error to client + except Exception as err: # Response the error to client self.log.error("DbQuery error: %s" % err) return self.response(to, {"error": str(err)}) # Convert result to dict @@ -693,7 +693,7 @@ class UiWebsocket(object): with gevent.Timeout(timeout): self.site.needFile(inner_path, priority=6) body = self.site.storage.read(inner_path, "rb") - except Exception, err: + except Exception as err: self.log.error("%s fileGet error: %s" % (inner_path, err)) body = None if body and format == "base64": @@ -705,7 +705,7 @@ class UiWebsocket(object): try: with gevent.Timeout(timeout): self.site.needFile(inner_path, priority=6) - except Exception, err: + except Exception as err: return self.response(to, {"error": str(err)}) return self.response(to, "ok") @@ -725,7 +725,7 @@ class UiWebsocket(object): rules = self.site.content_manager.getRules(inner_path, content) if inner_path.endswith("content.json") and rules: if content: - rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in content.get("files", {}).values()]) + rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in list(content.get("files", {}).values())]) else: rules["current_size"] = 0 return self.response(to, rules) @@ -749,11 +749,11 @@ class UiWebsocket(object): self.cmd( "confirm", [body, _("Change it to {auth_type}/{auth_user_name}@{domain}")], - lambda (res): self.cbCertAddConfirm(to, domain, auth_type, auth_user_name, cert) + lambda res: self.cbCertAddConfirm(to, domain, auth_type, auth_user_name, cert) ) else: self.response(to, "Not changed") - except Exception, err: + except Exception as err: self.log.error("CertAdd error: Exception - %s (%s)" % (err.message, Debug.formatException(err))) self.response(to, {"error": err.message}) @@ -781,7 +781,7 @@ class UiWebsocket(object): if not accepted_domains and not accepted_pattern: # Accept any if no filter defined accept_any = True - for domain, cert in self.user.certs.items(): + for domain, cert in list(self.user.certs.items()): if auth_address == cert["auth_address"] and domain == site_data.get("cert"): active = domain title = cert["auth_user_name"] + "@" + domain @@ -797,7 +797,7 @@ class UiWebsocket(object): for domain, account, css_class in accounts: if domain == active: css_class += " active" # Currently selected option - title = _(u"<b>%s</b> <small>({_[currently selected]})</small>") % account + title = _("<b>%s</b> <small>({_[currently selected]})</small>") % account else: title = "<b>%s</b>" % account body += "<a href='#Select+account' class='select select-close cert %s' title='%s'>%s</a>" % (css_class, domain, title) @@ -807,7 +807,7 @@ class UiWebsocket(object): # body+= "<small style='margin-top: 10px; display: block'>Accepted authorization providers by the site:</small>" body += "<div style='background-color: #F7F7F7; margin-right: -30px'>" for domain in more_domains: - body += _(u""" + body += _(""" <a href='/{domain}' target='_top' class='select'> <small style='float: right; margin-right: 40px; margin-top: -1px'>{_[Register]} »</small>{domain} </a> @@ -858,7 +858,7 @@ class UiWebsocket(object): def actionCertList(self, to): back = [] auth_address = self.user.getAuthAddress(self.site.address) - for domain, cert in self.user.certs.items(): + for domain, cert in list(self.user.certs.items()): back.append({ "auth_address": cert["auth_address"], "auth_type": cert["auth_type"], @@ -872,7 +872,7 @@ class UiWebsocket(object): def actionSiteList(self, to, connecting_sites=False): ret = [] SiteManager.site_manager.load() # Reload sites - for site in self.server.sites.values(): + for site in list(self.server.sites.values()): if not site.content_manager.contents.get("content.json") and not connecting_sites: continue # Incomplete site ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing @@ -883,7 +883,7 @@ class UiWebsocket(object): if channel not in self.channels: # Add channel to channels self.channels.append(channel) - for site in self.server.sites.values(): # Add websocket to every channel + for site in list(self.server.sites.values()): # Add websocket to every channel if self not in site.websockets: site.websockets.append(self) @@ -970,7 +970,7 @@ class UiWebsocket(object): site = self.server.sites.get(address) if site.bad_files: - for bad_inner_path in site.bad_files.keys(): + for bad_inner_path in list(site.bad_files.keys()): is_user_file = "cert_signers" in site.content_manager.getRules(bad_inner_path) if not is_user_file: self.cmd("notification", ["error", _["Clone error: Site still in sync"]]) @@ -982,7 +982,7 @@ class UiWebsocket(object): self.cmd( "confirm", [_["Clone site <b>%s</b>?"] % address, _["Clone"]], - lambda (res): self.cbSiteClone(to, address, root_inner_path, target_address) + lambda res: self.cbSiteClone(to, address, root_inner_path, target_address) ) def actionSiteSetLimit(self, to, size_limit): @@ -1013,7 +1013,7 @@ class UiWebsocket(object): min_mtime = self.site.settings["cache"].get("time_modified_files_check") modified_files = self.site.settings["cache"].get("modified_files", []) - inner_paths = [content_inner_path] + content.get("includes", {}).keys() + content.get("files", {}).keys() + inner_paths = [content_inner_path] + list(content.get("includes", {}).keys()) + list(content.get("files", {}).keys()) for relative_inner_path in inner_paths: inner_path = helper.getDirname(content_inner_path) + relative_inner_path diff --git a/src/Ui/__init__.py b/src/Ui/__init__.py index 9982dc4f..dcb8896d 100644 --- a/src/Ui/__init__.py +++ b/src/Ui/__init__.py @@ -1,3 +1,3 @@ -from UiServer import UiServer -from UiRequest import UiRequest -from UiWebsocket import UiWebsocket \ No newline at end of file +from .UiServer import UiServer +from .UiRequest import UiRequest +from .UiWebsocket import UiWebsocket \ No newline at end of file diff --git a/src/User/User.py b/src/User/User.py index 5ee2d81a..3653f9eb 100644 --- a/src/User/User.py +++ b/src/User/User.py @@ -9,6 +9,7 @@ from Crypt import CryptBitcoin from Plugin import PluginManager from Config import config from util import helper +from Debug import Debug @PluginManager.acceptPlugins @@ -52,7 +53,7 @@ class User(object): self.delayed_save_thread = gevent.spawn_later(5, self.save) def getAddressAuthIndex(self, address): - return int(address.encode("hex"), 16) + return int(address.encode("ascii").hex(), 16) @util.Noparallel() def generateAuthAddress(self, address): @@ -122,7 +123,7 @@ class User(object): # Add cert for the user def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign): # Find privatekey by auth address - auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0] + auth_privatekey = [site["auth_privatekey"] for site in list(self.sites.values()) if site["auth_address"] == auth_address][0] cert_node = { "auth_address": auth_address, "auth_privatekey": auth_privatekey, diff --git a/src/User/UserManager.py b/src/User/UserManager.py index 66876dc1..e1f069c0 100644 --- a/src/User/UserManager.py +++ b/src/User/UserManager.py @@ -4,7 +4,7 @@ import logging import time # ZeroNet Modules -from User import User +from .User import User from Plugin import PluginManager from Config import config @@ -24,7 +24,7 @@ class UserManager(object): added = 0 s = time.time() # Load new users - for master_address, data in json.load(open("%s/users.json" % config.data_dir)).items(): + for master_address, data in list(json.load(open("%s/users.json" % config.data_dir)).items()): if master_address not in self.users: user = User(master_address, data=data) self.users[master_address] = user @@ -32,7 +32,7 @@ class UserManager(object): user_found.append(master_address) # Remove deleted adresses - for master_address in self.users.keys(): + for master_address in list(self.users.keys()): if master_address not in user_found: del(self.users[master_address]) self.log.debug("Removed user: %s" % master_address) @@ -63,7 +63,7 @@ class UserManager(object): def get(self, master_address=None): users = self.list() if users: - return users.values()[0] # Single user mode, always return the first + return list(users.values())[0] # Single user mode, always return the first else: return None diff --git a/src/User/__init__.py b/src/User/__init__.py index 8d569979..4db9149e 100644 --- a/src/User/__init__.py +++ b/src/User/__init__.py @@ -1 +1 @@ -from User import User +from .User import User diff --git a/src/Worker/Worker.py b/src/Worker/Worker.py index 399e1f32..fca4d35a 100644 --- a/src/Worker/Worker.py +++ b/src/Worker/Worker.py @@ -78,10 +78,12 @@ class Worker(object): self.task = task site = task["site"] task["workers_num"] += 1 + error_message = "Unknown error" try: buff = self.peer.getFile(site.address, task["inner_path"], task["size"]) - except Exception, err: + except Exception as err: self.manager.log.debug("%s: getFile error: %s" % (self.key, err)) + error_message = str(err) buff = None if self.running is False: # Worker no longer needed or got killed self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"])) @@ -91,10 +93,11 @@ class Worker(object): if buff: # Download ok try: correct = site.content_manager.verifyFile(task["inner_path"], buff) - except Exception, err: + except Exception as err: + error_message = str(err) correct = False else: # Download error - err = "Download failed" + error_message = "Download failed" correct = False if correct is True or correct is None: # Verify ok or same file self.manager.log.debug("%s: Verify correct: %s" % (self.key, task["inner_path"])) diff --git a/src/Worker/WorkerManager.py b/src/Worker/WorkerManager.py index e3cbfde7..21f0402e 100644 --- a/src/Worker/WorkerManager.py +++ b/src/Worker/WorkerManager.py @@ -4,7 +4,7 @@ import collections import gevent -from Worker import Worker +from .Worker import Worker from Config import config from util import helper from Plugin import PluginManager @@ -41,7 +41,7 @@ class WorkerManager(object): time.sleep(15) # Check every 15 sec # Clean up workers - for worker in self.workers.values(): + for worker in list(self.workers.values()): if worker.task and worker.task["done"]: worker.skip() # Stop workers with task done @@ -205,7 +205,7 @@ class WorkerManager(object): def findOptionalTasks(self, optional_tasks, reset_task=False): found = collections.defaultdict(list) # { found_hash: [peer1, peer2...], ...} - for peer in self.site.peers.values(): + for peer in list(self.site.peers.values()): if not peer.has_hashfield: continue @@ -226,7 +226,7 @@ class WorkerManager(object): def findOptionalHashIds(self, optional_hash_ids, limit=0): found = collections.defaultdict(list) # { found_hash_id: [peer1, peer2...], ...} - for peer in self.site.peers.values(): + for peer in list(self.site.peers.values()): if not peer.has_hashfield: continue @@ -242,7 +242,7 @@ class WorkerManager(object): # Add peers to tasks from found result def addOptionalPeers(self, found_ips): found = collections.defaultdict(list) - for hash_id, peer_ips in found_ips.iteritems(): + for hash_id, peer_ips in found_ips.items(): task = [task for task in self.tasks if task["optional_hash_id"] == hash_id] if task: # Found task, lets take the first task = task[0] @@ -283,10 +283,10 @@ class WorkerManager(object): found = self.findOptionalTasks(optional_tasks, reset_task=reset_task) if found: - found_peers = set([peer for peers in found.values() for peer in peers]) + found_peers = set([peer for peers in list(found.values()) for peer in peers]) self.startWorkers(found_peers, force_num=3) - if len(found) < len(optional_hash_ids) or find_more or (high_priority and any(len(peers) < 10 for peers in found.itervalues())): + if len(found) < len(optional_hash_ids) or find_more or (high_priority and any(len(peers) < 10 for peers in found.values())): self.log.debug("No local result for optional files: %s" % (optional_hash_ids - set(found))) # Query hashfield from connected peers @@ -308,7 +308,7 @@ class WorkerManager(object): )) if found: - found_peers = set([peer for hash_id_peers in found.values() for peer in hash_id_peers]) + found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers]) self.startWorkers(found_peers, force_num=3) if len(found) < len(optional_hash_ids) or find_more: @@ -344,7 +344,7 @@ class WorkerManager(object): )) if found: - found_peers = set([peer for hash_id_peers in found.values() for peer in hash_id_peers]) + found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers]) self.startWorkers(found_peers, force_num=3) if len(thread_values) == len(threads): @@ -376,7 +376,7 @@ class WorkerManager(object): self.log.debug("Found optional files after findhash random peers: %s/%s" % (len(found), len(optional_hash_ids))) if found: - found_peers = set([peer for hash_id_peers in found.values() for peer in hash_id_peers]) + found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers]) self.startWorkers(found_peers, force_num=3) if len(found) < len(optional_hash_ids): @@ -390,7 +390,7 @@ class WorkerManager(object): # Stop all worker def stopWorkers(self): - for worker in self.workers.values(): + for worker in list(self.workers.values()): worker.stop() tasks = self.tasks[:] # Copy for task in tasks: # Mark all current task as failed @@ -399,7 +399,7 @@ class WorkerManager(object): # Find workers by task def findWorkers(self, task): workers = [] - for worker in self.workers.values(): + for worker in list(self.workers.values()): if worker.task == task: workers.append(worker) return workers diff --git a/src/Worker/__init__.py b/src/Worker/__init__.py index 26649852..f4d20a96 100644 --- a/src/Worker/__init__.py +++ b/src/Worker/__init__.py @@ -1,2 +1,2 @@ -from Worker import Worker -from WorkerManager import WorkerManager \ No newline at end of file +from .Worker import Worker +from .WorkerManager import WorkerManager diff --git a/src/lib/cssvendor/cssvendor.py b/src/lib/cssvendor/cssvendor.py index e2ca6788..b04d7cc3 100644 --- a/src/lib/cssvendor/cssvendor.py +++ b/src/lib/cssvendor/cssvendor.py @@ -3,26 +3,26 @@ import re def prefix(content): content = re.sub( - "@keyframes (.*? {.*?}\s*})", "@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n", + b"@keyframes (.*? {.*?}\s*})", b"@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n", content, flags=re.DOTALL ) content = re.sub( - '([^-\*])(border-radius|box-shadow|appearance|transition|animation|box-sizing|' + - 'backface-visibility|transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])', - '\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content + b'([^-\*])(border-radius|box-shadow|appearance|transition|animation|box-sizing|' + + b'backface-visibility|transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])', + b'\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content ) content = re.sub( - '(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])', - '\\1: -webkit-\\2(\\3);' + - '\\1: -moz-\\2(\\3);' + - '\\1: -o-\\2(\\3);' + - '\\1: -ms-\\2(\\3);' + - '\\1: \\2(\\3);', content + b'(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])', + b'\\1: -webkit-\\2(\\3);' + + b'\\1: -moz-\\2(\\3);' + + b'\\1: -o-\\2(\\3);' + + b'\\1: -ms-\\2(\\3);' + + b'\\1: \\2(\\3);', content ) return content if __name__ == "__main__": - print prefix(""" + print(prefix(b""" .test { border-radius: 5px; background: linear-gradient(red, blue); @@ -36,4 +36,4 @@ if __name__ == "__main__": } - """) + """).decode("utf8")) diff --git a/src/main.py b/src/main.py index 0a05159f..9bce4f3d 100644 --- a/src/main.py +++ b/src/main.py @@ -35,7 +35,7 @@ if not os.path.isdir(config.data_dir): try: os.chmod(config.data_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) except Exception as err: - print "Can't change permission of %s: %s" % (config.data_dir, err) + print("Can't change permission of %s: %s" % (config.data_dir, err)) if not os.path.isfile("%s/sites.json" % config.data_dir): open("%s/sites.json" % config.data_dir, "w").write("{}") @@ -48,9 +48,9 @@ if config.action == "main": lock = helper.openLocked("%s/lock.pid" % config.data_dir, "w") lock.write("%s" % os.getpid()) except IOError as err: - print "Can't open lock file, your ZeroNet client is probably already running, exiting... (%s)" % err + print("Can't open lock file, your ZeroNet client is probably already running, exiting... (%s)" % err) if config.open_browser and config.open_browser != "False": - print "Opening browser: %s...", config.open_browser + print("Opening browser: %s...", config.open_browser) import webbrowser try: if config.open_browser == "default_browser": @@ -59,7 +59,7 @@ if config.action == "main": browser = webbrowser.get(config.open_browser) browser.open("http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage), new=2) except Exception as err: - print "Error starting browser: %s" % err + print("Error starting browser: %s" % err) sys.exit() @@ -87,14 +87,14 @@ if config.msgpack_purepython: # Socket monkey patch if config.proxy: from util import SocksProxy - import urllib2 + import urllib.request logging.info("Patching sockets to socks proxy: %s" % config.proxy) if config.fileserver_ip == "*": config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost SocksProxy.monkeyPatch(*config.proxy.split(":")) elif config.tor == "always": from util import SocksProxy - import urllib2 + import urllib.request logging.info("Patching sockets to tor socks proxy: %s" % config.tor_proxy) if config.fileserver_ip == "*": config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost @@ -118,7 +118,7 @@ class Actions(object): func = getattr(self, function_name, None) back = func(**kwargs) if back: - print back + print(back) # Default action: Start serving UiServer and FileServer def main(self): @@ -153,7 +153,7 @@ class Actions(object): logging.info("----------------------------------------------------------------------") while True and not config.batch: - if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes": + if input("? Have you secured your private key? (yes, no) > ").lower() == "yes": break else: logging.info("Please, secure it now, you going to need it to modify your site!") @@ -196,7 +196,7 @@ class Actions(object): privatekey = getpass.getpass("Private key (input hidden):") try: succ = site.content_manager.sign(inner_path=inner_path, privatekey=privatekey, update_changed_files=True, remove_missing_optional=remove_missing_optional) - except Exception, err: + except Exception as err: logging.error("Sign error: %s" % Debug.formatException(err)) succ = False if succ and publish: @@ -220,14 +220,14 @@ class Actions(object): file_correct = site.content_manager.verifyFile( content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False ) - except Exception, err: + except Exception as err: file_correct = False if file_correct is True: logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s)) else: logging.error("[ERROR] %s: invalid file: %s!" % (content_inner_path, err)) - raw_input("Continue?") + input("Continue?") bad_files += content_inner_path logging.info("Verifying site files...") @@ -258,7 +258,7 @@ class Actions(object): result = [] for row in site.storage.query(query): result.append(dict(row)) - print json.dumps(result, indent=4) + print(json.dumps(result, indent=4)) def siteAnnounce(self, address): from Site.Site import Site @@ -276,8 +276,8 @@ class Actions(object): s = time.time() site.announce() - print "Response time: %.3fs" % (time.time() - s) - print site.peers + print("Response time: %.3fs" % (time.time() - s)) + print(site.peers) def siteDownload(self, address): from Site import Site @@ -298,14 +298,14 @@ class Actions(object): evt.set(True) site.onComplete.once(lambda: onComplete(on_completed)) - print "Announcing..." + print("Announcing...") site.announce() s = time.time() - print "Downloading..." + print("Downloading...") site.downloadContent("content.json", check_modifications=True) - print "Downloaded in %.3fs" % (time.time()-s) + print("Downloaded in %.3fs" % (time.time()-s)) def siteNeedFile(self, address, inner_path): @@ -317,7 +317,7 @@ class Actions(object): while 1: s = time.time() time.sleep(1) - print "Switch time:", time.time() - s + print("Switch time:", time.time() - s) gevent.spawn(checker) logging.info("Opening a simple connection server") @@ -328,7 +328,7 @@ class Actions(object): site = Site(address) site.announce() - print site.needFile(inner_path, update=True) + print(site.needFile(inner_path, update=True)) def siteCmd(self, address, cmd, parameters): import json @@ -398,15 +398,15 @@ class Actions(object): import getpass privatekey = getpass.getpass("Private key (input hidden):") - print CryptBitcoin.privatekeyToAddress(privatekey) + print(CryptBitcoin.privatekeyToAddress(privatekey)) def cryptSign(self, message, privatekey): from Crypt import CryptBitcoin - print CryptBitcoin.sign(message, privatekey) + print(CryptBitcoin.sign(message, privatekey)) def cryptVerify(self, message, sign, address): from Crypt import CryptBitcoin - print CryptBitcoin.verify(message, address, sign) + print(CryptBitcoin.verify(message, address, sign)) def cryptGetPrivatekey(self, master_seed, site_address_index=None): from Crypt import CryptBitcoin @@ -414,7 +414,7 @@ class Actions(object): logging.error("Error: Invalid master seed length: %s (required: 64)" % len(master_seed)) return False privatekey = CryptBitcoin.hdPrivatekey(master_seed, site_address_index) - print "Requested private key: %s" % privatekey + print("Requested private key: %s" % privatekey) # Peer def peerPing(self, peer_ip, peer_port=None): @@ -435,18 +435,18 @@ class Actions(object): peer.connect() if not peer.connection: - print "Error: Can't connect to peer (connection error: %s)" % peer.connection_error + print("Error: Can't connect to peer (connection error: %s)" % peer.connection_error) return False - print "Connection time: %.3fs (connection error: %s)" % (time.time() - s, peer.connection_error) + print("Connection time: %.3fs (connection error: %s)" % (time.time() - s, peer.connection_error)) for i in range(5): - print "Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt) + print("Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt)) time.sleep(1) peer.remove() - print "Reconnect test..." + print("Reconnect test...") peer = Peer(peer_ip, peer_port) for i in range(5): - print "Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt) + print("Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt)) time.sleep(1) def peerGetFile(self, peer_ip, peer_port, site, filename, benchmark=False): @@ -465,10 +465,10 @@ class Actions(object): if benchmark: for i in range(10): peer.getFile(site, filename), - print "Response time: %.3fs" % (time.time() - s) - raw_input("Check memory") + print("Response time: %.3fs" % (time.time() - s)) + input("Check memory") else: - print peer.getFile(site, filename).read() + print(peer.getFile(site, filename).read()) def peerCmd(self, peer_ip, peer_port, cmd, parameters): logging.info("Opening a simple connection server") @@ -489,13 +489,14 @@ class Actions(object): parameters = {} try: res = peer.request(cmd, parameters) - print json.dumps(res, indent=2, ensure_ascii=False) - except Exception, err: - print "Unknown response (%s): %s" % (err, res) + print(json.dumps(res, indent=2, ensure_ascii=False)) + except Exception as err: + print("Unknown response (%s): %s" % (err, res)) def getConfig(self): import json - print json.dumps(config.getServerInfo(), indent=2, ensure_ascii=False) + print(json.dumps(config.getServerInfo(), indent=2, ensure_ascii=False)) + actions = Actions() diff --git a/src/util/Diff.py b/src/util/Diff.py index 3c325da8..535445ae 100644 --- a/src/util/Diff.py +++ b/src/util/Diff.py @@ -1,4 +1,4 @@ -import cStringIO as StringIO +import io import difflib @@ -31,7 +31,7 @@ def diff(old, new, limit=False): def patch(old_f, actions): - new_f = StringIO.StringIO() + new_f = io.BytesIO() for action, param in actions: if action == "=": # Same lines new_f.write(old_f.read(param)) diff --git a/src/util/Event.py b/src/util/Event.py index b9614795..9d642736 100644 --- a/src/util/Event.py +++ b/src/util/Event.py @@ -28,19 +28,19 @@ class Event(list): if __name__ == "__main__": def testBenchmark(): def say(pre, text): - print "%s Say: %s" % (pre, text) + print("%s Say: %s" % (pre, text)) import time s = time.time() on_changed = Event() for i in range(1000): on_changed.once(lambda pre: say(pre, "once"), "once") - print "Created 1000 once in %.3fs" % (time.time() - s) + print("Created 1000 once in %.3fs" % (time.time() - s)) on_changed("#1") def testUsage(): def say(pre, text): - print "%s Say: %s" % (pre, text) + print("%s Say: %s" % (pre, text)) on_changed = Event() on_changed.once(lambda pre: say(pre, "once")) diff --git a/src/util/Noparallel.py b/src/util/Noparallel.py index 9f9d3132..2a5f974a 100644 --- a/src/util/Noparallel.py +++ b/src/util/Noparallel.py @@ -49,7 +49,7 @@ class Noparallel(object): # Only allow function running once in same time return ret else: # No blocking just return the thread return thread - wrapper.func_name = func.func_name + wrapper.__name__ = func.__name__ return wrapper @@ -65,7 +65,7 @@ if __name__ == "__main__": @Noparallel() def count(self, num=5): for i in range(num): - print self, i + print(self, i) time.sleep(1) return "%s return:%s" % (self, i) @@ -74,59 +74,59 @@ if __name__ == "__main__": @Noparallel(blocking=False) def count(self, num=5): for i in range(num): - print self, i + print(self, i) time.sleep(1) return "%s return:%s" % (self, i) def testBlocking(): test = Test() test2 = Test() - print "Counting..." - print "Creating class1/thread1" + print("Counting...") + print("Creating class1/thread1") thread1 = gevent.spawn(test.count) - print "Creating class1/thread2 (ignored)" + print("Creating class1/thread2 (ignored)") thread2 = gevent.spawn(test.count) - print "Creating class2/thread3" + print("Creating class2/thread3") thread3 = gevent.spawn(test2.count) - print "Joining class1/thread1" + print("Joining class1/thread1") thread1.join() - print "Joining class1/thread2" + print("Joining class1/thread2") thread2.join() - print "Joining class2/thread3" + print("Joining class2/thread3") thread3.join() - print "Creating class1/thread4 (its finished, allowed again)" + print("Creating class1/thread4 (its finished, allowed again)") thread4 = gevent.spawn(test.count) - print "Joining thread4" + print("Joining thread4") thread4.join() - print thread1.value, thread2.value, thread3.value, thread4.value - print "Done." + print(thread1.value, thread2.value, thread3.value, thread4.value) + print("Done.") def testNoblocking(): test = TestNoblock() test2 = TestNoblock() - print "Creating class1/thread1" + print("Creating class1/thread1") thread1 = test.count() - print "Creating class1/thread2 (ignored)" + print("Creating class1/thread2 (ignored)") thread2 = test.count() - print "Creating class2/thread3" + print("Creating class2/thread3") thread3 = test2.count() - print "Joining class1/thread1" + print("Joining class1/thread1") thread1.join() - print "Joining class1/thread2" + print("Joining class1/thread2") thread2.join() - print "Joining class2/thread3" + print("Joining class2/thread3") thread3.join() - print "Creating class1/thread4 (its finished, allowed again)" + print("Creating class1/thread4 (its finished, allowed again)") thread4 = test.count() - print "Joining thread4" + print("Joining thread4") thread4.join() - print thread1.value, thread2.value, thread3.value, thread4.value - print "Done." + print(thread1.value, thread2.value, thread3.value, thread4.value) + print("Done.") def testBenchmark(): import time @@ -135,21 +135,21 @@ if __name__ == "__main__": import gc from greenlet import greenlet objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)] - print "Greenlets: %s" % len(objs) + print("Greenlets: %s" % len(objs)) printThreadNum() test = TestNoblock() s = time.time() for i in range(3): gevent.spawn(test.count, i + 1) - print "Created in %.3fs" % (time.time() - s) + print("Created in %.3fs" % (time.time() - s)) printThreadNum() time.sleep(5) from gevent import monkey monkey.patch_all() testBenchmark() - print "Testing blocking mode..." + print("Testing blocking mode...") testBlocking() - print "Testing noblocking mode..." + print("Testing noblocking mode...") testNoblocking() diff --git a/src/util/Platform.py b/src/util/Platform.py index 19477649..788e5949 100644 --- a/src/util/Platform.py +++ b/src/util/Platform.py @@ -19,6 +19,6 @@ def setMaxfilesopened(limit): resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard)) return True - except Exception, err: + except Exception as err: logging.error("Failed to modify max files open limit: %s" % err) return False diff --git a/src/util/Pooled.py b/src/util/Pooled.py index b7751995..9a4a7b63 100644 --- a/src/util/Pooled.py +++ b/src/util/Pooled.py @@ -29,7 +29,7 @@ class Pooled(object): self.pooler_running = True gevent.spawn(self.pooler) return evt - wrapper.func_name = func.func_name + wrapper.__name__ = func.__name__ self.func = func return wrapper @@ -62,4 +62,4 @@ if __name__ == "__main__": s = time.time() gevent.joinall(threads) # Should take 10 second - print time.time() - s + print(time.time() - s) diff --git a/src/util/QueryJson.py b/src/util/QueryJson.py index 6b1f9372..d9921ff0 100644 --- a/src/util/QueryJson.py +++ b/src/util/QueryJson.py @@ -64,4 +64,4 @@ def query(path_pattern, filter): if __name__ == "__main__": for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")): - print row + print(row) diff --git a/src/util/RateLimit.py b/src/util/RateLimit.py index 35b592ca..94d2648c 100644 --- a/src/util/RateLimit.py +++ b/src/util/RateLimit.py @@ -86,7 +86,7 @@ def call(event, allowed_again=10, func=None, *args, **kwargs): def rateLimitCleanup(): while 1: expired = time.time() - 60 * 2 # Cleanup if older than 2 minutes - for event in called_db.keys(): + for event in list(called_db.keys()): if called_db[event] < expired: del called_db[event] time.sleep(60 * 3) # Every 3 minutes @@ -99,30 +99,30 @@ if __name__ == "__main__": import random def publish(inner_path): - print "Publishing %s..." % inner_path + print("Publishing %s..." % inner_path) return 1 def cb(thread): - print "Value:", thread.value + print("Value:", thread.value) print "Testing async spam requests rate limit to 1/sec..." for i in range(3000): thread = callAsync("publish content.json", 1, publish, "content.json %s" % i) time.sleep(float(random.randint(1, 20)) / 100000) - print thread.link(cb) - print "Done" + print(thread.link(cb)) + print("Done") time.sleep(2) - print "Testing sync spam requests rate limit to 1/sec..." + print("Testing sync spam requests rate limit to 1/sec...") for i in range(5): call("publish data.json", 1, publish, "data.json %s" % i) time.sleep(float(random.randint(1, 100)) / 100) - print "Done" + print("Done") - print "Testing cleanup" + print("Testing cleanup") thread = callAsync("publish content.json single", 1, publish, "content.json single") - print "Needs to cleanup:", called_db, queue_db - print "Waiting 3min for cleanup process..." + print("Needs to cleanup:", called_db, queue_db) + print("Waiting 3min for cleanup process...") time.sleep(60 * 3) - print "Cleaned up:", called_db, queue_db + print("Cleaned up:", called_db, queue_db) diff --git a/src/util/SslPatch.py b/src/util/SslPatch.py index a1e5f33f..67b50b91 100644 --- a/src/util/SslPatch.py +++ b/src/util/SslPatch.py @@ -57,7 +57,7 @@ def disableSSLCompression(): try: openssl = openLibrary() openssl.SSL_COMP_get_compression_methods.restype = ctypes.c_void_p - except Exception, err: + except Exception as err: logging.debug("Disable SSL compression failed: %s (normal on Windows)" % err) return False @@ -69,7 +69,7 @@ def disableSSLCompression(): if config.disable_sslcompression: try: disableSSLCompression() - except Exception, err: + except Exception as err: logging.debug("Error disabling SSL compression: %s" % err) diff --git a/src/util/UpnpPunch.py b/src/util/UpnpPunch.py index 7f14198d..d700a24f 100644 --- a/src/util/UpnpPunch.py +++ b/src/util/UpnpPunch.py @@ -1,8 +1,8 @@ import re -import urllib2 -import httplib +import urllib.request +import http.client import logging -from urlparse import urlparse +from urllib.parse import urlparse from xml.dom.minidom import parseString from xml.parsers.expat import ExpatError @@ -84,7 +84,7 @@ def _retrieve_igd_profile(url): Retrieve the device's UPnP profile. """ try: - return urllib2.urlopen(url.geturl(), timeout=5).read().decode('utf-8') + return urllib.request.urlopen(url.geturl(), timeout=5).read().decode('utf-8') except socket.error: raise IGDError('IGD profile query timed out') @@ -251,7 +251,7 @@ def _send_soap_request(location, upnp_schema, control_path, soap_fn, } logging.debug("Sending UPnP request to {0}:{1}...".format( location.hostname, location.port)) - conn = httplib.HTTPConnection(location.hostname, location.port) + conn = http.client.HTTPConnection(location.hostname, location.port) conn.request('POST', control_path, soap_message, headers) response = conn.getresponse() @@ -366,10 +366,12 @@ if __name__ == "__main__": import time s = time.time() - print "Opening port..." - print ask_to_open_port(15443, "ZeroNet", protos=["TCP"]) - print "Done in", time.time() - s + print("Opening port...") + print(ask_to_open_port(15443, "ZeroNet", protos=["TCP"])) + print("Done in", time.time() - s) - print "Closing port..." - print ask_to_close_port(15443, "ZeroNet", protos=["TCP"]) - print "Done in", time.time() - s + """ + print("Closing port...") + print(ask_to_close_port(15443, "ZeroNet", protos=["TCP"])) + print("Done in", time.time() - s) + """ diff --git a/src/util/__init__.py b/src/util/__init__.py index 1c873327..7cf8ecf7 100644 --- a/src/util/__init__.py +++ b/src/util/__init__.py @@ -1,3 +1,3 @@ -from Event import Event -from Noparallel import Noparallel -from Pooled import Pooled +from .Event import Event +from .Noparallel import Noparallel +from .Pooled import Pooled diff --git a/src/util/helper.py b/src/util/helper.py index 9b8cb12a..2e2c1697 100644 --- a/src/util/helper.py +++ b/src/util/helper.py @@ -67,7 +67,7 @@ def getFreeSpace(): ctypes.c_wchar_p(config.data_dir), None, None, ctypes.pointer(free_space_pointer) ) free_space = free_space_pointer.value - except Exception, err: + except Exception as err: logging.error("GetFreeSpace error: %s" % err) return free_space @@ -153,7 +153,7 @@ def toHashId(hash): def mergeDicts(dicts): back = collections.defaultdict(set) for d in dicts: - for key, val in d.iteritems(): + for key, val in d.items(): back[key].update(val) return dict(back) @@ -161,16 +161,16 @@ def mergeDicts(dicts): # Request https url using gevent SSL error workaround def httpRequest(url, as_file=False): if url.startswith("http://"): - import urllib - response = urllib.urlopen(url) + import urllib.request + response = urllib.request.urlopen(url) else: # Hack to avoid Python gevent ssl errors import socket - import httplib + import http.client import ssl host, request = re.match("https://(.*?)(/.*?)$", url).groups() - conn = httplib.HTTPSConnection(host) + conn = http.client.HTTPSConnection(host) sock = socket.create_connection((conn.host, conn.port), conn.timeout, conn.source_address) conn.sock = ssl.wrap_socket(sock, conn.key_file, conn.cert_file) conn.request("GET", request) @@ -180,8 +180,8 @@ def httpRequest(url, as_file=False): response = httpRequest(response.getheader('Location')) if as_file: - import cStringIO as StringIO - data = StringIO.StringIO() + import io + data = io.BytesIO() while True: buff = response.read(1024 * 16) if not buff: diff --git a/zeronet.py b/zeronet.py index 8de61347..e66d4112 100755 --- a/zeronet.py +++ b/zeronet.py @@ -7,7 +7,7 @@ import sys def main(): if "--silent" not in sys.argv: - print "- Starting ZeroNet..." + print("- Starting ZeroNet...") main = None try: @@ -30,13 +30,13 @@ def main(): try: if "lib.opensslVerify" in sys.modules: sys.modules["lib.opensslVerify"].opensslVerify.closeLibrary() - except Exception, err: - print "Error closing opensslVerify lib", err + except Exception as err: + print("Error closing opensslVerify lib", err) try: if "lib.pyelliptic" in sys.modules: sys.modules["lib.pyelliptic"].openssl.closeLibrary() - except Exception, err: - print "Error closing pyelliptic lib", err + except Exception as err: + print("Error closing pyelliptic lib", err) # Close lock file sys.modules["main"].lock.close() @@ -44,23 +44,23 @@ def main(): # Update try: update.update() - except Exception, err: - print "Update error: %s" % err + except Exception as err: + print("Update error: %s" % err) - except Exception, err: # Prevent closing + except Exception as err: # Prevent closing import traceback try: import logging logging.exception("Unhandled exception: %s" % err) - except Exception, log_err: - print "Failed to log error:", log_err + except Exception as log_err: + print("Failed to log error:", log_err) traceback.print_exc() from Config import config traceback.print_exc(file=open(config.log_dir + "/error.log", "a")) if main and (main.update_after_shutdown or main.restart_after_shutdown): # Updater import atexit - print "Restarting..." + print("Restarting...") # Close log files logger = sys.modules["main"].logging.getLogger() @@ -83,11 +83,11 @@ def main(): args = ['"%s"' % arg for arg in args] try: - print "Executing %s %s" % (sys.executable, args) + print("Executing %s %s" % (sys.executable, args)) os.execv(sys.executable, args) - except Exception, err: - print "Execv error: %s" % err - print "Bye." + except Exception as err: + print("Execv error: %s" % err) + print("Bye.") if __name__ == '__main__':