Change to Python3 coding style
This commit is contained in:
parent
fc0fe0557b
commit
b0b9a4d33c
137 changed files with 910 additions and 913 deletions
|
@ -4,7 +4,7 @@ import gevent
|
|||
|
||||
from Plugin import PluginManager
|
||||
from Config import config
|
||||
import BroadcastServer
|
||||
from . import BroadcastServer
|
||||
|
||||
|
||||
@PluginManager.registerTo("SiteAnnouncer")
|
||||
|
@ -42,7 +42,7 @@ class LocalAnnouncer(BroadcastServer.BroadcastServer):
|
|||
if force: # Probably new site added, clean cache
|
||||
self.known_peers = {}
|
||||
|
||||
for peer_id, known_peer in self.known_peers.items():
|
||||
for peer_id, known_peer in list(self.known_peers.items()):
|
||||
if time.time() - known_peer["found"] > 20 * 60:
|
||||
del(self.known_peers[peer_id])
|
||||
self.log.debug("Timeout, removing from known_peers: %s" % peer_id)
|
||||
|
@ -78,7 +78,7 @@ class LocalAnnouncer(BroadcastServer.BroadcastServer):
|
|||
|
||||
def actionSiteListRequest(self, sender, params):
|
||||
back = []
|
||||
sites = self.server.sites.values()
|
||||
sites = list(self.server.sites.values())
|
||||
|
||||
# Split adresses to group of 100 to avoid UDP size limit
|
||||
site_groups = [sites[i:i + 100] for i in range(0, len(sites), 100)]
|
||||
|
|
|
@ -85,10 +85,10 @@ class TestAnnounce:
|
|||
|
||||
def testPeerDiscover(self, announcer, announcer_remote, site):
|
||||
assert announcer.server.peer_id != announcer_remote.server.peer_id
|
||||
assert len(announcer.server.sites.values()[0].peers) == 0
|
||||
assert len(list(announcer.server.sites.values())[0].peers) == 0
|
||||
announcer.broadcast({"cmd": "discoverRequest"}, port=announcer_remote.listen_port)
|
||||
time.sleep(0.1)
|
||||
assert len(announcer.server.sites.values()[0].peers) == 1
|
||||
assert len(list(announcer.server.sites.values())[0].peers) == 1
|
||||
|
||||
def testRecentPeerList(self, announcer, announcer_remote, site):
|
||||
assert len(site.peers_recent) == 0
|
||||
|
@ -101,13 +101,13 @@ class TestAnnounce:
|
|||
assert len(site.peers) == 1
|
||||
|
||||
# It should update peer without siteListResponse
|
||||
last_time_found = site.peers.values()[0].time_found
|
||||
last_time_found = list(site.peers.values())[0].time_found
|
||||
site.peers_recent.clear()
|
||||
with Spy.Spy(announcer, "handleMessage") as responses:
|
||||
announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port)
|
||||
time.sleep(0.1)
|
||||
assert [response[1]["cmd"] for response in responses] == ["discoverResponse"]
|
||||
assert len(site.peers_recent) == 1
|
||||
assert site.peers.values()[0].time_found > last_time_found
|
||||
assert list(site.peers.values())[0].time_found > last_time_found
|
||||
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
import AnnounceLocalPlugin
|
||||
from . import AnnounceLocalPlugin
|
|
@ -75,7 +75,7 @@ class TrackerStorage(object):
|
|||
|
||||
def getWorkingTrackers(self, type="shared"):
|
||||
trackers = {
|
||||
key: tracker for key, tracker in self.getTrackers(type).iteritems()
|
||||
key: tracker for key, tracker in self.getTrackers(type).items()
|
||||
if tracker["time_success"] > time.time() - 60 * 60
|
||||
}
|
||||
return trackers
|
||||
|
@ -95,7 +95,7 @@ class TrackerStorage(object):
|
|||
|
||||
trackers = self.getTrackers()
|
||||
self.log.debug("Loaded %s shared trackers" % len(trackers))
|
||||
for address, tracker in trackers.items():
|
||||
for address, tracker in list(trackers.items()):
|
||||
tracker["num_error"] = 0
|
||||
if not address.startswith("zero://"):
|
||||
del trackers[address]
|
||||
|
@ -144,7 +144,7 @@ class SiteAnnouncerPlugin(object):
|
|||
tracker_storage.time_discover = time.time()
|
||||
gevent.spawn(tracker_storage.discoverTrackers, self.site.getConnectedPeers())
|
||||
trackers = super(SiteAnnouncerPlugin, self).getTrackers()
|
||||
shared_trackers = tracker_storage.getTrackers("shared").keys()
|
||||
shared_trackers = list(tracker_storage.getTrackers("shared").keys())
|
||||
if shared_trackers:
|
||||
return trackers + shared_trackers
|
||||
else:
|
||||
|
@ -164,7 +164,7 @@ class SiteAnnouncerPlugin(object):
|
|||
@PluginManager.registerTo("FileRequest")
|
||||
class FileRequestPlugin(object):
|
||||
def actionGetTrackers(self, params):
|
||||
shared_trackers = tracker_storage.getWorkingTrackers("shared").keys()
|
||||
shared_trackers = list(tracker_storage.getWorkingTrackers("shared").keys())
|
||||
self.response({"trackers": shared_trackers})
|
||||
|
||||
|
||||
|
|
|
@ -12,7 +12,6 @@ class TestAnnounceShare:
|
|||
open("%s/trackers.json" % config.data_dir, "w").write("{}")
|
||||
tracker_storage = AnnounceSharePlugin.tracker_storage
|
||||
tracker_storage.load()
|
||||
print tracker_storage.file_path, config.data_dir
|
||||
peer = Peer(file_server.ip, 1544, connection_server=file_server)
|
||||
assert peer.request("getTrackers")["trackers"] == []
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
import AnnounceSharePlugin
|
||||
from . import AnnounceSharePlugin
|
||||
|
|
|
@ -119,7 +119,7 @@ class SiteAnnouncerPlugin(object):
|
|||
onion = self.site.connection_server.tor_manager.getOnion(site.address)
|
||||
publickey = self.site.connection_server.tor_manager.getPublickey(onion)
|
||||
if publickey not in request["onion_signs"]:
|
||||
sign = CryptRsa.sign(res["onion_sign_this"], self.site.connection_server.tor_manager.getPrivatekey(onion))
|
||||
sign = CryptRsa.sign(res["onion_sign_this"].encode("utf8"), self.site.connection_server.tor_manager.getPrivatekey(onion))
|
||||
request["onion_signs"][publickey] = sign
|
||||
res = tracker_peer.request("announce", request)
|
||||
if not res or "onion_sign_this" in res:
|
||||
|
|
|
@ -1 +1 @@
|
|||
import AnnounceZeroPlugin
|
||||
from . import AnnounceZeroPlugin
|
|
@ -4,7 +4,7 @@ import array
|
|||
def packPiecefield(data):
|
||||
res = []
|
||||
if not data:
|
||||
return array.array("H", "")
|
||||
return array.array("H", b"")
|
||||
|
||||
if data[0] == "0":
|
||||
res.append(0)
|
||||
|
@ -48,7 +48,7 @@ class BigfilePiecefield(object):
|
|||
__slots__ = ["data"]
|
||||
|
||||
def __init__(self):
|
||||
self.data = ""
|
||||
self.data = b""
|
||||
|
||||
def fromstring(self, s):
|
||||
self.data = s
|
||||
|
@ -80,7 +80,7 @@ class BigfilePiecefieldPacked(object):
|
|||
__slots__ = ["data"]
|
||||
|
||||
def __init__(self):
|
||||
self.data = ""
|
||||
self.data = b""
|
||||
|
||||
def fromstring(self, data):
|
||||
self.data = packPiecefield(data).tostring()
|
||||
|
@ -116,7 +116,7 @@ if __name__ == "__main__":
|
|||
meminfo = psutil.Process(os.getpid()).memory_info
|
||||
|
||||
for storage in [BigfilePiecefieldPacked, BigfilePiecefield]:
|
||||
print "-- Testing storage: %s --" % storage
|
||||
print("-- Testing storage: %s --" % storage))
|
||||
m = meminfo()[0]
|
||||
s = time.time()
|
||||
piecefields = {}
|
||||
|
@ -125,34 +125,34 @@ if __name__ == "__main__":
|
|||
piecefield.fromstring(testdata[:i] + "0" + testdata[i + 1:])
|
||||
piecefields[i] = piecefield
|
||||
|
||||
print "Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))
|
||||
print("Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)))
|
||||
|
||||
m = meminfo()[0]
|
||||
s = time.time()
|
||||
for piecefield in piecefields.values():
|
||||
for piecefield in list(piecefields.values()):
|
||||
val = piecefield[1000]
|
||||
|
||||
print "Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)
|
||||
print("Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s))
|
||||
|
||||
m = meminfo()[0]
|
||||
s = time.time()
|
||||
for piecefield in piecefields.values():
|
||||
for piecefield in list(piecefields.values()):
|
||||
piecefield[1000] = True
|
||||
|
||||
print "Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)
|
||||
print("Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s))
|
||||
|
||||
m = meminfo()[0]
|
||||
s = time.time()
|
||||
for piecefield in piecefields.values():
|
||||
for piecefield in list(piecefields.values()):
|
||||
packed = piecefield.pack()
|
||||
|
||||
print "Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed))
|
||||
print("Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed)))
|
||||
|
||||
m = meminfo()[0]
|
||||
s = time.time()
|
||||
for piecefield in piecefields.values():
|
||||
for piecefield in list(piecefields.values()):
|
||||
piecefield.unpack(packed)
|
||||
|
||||
print "Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))
|
||||
print("Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)))
|
||||
|
||||
piecefields = {}
|
||||
|
|
|
@ -5,7 +5,6 @@ import shutil
|
|||
import collections
|
||||
import math
|
||||
|
||||
import msgpack
|
||||
import gevent
|
||||
import gevent.lock
|
||||
|
||||
|
@ -15,7 +14,7 @@ from Crypt import CryptHash
|
|||
from lib import merkletools
|
||||
from util import helper
|
||||
import util
|
||||
from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
|
||||
from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
|
||||
|
||||
|
||||
# We can only import plugin host clases after the plugins are loaded
|
||||
|
@ -61,7 +60,7 @@ class UiRequestPlugin(object):
|
|||
)
|
||||
|
||||
if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split
|
||||
hash = piecemap_info["sha512_pieces"][0].encode("hex")
|
||||
hash = piecemap_info["sha512_pieces"][0].hex()
|
||||
hash_id = site.content_manager.hashfield.getHashId(hash)
|
||||
site.content_manager.optionalDownloaded(inner_path, hash_id, upload_info["size"], own=True)
|
||||
|
||||
|
@ -178,7 +177,7 @@ class UiWebsocketPlugin(object):
|
|||
self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True)
|
||||
try:
|
||||
self.site.storage.delete(piecemap_inner_path)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("File %s delete error: %s" % (piecemap_inner_path, err))
|
||||
|
||||
return super(UiWebsocketPlugin, self).actionFileDelete(to, inner_path)
|
||||
|
@ -324,7 +323,7 @@ class ContentManagerPlugin(object):
|
|||
|
||||
def verifyPiece(self, inner_path, pos, piece):
|
||||
piecemap = self.getPiecemap(inner_path)
|
||||
piece_i = pos / piecemap["piece_size"]
|
||||
piece_i = int(pos / piecemap["piece_size"])
|
||||
if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]:
|
||||
raise VerifyError("Invalid hash")
|
||||
return True
|
||||
|
@ -345,7 +344,7 @@ class ContentManagerPlugin(object):
|
|||
file_info = self.getFileInfo(inner_path)
|
||||
|
||||
# Mark piece downloaded
|
||||
piece_i = pos_from / file_info["piece_size"]
|
||||
piece_i = int(pos_from / file_info["piece_size"])
|
||||
self.site.storage.piecefields[file_info["sha512"]][piece_i] = True
|
||||
|
||||
# Only add to site size on first request
|
||||
|
@ -368,7 +367,7 @@ class ContentManagerPlugin(object):
|
|||
del self.site.storage.piecefields[sha512]
|
||||
|
||||
# Also remove other pieces of the file from download queue
|
||||
for key in self.site.bad_files.keys():
|
||||
for key in list(self.site.bad_files.keys()):
|
||||
if key.startswith(inner_path + "|"):
|
||||
del self.site.bad_files[key]
|
||||
self.site.worker_manager.removeSolvedFileTasks()
|
||||
|
@ -381,9 +380,9 @@ class SiteStoragePlugin(object):
|
|||
super(SiteStoragePlugin, self).__init__(*args, **kwargs)
|
||||
self.piecefields = collections.defaultdict(BigfilePiecefield)
|
||||
if "piecefields" in self.site.settings.get("cache", {}):
|
||||
for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").iteritems():
|
||||
for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").items():
|
||||
if piecefield_packed:
|
||||
self.piecefields[sha512].unpack(piecefield_packed.decode("base64"))
|
||||
self.piecefields[sha512].unpack(base64.b64decode(piecefield_packed))
|
||||
self.site.settings["cache"]["piecefields"] = {}
|
||||
|
||||
def createSparseFile(self, inner_path, size, sha512=None):
|
||||
|
@ -486,7 +485,7 @@ class BigFile(object):
|
|||
requests = []
|
||||
# Request all required blocks
|
||||
while 1:
|
||||
piece_i = pos / self.piece_size
|
||||
piece_i = int(pos / self.piece_size)
|
||||
if piece_i * self.piece_size >= read_until:
|
||||
break
|
||||
pos_from = piece_i * self.piece_size
|
||||
|
@ -503,7 +502,7 @@ class BigFile(object):
|
|||
prebuffer_until = min(self.size, read_until + self.prebuffer)
|
||||
priority = 3
|
||||
while 1:
|
||||
piece_i = pos / self.piece_size
|
||||
piece_i = int(pos / self.piece_size)
|
||||
if piece_i * self.piece_size >= prebuffer_until:
|
||||
break
|
||||
pos_from = piece_i * self.piece_size
|
||||
|
@ -565,7 +564,7 @@ class WorkerManagerPlugin(object):
|
|||
|
||||
inner_path, file_range = inner_path.split("|")
|
||||
pos_from, pos_to = map(int, file_range.split("-"))
|
||||
task["piece_i"] = pos_from / file_info["piece_size"]
|
||||
task["piece_i"] = int(pos_from / file_info["piece_size"])
|
||||
task["sha512"] = file_info["sha512"]
|
||||
else:
|
||||
if inner_path in self.site.bad_files:
|
||||
|
@ -601,10 +600,10 @@ class WorkerManagerPlugin(object):
|
|||
class FileRequestPlugin(object):
|
||||
def isReadable(self, site, inner_path, file, pos):
|
||||
# Peek into file
|
||||
if file.read(10) == "\0" * 10:
|
||||
if file.read(10) == b"\0" * 10:
|
||||
# Looks empty, but makes sures we don't have that piece
|
||||
file_info = site.content_manager.getFileInfo(inner_path)
|
||||
piece_i = pos / file_info["piece_size"]
|
||||
piece_i = int(pos / file_info["piece_size"])
|
||||
if not site.storage.piecefields[file_info["sha512"]][piece_i]:
|
||||
return False
|
||||
# Seek back to position we want to read
|
||||
|
@ -622,7 +621,7 @@ class FileRequestPlugin(object):
|
|||
if not peer.connection: # Just added
|
||||
peer.connect(self.connection) # Assign current connection to peer
|
||||
|
||||
piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.iteritems()}
|
||||
piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.items()}
|
||||
self.response({"piecefields_packed": piecefields_packed})
|
||||
|
||||
def actionSetPiecefields(self, params):
|
||||
|
@ -638,7 +637,7 @@ class FileRequestPlugin(object):
|
|||
peer.connect(self.connection)
|
||||
|
||||
peer.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
|
||||
for sha512, piecefield_packed in params["piecefields_packed"].iteritems():
|
||||
for sha512, piecefield_packed in params["piecefields_packed"].items():
|
||||
peer.piecefields[sha512].unpack(piecefield_packed)
|
||||
site.settings["has_bigfile"] = True
|
||||
|
||||
|
@ -673,7 +672,7 @@ class PeerPlugin(object):
|
|||
|
||||
self.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
|
||||
try:
|
||||
for sha512, piecefield_packed in res["piecefields_packed"].iteritems():
|
||||
for sha512, piecefield_packed in res["piecefields_packed"].items():
|
||||
self.piecefields[sha512].unpack(piecefield_packed)
|
||||
except Exception as err:
|
||||
self.log("Invalid updatePiecefields response: %s" % Debug.formatException(err))
|
||||
|
@ -720,7 +719,7 @@ class SitePlugin(object):
|
|||
def getSettingsCache(self):
|
||||
back = super(SitePlugin, self).getSettingsCache()
|
||||
if self.storage.piecefields:
|
||||
back["piecefields"] = {sha512: piecefield.pack().encode("base64") for sha512, piecefield in self.storage.piecefields.iteritems()}
|
||||
back["piecefields"] = {sha512: base64.b64encode(piecefield.pack()).decode("utf8") for sha512, piecefield in self.storage.piecefields.items()}
|
||||
return back
|
||||
|
||||
def needFile(self, inner_path, *args, **kwargs):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import time
|
||||
from cStringIO import StringIO
|
||||
import io
|
||||
|
||||
import pytest
|
||||
import msgpack
|
||||
|
@ -40,7 +40,7 @@ class TestBigfile:
|
|||
piecemap = msgpack.unpack(site.storage.open(file_node["piecemap"], "rb"))["optional.any.iso"]
|
||||
assert len(piecemap["sha512_pieces"]) == 10
|
||||
assert piecemap["sha512_pieces"][0] != piecemap["sha512_pieces"][1]
|
||||
assert piecemap["sha512_pieces"][0].encode("hex") == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3"
|
||||
assert piecemap["sha512_pieces"][0].hex() == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3"
|
||||
|
||||
def testVerifyPiece(self, site):
|
||||
inner_path = self.createBigfile(site)
|
||||
|
@ -48,7 +48,7 @@ class TestBigfile:
|
|||
# Verify all 10 piece
|
||||
f = site.storage.open(inner_path, "rb")
|
||||
for i in range(10):
|
||||
piece = StringIO(f.read(1024 * 1024))
|
||||
piece = io.BytesIO(f.read(1024 * 1024))
|
||||
piece.seek(0)
|
||||
site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
|
||||
f.close()
|
||||
|
@ -57,7 +57,7 @@ class TestBigfile:
|
|||
with pytest.raises(VerifyError) as err:
|
||||
i = 1
|
||||
f = site.storage.open(inner_path, "rb")
|
||||
piece = StringIO(f.read(1024 * 1024))
|
||||
piece = io.BytesIO(f.read(1024 * 1024))
|
||||
f.close()
|
||||
site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
|
||||
assert "Invalid hash" in str(err)
|
||||
|
@ -70,19 +70,19 @@ class TestBigfile:
|
|||
|
||||
# Write to file beginning
|
||||
s = time.time()
|
||||
f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), "hellostart" * 1024)
|
||||
f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), b"hellostart" * 1024)
|
||||
time_write_start = time.time() - s
|
||||
|
||||
# Write to file end
|
||||
s = time.time()
|
||||
f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), "helloend" * 1024)
|
||||
f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), b"helloend" * 1024)
|
||||
time_write_end = time.time() - s
|
||||
|
||||
# Verify writes
|
||||
f = site.storage.open(inner_path)
|
||||
assert f.read(10) == "hellostart"
|
||||
assert f.read(10) == b"hellostart"
|
||||
f.seek(99 * 1024 * 1024)
|
||||
assert f.read(8) == "helloend"
|
||||
assert f.read(8) == b"helloend"
|
||||
f.close()
|
||||
|
||||
site.storage.delete(inner_path)
|
||||
|
@ -105,7 +105,7 @@ class TestBigfile:
|
|||
buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
|
||||
|
||||
assert len(buff.getvalue()) == 1 * 1024 * 1024 # Correct block size
|
||||
assert buff.getvalue().startswith("Test524") # Correct data
|
||||
assert buff.getvalue().startswith(b"Test524") # Correct data
|
||||
buff.seek(0)
|
||||
assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff) # Correct hash
|
||||
|
||||
|
@ -147,12 +147,12 @@ class TestBigfile:
|
|||
|
||||
# Verify 0. block not downloaded
|
||||
f = site_temp.storage.open(inner_path)
|
||||
assert f.read(10) == "\0" * 10
|
||||
assert f.read(10) == b"\0" * 10
|
||||
# Verify 5. and 10. block downloaded
|
||||
f.seek(5 * 1024 * 1024)
|
||||
assert f.read(7) == "Test524"
|
||||
assert f.read(7) == b"Test524"
|
||||
f.seek(9 * 1024 * 1024)
|
||||
assert f.read(7) == "943---T"
|
||||
assert f.read(7) == b"943---T"
|
||||
|
||||
# Verify hashfield
|
||||
assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) # 18343: data/optional.any.iso, 30970: data/optional.any.iso.hashmap.msgpack
|
||||
|
@ -178,14 +178,14 @@ class TestBigfile:
|
|||
with site_temp.storage.openBigfile(inner_path) as f:
|
||||
with Spy.Spy(FileRequest, "route") as requests:
|
||||
f.seek(5 * 1024 * 1024)
|
||||
assert f.read(7) == "Test524"
|
||||
assert f.read(7) == b"Test524"
|
||||
|
||||
f.seek(9 * 1024 * 1024)
|
||||
assert f.read(7) == "943---T"
|
||||
assert f.read(7) == b"943---T"
|
||||
|
||||
assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces
|
||||
|
||||
assert set(site_temp.content_manager.hashfield) == set([18343, 30970])
|
||||
assert set(site_temp.content_manager.hashfield) == set([18343, 43727])
|
||||
|
||||
assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001"
|
||||
assert f.sha512 in site_temp.getSettingsCache()["piecefields"]
|
||||
|
@ -193,7 +193,7 @@ class TestBigfile:
|
|||
# Test requesting already downloaded
|
||||
with Spy.Spy(FileRequest, "route") as requests:
|
||||
f.seek(5 * 1024 * 1024)
|
||||
assert f.read(7) == "Test524"
|
||||
assert f.read(7) == b"Test524"
|
||||
|
||||
assert len(requests) == 0
|
||||
|
||||
|
@ -201,9 +201,9 @@ class TestBigfile:
|
|||
with Spy.Spy(FileRequest, "route") as requests:
|
||||
f.seek(5 * 1024 * 1024) # We already have this block
|
||||
data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block
|
||||
assert data.startswith("Test524")
|
||||
assert data.endswith("Test838-")
|
||||
assert "\0" not in data # No null bytes allowed
|
||||
assert data.startswith(b"Test524")
|
||||
assert data.endswith(b"Test838-")
|
||||
assert b"\0" not in data # No null bytes allowed
|
||||
|
||||
assert len(requests) == 2 # Two block download
|
||||
|
||||
|
@ -258,11 +258,11 @@ class TestBigfile:
|
|||
# Download second block
|
||||
with site_temp.storage.openBigfile(inner_path) as f:
|
||||
f.seek(1024 * 1024)
|
||||
assert f.read(1024)[0] != "\0"
|
||||
assert f.read(1024)[0:1] != b"\0"
|
||||
|
||||
# Make sure first block not download
|
||||
with site_temp.storage.open(inner_path) as f:
|
||||
assert f.read(1024)[0] == "\0"
|
||||
assert f.read(1024)[0:1] == b"\0"
|
||||
|
||||
peer2 = site.addPeer(file_server.ip, 1545, return_peer=True)
|
||||
|
||||
|
@ -284,8 +284,8 @@ class TestBigfile:
|
|||
s = time.time()
|
||||
for i in range(25000):
|
||||
site.addPeer(file_server.ip, i)
|
||||
print "%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024) # 0.082s MEM: + 6800KB
|
||||
print site.peers.values()[0].piecefields
|
||||
print("%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024)) # 0.082s MEM: + 6800KB
|
||||
print(list(site.peers.values())[0].piecefields)
|
||||
|
||||
def testUpdatePiecefield(self, file_server, site, site_temp):
|
||||
inner_path = self.createBigfile(site)
|
||||
|
@ -390,16 +390,16 @@ class TestBigfile:
|
|||
size_bigfile = site_temp.content_manager.getFileInfo(inner_path)["size"]
|
||||
|
||||
with site_temp.storage.openBigfile(inner_path) as f:
|
||||
assert "\0" not in f.read(1024)
|
||||
assert b"\0" not in f.read(1024)
|
||||
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
|
||||
|
||||
with site_temp.storage.openBigfile(inner_path) as f:
|
||||
# Don't count twice
|
||||
assert "\0" not in f.read(1024)
|
||||
assert b"\0" not in f.read(1024)
|
||||
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
|
||||
|
||||
# Add second block
|
||||
assert "\0" not in f.read(1024 * 1024)
|
||||
assert b"\0" not in f.read(1024 * 1024)
|
||||
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
|
||||
|
||||
def testPrebuffer(self, file_server, site, site_temp):
|
||||
|
@ -423,7 +423,7 @@ class TestBigfile:
|
|||
with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f:
|
||||
with Spy.Spy(FileRequest, "route") as requests:
|
||||
f.seek(5 * 1024 * 1024)
|
||||
assert f.read(7) == "Test524"
|
||||
assert f.read(7) == b"Test524"
|
||||
# assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces
|
||||
assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2
|
||||
|
||||
|
@ -434,7 +434,7 @@ class TestBigfile:
|
|||
|
||||
# No prebuffer beyond end of the file
|
||||
f.seek(9 * 1024 * 1024)
|
||||
assert "\0" not in f.read(7)
|
||||
assert b"\0" not in f.read(7)
|
||||
|
||||
assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0
|
||||
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
import BigfilePlugin
|
||||
from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
|
||||
from . import BigfilePlugin
|
||||
from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
|
|
@ -29,7 +29,7 @@ class ChartCollector(object):
|
|||
sites = file_server.sites
|
||||
if not sites:
|
||||
return collectors
|
||||
content_db = sites.values()[0].content_manager.contents.db
|
||||
content_db = list(sites.values())[0].content_manager.contents.db
|
||||
|
||||
# Connection stats
|
||||
collectors["connection"] = lambda: len(file_server.connections)
|
||||
|
@ -67,8 +67,8 @@ class ChartCollector(object):
|
|||
collectors["optional_downloaded"] = lambda: sum([site.settings.get("optional_downloaded", 0) for site in sites.values()])
|
||||
|
||||
# Peers
|
||||
collectors["peer"] = lambda (peers): len(peers)
|
||||
collectors["peer_onion"] = lambda (peers): len([True for peer in peers if ".onion" in peer])
|
||||
collectors["peer"] = lambda peers: len(peers)
|
||||
collectors["peer_onion"] = lambda peers: len([True for peer in peers if ".onion" in peer])
|
||||
|
||||
# Size
|
||||
collectors["size"] = lambda: sum([site.settings.get("size", 0) for site in sites.values()])
|
||||
|
@ -81,21 +81,21 @@ class ChartCollector(object):
|
|||
site_collectors = {}
|
||||
|
||||
# Size
|
||||
site_collectors["site_size"] = lambda(site): site.settings.get("size", 0)
|
||||
site_collectors["site_size_optional"] = lambda(site): site.settings.get("size_optional", 0)
|
||||
site_collectors["site_optional_downloaded"] = lambda(site): site.settings.get("optional_downloaded", 0)
|
||||
site_collectors["site_content"] = lambda(site): len(site.content_manager.contents)
|
||||
site_collectors["site_size"] = lambda site: site.settings.get("size", 0)
|
||||
site_collectors["site_size_optional"] = lambda site: site.settings.get("size_optional", 0)
|
||||
site_collectors["site_optional_downloaded"] = lambda site: site.settings.get("optional_downloaded", 0)
|
||||
site_collectors["site_content"] = lambda site: len(site.content_manager.contents)
|
||||
|
||||
# Data transfer
|
||||
site_collectors["site_bytes_recv|change"] = lambda(site): site.settings.get("bytes_recv", 0)
|
||||
site_collectors["site_bytes_sent|change"] = lambda(site): site.settings.get("bytes_sent", 0)
|
||||
site_collectors["site_bytes_recv|change"] = lambda site: site.settings.get("bytes_recv", 0)
|
||||
site_collectors["site_bytes_sent|change"] = lambda site: site.settings.get("bytes_sent", 0)
|
||||
|
||||
# Peers
|
||||
site_collectors["site_peer"] = lambda(site): len(site.peers)
|
||||
site_collectors["site_peer_onion"] = lambda(site): len(
|
||||
[True for peer in site.peers.itervalues() if peer.ip.endswith(".onion")]
|
||||
site_collectors["site_peer"] = lambda site: len(site.peers)
|
||||
site_collectors["site_peer_onion"] = lambda site: len(
|
||||
[True for peer in site.peers.values() if peer.ip.endswith(".onion")]
|
||||
)
|
||||
site_collectors["site_peer_connected"] = lambda(site): len([True for peer in site.peers.itervalues() if peer.connection])
|
||||
site_collectors["site_peer_connected"] = lambda site: len([True for peer in site.peers.values() if peer.connection])
|
||||
|
||||
return site_collectors
|
||||
|
||||
|
@ -109,7 +109,7 @@ class ChartCollector(object):
|
|||
if site is None:
|
||||
peers = self.getUniquePeers()
|
||||
datas = {}
|
||||
for key, collector in collectors.iteritems():
|
||||
for key, collector in collectors.items():
|
||||
try:
|
||||
if site:
|
||||
value = collector(site)
|
||||
|
@ -138,7 +138,7 @@ class ChartCollector(object):
|
|||
s = time.time()
|
||||
datas = self.collectDatas(collectors, last_values["global"])
|
||||
values = []
|
||||
for key, value in datas.iteritems():
|
||||
for key, value in datas.items():
|
||||
values.append((self.db.getTypeId(key), value, now))
|
||||
self.log.debug("Global collectors done in %.3fs" % (time.time() - s))
|
||||
|
||||
|
@ -154,9 +154,9 @@ class ChartCollector(object):
|
|||
now = int(time.time())
|
||||
s = time.time()
|
||||
values = []
|
||||
for address, site in sites.iteritems():
|
||||
for address, site in sites.items():
|
||||
site_datas = self.collectDatas(collectors, last_values["site:%s" % address], site)
|
||||
for key, value in site_datas.iteritems():
|
||||
for key, value in site_datas.items():
|
||||
values.append((self.db.getTypeId(key), self.db.getSiteId(address), value, now))
|
||||
time.sleep(0.000001)
|
||||
self.log.debug("Site collections done in %.3fs" % (time.time() - s))
|
||||
|
|
|
@ -6,8 +6,8 @@ import gevent
|
|||
from Config import config
|
||||
from util import helper
|
||||
from Plugin import PluginManager
|
||||
from ChartDb import ChartDb
|
||||
from ChartCollector import ChartCollector
|
||||
from .ChartDb import ChartDb
|
||||
from .ChartCollector import ChartCollector
|
||||
|
||||
if "db" not in locals().keys(): # Share on reloads
|
||||
db = ChartDb()
|
||||
|
@ -39,7 +39,7 @@ class UiWebsocketPlugin(object):
|
|||
if not query.strip().upper().startswith("SELECT"):
|
||||
raise Exception("Only SELECT query supported")
|
||||
res = db.execute(query, params)
|
||||
except Exception, err: # Response the error to client
|
||||
except Exception as err: # Response the error to client
|
||||
self.log.error("ChartDbQuery error: %s" % err)
|
||||
return {"error": str(err)}
|
||||
# Convert result to dict
|
||||
|
|
|
@ -1 +1 @@
|
|||
import ChartPlugin
|
||||
from . import ChartPlugin
|
|
@ -1,13 +1,13 @@
|
|||
import time
|
||||
import re
|
||||
import cgi
|
||||
import html
|
||||
import hashlib
|
||||
|
||||
from Plugin import PluginManager
|
||||
from Translate import Translate
|
||||
from Config import config
|
||||
|
||||
from ContentFilterStorage import ContentFilterStorage
|
||||
from .ContentFilterStorage import ContentFilterStorage
|
||||
|
||||
|
||||
if "_" not in locals():
|
||||
|
@ -39,8 +39,8 @@ class UiWebsocketPlugin(object):
|
|||
else:
|
||||
self.cmd(
|
||||
"confirm",
|
||||
[_["Hide all content from <b>%s</b>?"] % cgi.escape(cert_user_id), _["Mute"]],
|
||||
lambda (res): self.cbMuteAdd(to, auth_address, cert_user_id, reason)
|
||||
[_["Hide all content from <b>%s</b>?"] % html.escape(cert_user_id), _["Mute"]],
|
||||
lambda res: self.cbMuteAdd(to, auth_address, cert_user_id, reason)
|
||||
)
|
||||
|
||||
def cbMuteRemove(self, to, auth_address):
|
||||
|
@ -55,8 +55,8 @@ class UiWebsocketPlugin(object):
|
|||
else:
|
||||
self.cmd(
|
||||
"confirm",
|
||||
[_["Unmute <b>%s</b>?"] % cgi.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]],
|
||||
lambda (res): self.cbMuteRemove(to, auth_address)
|
||||
[_["Unmute <b>%s</b>?"] % html.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]],
|
||||
lambda res: self.cbMuteRemove(to, auth_address)
|
||||
)
|
||||
|
||||
def actionMuteList(self, to):
|
||||
|
@ -101,13 +101,13 @@ class UiWebsocketPlugin(object):
|
|||
else:
|
||||
content = site.storage.loadJson(inner_path)
|
||||
title = _["New shared global content filter: <b>%s</b> (%s sites, %s users)"] % (
|
||||
cgi.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {}))
|
||||
html.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {}))
|
||||
)
|
||||
|
||||
self.cmd(
|
||||
"confirm",
|
||||
[title, "Add"],
|
||||
lambda (res): self.cbFilterIncludeAdd(to, res, address, inner_path, description)
|
||||
lambda res: self.cbFilterIncludeAdd(to, res, address, inner_path, description)
|
||||
)
|
||||
|
||||
def cbFilterIncludeAdd(self, to, res, address, inner_path, description):
|
||||
|
@ -189,7 +189,7 @@ class UiRequestPlugin(object):
|
|||
address = self.server.site_manager.resolveDomain(address)
|
||||
|
||||
if address:
|
||||
address_sha256 = "0x" + hashlib.sha256(address).hexdigest()
|
||||
address_sha256 = "0x" + hashlib.sha256(address.encode("utf8")).hexdigest()
|
||||
else:
|
||||
address_sha256 = None
|
||||
|
||||
|
|
|
@ -62,7 +62,7 @@ class ContentFilterStorage(object):
|
|||
)
|
||||
continue
|
||||
|
||||
for key, val in content.iteritems():
|
||||
for key, val in content.items():
|
||||
if type(val) is not dict:
|
||||
continue
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
import ContentFilterPlugin
|
||||
from . import ContentFilterPlugin
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import re
|
||||
import cgi
|
||||
import html
|
||||
import copy
|
||||
|
||||
from Plugin import PluginManager
|
||||
|
@ -78,8 +78,8 @@ class UiWebsocketPlugin(object):
|
|||
|
||||
self.cmd(
|
||||
"confirm",
|
||||
[_["This site requests <b>read</b> permission to: <b>%s</b>"] % cgi.escape(site_name), button_title],
|
||||
lambda (res): self.cbCorsPermission(to, address)
|
||||
[_["This site requests <b>read</b> permission to: <b>%s</b>"] % html.escape(site_name), button_title],
|
||||
lambda res: self.cbCorsPermission(to, address)
|
||||
)
|
||||
|
||||
def cbCorsPermission(self, to, address):
|
||||
|
|
|
@ -1 +1 @@
|
|||
import CorsPlugin
|
||||
from . import CorsPlugin
|
|
@ -43,11 +43,11 @@ def getEcc(privatekey=None):
|
|||
|
||||
def toOpensslPrivatekey(privatekey):
|
||||
privatekey_bin = btctools.encode_privkey(privatekey, "bin")
|
||||
return '\x02\xca\x00\x20' + privatekey_bin
|
||||
return b'\x02\xca\x00\x20' + privatekey_bin
|
||||
|
||||
|
||||
def toOpensslPublickey(publickey):
|
||||
publickey_bin = btctools.encode_pubkey(publickey, "bin")
|
||||
publickey_bin = publickey_bin[1:]
|
||||
publickey_openssl = '\x02\xca\x00 ' + publickey_bin[:32] + '\x00 ' + publickey_bin[32:]
|
||||
publickey_openssl = b'\x02\xca\x00 ' + publickey_bin[:32] + b'\x00 ' + publickey_bin[32:]
|
||||
return publickey_openssl
|
||||
|
|
|
@ -3,9 +3,9 @@ import os
|
|||
|
||||
from Plugin import PluginManager
|
||||
from Crypt import CryptBitcoin
|
||||
from lib.pybitcointools import bitcoin as btctools
|
||||
import lib.pybitcointools as btctools
|
||||
|
||||
import CryptMessage
|
||||
from . import CryptMessage
|
||||
|
||||
|
||||
@PluginManager.registerTo("UiWebsocket")
|
||||
|
|
|
@ -1 +1 @@
|
|||
import CryptMessagePlugin
|
||||
from . import CryptMessagePlugin
|
|
@ -48,7 +48,7 @@ class UiRequestPlugin(object):
|
|||
if ".zip/" in path or ".tar.gz/" in path:
|
||||
file_obj = None
|
||||
path_parts = self.parsePath(path)
|
||||
file_path = u"%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"].decode("utf8"))
|
||||
file_path = "%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"])
|
||||
match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", file_path)
|
||||
archive_path, path_within = match.groups()
|
||||
if archive_path not in archive_cache:
|
||||
|
|
|
@ -1 +1 @@
|
|||
import FilePackPlugin
|
||||
from . import FilePackPlugin
|
|
@ -66,7 +66,7 @@ class UiWebsocketPlugin(object):
|
|||
self.cmd(
|
||||
"confirm",
|
||||
[_["Add <b>%s</b> new site?"] % len(addresses), "Add"],
|
||||
lambda (res): self.cbMergerSiteAdd(to, addresses)
|
||||
lambda res: self.cbMergerSiteAdd(to, addresses)
|
||||
)
|
||||
self.response(to, "ok")
|
||||
|
||||
|
@ -102,7 +102,7 @@ class UiWebsocketPlugin(object):
|
|||
ret = {}
|
||||
if not merger_types:
|
||||
return self.response(to, {"error": "Not a merger site"})
|
||||
for address, merged_type in merged_db.iteritems():
|
||||
for address, merged_type in merged_db.items():
|
||||
if merged_type not in merger_types:
|
||||
continue # Site not for us
|
||||
if query_site_info:
|
||||
|
@ -215,7 +215,7 @@ class UiWebsocketPlugin(object):
|
|||
if not re.match("^[A-Za-z0-9-]+$", merger_type):
|
||||
raise Exception("Invalid merger_type: %s" % merger_type)
|
||||
merged_sites = []
|
||||
for address, merged_type in merged_db.iteritems():
|
||||
for address, merged_type in merged_db.items():
|
||||
if merged_type != merger_type:
|
||||
continue
|
||||
site = self.server.sites.get(address)
|
||||
|
@ -253,18 +253,18 @@ class SiteStoragePlugin(object):
|
|||
|
||||
# Not a merger site, that's all
|
||||
if not merger_types:
|
||||
raise StopIteration
|
||||
return
|
||||
|
||||
merged_sites = [
|
||||
site_manager.sites[address]
|
||||
for address, merged_type in merged_db.iteritems()
|
||||
for address, merged_type in merged_db.items()
|
||||
if merged_type in merger_types
|
||||
]
|
||||
found = 0
|
||||
for merged_site in merged_sites:
|
||||
self.log.debug("Loading merged site: %s" % merged_site)
|
||||
merged_type = merged_db[merged_site.address]
|
||||
for content_inner_path, content in merged_site.content_manager.contents.iteritems():
|
||||
for content_inner_path, content in merged_site.content_manager.contents.items():
|
||||
# content.json file itself
|
||||
if merged_site.storage.isFile(content_inner_path): # Missing content.json file
|
||||
merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path)
|
||||
|
@ -273,7 +273,7 @@ class SiteStoragePlugin(object):
|
|||
merged_site.log.error("[MISSING] %s" % content_inner_path)
|
||||
# Data files in content.json
|
||||
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
|
||||
for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys():
|
||||
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
|
||||
if not file_relative_path.endswith(".json"):
|
||||
continue # We only interesed in json files
|
||||
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
|
||||
|
@ -285,7 +285,7 @@ class SiteStoragePlugin(object):
|
|||
merged_site.log.error("[MISSING] %s" % file_inner_path)
|
||||
found += 1
|
||||
if found % 100 == 0:
|
||||
time.sleep(0.000001) # Context switch to avoid UI block
|
||||
time.sleep(0.001) # Context switch to avoid UI block
|
||||
|
||||
# Also notice merger sites on a merged site file change
|
||||
def onUpdated(self, inner_path, file=None):
|
||||
|
@ -339,11 +339,11 @@ class SiteManagerPlugin(object):
|
|||
site_manager = self
|
||||
if not self.sites:
|
||||
return
|
||||
for site in self.sites.itervalues():
|
||||
for site in self.sites.values():
|
||||
# Update merged sites
|
||||
try:
|
||||
merged_type = site.content_manager.contents.get("content.json", {}).get("merged_type")
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Error loading site %s: %s" % (site.address, Debug.formatException(err)))
|
||||
continue
|
||||
if merged_type:
|
||||
|
@ -368,7 +368,7 @@ class SiteManagerPlugin(object):
|
|||
|
||||
# Update merged to merger
|
||||
if merged_type:
|
||||
for merger_site in self.sites.itervalues():
|
||||
for merger_site in self.sites.values():
|
||||
if "Merger:" + merged_type in merger_site.settings["permissions"]:
|
||||
if site.address not in merged_to_merger:
|
||||
merged_to_merger[site.address] = []
|
||||
|
|
|
@ -1 +1 @@
|
|||
import MergerSitePlugin
|
||||
from . import MergerSitePlugin
|
|
@ -37,7 +37,7 @@ class UiWebsocketPlugin(object):
|
|||
total_s = time.time()
|
||||
num_sites = 0
|
||||
|
||||
for address, site_data in self.user.sites.items():
|
||||
for address, site_data in list(self.user.sites.items()):
|
||||
feeds = site_data.get("follow")
|
||||
if not feeds:
|
||||
continue
|
||||
|
@ -45,7 +45,7 @@ class UiWebsocketPlugin(object):
|
|||
self.log.debug("Invalid feed for site %s" % address)
|
||||
continue
|
||||
num_sites += 1
|
||||
for name, query_set in feeds.iteritems():
|
||||
for name, query_set in feeds.items():
|
||||
site = SiteManager.site_manager.get(address)
|
||||
if not site or not site.storage.has_db:
|
||||
continue
|
||||
|
@ -78,7 +78,7 @@ class UiWebsocketPlugin(object):
|
|||
|
||||
for row in res:
|
||||
row = dict(row)
|
||||
if not isinstance(row["date_added"], (int, long, float, complex)):
|
||||
if not isinstance(row["date_added"], (int, float, complex)):
|
||||
self.log.debug("Invalid date_added from site %s: %r" % (address, row["date_added"]))
|
||||
continue
|
||||
if row["date_added"] > 1000000000000: # Formatted as millseconds
|
||||
|
@ -116,7 +116,7 @@ class UiWebsocketPlugin(object):
|
|||
|
||||
search_text, filters = self.parseSearch(search)
|
||||
|
||||
for address, site in SiteManager.site_manager.list().iteritems():
|
||||
for address, site in SiteManager.site_manager.list().items():
|
||||
if not site.storage.has_db:
|
||||
continue
|
||||
|
||||
|
@ -137,7 +137,7 @@ class UiWebsocketPlugin(object):
|
|||
|
||||
num_sites += 1
|
||||
|
||||
for name, query in feeds.iteritems():
|
||||
for name, query in feeds.items():
|
||||
s = time.time()
|
||||
try:
|
||||
db_query = DbQuery(query)
|
||||
|
@ -162,7 +162,7 @@ class UiWebsocketPlugin(object):
|
|||
db_query.parts["LIMIT"] = str(limit)
|
||||
|
||||
res = site.storage.query(str(db_query), params)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err)))
|
||||
stats.append({"site": site.address, "feed_name": name, "error": str(err), "query": query})
|
||||
continue
|
||||
|
|
|
@ -1 +1 @@
|
|||
import NewsfeedPlugin
|
||||
from . import NewsfeedPlugin
|
|
@ -88,8 +88,8 @@ class ContentDbPlugin(object):
|
|||
site_sizes[row["site_id"]]["optional_downloaded"] += row["size"]
|
||||
|
||||
# Site site size stats to sites.json settings
|
||||
site_ids_reverse = {val: key for key, val in self.site_ids.iteritems()}
|
||||
for site_id, stats in site_sizes.iteritems():
|
||||
site_ids_reverse = {val: key for key, val in self.site_ids.items()}
|
||||
for site_id, stats in site_sizes.items():
|
||||
site_address = site_ids_reverse.get(site_id)
|
||||
if not site_address:
|
||||
self.log.error("Not found site_id: %s" % site_id)
|
||||
|
@ -166,7 +166,7 @@ class ContentDbPlugin(object):
|
|||
num = 0
|
||||
site_id = self.site_ids[site.address]
|
||||
content_inner_dir = helper.getDirname(content_inner_path)
|
||||
for relative_inner_path, file in content.get("files_optional", {}).iteritems():
|
||||
for relative_inner_path, file in content.get("files_optional", {}).items():
|
||||
file_inner_path = content_inner_dir + relative_inner_path
|
||||
hash_id = int(file["sha512"][0:4], 16)
|
||||
if hash_id in site.content_manager.hashfield:
|
||||
|
@ -232,14 +232,14 @@ class ContentDbPlugin(object):
|
|||
num_file = 0
|
||||
num_updated = 0
|
||||
num_site = 0
|
||||
for site in self.sites.values():
|
||||
for site in list(self.sites.values()):
|
||||
if not site.content_manager.has_optional_files:
|
||||
continue
|
||||
if not site.settings["serving"]:
|
||||
continue
|
||||
has_updated_hashfield = next((
|
||||
peer
|
||||
for peer in site.peers.itervalues()
|
||||
for peer in site.peers.values()
|
||||
if peer.has_hashfield and peer.hashfield.time_changed > self.time_peer_numbers_updated
|
||||
), None)
|
||||
|
||||
|
@ -248,7 +248,7 @@ class ContentDbPlugin(object):
|
|||
|
||||
hashfield_peers = itertools.chain.from_iterable(
|
||||
peer.hashfield.storage
|
||||
for peer in site.peers.itervalues()
|
||||
for peer in site.peers.values()
|
||||
if peer.has_hashfield
|
||||
)
|
||||
peer_nums = collections.Counter(
|
||||
|
@ -270,7 +270,7 @@ class ContentDbPlugin(object):
|
|||
updates[row["file_id"]] = peer_num
|
||||
|
||||
self.execute("BEGIN")
|
||||
for file_id, peer_num in updates.iteritems():
|
||||
for file_id, peer_num in updates.items():
|
||||
self.execute("UPDATE file_optional SET peer = ? WHERE file_id = ?", (peer_num, file_id))
|
||||
self.execute("END")
|
||||
|
||||
|
@ -394,7 +394,7 @@ class ContentDbPlugin(object):
|
|||
|
||||
self.updatePeerNumbers()
|
||||
|
||||
site_ids_reverse = {val: key for key, val in self.site_ids.iteritems()}
|
||||
site_ids_reverse = {val: key for key, val in self.site_ids.items()}
|
||||
deleted_file_ids = []
|
||||
for row in self.queryDeletableFiles():
|
||||
site_address = site_ids_reverse.get(row["site_id"])
|
||||
|
|
|
@ -6,7 +6,7 @@ import gevent
|
|||
|
||||
from util import helper
|
||||
from Plugin import PluginManager
|
||||
import ContentDbPlugin
|
||||
from . import ContentDbPlugin
|
||||
|
||||
|
||||
# We can only import plugin host clases after the plugins are loaded
|
||||
|
@ -24,7 +24,7 @@ def processAccessLog():
|
|||
for site_id in access_log:
|
||||
content_db.execute(
|
||||
"UPDATE file_optional SET time_accessed = %s WHERE ?" % now,
|
||||
{"site_id": site_id, "inner_path": access_log[site_id].keys()}
|
||||
{"site_id": site_id, "inner_path": list(access_log[site_id].keys())}
|
||||
)
|
||||
num += len(access_log[site_id])
|
||||
access_log.clear()
|
||||
|
@ -37,7 +37,7 @@ def processRequestLog():
|
|||
num = 0
|
||||
cur.execute("BEGIN")
|
||||
for site_id in request_log:
|
||||
for inner_path, uploaded in request_log[site_id].iteritems():
|
||||
for inner_path, uploaded in request_log[site_id].items():
|
||||
content_db.execute(
|
||||
"UPDATE file_optional SET uploaded = uploaded + %s WHERE ?" % uploaded,
|
||||
{"site_id": site_id, "inner_path": inner_path}
|
||||
|
@ -101,7 +101,7 @@ class ContentManagerPlugin(object):
|
|||
{"site_id": self.contents.db.site_ids[self.site.address], "hash_id": hash_id}
|
||||
)
|
||||
row = res.fetchone()
|
||||
if row and row[0]:
|
||||
if row and row["is_downloaded"]:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
@ -191,7 +191,7 @@ class SitePlugin(object):
|
|||
if is_downloadable:
|
||||
return is_downloadable
|
||||
|
||||
for path in self.settings.get("optional_help", {}).iterkeys():
|
||||
for path in self.settings.get("optional_help", {}).keys():
|
||||
if inner_path.startswith(path):
|
||||
return True
|
||||
|
||||
|
|
|
@ -1,15 +1,7 @@
|
|||
import hashlib
|
||||
import os
|
||||
import copy
|
||||
import json
|
||||
from cStringIO import StringIO
|
||||
|
||||
import pytest
|
||||
|
||||
from OptionalManager import OptionalManagerPlugin
|
||||
from util import helper
|
||||
from Crypt import CryptBitcoin
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("resetSettings")
|
||||
class TestOptionalManager:
|
||||
|
@ -58,7 +50,7 @@ class TestOptionalManager:
|
|||
assert not file_row["is_downloaded"]
|
||||
|
||||
# Write file from outside of ZeroNet
|
||||
site.storage.open("testfile", "wb").write("A" * 1234) # For quick check hash does not matter only file size
|
||||
site.storage.open("testfile", "wb").write(b"A" * 1234) # For quick check hash does not matter only file size
|
||||
|
||||
hashfield_len_before = len(site.content_manager.hashfield)
|
||||
site.storage.verifyFiles(quick_check=True)
|
||||
|
@ -92,8 +84,8 @@ class TestOptionalManager:
|
|||
assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") == site.content_manager.hashfield.getHashId("aaaabbbbdddd")
|
||||
|
||||
# Write files from outside of ZeroNet (For quick check hash does not matter only file size)
|
||||
site.storage.open("testfile1", "wb").write("A" * 1234)
|
||||
site.storage.open("testfile2", "wb").write("B" * 2345)
|
||||
site.storage.open("testfile1", "wb").write(b"A" * 1234)
|
||||
site.storage.open("testfile2", "wb").write(b"B" * 2345)
|
||||
|
||||
site.storage.verifyFiles(quick_check=True)
|
||||
|
||||
|
@ -129,7 +121,6 @@ class TestOptionalManager:
|
|||
assert site.bad_files["data/fake_bigfile.mp4|2048-3064"] == 1
|
||||
|
||||
def testOptionalDelete(self, site):
|
||||
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
|
||||
contents = site.content_manager.contents
|
||||
|
||||
site.content_manager.setPin("data/img/zerotalk-upvote.png", True)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import re
|
||||
import time
|
||||
import cgi
|
||||
import html
|
||||
|
||||
import gevent
|
||||
|
||||
|
@ -28,7 +28,7 @@ class UiWebsocketPlugin(object):
|
|||
content_db.my_optional_files[self.site.address + "/" + content_inner_dir] = time.time()
|
||||
if len(content_db.my_optional_files) > 50: # Keep only last 50
|
||||
oldest_key = min(
|
||||
content_db.my_optional_files.iterkeys(),
|
||||
iter(content_db.my_optional_files.keys()),
|
||||
key=(lambda key: content_db.my_optional_files[key])
|
||||
)
|
||||
del content_db.my_optional_files[oldest_key]
|
||||
|
@ -80,7 +80,7 @@ class UiWebsocketPlugin(object):
|
|||
# Add leech / seed stats
|
||||
row["peer_seed"] = 0
|
||||
row["peer_leech"] = 0
|
||||
for peer in site.peers.itervalues():
|
||||
for peer in site.peers.values():
|
||||
if not peer.time_piecefields_updated or sha512 not in peer.piecefields:
|
||||
continue
|
||||
peer_piecefield = peer.piecefields[sha512].tostring()
|
||||
|
@ -212,7 +212,7 @@ class UiWebsocketPlugin(object):
|
|||
num_file = len(inner_path)
|
||||
if back == "ok":
|
||||
if num_file == 1:
|
||||
self.cmd("notification", ["done", _["Pinned %s"] % cgi.escape(helper.getFilename(inner_path[0])), 5000])
|
||||
self.cmd("notification", ["done", _["Pinned %s"] % html.escape(helper.getFilename(inner_path[0])), 5000])
|
||||
else:
|
||||
self.cmd("notification", ["done", _["Pinned %s files"] % num_file, 5000])
|
||||
self.response(to, back)
|
||||
|
@ -224,7 +224,7 @@ class UiWebsocketPlugin(object):
|
|||
num_file = len(inner_path)
|
||||
if back == "ok":
|
||||
if num_file == 1:
|
||||
self.cmd("notification", ["done", _["Removed pin from %s"] % cgi.escape(helper.getFilename(inner_path[0])), 5000])
|
||||
self.cmd("notification", ["done", _["Removed pin from %s"] % html.escape(helper.getFilename(inner_path[0])), 5000])
|
||||
else:
|
||||
self.cmd("notification", ["done", _["Removed pin from %s files"] % num_file, 5000])
|
||||
self.response(to, back)
|
||||
|
@ -325,7 +325,7 @@ class UiWebsocketPlugin(object):
|
|||
self.cmd("notification", [
|
||||
"done",
|
||||
_["You started to help distribute <b>%s</b>.<br><small>Directory: %s</small>"] %
|
||||
(cgi.escape(title), cgi.escape(directory)),
|
||||
(html.escape(title), html.escape(directory)),
|
||||
10000
|
||||
])
|
||||
|
||||
|
@ -369,10 +369,10 @@ class UiWebsocketPlugin(object):
|
|||
self.cmd(
|
||||
"confirm",
|
||||
[
|
||||
_["Help distribute all new optional files on site <b>%s</b>"] % cgi.escape(site_title),
|
||||
_["Help distribute all new optional files on site <b>%s</b>"] % html.escape(site_title),
|
||||
_["Yes, I want to help!"]
|
||||
],
|
||||
lambda (res): self.cbOptionalHelpAll(to, site, True)
|
||||
lambda res: self.cbOptionalHelpAll(to, site, True)
|
||||
)
|
||||
else:
|
||||
site.settings["autodownloadoptional"] = False
|
||||
|
|
|
@ -1 +1 @@
|
|||
import OptionalManagerPlugin
|
||||
from . import OptionalManagerPlugin
|
|
@ -96,8 +96,8 @@ class ContentDbPlugin(object):
|
|||
gevent.spawn_later(60*60, self.savePeers, site, spawn=True)
|
||||
|
||||
def saveAllPeers(self):
|
||||
for site in self.sites.values():
|
||||
for site in list(self.sites.values()):
|
||||
try:
|
||||
self.savePeers(site)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
site.log.error("Save peer error: %s" % err)
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
import PeerDbPlugin
|
||||
from . import PeerDbPlugin
|
||||
|
||||
|
|
|
@ -1,14 +1,11 @@
|
|||
import re
|
||||
import os
|
||||
import cgi
|
||||
import html
|
||||
import sys
|
||||
import math
|
||||
import time
|
||||
import json
|
||||
try:
|
||||
import cStringIO as StringIO
|
||||
except:
|
||||
import StringIO
|
||||
import io
|
||||
|
||||
import gevent
|
||||
|
||||
|
@ -17,7 +14,7 @@ from Plugin import PluginManager
|
|||
from Debug import Debug
|
||||
from Translate import Translate
|
||||
from util import helper
|
||||
from ZipStream import ZipStream
|
||||
from .ZipStream import ZipStream
|
||||
|
||||
plugin_dir = "plugins/Sidebar"
|
||||
media_dir = plugin_dir + "/media"
|
||||
|
@ -46,7 +43,7 @@ class UiRequestPlugin(object):
|
|||
from Debug import DebugMedia
|
||||
DebugMedia.merge(plugin_media_file)
|
||||
if ext == "js":
|
||||
yield _.translateData(open(plugin_media_file).read())
|
||||
yield _.translateData(open(plugin_media_file).read()).encode("utf8")
|
||||
else:
|
||||
for part in self.actionFile(plugin_media_file, send_header=False):
|
||||
yield part
|
||||
|
@ -84,15 +81,13 @@ class UiRequestPlugin(object):
|
|||
yield data
|
||||
|
||||
|
||||
|
||||
|
||||
@PluginManager.registerTo("UiWebsocket")
|
||||
class UiWebsocketPlugin(object):
|
||||
def sidebarRenderPeerStats(self, body, site):
|
||||
connected = len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected])
|
||||
connectable = len([peer_id for peer_id in site.peers.keys() if not peer_id.endswith(":0")])
|
||||
onion = len([peer_id for peer_id in site.peers.keys() if ".onion" in peer_id])
|
||||
local = len([peer for peer in site.peers.values() if helper.isPrivateIp(peer.ip)])
|
||||
connected = len([peer for peer in list(site.peers.values()) if peer.connection and peer.connection.connected])
|
||||
connectable = len([peer_id for peer_id in list(site.peers.keys()) if not peer_id.endswith(":0")])
|
||||
onion = len([peer_id for peer_id in list(site.peers.keys()) if ".onion" in peer_id])
|
||||
local = len([peer for peer in list(site.peers.values()) if helper.isPrivateIp(peer.ip)])
|
||||
peers_total = len(site.peers)
|
||||
|
||||
# Add myself
|
||||
|
@ -111,7 +106,7 @@ class UiWebsocketPlugin(object):
|
|||
percent_connectable = percent_connected = percent_onion = 0
|
||||
|
||||
if local:
|
||||
local_html = _(u"<li class='color-yellow'><span>{_[Local]}:</span><b>{local}</b></li>")
|
||||
local_html = _("<li class='color-yellow'><span>{_[Local]}:</span><b>{local}</b></li>")
|
||||
else:
|
||||
local_html = ""
|
||||
|
||||
|
@ -122,7 +117,7 @@ class UiWebsocketPlugin(object):
|
|||
",".join(peer_ips)
|
||||
)
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>
|
||||
{_[Peers]}
|
||||
|
@ -155,7 +150,7 @@ class UiWebsocketPlugin(object):
|
|||
percent_recv = 0.5
|
||||
percent_sent = 0.5
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Data transfer]}</label>
|
||||
<ul class='graph graph-stacked'>
|
||||
|
@ -170,7 +165,7 @@ class UiWebsocketPlugin(object):
|
|||
"""))
|
||||
|
||||
def sidebarRenderFileStats(self, body, site):
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>
|
||||
{_[Files]}
|
||||
|
@ -198,7 +193,7 @@ class UiWebsocketPlugin(object):
|
|||
content = site.content_manager.contents[inner_path]
|
||||
if "files" not in content or content["files"] is None:
|
||||
continue
|
||||
for file_name, file_details in content["files"].items():
|
||||
for file_name, file_details in list(content["files"].items()):
|
||||
size_total += file_details["size"]
|
||||
ext = file_name.split(".")[-1]
|
||||
size_filetypes[ext] = size_filetypes.get(ext, 0) + file_details["size"]
|
||||
|
@ -236,7 +231,7 @@ class UiWebsocketPlugin(object):
|
|||
percent = 100 * (float(size) / size_total)
|
||||
percent = math.floor(percent * 100) / 100 # Floor to 2 digits
|
||||
body.append(
|
||||
u"""<li style='width: %.2f%%' class='%s back-%s' title="%s"></li>""" %
|
||||
"""<li style='width: %.2f%%' class='%s back-%s' title="%s"></li>""" %
|
||||
(percent, _[extension], color, _[extension])
|
||||
)
|
||||
|
||||
|
@ -262,7 +257,7 @@ class UiWebsocketPlugin(object):
|
|||
else:
|
||||
size_formatted = "%.0fkB" % (size / 1024)
|
||||
|
||||
body.append(u"<li class='color-%s'><span>%s:</span><b>%s</b></li>" % (color, _[title], size_formatted))
|
||||
body.append("<li class='color-%s'><span>%s:</span><b>%s</b></li>" % (color, _[title], size_formatted))
|
||||
|
||||
body.append("</ul></li>")
|
||||
|
||||
|
@ -272,9 +267,9 @@ class UiWebsocketPlugin(object):
|
|||
size_limit = site.getSizeLimit()
|
||||
percent_used = size / size_limit
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Size limit]} <small>({_[limit used]}: {percent_used:.0%}, {_[free space]}: {free_space:,d}MB)</small></label>
|
||||
<label>{_[Size limit]} <small>({_[limit used]}: {percent_used:.0%}, {_[free space]}: {free_space:,.0f}MB)</small></label>
|
||||
<input type='text' class='text text-num' value="{size_limit}" id='input-sitelimit'/><span class='text-post'>MB</span>
|
||||
<a href='#Set' class='button' id='button-sitelimit'>{_[Set]}</a>
|
||||
</li>
|
||||
|
@ -292,7 +287,7 @@ class UiWebsocketPlugin(object):
|
|||
size_formatted_total = size_total / 1024 / 1024
|
||||
size_formatted_downloaded = size_downloaded / 1024 / 1024
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Optional files]}</label>
|
||||
<ul class='graph'>
|
||||
|
@ -314,14 +309,14 @@ class UiWebsocketPlugin(object):
|
|||
else:
|
||||
checked = ""
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Download and help distribute all files]}</label>
|
||||
<input type="checkbox" class="checkbox" id="checkbox-autodownloadoptional" {checked}/><div class="checkbox-skin"></div>
|
||||
"""))
|
||||
|
||||
autodownload_bigfile_size_limit = int(site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit))
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<div class='settings-autodownloadoptional'>
|
||||
<label>{_[Auto download big file size limit]}</label>
|
||||
<input type='text' class='text text-num' value="{autodownload_bigfile_size_limit}" id='input-autodownload_bigfile_size_limit'/><span class='text-post'>MB</span>
|
||||
|
@ -331,16 +326,16 @@ class UiWebsocketPlugin(object):
|
|||
body.append("</li>")
|
||||
|
||||
def sidebarRenderBadFiles(self, body, site):
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Needs to be updated]}:</label>
|
||||
<ul class='filelist'>
|
||||
"""))
|
||||
|
||||
i = 0
|
||||
for bad_file, tries in site.bad_files.iteritems():
|
||||
for bad_file, tries in site.bad_files.items():
|
||||
i += 1
|
||||
body.append(_(u"""<li class='color-red' title="{bad_file_path} ({tries})">{bad_filename}</li>""", {
|
||||
body.append(_("""<li class='color-red' title="{bad_file_path} ({tries})">{bad_filename}</li>""", {
|
||||
"bad_file_path": bad_file,
|
||||
"bad_filename": helper.getFilename(bad_file),
|
||||
"tries": _.pluralize(tries, "{} try", "{} tries")
|
||||
|
@ -350,7 +345,7 @@ class UiWebsocketPlugin(object):
|
|||
|
||||
if len(site.bad_files) > 30:
|
||||
num_bad_files = len(site.bad_files) - 30
|
||||
body.append(_(u"""<li class='color-red'>{_[+ {num_bad_files} more]}</li>""", nested=True))
|
||||
body.append(_("""<li class='color-red'>{_[+ {num_bad_files} more]}</li>""", nested=True))
|
||||
|
||||
body.append("""
|
||||
</ul>
|
||||
|
@ -363,11 +358,11 @@ class UiWebsocketPlugin(object):
|
|||
size = float(site.storage.getSize(inner_path)) / 1024
|
||||
feeds = len(site.storage.db.schema.get("feeds", {}))
|
||||
else:
|
||||
inner_path = _[u"No database found"]
|
||||
inner_path = _["No database found"]
|
||||
size = 0.0
|
||||
feeds = 0
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Database]} <small>({size:.2f}kB, {_[search feeds]}: {_[{feeds} query]})</small></label>
|
||||
<div class='flex'>
|
||||
|
@ -385,14 +380,14 @@ class UiWebsocketPlugin(object):
|
|||
quota = rules["max_size"] / 1024
|
||||
try:
|
||||
content = site.content_manager.contents["data/users/%s/content.json" % auth_address]
|
||||
used = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()])
|
||||
used = len(json.dumps(content)) + sum([file["size"] for file in list(content["files"].values())])
|
||||
except:
|
||||
used = 0
|
||||
used = used / 1024
|
||||
else:
|
||||
quota = used = 0
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Identity address]} <small>({_[limit used]}: {used:.2f}kB / {quota:.2f}kB)</small></label>
|
||||
<div class='flex'>
|
||||
|
@ -411,7 +406,7 @@ class UiWebsocketPlugin(object):
|
|||
class_pause = "hidden"
|
||||
class_resume = ""
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Site control]}</label>
|
||||
<a href='#Update' class='button noupdate' id='button-update'>{_[Update]}</a>
|
||||
|
@ -423,7 +418,7 @@ class UiWebsocketPlugin(object):
|
|||
|
||||
donate_key = site.content_manager.contents.get("content.json", {}).get("donate", True)
|
||||
site_address = self.site.address
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Site address]}</label><br>
|
||||
<div class='flex'>
|
||||
|
@ -431,8 +426,8 @@ class UiWebsocketPlugin(object):
|
|||
"""))
|
||||
if donate_key == False or donate_key == "":
|
||||
pass
|
||||
elif (type(donate_key) == str or type(donate_key) == unicode) and len(donate_key) > 0:
|
||||
body.append(_(u"""
|
||||
elif (type(donate_key) == str or type(donate_key) == str) and len(donate_key) > 0:
|
||||
body.append(_("""
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
|
@ -441,10 +436,10 @@ class UiWebsocketPlugin(object):
|
|||
{donate_key}
|
||||
"""))
|
||||
else:
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<a href='bitcoin:{site_address}' class='button' id='button-donate'>{_[Donate]}</a>
|
||||
"""))
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
</div>
|
||||
</li>
|
||||
"""))
|
||||
|
@ -455,7 +450,7 @@ class UiWebsocketPlugin(object):
|
|||
else:
|
||||
checked = ""
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<h2 class='owned-title'>{_[This is my site]}</h2>
|
||||
<input type="checkbox" class="checkbox" id="checkbox-owned" {checked}/><div class="checkbox-skin"></div>
|
||||
"""))
|
||||
|
@ -464,7 +459,7 @@ class UiWebsocketPlugin(object):
|
|||
title = site.content_manager.contents.get("content.json", {}).get("title", "")
|
||||
description = site.content_manager.contents.get("content.json", {}).get("description", "")
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label for='settings-title'>{_[Site title]}</label>
|
||||
<input type='text' class='text' value="{title}" id='settings-title'/>
|
||||
|
@ -483,17 +478,17 @@ class UiWebsocketPlugin(object):
|
|||
def sidebarRenderContents(self, body, site):
|
||||
has_privatekey = bool(self.user.getSiteData(site.address, create=False).get("privatekey"))
|
||||
if has_privatekey:
|
||||
tag_privatekey = _(u"{_[Private key saved.]} <a href='#Forgot+private+key' id='privatekey-forgot' class='link-right'>{_[Forgot]}</a>")
|
||||
tag_privatekey = _("{_[Private key saved.]} <a href='#Forgot+private+key' id='privatekey-forgot' class='link-right'>{_[Forgot]}</a>")
|
||||
else:
|
||||
tag_privatekey = _(u"<a href='#Add+private+key' id='privatekey-add' class='link-right'>{_[Add saved private key]}</a>")
|
||||
tag_privatekey = _("<a href='#Add+private+key' id='privatekey-add' class='link-right'>{_[Add saved private key]}</a>")
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Content publishing]} <small class='label-right'>{tag_privatekey}</small></label>
|
||||
""".replace("{tag_privatekey}", tag_privatekey)))
|
||||
|
||||
# Choose content you want to sign
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<div class='flex'>
|
||||
<input type='text' class='text' value="content.json" id='input-contents'/>
|
||||
<a href='#Sign-and-Publish' id='button-sign-publish' class='button'>{_[Sign and publish]}</a>
|
||||
|
@ -502,8 +497,8 @@ class UiWebsocketPlugin(object):
|
|||
"""))
|
||||
|
||||
contents = ["content.json"]
|
||||
contents += site.content_manager.contents.get("content.json", {}).get("includes", {}).keys()
|
||||
body.append(_(u"<div class='contents'>{_[Choose]}: "))
|
||||
contents += list(site.content_manager.contents.get("content.json", {}).get("includes", {}).keys())
|
||||
body.append(_("<div class='contents'>{_[Choose]}: "))
|
||||
for content in contents:
|
||||
body.append(_("<a href='{content}' class='contents-content'>{content}</a> "))
|
||||
body.append("</div>")
|
||||
|
@ -520,7 +515,7 @@ class UiWebsocketPlugin(object):
|
|||
|
||||
body.append("<div>")
|
||||
body.append("<a href='#Close' class='close'>×</a>")
|
||||
body.append("<h1>%s</h1>" % cgi.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True))
|
||||
body.append("<h1>%s</h1>" % html.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True))
|
||||
|
||||
body.append("<div class='globe loading'></div>")
|
||||
|
||||
|
@ -554,7 +549,6 @@ class UiWebsocketPlugin(object):
|
|||
self.response(to, "".join(body))
|
||||
|
||||
def downloadGeoLiteDb(self, db_path):
|
||||
import urllib
|
||||
import gzip
|
||||
import shutil
|
||||
from util import helper
|
||||
|
@ -566,12 +560,13 @@ class UiWebsocketPlugin(object):
|
|||
"https://raw.githubusercontent.com/texnikru/GeoLite2-Database/master/GeoLite2-City.mmdb.gz"
|
||||
]
|
||||
for db_url in db_urls:
|
||||
downloadl_err = None
|
||||
try:
|
||||
# Download
|
||||
response = helper.httpRequest(db_url)
|
||||
data_size = response.getheader('content-length')
|
||||
data_recv = 0
|
||||
data = StringIO.StringIO()
|
||||
data = io.BytesIO()
|
||||
while True:
|
||||
buff = response.read(1024 * 512)
|
||||
if not buff:
|
||||
|
@ -592,11 +587,12 @@ class UiWebsocketPlugin(object):
|
|||
time.sleep(2) # Wait for notify animation
|
||||
return True
|
||||
except Exception as err:
|
||||
download_err = err
|
||||
self.log.error("Error downloading %s: %s" % (db_url, err))
|
||||
pass
|
||||
self.cmd("progress", [
|
||||
"geolite-info",
|
||||
_["GeoLite2 City database download error: {}!<br>Please download manually and unpack to data dir:<br>{}"].format(err, db_urls[0]),
|
||||
_["GeoLite2 City database download error: {}!<br>Please download manually and unpack to data dir:<br>{}"].format(download_err, db_urls[0]),
|
||||
-100
|
||||
])
|
||||
|
||||
|
@ -629,14 +625,14 @@ class UiWebsocketPlugin(object):
|
|||
return loc
|
||||
|
||||
def getPeerLocations(self, peers):
|
||||
import maxminddb
|
||||
from . import maxminddb
|
||||
db_path = config.data_dir + '/GeoLite2-City.mmdb'
|
||||
if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0:
|
||||
if not self.downloadGeoLiteDb(db_path):
|
||||
return False
|
||||
geodb = maxminddb.open_database(db_path)
|
||||
|
||||
peers = peers.values()
|
||||
peers = list(peers.values())
|
||||
# Place bars
|
||||
peer_locations = []
|
||||
placed = {} # Already placed bars here
|
||||
|
@ -704,9 +700,9 @@ class UiWebsocketPlugin(object):
|
|||
globe_data += [peer_location["lat"], peer_location["lon"], height]
|
||||
|
||||
self.response(to, globe_data)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("sidebarGetPeers error: %s" % Debug.formatException(err))
|
||||
self.response(to, {"error": err})
|
||||
self.response(to, {"error": str(err)})
|
||||
|
||||
def actionSiteSetOwned(self, to, owned):
|
||||
permissions = self.getPermissions(to)
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
import cStringIO as StringIO
|
||||
import io
|
||||
import os
|
||||
import zipfile
|
||||
|
||||
|
||||
class ZipStream(file):
|
||||
class ZipStream(object):
|
||||
def __init__(self, dir_path):
|
||||
self.dir_path = dir_path
|
||||
self.pos = 0
|
||||
self.zf = zipfile.ZipFile(self, 'w', zipfile.ZIP_DEFLATED, allowZip64=True)
|
||||
self.buff = StringIO.StringIO()
|
||||
self.buff = io.BytesIO()
|
||||
self.file_list = self.getFileList()
|
||||
|
||||
def getFileList(self):
|
||||
|
|
|
@ -1 +1 @@
|
|||
import SidebarPlugin
|
||||
from . import SidebarPlugin
|
|
@ -1 +1 @@
|
|||
import StatsPlugin
|
||||
from . import StatsPlugin
|
|
@ -15,7 +15,7 @@ class UiRequestPlugin(object):
|
|||
path_parts = self.parsePath(path)
|
||||
kwargs["header_length"] = False
|
||||
file_generator = super(UiRequestPlugin, self).actionSiteMedia(path, **kwargs)
|
||||
if "next" in dir(file_generator): # File found and generator returned
|
||||
if "__next__" in dir(file_generator): # File found and generator returned
|
||||
site = self.server.sites.get(path_parts["address"])
|
||||
return self.actionPatchFile(site, path_parts["inner_path"], file_generator)
|
||||
else:
|
||||
|
@ -28,10 +28,10 @@ class UiRequestPlugin(object):
|
|||
file_generator = super(UiRequestPlugin, self).actionUiMedia(path)
|
||||
if translate.lang != "en" and path.endswith(".js"):
|
||||
s = time.time()
|
||||
data = "".join(list(file_generator))
|
||||
data = translate.translateData(data)
|
||||
data = b"".join(list(file_generator))
|
||||
data = translate.translateData(data.decode("utf8"))
|
||||
self.log.debug("Patched %s (%s bytes) in %.3fs" % (path, len(data), time.time() - s))
|
||||
return iter([data])
|
||||
return iter([data.encode("utf8")])
|
||||
else:
|
||||
return file_generator
|
||||
|
||||
|
@ -49,12 +49,12 @@ class UiRequestPlugin(object):
|
|||
if not lang_file_exist or inner_path not in content_json.get("translate", []):
|
||||
for part in file_generator:
|
||||
if inner_path.endswith(".html"):
|
||||
yield part.replace("lang={lang}", "lang=" + str(translate.lang)) # lang get parameter to .js file to avoid cache
|
||||
yield part.replace(b"lang={lang}", b"lang=%s" % translate.lang.encode("utf8")) # lang get parameter to .js file to avoid cache
|
||||
else:
|
||||
yield part
|
||||
else:
|
||||
s = time.time()
|
||||
data = "".join(list(file_generator))
|
||||
data = b"".join(list(file_generator)).decode("utf8")
|
||||
|
||||
# if site.content_manager.contents["content.json"]["files"].get(lang_file):
|
||||
site.needFile(lang_file, priority=10)
|
||||
|
@ -63,9 +63,9 @@ class UiRequestPlugin(object):
|
|||
data = translate.translateData(data, site.storage.loadJson(lang_file), "js")
|
||||
else:
|
||||
data = translate.translateData(data, site.storage.loadJson(lang_file), "html")
|
||||
data = data.replace("lang={lang}", "lang=" + str(translate.lang)) # lang get parameter to .js file to avoid cache
|
||||
data = data.replace(b"lang={lang}", b"lang=%s" % translate.lang.encode("utf8")) # lang get parameter to .js file to avoid cache
|
||||
except Exception as err:
|
||||
site.log.error("Error loading translation file %s: %s" % (lang_file, err))
|
||||
|
||||
self.log.debug("Patched %s (%s bytes) in %.3fs" % (inner_path, len(data), time.time() - s))
|
||||
yield data
|
||||
yield data.encode("utf8")
|
||||
|
|
|
@ -1 +1 @@
|
|||
import TranslateSitePlugin
|
||||
from . import TranslateSitePlugin
|
||||
|
|
|
@ -17,7 +17,7 @@ class ActionsPlugin(object):
|
|||
|
||||
def main(self):
|
||||
global notificationicon, winfolders
|
||||
from lib import notificationicon, winfolders
|
||||
from .lib import notificationicon, winfolders
|
||||
import gevent.threadpool
|
||||
|
||||
self.main = sys.modules["main"]
|
||||
|
@ -25,7 +25,7 @@ class ActionsPlugin(object):
|
|||
fs_encoding = sys.getfilesystemencoding()
|
||||
|
||||
icon = notificationicon.NotificationIcon(
|
||||
os.path.join(os.path.dirname(os.path.abspath(__file__).decode(fs_encoding)), 'trayicon.ico'),
|
||||
os.path.join(os.path.dirname(os.path.abspath(__file__)), 'trayicon.ico'),
|
||||
"ZeroNet %s" % config.version
|
||||
)
|
||||
self.icon = icon
|
||||
|
@ -137,7 +137,7 @@ class ActionsPlugin(object):
|
|||
cmd += ' --open_browser ""'
|
||||
cmd = cmd.decode(sys.getfilesystemencoding())
|
||||
|
||||
return u"""
|
||||
return """
|
||||
@echo off
|
||||
chcp 65001 > nul
|
||||
set PYTHONIOENCODING=utf-8
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import sys
|
||||
|
||||
if sys.platform == 'win32':
|
||||
import TrayiconPlugin
|
||||
from . import TrayiconPlugin
|
|
@ -190,27 +190,27 @@ DefWindowProc = ctypes.windll.user32.DefWindowProcW
|
|||
DefWindowProc.restype = ctypes.c_int
|
||||
DefWindowProc.argtypes = [ctypes.wintypes.HWND, ctypes.c_uint, ctypes.wintypes.WPARAM, ctypes.wintypes.LPARAM]
|
||||
|
||||
WS_OVERLAPPED = 0x00000000L
|
||||
WS_POPUP = 0x80000000L
|
||||
WS_CHILD = 0x40000000L
|
||||
WS_MINIMIZE = 0x20000000L
|
||||
WS_VISIBLE = 0x10000000L
|
||||
WS_DISABLED = 0x08000000L
|
||||
WS_CLIPSIBLINGS = 0x04000000L
|
||||
WS_CLIPCHILDREN = 0x02000000L
|
||||
WS_MAXIMIZE = 0x01000000L
|
||||
WS_CAPTION = 0x00C00000L
|
||||
WS_BORDER = 0x00800000L
|
||||
WS_DLGFRAME = 0x00400000L
|
||||
WS_VSCROLL = 0x00200000L
|
||||
WS_HSCROLL = 0x00100000L
|
||||
WS_SYSMENU = 0x00080000L
|
||||
WS_THICKFRAME = 0x00040000L
|
||||
WS_GROUP = 0x00020000L
|
||||
WS_TABSTOP = 0x00010000L
|
||||
WS_OVERLAPPED = 0x00000000
|
||||
WS_POPUP = 0x80000000
|
||||
WS_CHILD = 0x40000000
|
||||
WS_MINIMIZE = 0x20000000
|
||||
WS_VISIBLE = 0x10000000
|
||||
WS_DISABLED = 0x08000000
|
||||
WS_CLIPSIBLINGS = 0x04000000
|
||||
WS_CLIPCHILDREN = 0x02000000
|
||||
WS_MAXIMIZE = 0x01000000
|
||||
WS_CAPTION = 0x00C00000
|
||||
WS_BORDER = 0x00800000
|
||||
WS_DLGFRAME = 0x00400000
|
||||
WS_VSCROLL = 0x00200000
|
||||
WS_HSCROLL = 0x00100000
|
||||
WS_SYSMENU = 0x00080000
|
||||
WS_THICKFRAME = 0x00040000
|
||||
WS_GROUP = 0x00020000
|
||||
WS_TABSTOP = 0x00010000
|
||||
|
||||
WS_MINIMIZEBOX = 0x00020000L
|
||||
WS_MAXIMIZEBOX = 0x00010000L
|
||||
WS_MINIMIZEBOX = 0x00020000
|
||||
WS_MAXIMIZEBOX = 0x00010000
|
||||
|
||||
WS_OVERLAPPEDWINDOW = (WS_OVERLAPPED |
|
||||
WS_CAPTION |
|
||||
|
@ -497,7 +497,7 @@ DispatchMessage.argtypes = [ctypes.POINTER(MSG)]
|
|||
|
||||
def LoadIcon(iconfilename, small=False):
|
||||
return LoadImage(0,
|
||||
unicode(iconfilename),
|
||||
str(iconfilename),
|
||||
IMAGE_ICON,
|
||||
16 if small else 0,
|
||||
16 if small else 0,
|
||||
|
@ -506,15 +506,15 @@ def LoadIcon(iconfilename, small=False):
|
|||
|
||||
class NotificationIcon(object):
|
||||
def __init__(self, iconfilename, tooltip=None):
|
||||
assert os.path.isfile(unicode(iconfilename)), "{} doesn't exist".format(iconfilename)
|
||||
self._iconfile = unicode(iconfilename)
|
||||
assert os.path.isfile(str(iconfilename)), "{} doesn't exist".format(iconfilename)
|
||||
self._iconfile = str(iconfilename)
|
||||
self._hicon = LoadIcon(self._iconfile, True)
|
||||
assert self._hicon, "Failed to load {}".format(iconfilename)
|
||||
#self._pumpqueue = Queue.Queue()
|
||||
self._die = False
|
||||
self._timerid = None
|
||||
self._uid = uuid.uuid4()
|
||||
self._tooltip = unicode(tooltip) if tooltip else u''
|
||||
self._tooltip = str(tooltip) if tooltip else ''
|
||||
#self._thread = threading.Thread(target=self._run)
|
||||
#self._thread.start()
|
||||
self._info_bubble = None
|
||||
|
@ -525,7 +525,7 @@ class NotificationIcon(object):
|
|||
if self._info_bubble:
|
||||
info_bubble = self._info_bubble
|
||||
self._info_bubble = None
|
||||
message = unicode(self._info_bubble)
|
||||
message = str(self._info_bubble)
|
||||
iconinfo.uFlags |= NIF_INFO
|
||||
iconinfo.szInfo = message
|
||||
iconinfo.szInfoTitle = message
|
||||
|
@ -535,7 +535,7 @@ class NotificationIcon(object):
|
|||
|
||||
|
||||
def _run(self):
|
||||
self.WM_TASKBARCREATED = ctypes.windll.user32.RegisterWindowMessageW(u'TaskbarCreated')
|
||||
self.WM_TASKBARCREATED = ctypes.windll.user32.RegisterWindowMessageW('TaskbarCreated')
|
||||
|
||||
self._windowproc = WNDPROC(self._callback)
|
||||
self._hwnd = GenerateDummyWindow(self._windowproc, str(self._uid))
|
||||
|
@ -562,11 +562,11 @@ class NotificationIcon(object):
|
|||
ret = GetMessage(ctypes.pointer(message), 0, 0, 0)
|
||||
TranslateMessage(ctypes.pointer(message))
|
||||
DispatchMessage(ctypes.pointer(message))
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
# print "NotificationIcon error", err, message
|
||||
message = MSG()
|
||||
time.sleep(0.125)
|
||||
print "Icon thread stopped, removing icon..."
|
||||
print("Icon thread stopped, removing icon...")
|
||||
|
||||
Shell_NotifyIcon(NIM_DELETE, ctypes.cast(ctypes.pointer(iconinfo), ctypes.POINTER(NOTIFYICONDATA)))
|
||||
ctypes.windll.user32.DestroyWindow(self._hwnd)
|
||||
|
@ -586,7 +586,7 @@ class NotificationIcon(object):
|
|||
item_map = {}
|
||||
for fs in self.items:
|
||||
iidx += 1
|
||||
if isinstance(fs, basestring):
|
||||
if isinstance(fs, str):
|
||||
if fs and not fs.strip('-_='):
|
||||
AppendMenu(menu, MF_SEPARATOR, iidx, fs)
|
||||
else:
|
||||
|
@ -595,7 +595,7 @@ class NotificationIcon(object):
|
|||
if callable(fs[0]):
|
||||
itemstring = fs[0]()
|
||||
else:
|
||||
itemstring = unicode(fs[0])
|
||||
itemstring = str(fs[0])
|
||||
flags = MF_STRING
|
||||
if itemstring.startswith("!"):
|
||||
itemstring = itemstring[1:]
|
||||
|
@ -660,8 +660,8 @@ class NotificationIcon(object):
|
|||
time.sleep(0.2)
|
||||
try:
|
||||
Shell_NotifyIcon(NIM_DELETE, self.iconinfo)
|
||||
except Exception, err:
|
||||
print "Icon remove error", err
|
||||
except Exception as err:
|
||||
print("Icon remove error", err)
|
||||
ctypes.windll.user32.DestroyWindow(self._hwnd)
|
||||
ctypes.windll.user32.DestroyIcon(self._hicon)
|
||||
|
||||
|
@ -693,7 +693,7 @@ if __name__ == "__main__":
|
|||
|
||||
def greet():
|
||||
ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 0)
|
||||
print "Hello"
|
||||
print("Hello")
|
||||
|
||||
def quit():
|
||||
ni._die = True
|
||||
|
@ -724,6 +724,6 @@ if __name__ == "__main__":
|
|||
|
||||
@atexit.register
|
||||
def goodbye():
|
||||
print "You are now leaving the Python sector."
|
||||
print("You are now leaving the Python sector.")
|
||||
|
||||
ni._run()
|
||||
|
|
|
@ -10,7 +10,8 @@ Luke Pinner - Environment.gov.au, 2010 February 10
|
|||
|
||||
#Imports use _syntax to mask them from autocomplete IDE's
|
||||
import ctypes as _ctypes
|
||||
from ctypes.wintypes import HWND as _HWND, HANDLE as _HANDLE,DWORD as _DWORD,LPCWSTR as _LPCWSTR,MAX_PATH as _MAX_PATH, create_unicode_buffer as _cub
|
||||
from ctypes import create_unicode_buffer as _cub
|
||||
from ctypes.wintypes import HWND as _HWND, HANDLE as _HANDLE,DWORD as _DWORD,LPCWSTR as _LPCWSTR,MAX_PATH as _MAX_PATH
|
||||
_SHGetFolderPath = _ctypes.windll.shell32.SHGetFolderPathW
|
||||
|
||||
#public special folder constants
|
||||
|
@ -49,5 +50,5 @@ def get(intFolder):
|
|||
|
||||
if __name__ == "__main__":
|
||||
import os
|
||||
print get(STARTUP)
|
||||
print(get(STARTUP))
|
||||
open(get(STARTUP)+"\\zeronet.cmd", "w").write("cd /D %s\r\nzeronet.py" % os.getcwd())
|
|
@ -1,7 +1,8 @@
|
|||
import io
|
||||
|
||||
from Plugin import PluginManager
|
||||
from Config import config
|
||||
from Translate import Translate
|
||||
from cStringIO import StringIO
|
||||
|
||||
|
||||
if "_" not in locals():
|
||||
|
@ -47,7 +48,7 @@ class UiRequestPlugin(object):
|
|||
else:
|
||||
data = open(file_path).read()
|
||||
|
||||
return self.actionFile(file_path, file_obj=StringIO(data), file_size=len(data))
|
||||
return self.actionFile(file_path, file_obj=io.BytesIO(data), file_size=len(data))
|
||||
else:
|
||||
return super(UiRequestPlugin, self).actionUiMedia(path)
|
||||
|
||||
|
@ -58,7 +59,7 @@ class UiWebsocketPlugin(object):
|
|||
back = {}
|
||||
config_values = vars(config.arguments)
|
||||
config_values.update(config.pending_changes)
|
||||
for key, val in config_values.iteritems():
|
||||
for key, val in config_values.items():
|
||||
if key not in config.keys_api_change_allowed:
|
||||
continue
|
||||
is_pending = key in config.pending_changes
|
||||
|
|
|
@ -1 +1 @@
|
|||
import UiConfigPlugin
|
||||
from . import UiConfigPlugin
|
||||
|
|
|
@ -3,7 +3,7 @@ import time
|
|||
from util import helper
|
||||
|
||||
from Plugin import PluginManager
|
||||
from BootstrapperDb import BootstrapperDb
|
||||
from .BootstrapperDb import BootstrapperDb
|
||||
from Crypt import CryptRsa
|
||||
from Config import config
|
||||
|
||||
|
@ -70,7 +70,7 @@ class FileRequestPlugin(object):
|
|||
|
||||
hashes_changed = 0
|
||||
db.execute("BEGIN")
|
||||
for onion, onion_hashes in onion_to_hash.iteritems():
|
||||
for onion, onion_hashes in onion_to_hash.items():
|
||||
hashes_changed += db.peerAnnounce(
|
||||
ip_type="onion",
|
||||
address=onion,
|
||||
|
@ -113,7 +113,7 @@ class FileRequestPlugin(object):
|
|||
|
||||
hash_peers = db.peerList(
|
||||
hash,
|
||||
address=self.connection.ip, onions=onion_to_hash.keys(), port=params["port"],
|
||||
address=self.connection.ip, onions=list(onion_to_hash.keys()), port=params["port"],
|
||||
limit=min(limit, params["need_num"]), need_types=params["need_types"], order=order
|
||||
)
|
||||
if "ip4" in params["need_types"]: # Backward compatibility
|
||||
|
|
|
@ -78,7 +78,7 @@ class TestBootstrapper:
|
|||
assert len(res["peers"][0][ip_type]) == 1
|
||||
|
||||
# Test DB cleanup
|
||||
assert map(lambda row: row[0], bootstrapper_db.execute("SELECT address FROM peer").fetchall()) == [file_server.ip_external] # 127.0.0.1 never get added to db
|
||||
assert [row[0] for row in bootstrapper_db.execute("SELECT address FROM peer").fetchall()] == [file_server.ip_external] # 127.0.0.1 never get added to db
|
||||
|
||||
# Delete peers
|
||||
bootstrapper_db.execute("DELETE FROM peer WHERE address = ?", [file_server.ip_external])
|
||||
|
|
|
@ -1 +1 @@
|
|||
import BootstrapperPlugin
|
||||
from . import BootstrapperPlugin
|
|
@ -54,7 +54,7 @@ class SiteManagerPlugin(object):
|
|||
res = Http.get("https://api.dnschain.net/v1/namecoin/key/%s" % top_domain).read()
|
||||
data = json.loads(res)["data"]["value"]
|
||||
if "zeronet" in data:
|
||||
for key, val in data["zeronet"].iteritems():
|
||||
for key, val in data["zeronet"].items():
|
||||
self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours
|
||||
self.saveDnsCache()
|
||||
return data["zeronet"].get(sub_domain)
|
||||
|
@ -76,7 +76,7 @@ class SiteManagerPlugin(object):
|
|||
with gevent.Timeout(5, Exception("Timeout: 5s")):
|
||||
res = Http.get("https://dnschain.info/bit/d/%s" % re.sub(r"\.bit$", "", top_domain)).read()
|
||||
data = json.loads(res)["value"]
|
||||
for key, val in data["zeronet"].iteritems():
|
||||
for key, val in data["zeronet"].items():
|
||||
self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours
|
||||
self.saveDnsCache()
|
||||
return data["zeronet"].get(sub_domain)
|
||||
|
|
|
@ -1 +1 @@
|
|||
import DonationMessagePlugin
|
||||
from . import DonationMessagePlugin
|
||||
|
|
|
@ -5,11 +5,11 @@ import json
|
|||
from Config import config
|
||||
from Plugin import PluginManager
|
||||
from Crypt import CryptBitcoin
|
||||
import UserPlugin
|
||||
from . import UserPlugin
|
||||
|
||||
try:
|
||||
local_master_addresses = set(json.load(open("%s/users.json" % config.data_dir)).keys()) # Users in users.json
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
local_master_addresses = set()
|
||||
|
||||
|
||||
|
@ -59,7 +59,7 @@ class UiRequestPlugin(object):
|
|||
return False
|
||||
|
||||
elif loggedin:
|
||||
back = back_generator.next()
|
||||
back = next(back_generator)
|
||||
inject_html = """
|
||||
<!-- Multiser plugin -->
|
||||
<script nonce="{script_nonce}">
|
||||
|
|
|
@ -1 +1 @@
|
|||
import MultiuserPlugin
|
||||
from . import MultiuserPlugin
|
||||
|
|
|
@ -16,9 +16,9 @@ if config.tor != "disable":
|
|||
monkey.patch_time()
|
||||
monkey.patch_socket(dns=False)
|
||||
monkey.patch_thread()
|
||||
print "Stem Port Plugin: modules are patched."
|
||||
print("Stem Port Plugin: modules are patched.")
|
||||
else:
|
||||
print "Stem Port Plugin: Tor mode disabled. Module patching skipped."
|
||||
print("Stem Port Plugin: Tor mode disabled. Module patching skipped.")
|
||||
|
||||
|
||||
class PatchedControlPort(ControlPort):
|
||||
|
@ -66,14 +66,14 @@ class TorManagerPlugin(object):
|
|||
controller = from_port(port=self.port)
|
||||
controller.authenticate()
|
||||
self.controller = controller
|
||||
self.status = u"Connected (via Stem)"
|
||||
except Exception, err:
|
||||
self.status = "Connected (via Stem)"
|
||||
except Exception as err:
|
||||
print("\n")
|
||||
traceback.print_exc()
|
||||
print("\n")
|
||||
|
||||
self.controller = None
|
||||
self.status = u"Error (%s)" % err
|
||||
self.status = "Error (%s)" % err
|
||||
self.log.error("Tor stem connect error: %s" % Debug.formatException(err))
|
||||
|
||||
return self.controller
|
||||
|
@ -87,8 +87,8 @@ class TorManagerPlugin(object):
|
|||
def resetCircuits(self):
|
||||
try:
|
||||
self.controller.signal(Signal.NEWNYM)
|
||||
except Exception, err:
|
||||
self.status = u"Stem reset circuits error (%s)" % err
|
||||
except Exception as err:
|
||||
self.status = "Stem reset circuits error (%s)" % err
|
||||
self.log.error("Stem reset circuits error: %s" % err)
|
||||
|
||||
|
||||
|
@ -105,8 +105,8 @@ class TorManagerPlugin(object):
|
|||
|
||||
return (service.service_id, service.private_key)
|
||||
|
||||
except Exception, err:
|
||||
self.status = u"AddOnion error (Stem: %s)" % err
|
||||
except Exception as err:
|
||||
self.status = "AddOnion error (Stem: %s)" % err
|
||||
self.log.error("Failed to create hidden service with Stem: " + err)
|
||||
return False
|
||||
|
||||
|
@ -115,8 +115,8 @@ class TorManagerPlugin(object):
|
|||
try:
|
||||
self.controller.remove_ephemeral_hidden_service(address)
|
||||
return True
|
||||
except Exception, err:
|
||||
self.status = u"DelOnion error (Stem: %s)" % err
|
||||
except Exception as err:
|
||||
self.status = "DelOnion error (Stem: %s)" % err
|
||||
self.log.error("Stem failed to delete %s.onion: %s" % (address, err))
|
||||
self.disconnect() # Why?
|
||||
return False
|
||||
|
|
|
@ -2,9 +2,9 @@ try:
|
|||
from stem.control import Controller
|
||||
stem_found = True
|
||||
except Exception as err:
|
||||
print "STEM NOT FOUND! %s" % err
|
||||
print(("STEM NOT FOUND! %s" % err))
|
||||
stem_found = False
|
||||
|
||||
if stem_found:
|
||||
print "Starting Stem plugin..."
|
||||
import StemPortPlugin
|
||||
print("Starting Stem plugin...")
|
||||
from . import StemPortPlugin
|
||||
|
|
|
@ -1 +1 @@
|
|||
import UiPasswordPlugin
|
||||
from . import UiPasswordPlugin
|
|
@ -3,7 +3,7 @@ import gevent
|
|||
from Plugin import PluginManager
|
||||
from Config import config
|
||||
from Debug import Debug
|
||||
from domainLookup import lookupDomain
|
||||
from .domainLookup import lookupDomain
|
||||
|
||||
allow_reload = False # No reload supported
|
||||
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
import UiRequestPlugin
|
||||
import SiteManagerPlugin
|
||||
from . import UiRequestPlugin
|
||||
from . import SiteManagerPlugin
|
|
@ -37,7 +37,7 @@
|
|||
try:
|
||||
import http.client as httplib
|
||||
except ImportError:
|
||||
import httplib
|
||||
import http.client
|
||||
import base64
|
||||
import decimal
|
||||
import json
|
||||
|
@ -45,7 +45,7 @@ import logging
|
|||
try:
|
||||
import urllib.parse as urlparse
|
||||
except ImportError:
|
||||
import urlparse
|
||||
import urllib.parse
|
||||
|
||||
USER_AGENT = "AuthServiceProxy/0.1"
|
||||
|
||||
|
@ -83,7 +83,7 @@ class AuthServiceProxy(object):
|
|||
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None):
|
||||
self.__service_url = service_url
|
||||
self.__service_name = service_name
|
||||
self.__url = urlparse.urlparse(service_url)
|
||||
self.__url = urllib.parse.urlparse(service_url)
|
||||
if self.__url.port is None:
|
||||
port = 80
|
||||
else:
|
||||
|
@ -106,10 +106,10 @@ class AuthServiceProxy(object):
|
|||
# Callables re-use the connection of the original proxy
|
||||
self.__conn = connection
|
||||
elif self.__url.scheme == 'https':
|
||||
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port,
|
||||
self.__conn = http.client.HTTPSConnection(self.__url.hostname, port,
|
||||
timeout=timeout)
|
||||
else:
|
||||
self.__conn = httplib.HTTPConnection(self.__url.hostname, port,
|
||||
self.__conn = http.client.HTTPConnection(self.__url.hostname, port,
|
||||
timeout=timeout)
|
||||
|
||||
def __getattr__(self, name):
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
|
||||
from .bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
|
||||
import time, json, os, sys, re, socket
|
||||
|
||||
# Connecting to RPC
|
||||
|
|
|
@ -3,7 +3,7 @@ import sys
|
|||
import os
|
||||
import locale
|
||||
import re
|
||||
import ConfigParser
|
||||
import configparser
|
||||
import logging
|
||||
import logging.handlers
|
||||
import stat
|
||||
|
@ -304,7 +304,7 @@ class Config(object):
|
|||
if "://" in tracker and tracker not in self.trackers:
|
||||
self.trackers.append(tracker)
|
||||
except Exception as err:
|
||||
print "Error loading trackers file: %s" % err
|
||||
print("Error loading trackers file: %s" % err)
|
||||
|
||||
# Find arguments specified for current action
|
||||
def getActionArguments(self):
|
||||
|
@ -316,7 +316,7 @@ class Config(object):
|
|||
|
||||
# Try to find action from argv
|
||||
def getAction(self, argv):
|
||||
actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions
|
||||
actions = [list(action.choices.keys()) for action in self.parser._actions if action.dest == "action"][0] # Valid actions
|
||||
found_action = False
|
||||
for action in actions: # See if any in argv
|
||||
if action in argv:
|
||||
|
@ -404,7 +404,7 @@ class Config(object):
|
|||
self.config_file = argv[argv.index("--config_file") + 1]
|
||||
# Load config file
|
||||
if os.path.isfile(self.config_file):
|
||||
config = ConfigParser.ConfigParser(allow_no_value=True)
|
||||
config = configparser.ConfigParser(allow_no_value=True)
|
||||
config.read(self.config_file)
|
||||
for section in config.sections():
|
||||
for key, val in config.items(section):
|
||||
|
@ -570,7 +570,7 @@ class Config(object):
|
|||
try:
|
||||
os.chmod(self.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
except Exception as err:
|
||||
print "Can't change permission of %s: %s" % (self.log_dir, err)
|
||||
print("Can't change permission of %s: %s" % (self.log_dir, err))
|
||||
|
||||
# Make warning hidden from console
|
||||
logging.WARNING = 15 # Don't display warnings if not in debug mode
|
||||
|
|
|
@ -314,7 +314,7 @@ class Connection(object):
|
|||
self.incomplete_buff_recv += 1
|
||||
self.bytes_recv += buff_len
|
||||
self.server.bytes_recv += buff_len
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log("Stream read error: %s" % Debug.formatException(err))
|
||||
|
||||
if config.debug_socket:
|
||||
|
@ -328,7 +328,7 @@ class Connection(object):
|
|||
if unpacker_stream_bytes:
|
||||
return buff[buff_stream_start + unpacker_stream_bytes:]
|
||||
else:
|
||||
return ""
|
||||
return b""
|
||||
|
||||
# My handshake info
|
||||
def getHandshakeInfo(self):
|
||||
|
@ -476,7 +476,7 @@ class Connection(object):
|
|||
try:
|
||||
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin)
|
||||
self.sock_wrapped = True
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
if not config.force_encryption:
|
||||
self.log("Crypt connection error: %s, adding ip %s as broken ssl." % (err, self.ip))
|
||||
self.server.broken_ssl_ips[self.ip] = True
|
||||
|
@ -526,7 +526,7 @@ class Connection(object):
|
|||
message = None
|
||||
with self.send_lock:
|
||||
self.sock.sendall(data)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.close("Send error: %s (cmd: %s)" % (err, stat_key))
|
||||
return False
|
||||
self.last_sent_time = time.time()
|
||||
|
@ -577,9 +577,9 @@ class Connection(object):
|
|||
with gevent.Timeout(10.0, False):
|
||||
try:
|
||||
response = self.request("ping")
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log("Ping error: %s" % Debug.formatException(err))
|
||||
if response and "body" in response and response["body"] == "Pong!":
|
||||
if response and "body" in response and response["body"] == b"Pong!":
|
||||
self.last_ping_delay = time.time() - s
|
||||
return True
|
||||
else:
|
||||
|
@ -608,7 +608,7 @@ class Connection(object):
|
|||
if self.sock:
|
||||
self.sock.shutdown(gevent.socket.SHUT_WR)
|
||||
self.sock.close()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
if config.debug_socket:
|
||||
self.log("Close error: %s" % err)
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ from gevent.pool import Pool
|
|||
import util
|
||||
from util import helper
|
||||
from Debug import Debug
|
||||
from Connection import Connection
|
||||
from .Connection import Connection
|
||||
from Config import config
|
||||
from Crypt import CryptConnection
|
||||
from Crypt import CryptHash
|
||||
|
@ -94,7 +94,7 @@ class ConnectionServer(object):
|
|||
self.stream_server = StreamServer(
|
||||
(self.ip, self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100
|
||||
)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.info("StreamServer create error: %s" % Debug.formatException(err))
|
||||
|
||||
def listen(self):
|
||||
|
@ -102,7 +102,7 @@ class ConnectionServer(object):
|
|||
gevent.spawn(self.listenProxy)
|
||||
try:
|
||||
self.stream_server.serve_forever()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.info("StreamServer listen error: %s" % err)
|
||||
|
||||
def stop(self):
|
||||
|
@ -199,7 +199,7 @@ class ConnectionServer(object):
|
|||
connection.close("Connection event return error")
|
||||
raise Exception("Connection event return error")
|
||||
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
connection.close("%s Connect error: %s" % (ip, Debug.formatException(err)))
|
||||
raise err
|
||||
|
||||
|
@ -346,6 +346,6 @@ class ConnectionServer(object):
|
|||
])
|
||||
if len(corrections) < 6:
|
||||
return 0.0
|
||||
mid = len(corrections) / 2 - 1
|
||||
mid = int(len(corrections) / 2 - 1)
|
||||
median = (corrections[mid - 1] + corrections[mid] + corrections[mid + 1]) / 3
|
||||
return median
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
from ConnectionServer import ConnectionServer
|
||||
from Connection import Connection
|
||||
from .ConnectionServer import ConnectionServer
|
||||
from .Connection import Connection
|
||||
|
|
|
@ -19,7 +19,7 @@ class ContentDb(Db):
|
|||
foreign_key_error = self.execute("PRAGMA foreign_key_check").fetchone()
|
||||
if foreign_key_error:
|
||||
raise Exception("Database foreign key error: %s" % foreign_key_error)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Error loading content.db: %s, rebuilding..." % Debug.formatException(err))
|
||||
self.close()
|
||||
os.unlink(path) # Remove and try again
|
||||
|
@ -95,8 +95,8 @@ class ContentDb(Db):
|
|||
def setContent(self, site, inner_path, content, size=0):
|
||||
self.insertOrUpdate("content", {
|
||||
"size": size,
|
||||
"size_files": sum([val["size"] for key, val in content.get("files", {}).iteritems()]),
|
||||
"size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).iteritems()]),
|
||||
"size_files": sum([val["size"] for key, val in content.get("files", {}).items()]),
|
||||
"size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).items()]),
|
||||
"modified": int(content.get("modified", 0))
|
||||
}, {
|
||||
"site_id": self.site_ids.get(site.address, 0),
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import time
|
||||
import os
|
||||
|
||||
import ContentDb
|
||||
from . import ContentDb
|
||||
from Debug import Debug
|
||||
from Config import config
|
||||
|
||||
|
@ -127,29 +127,29 @@ if __name__ == "__main__":
|
|||
s_mem = process.memory_info()[0] / float(2 ** 20)
|
||||
root = "data-live/1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27"
|
||||
contents = ContentDbDict("1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27", root)
|
||||
print "Init len", len(contents)
|
||||
print("Init len", len(contents))
|
||||
|
||||
s = time.time()
|
||||
for dir_name in os.listdir(root + "/data/users/")[0:8000]:
|
||||
contents["data/users/%s/content.json" % dir_name]
|
||||
print "Load: %.3fs" % (time.time() - s)
|
||||
print("Load: %.3fs" % (time.time() - s))
|
||||
|
||||
s = time.time()
|
||||
found = 0
|
||||
for key, val in contents.iteritems():
|
||||
for key, val in contents.items():
|
||||
found += 1
|
||||
assert key
|
||||
assert val
|
||||
print "Found:", found
|
||||
print "Iteritem: %.3fs" % (time.time() - s)
|
||||
print("Found:", found)
|
||||
print("Iteritem: %.3fs" % (time.time() - s))
|
||||
|
||||
s = time.time()
|
||||
found = 0
|
||||
for key in contents.keys():
|
||||
for key in list(contents.keys()):
|
||||
found += 1
|
||||
assert key in contents
|
||||
print "In: %.3fs" % (time.time() - s)
|
||||
print("In: %.3fs" % (time.time() - s))
|
||||
|
||||
print "Len:", len(contents.values()), len(contents.keys())
|
||||
print("Len:", len(list(contents.values())), len(list(contents.keys())))
|
||||
|
||||
print "Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem
|
||||
print("Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem)
|
||||
|
|
|
@ -3,6 +3,7 @@ import time
|
|||
import re
|
||||
import os
|
||||
import copy
|
||||
import base64
|
||||
|
||||
import gevent
|
||||
|
||||
|
@ -13,7 +14,7 @@ from util import helper
|
|||
from util import Diff
|
||||
from util import SafeRe
|
||||
from Peer import PeerHashfield
|
||||
from ContentDbDict import ContentDbDict
|
||||
from .ContentDbDict import ContentDbDict
|
||||
from Plugin import PluginManager
|
||||
|
||||
|
||||
|
@ -44,7 +45,7 @@ class ContentManager(object):
|
|||
|
||||
# Load hashfield cache
|
||||
if "hashfield" in self.site.settings.get("cache", {}):
|
||||
self.hashfield.fromstring(self.site.settings["cache"]["hashfield"].decode("base64"))
|
||||
self.hashfield.frombytes(base64.b64decode(self.site.settings["cache"]["hashfield"]))
|
||||
del self.site.settings["cache"]["hashfield"]
|
||||
elif self.contents.get("content.json") and self.site.settings["size_optional"] > 0:
|
||||
self.site.storage.updateBadFiles() # No hashfield cache created yet
|
||||
|
@ -74,7 +75,7 @@ class ContentManager(object):
|
|||
return [], []
|
||||
|
||||
new_content = json.load(open(content_path))
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.warning("%s load error: %s" % (content_path, Debug.formatException(err)))
|
||||
return [], []
|
||||
else:
|
||||
|
@ -86,7 +87,7 @@ class ContentManager(object):
|
|||
changed = []
|
||||
deleted = []
|
||||
# Check changed
|
||||
for relative_path, info in new_content.get("files", {}).iteritems():
|
||||
for relative_path, info in new_content.get("files", {}).items():
|
||||
if "sha512" in info:
|
||||
hash_type = "sha512"
|
||||
else: # Backward compatibility
|
||||
|
@ -101,7 +102,7 @@ class ContentManager(object):
|
|||
changed.append(content_inner_dir + relative_path)
|
||||
|
||||
# Check changed optional files
|
||||
for relative_path, info in new_content.get("files_optional", {}).iteritems():
|
||||
for relative_path, info in new_content.get("files_optional", {}).items():
|
||||
file_inner_path = content_inner_dir + relative_path
|
||||
new_hash = info["sha512"]
|
||||
if old_content and old_content.get("files_optional", {}).get(relative_path):
|
||||
|
@ -115,7 +116,7 @@ class ContentManager(object):
|
|||
self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][relative_path]["size"])
|
||||
self.optionalDelete(file_inner_path)
|
||||
self.log.debug("Deleted changed optional file: %s" % file_inner_path)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err)))
|
||||
else: # The file is not in the old content
|
||||
if self.site.isDownloadable(file_inner_path):
|
||||
|
@ -151,7 +152,7 @@ class ContentManager(object):
|
|||
self.site.storage.delete(file_inner_path)
|
||||
|
||||
self.log.debug("Deleted file: %s" % file_inner_path)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err)))
|
||||
|
||||
# Cleanup empty dirs
|
||||
|
@ -165,7 +166,7 @@ class ContentManager(object):
|
|||
self.site.storage.deleteDir(root_inner_path)
|
||||
# Remove from tree dict to reflect changed state
|
||||
tree[os.path.dirname(root)][0].remove(os.path.basename(root))
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error deleting empty directory %s: %s" % (root_inner_path, err))
|
||||
|
||||
# Check archived
|
||||
|
@ -175,12 +176,12 @@ class ContentManager(object):
|
|||
self.log.debug("old archived: %s, new archived: %s" % (len(old_archived), len(new_archived)))
|
||||
archived_changed = {
|
||||
key: date_archived
|
||||
for key, date_archived in new_archived.iteritems()
|
||||
for key, date_archived in new_archived.items()
|
||||
if old_archived.get(key) != new_archived[key]
|
||||
}
|
||||
if archived_changed:
|
||||
self.log.debug("Archived changed: %s" % archived_changed)
|
||||
for archived_dirname, date_archived in archived_changed.iteritems():
|
||||
for archived_dirname, date_archived in archived_changed.items():
|
||||
archived_inner_path = content_inner_dir + archived_dirname + "/content.json"
|
||||
if self.contents.get(archived_inner_path, {}).get("modified", 0) < date_archived:
|
||||
self.removeContent(archived_inner_path)
|
||||
|
@ -204,7 +205,7 @@ class ContentManager(object):
|
|||
|
||||
# Remove archived files from download queue
|
||||
num_removed_bad_files = 0
|
||||
for bad_file in self.site.bad_files.keys():
|
||||
for bad_file in list(self.site.bad_files.keys()):
|
||||
if bad_file.endswith("content.json"):
|
||||
del self.site.bad_files[bad_file]
|
||||
num_removed_bad_files += 1
|
||||
|
@ -217,7 +218,7 @@ class ContentManager(object):
|
|||
|
||||
# Load includes
|
||||
if load_includes and "includes" in new_content:
|
||||
for relative_path, info in new_content["includes"].items():
|
||||
for relative_path, info in list(new_content["includes"].items()):
|
||||
include_inner_path = content_inner_dir + relative_path
|
||||
if self.site.storage.isFile(include_inner_path): # Content.json exists, load it
|
||||
include_changed, include_deleted = self.loadContent(
|
||||
|
@ -255,7 +256,7 @@ class ContentManager(object):
|
|||
self.has_optional_files = True
|
||||
# Update the content
|
||||
self.contents[content_inner_path] = new_content
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.warning("%s parse error: %s" % (content_inner_path, Debug.formatException(err)))
|
||||
return [], [] # Content.json parse error
|
||||
|
||||
|
@ -282,7 +283,7 @@ class ContentManager(object):
|
|||
content.get("files", {}),
|
||||
**content.get("files_optional", {})
|
||||
)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error loading %s for removeContent: %s" % (inner_path, Debug.formatException(err)))
|
||||
files = {}
|
||||
files["content.json"] = True
|
||||
|
@ -292,16 +293,16 @@ class ContentManager(object):
|
|||
try:
|
||||
self.site.storage.delete(file_inner_path)
|
||||
self.log.debug("Deleted file: %s" % file_inner_path)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error deleting file %s: %s" % (file_inner_path, err))
|
||||
try:
|
||||
self.site.storage.deleteDir(inner_dir)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error deleting dir %s: %s" % (inner_dir, err))
|
||||
|
||||
try:
|
||||
del self.contents[inner_path]
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error key from contents: %s" % inner_path)
|
||||
|
||||
# Get total size of site
|
||||
|
@ -317,7 +318,7 @@ class ContentManager(object):
|
|||
return []
|
||||
back = [inner_path]
|
||||
content_inner_dir = helper.getDirname(inner_path)
|
||||
for relative_path in self.contents[inner_path].get("includes", {}).keys():
|
||||
for relative_path in list(self.contents[inner_path].get("includes", {}).keys()):
|
||||
include_inner_path = content_inner_dir + relative_path
|
||||
back += self.listContents(include_inner_path)
|
||||
return back
|
||||
|
@ -333,7 +334,7 @@ class ContentManager(object):
|
|||
file_info = self.getFileInfo(user_contents_inner_path)
|
||||
if file_info:
|
||||
time_archived_before = file_info.get("archived_before", 0)
|
||||
time_directory_archived = file_info.get("archived", {}).get(relative_directory)
|
||||
time_directory_archived = file_info.get("archived", {}).get(relative_directory, 0)
|
||||
if modified <= time_archived_before or modified <= time_directory_archived:
|
||||
return True
|
||||
else:
|
||||
|
@ -493,11 +494,11 @@ class ContentManager(object):
|
|||
banned = False
|
||||
if "signers" in rules:
|
||||
rules["signers"] = rules["signers"][:] # Make copy of the signers
|
||||
for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules
|
||||
for permission_pattern, permission_rules in list(user_contents["permission_rules"].items()): # Regexp rules
|
||||
if not SafeRe.match(permission_pattern, user_urn):
|
||||
continue # Rule is not valid for user
|
||||
# Update rules if its better than current recorded ones
|
||||
for key, val in permission_rules.iteritems():
|
||||
for key, val in permission_rules.items():
|
||||
if key not in rules:
|
||||
if type(val) is list:
|
||||
rules[key] = val[:] # Make copy
|
||||
|
@ -649,7 +650,7 @@ class ContentManager(object):
|
|||
|
||||
if extend:
|
||||
# Add extend keys if not exists
|
||||
for key, val in extend.items():
|
||||
for key, val in list(extend.items()):
|
||||
if not content.get(key):
|
||||
content[key] = val
|
||||
self.log.info("Extending content.json with: %s" % key)
|
||||
|
@ -664,14 +665,14 @@ class ContentManager(object):
|
|||
)
|
||||
|
||||
if not remove_missing_optional:
|
||||
for file_inner_path, file_details in content.get("files_optional", {}).iteritems():
|
||||
for file_inner_path, file_details in content.get("files_optional", {}).items():
|
||||
if file_inner_path not in files_optional_node:
|
||||
files_optional_node[file_inner_path] = file_details
|
||||
|
||||
# Find changed files
|
||||
files_merged = files_node.copy()
|
||||
files_merged.update(files_optional_node)
|
||||
for file_relative_path, file_details in files_merged.iteritems():
|
||||
for file_relative_path, file_details in files_merged.items():
|
||||
old_hash = content.get("files", {}).get(file_relative_path, {}).get("sha512")
|
||||
new_hash = files_merged[file_relative_path]["sha512"]
|
||||
if old_hash != new_hash:
|
||||
|
@ -795,19 +796,19 @@ class ContentManager(object):
|
|||
try:
|
||||
cert_subject = "%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name)
|
||||
result = CryptBitcoin.verify(cert_subject, cert_address, content["cert_sign"])
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
raise VerifyError("Certificate verify error: %s" % err)
|
||||
return result
|
||||
|
||||
# Checks if the content.json content is valid
|
||||
# Return: True or False
|
||||
def verifyContent(self, inner_path, content):
|
||||
content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in content["files"].values() if file["size"] >= 0]) # Size of new content
|
||||
content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in list(content["files"].values()) if file["size"] >= 0]) # Size of new content
|
||||
# Calculate old content size
|
||||
old_content = self.contents.get(inner_path)
|
||||
if old_content:
|
||||
old_content_size = len(json.dumps(old_content, indent=1)) + sum([file["size"] for file in old_content.get("files", {}).values()])
|
||||
old_content_size_optional = sum([file["size"] for file in old_content.get("files_optional", {}).values()])
|
||||
old_content_size = len(json.dumps(old_content, indent=1)) + sum([file["size"] for file in list(old_content.get("files", {}).values())])
|
||||
old_content_size_optional = sum([file["size"] for file in list(old_content.get("files_optional", {}).values())])
|
||||
else:
|
||||
old_content_size = 0
|
||||
old_content_size_optional = 0
|
||||
|
@ -816,7 +817,7 @@ class ContentManager(object):
|
|||
if not old_content and inner_path == "content.json":
|
||||
self.site.settings["size"] = 0
|
||||
|
||||
content_size_optional = sum([file["size"] for file in content.get("files_optional", {}).values() if file["size"] >= 0])
|
||||
content_size_optional = sum([file["size"] for file in list(content.get("files_optional", {}).values()) if file["size"] >= 0])
|
||||
site_size = self.site.settings["size"] - old_content_size + content_size # Site size without old content plus the new
|
||||
site_size_optional = self.site.settings["size_optional"] - old_content_size_optional + content_size_optional # Site size without old content plus the new
|
||||
|
||||
|
@ -841,7 +842,7 @@ class ContentManager(object):
|
|||
raise VerifyError("Content too large %sB > %sB, aborting task..." % (site_size, site_size_limit))
|
||||
|
||||
# Verify valid filenames
|
||||
for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys():
|
||||
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
|
||||
if not self.isValidRelativePath(file_relative_path):
|
||||
raise VerifyError("Invalid relative path: %s" % file_relative_path)
|
||||
|
||||
|
@ -876,12 +877,12 @@ class ContentManager(object):
|
|||
|
||||
# Filename limit
|
||||
if rules.get("files_allowed"):
|
||||
for file_inner_path in content["files"].keys():
|
||||
for file_inner_path in list(content["files"].keys()):
|
||||
if not SafeRe.match("^%s$" % rules["files_allowed"], file_inner_path):
|
||||
raise VerifyError("File not allowed: %s" % file_inner_path)
|
||||
|
||||
if rules.get("files_allowed_optional"):
|
||||
for file_inner_path in content.get("files_optional", {}).keys():
|
||||
for file_inner_path in list(content.get("files_optional", {}).keys()):
|
||||
if not SafeRe.match("^%s$" % rules["files_allowed_optional"], file_inner_path):
|
||||
raise VerifyError("Optional file not allowed: %s" % file_inner_path)
|
||||
|
||||
|
@ -964,7 +965,7 @@ class ContentManager(object):
|
|||
else:
|
||||
raise VerifyError("Invalid old-style sign")
|
||||
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.warning("%s: verify sign error: %s" % (inner_path, Debug.formatException(err)))
|
||||
raise err
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
from ContentManager import ContentManager
|
||||
from .ContentManager import ContentManager
|
|
@ -13,10 +13,10 @@ def sha1sum(file, blocksize=65536):
|
|||
|
||||
|
||||
def sha512sum(file, blocksize=65536, format="hexdigest"):
|
||||
if hasattr(file, "endswith"): # Its a string open it
|
||||
if type(file) is str: # Filename specified
|
||||
file = open(file, "rb")
|
||||
hash = hashlib.sha512()
|
||||
for block in iter(lambda: file.read(blocksize), ""):
|
||||
for block in iter(lambda: file.read(blocksize), b""):
|
||||
hash.update(block)
|
||||
|
||||
# Truncate to 256bits is good enough
|
||||
|
@ -31,7 +31,7 @@ def sha256sum(file, blocksize=65536):
|
|||
if hasattr(file, "endswith"): # Its a string open it
|
||||
file = open(file, "rb")
|
||||
hash = hashlib.sha256()
|
||||
for block in iter(lambda: file.read(blocksize), ""):
|
||||
for block in iter(lambda: file.read(blocksize), b""):
|
||||
hash.update(block)
|
||||
return hash.hexdigest()
|
||||
|
||||
|
@ -39,7 +39,7 @@ def sha256sum(file, blocksize=65536):
|
|||
def random(length=64, encoding="hex"):
|
||||
if encoding == "base64": # Characters: A-Za-z0-9
|
||||
hash = hashlib.sha512(os.urandom(256)).digest()
|
||||
return base64.standard_b64encode(hash).replace("+", "").replace("/", "").replace("=", "")[0:length]
|
||||
return base64.b64encode(hash).decode("ascii").replace("+", "").replace("/", "").replace("=", "")[0:length]
|
||||
else: # Characters: a-f0-9 (faster)
|
||||
return hashlib.sha512(os.urandom(256)).hexdigest()[0:length]
|
||||
|
||||
|
|
|
@ -35,4 +35,4 @@ def privatekeyToPublickey(privatekey):
|
|||
return pub.save_pkcs1("DER")
|
||||
|
||||
def publickeyToOnion(publickey):
|
||||
return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower()
|
||||
return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower().decode("ascii")
|
||||
|
|
25
src/Db/Db.py
25
src/Db/Db.py
|
@ -7,7 +7,7 @@ import os
|
|||
import gevent
|
||||
|
||||
from Debug import Debug
|
||||
from DbCursor import DbCursor
|
||||
from .DbCursor import DbCursor
|
||||
from Config import config
|
||||
from util import SafeRe
|
||||
from util import helper
|
||||
|
@ -149,8 +149,8 @@ class Db(object):
|
|||
if not self.db_keyvalues: # Get db keyvalues
|
||||
try:
|
||||
res = self.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues
|
||||
except sqlite3.OperationalError, err: # Table not exist
|
||||
self.log.debug("Query error: %s" % err)
|
||||
except sqlite3.OperationalError as err: # Table not exist
|
||||
self.log.debug("Query table version error: %s" % err)
|
||||
return False
|
||||
|
||||
for row in res:
|
||||
|
@ -260,7 +260,7 @@ class Db(object):
|
|||
data = json.load(helper.limitedGzipFile(fileobj=file))
|
||||
else:
|
||||
data = json.load(file)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Json file %s load error: %s" % (file_path, err))
|
||||
data = {}
|
||||
|
||||
|
@ -274,7 +274,7 @@ class Db(object):
|
|||
commit_after_done = False
|
||||
|
||||
# Row for current json file if required
|
||||
if not data or filter(lambda dbmap: "to_keyvalue" in dbmap or "to_table" in dbmap, matched_maps):
|
||||
if not data or [dbmap for dbmap in matched_maps if "to_keyvalue" in dbmap or "to_table" in dbmap]:
|
||||
json_row = cur.getJsonRow(relative_path)
|
||||
|
||||
# Check matched mappings in schema
|
||||
|
@ -311,7 +311,7 @@ class Db(object):
|
|||
changed = True
|
||||
if changed:
|
||||
# Add the custom col values
|
||||
data_json_row.update({key: val for key, val in data.iteritems() if key in dbmap["to_json_table"]})
|
||||
data_json_row.update({key: val for key, val in data.items() if key in dbmap["to_json_table"]})
|
||||
cur.execute("INSERT OR REPLACE INTO json ?", data_json_row)
|
||||
|
||||
# Insert data to tables
|
||||
|
@ -333,7 +333,7 @@ class Db(object):
|
|||
|
||||
# Fill import cols from table cols
|
||||
if not import_cols:
|
||||
import_cols = set(map(lambda item: item[0], self.schema["tables"][table_name]["cols"]))
|
||||
import_cols = set([item[0] for item in self.schema["tables"][table_name]["cols"]])
|
||||
|
||||
cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],))
|
||||
|
||||
|
@ -341,7 +341,7 @@ class Db(object):
|
|||
continue
|
||||
|
||||
if key_col: # Map as dict
|
||||
for key, val in data[node].iteritems():
|
||||
for key, val in data[node].items():
|
||||
if val_col: # Single value
|
||||
cur.execute(
|
||||
"INSERT OR REPLACE INTO %s ?" % table_name,
|
||||
|
@ -355,9 +355,9 @@ class Db(object):
|
|||
row[key_col] = key
|
||||
# Replace in value if necessary
|
||||
if replaces:
|
||||
for replace_key, replace in replaces.iteritems():
|
||||
for replace_key, replace in replaces.items():
|
||||
if replace_key in row:
|
||||
for replace_from, replace_to in replace.iteritems():
|
||||
for replace_from, replace_to in replace.items():
|
||||
row[replace_key] = row[replace_key].replace(replace_from, replace_to)
|
||||
|
||||
row["json_id"] = json_row["json_id"]
|
||||
|
@ -402,7 +402,6 @@ if __name__ == "__main__":
|
|||
dbjson.updateJson("data/users/%s/data.json" % user_dir, cur=cur)
|
||||
# print ".",
|
||||
cur.logging = True
|
||||
cur.execute("COMMIT")
|
||||
print "Done in %.3fs" % (time.time() - s)
|
||||
print("Done in %.3fs" % (time.time() - s))
|
||||
for query, stats in sorted(dbjson.query_stats.items()):
|
||||
print "-", query, stats
|
||||
print("-", query, stats)
|
||||
|
|
|
@ -9,9 +9,9 @@ class DbQuery:
|
|||
# Split main parts of query
|
||||
def parseParts(self, query):
|
||||
parts = re.split("(SELECT|FROM|WHERE|ORDER BY|LIMIT)", query)
|
||||
parts = filter(None, parts) # Remove empty parts
|
||||
parts = map(lambda s: s.strip(), parts) # Remove whitespace
|
||||
return dict(zip(parts[0::2], parts[1::2]))
|
||||
parts = [_f for _f in parts if _f] # Remove empty parts
|
||||
parts = [s.strip() for s in parts] # Remove whitespace
|
||||
return dict(list(zip(parts[0::2], parts[1::2])))
|
||||
|
||||
# Parse selected fields SELECT ... FROM
|
||||
def parseFields(self, query_select):
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
from Db import Db
|
||||
from DbQuery import DbQuery
|
||||
from DbCursor import DbCursor
|
||||
from .Db import Db
|
||||
from .DbQuery import DbQuery
|
||||
from .DbCursor import DbCursor
|
|
@ -63,10 +63,10 @@ gevent.spawn(testBlock)
|
|||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
print 1 / 0
|
||||
except Exception, err:
|
||||
print type(err).__name__
|
||||
print "1/0 error: %s" % formatException(err)
|
||||
print(1 / 0)
|
||||
except Exception as err:
|
||||
print(type(err).__name__)
|
||||
print("1/0 error: %s" % formatException(err))
|
||||
|
||||
def loadJson():
|
||||
json.loads("Errr")
|
||||
|
@ -74,13 +74,13 @@ if __name__ == "__main__":
|
|||
import json
|
||||
try:
|
||||
loadJson()
|
||||
except Exception, err:
|
||||
print err
|
||||
print "Json load error: %s" % formatException(err)
|
||||
except Exception as err:
|
||||
print(err)
|
||||
print("Json load error: %s" % formatException(err))
|
||||
|
||||
try:
|
||||
raise Notify("nothing...")
|
||||
except Exception, err:
|
||||
print "Notify: %s" % formatException(err)
|
||||
except Exception as err:
|
||||
print("Notify: %s" % formatException(err))
|
||||
|
||||
loadJson()
|
||||
|
|
|
@ -5,19 +5,20 @@ import gevent
|
|||
import gevent.hub
|
||||
|
||||
from Config import config
|
||||
import importlib
|
||||
|
||||
last_error = None
|
||||
|
||||
def shutdown():
|
||||
print "Shutting down..."
|
||||
print("Shutting down...")
|
||||
if "file_server" in dir(sys.modules["main"]) and sys.modules["main"].file_server.running:
|
||||
try:
|
||||
if "file_server" in dir(sys.modules["main"]):
|
||||
gevent.spawn(sys.modules["main"].file_server.stop)
|
||||
if "ui_server" in dir(sys.modules["main"]):
|
||||
gevent.spawn(sys.modules["main"].ui_server.stop)
|
||||
except Exception, err:
|
||||
print "Proper shutdown error: %s" % err
|
||||
except Exception as err:
|
||||
print("Proper shutdown error: %s" % err)
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.exit(0)
|
||||
|
@ -67,7 +68,7 @@ else:
|
|||
sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
|
||||
|
||||
gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet
|
||||
reload(gevent)
|
||||
importlib.reload(gevent)
|
||||
|
||||
def handleGreenletError(self, context, type, value, tb):
|
||||
if isinstance(value, str):
|
||||
|
@ -83,18 +84,18 @@ if __name__ == "__main__":
|
|||
import time
|
||||
from gevent import monkey
|
||||
monkey.patch_all(thread=False, ssl=False)
|
||||
import Debug
|
||||
from . import Debug
|
||||
|
||||
def sleeper(num):
|
||||
print "started", num
|
||||
print("started", num)
|
||||
time.sleep(3)
|
||||
raise Exception("Error")
|
||||
print "stopped", num
|
||||
print("stopped", num)
|
||||
thread1 = gevent.spawn(sleeper, 1)
|
||||
thread2 = gevent.spawn(sleeper, 2)
|
||||
time.sleep(1)
|
||||
print "killing..."
|
||||
print("killing...")
|
||||
thread1.kill(exception=Debug.Notify("Worker stopped"))
|
||||
#thread2.throw(Debug.Notify("Throw"))
|
||||
print "killed"
|
||||
print("killed")
|
||||
gevent.joinall([thread1,thread2])
|
||||
|
|
|
@ -3,6 +3,7 @@ import subprocess
|
|||
import re
|
||||
import logging
|
||||
import time
|
||||
import functools
|
||||
|
||||
from Config import config
|
||||
from util import helper
|
||||
|
@ -18,9 +19,9 @@ def findfiles(path, find_ext):
|
|||
elif f2 == "":
|
||||
return -1
|
||||
else:
|
||||
return cmp(f1.lower(), f2.lower())
|
||||
return helper.cmp(f1.lower(), f2.lower())
|
||||
|
||||
for root, dirs, files in sorted(os.walk(path, topdown=False), cmp=sorter):
|
||||
for root, dirs, files in sorted(os.walk(path, topdown=False), key=functools.cmp_to_key(sorter)):
|
||||
for file in sorted(files):
|
||||
file_path = root + "/" + file
|
||||
file_ext = file.split(".")[-1]
|
||||
|
@ -66,16 +67,16 @@ def merge(merged_path):
|
|||
return # Assets not changed, nothing to do
|
||||
|
||||
if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile
|
||||
merged_old = open(merged_path, "rb").read().decode("utf8")
|
||||
merged_old = open(merged_path, "rb").read()
|
||||
old_parts = {}
|
||||
for match in re.findall(r"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
|
||||
old_parts[match[1]] = match[2].strip("\n\r")
|
||||
for match in re.findall(rb"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
|
||||
old_parts[match[1]] = match[2].strip(rb"\n\r")
|
||||
|
||||
# Merge files
|
||||
parts = []
|
||||
s_total = time.time()
|
||||
for file_path in findfiles(merge_dir, find_ext):
|
||||
parts.append("\n\n/* ---- %s ---- */\n\n" % file_path.replace(config.data_dir, ""))
|
||||
parts.append(b"\n\n/* ---- %s ---- */\n\n" % file_path.replace(config.data_dir, "").encode("utf8"))
|
||||
if file_path.endswith(".coffee"): # Compile coffee script
|
||||
if file_path in changed or file_path.replace(config.data_dir, "") not in old_parts: # Only recompile if changed or its not compiled before
|
||||
if config.coffeescript_compiler is None:
|
||||
|
@ -95,31 +96,31 @@ def merge(merged_path):
|
|||
# Start compiling
|
||||
s = time.time()
|
||||
compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
|
||||
out = compiler.stdout.read().decode("utf8")
|
||||
out = compiler.stdout.read()
|
||||
compiler.wait()
|
||||
logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s))
|
||||
|
||||
# Check errors
|
||||
if out and out.startswith("("): # No error found
|
||||
if out and out.startswith(b"("): # No error found
|
||||
parts.append(out)
|
||||
else: # Put error message in place of source code
|
||||
error = out
|
||||
logging.error("%s Compile error: %s" % (file_path, error))
|
||||
parts.append(
|
||||
"alert('%s compile error: %s');" %
|
||||
(file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n"))
|
||||
b"alert('%s compile error: %s');" %
|
||||
(file_path, re.escape(error).replace(b"\n", b"\\n").replace(r"\\n", r"\n"))
|
||||
)
|
||||
else: # Not changed use the old_part
|
||||
parts.append(old_parts[file_path.replace(config.data_dir, "")])
|
||||
else: # Add to parts
|
||||
parts.append(open(file_path).read().decode("utf8"))
|
||||
parts.append(open(file_path, "rb").read())
|
||||
|
||||
merged = u"\n".join(parts)
|
||||
merged = b"\n".join(parts)
|
||||
if ext == "css": # Vendor prefix css
|
||||
from lib.cssvendor import cssvendor
|
||||
merged = cssvendor.prefix(merged)
|
||||
merged = merged.replace("\r", "")
|
||||
open(merged_path, "wb").write(merged.encode("utf8"))
|
||||
merged = merged.replace(b"\r", b"")
|
||||
open(merged_path, "wb").write(merged)
|
||||
logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total))
|
||||
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
from DebugReloader import DebugReloader
|
|
@ -118,7 +118,7 @@ class FileRequest(object):
|
|||
|
||||
try:
|
||||
content = json.loads(params["body"])
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Update for %s is invalid JSON: %s" % (inner_path, err))
|
||||
self.response({"error": "File invalid JSON"})
|
||||
self.connection.badAction(5)
|
||||
|
@ -131,7 +131,7 @@ class FileRequest(object):
|
|||
else:
|
||||
try:
|
||||
valid = site.content_manager.verifyFile(inner_path, content)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Update for %s is invalid: %s" % (inner_path, err))
|
||||
valid = False
|
||||
|
||||
|
@ -251,10 +251,10 @@ class FileRequest(object):
|
|||
|
||||
return {"bytes_sent": bytes_sent, "file_size": file_size, "location": params["location"]}
|
||||
|
||||
except RequestError, err:
|
||||
except RequestError as err:
|
||||
self.log.debug("GetFile %s %s request error: %s" % (self.connection, params["inner_path"], Debug.formatException(err)))
|
||||
self.response({"error": "File read error: %s" % err})
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
if config.verbose:
|
||||
self.log.debug("GetFile read error: %s" % Debug.formatException(err))
|
||||
self.response({"error": "File read error"})
|
||||
|
@ -306,7 +306,7 @@ class FileRequest(object):
|
|||
if config.verbose:
|
||||
self.log.debug(
|
||||
"Added %s peers to %s using pex, sending back %s" %
|
||||
(added, site, {key: len(val) for key, val in packed_peers.iteritems()})
|
||||
(added, site, {key: len(val) for key, val in packed_peers.items()})
|
||||
)
|
||||
|
||||
back = {
|
||||
|
@ -353,7 +353,7 @@ class FileRequest(object):
|
|||
back = collections.defaultdict(lambda: collections.defaultdict(list))
|
||||
found = site.worker_manager.findOptionalHashIds(hash_ids, limit=limit)
|
||||
|
||||
for hash_id, peers in found.iteritems():
|
||||
for hash_id, peers in found.items():
|
||||
for peer in peers:
|
||||
ip_type = helper.getIpType(peer.ip)
|
||||
if len(back[ip_type][hash_id]) < 20:
|
||||
|
@ -385,7 +385,7 @@ class FileRequest(object):
|
|||
if config.verbose:
|
||||
self.log.debug(
|
||||
"Found: %s for %s hashids in %.3fs" %
|
||||
({key: len(val) for key, val in back.iteritems()}, len(params["hash_ids"]), time.time() - s)
|
||||
({key: len(val) for key, val in back.items()}, len(params["hash_ids"]), time.time() - s)
|
||||
)
|
||||
self.response({"peers": back["ipv4"], "peers_onion": back["onion"], "peers_ipv6": back["ipv6"], "my": my_hashes})
|
||||
|
||||
|
@ -405,7 +405,7 @@ class FileRequest(object):
|
|||
|
||||
# Send a simple Pong! answer
|
||||
def actionPing(self, params):
|
||||
self.response("Pong!")
|
||||
self.response(b"Pong!")
|
||||
|
||||
# Check requested port of the other peer
|
||||
def actionCheckport(self, params):
|
||||
|
|
|
@ -10,7 +10,7 @@ from gevent.server import StreamServer
|
|||
import util
|
||||
from util import helper
|
||||
from Config import config
|
||||
from FileRequest import FileRequest
|
||||
from .FileRequest import FileRequest
|
||||
from Peer import PeerPortchecker
|
||||
from Site import SiteManager
|
||||
from Connection import ConnectionServer
|
||||
|
@ -41,7 +41,7 @@ class FileServer(ConnectionServer):
|
|||
port = config.tor_hs_port
|
||||
config.fileserver_port = port
|
||||
elif port == 0: # Use random port
|
||||
port_range_from, port_range_to = map(int, config.fileserver_port_range.split("-"))
|
||||
port_range_from, port_range_to = list(map(int, config.fileserver_port_range.split("-")))
|
||||
port = self.getRandomPort(ip, port_range_from, port_range_to)
|
||||
config.fileserver_port = port
|
||||
if not port:
|
||||
|
@ -59,7 +59,7 @@ class FileServer(ConnectionServer):
|
|||
self.stream_server_proxy = StreamServer(
|
||||
("0.0.0.0", self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100
|
||||
)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.info("StreamServer proxy create error: %s" % Debug.formatException(err))
|
||||
|
||||
self.port_opened = {}
|
||||
|
@ -117,7 +117,7 @@ class FileServer(ConnectionServer):
|
|||
def listenProxy(self):
|
||||
try:
|
||||
self.stream_server_proxy.serve_forever()
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
if err.errno == 98: # Address already in use error
|
||||
self.log.debug("StreamServer proxy listen error: %s" % err)
|
||||
else:
|
||||
|
@ -231,7 +231,7 @@ class FileServer(ConnectionServer):
|
|||
if not self.port_opened or force_port_check: # Test and open port if not tested yet
|
||||
if len(self.sites) <= 2: # Don't wait port opening on first startup
|
||||
sites_checking = True
|
||||
for address, site in self.sites.items():
|
||||
for address, site in list(self.sites.items()):
|
||||
gevent.spawn(self.checkSite, site, check_files)
|
||||
|
||||
self.portCheck()
|
||||
|
@ -242,7 +242,7 @@ class FileServer(ConnectionServer):
|
|||
if not sites_checking:
|
||||
check_pool = gevent.pool.Pool(5)
|
||||
# Check sites integrity
|
||||
for site in sorted(self.sites.values(), key=lambda site: site.settings.get("modified", 0), reverse=True):
|
||||
for site in sorted(list(self.sites.values()), key=lambda site: site.settings.get("modified", 0), reverse=True):
|
||||
if not site.settings["serving"]:
|
||||
continue
|
||||
check_thread = check_pool.spawn(self.checkSite, site, check_files) # Check in new thread
|
||||
|
@ -263,7 +263,7 @@ class FileServer(ConnectionServer):
|
|||
(len(self.connections), self.has_internet, len(peers_protected))
|
||||
)
|
||||
|
||||
for address, site in self.sites.items():
|
||||
for address, site in list(self.sites.items()):
|
||||
if not site.settings["serving"]:
|
||||
continue
|
||||
|
||||
|
@ -273,7 +273,7 @@ class FileServer(ConnectionServer):
|
|||
time.sleep(1) # Prevent too quick request
|
||||
|
||||
peers_protected = set([])
|
||||
for address, site in self.sites.items():
|
||||
for address, site in list(self.sites.items()):
|
||||
if not site.settings["serving"]:
|
||||
continue
|
||||
|
||||
|
@ -313,7 +313,7 @@ class FileServer(ConnectionServer):
|
|||
while 1:
|
||||
config.loadTrackersFile()
|
||||
s = time.time()
|
||||
for address, site in self.sites.items():
|
||||
for address, site in list(self.sites.items()):
|
||||
if not site.settings["serving"]:
|
||||
continue
|
||||
gevent.spawn(self.announceSite, site).join(timeout=10)
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
from FileServer import FileServer
|
||||
from FileRequest import FileRequest
|
||||
from .FileServer import FileServer
|
||||
from .FileRequest import FileRequest
|
|
@ -6,11 +6,11 @@ import collections
|
|||
|
||||
import gevent
|
||||
|
||||
from cStringIO import StringIO
|
||||
import io
|
||||
from Debug import Debug
|
||||
from Config import config
|
||||
from util import helper
|
||||
from PeerHashfield import PeerHashfield
|
||||
from .PeerHashfield import PeerHashfield
|
||||
from Plugin import PluginManager
|
||||
|
||||
if config.use_tempfiles:
|
||||
|
@ -95,7 +95,7 @@ class Peer(object):
|
|||
self.connection = connection_server.getConnection(self.ip, self.port, site=self.site, is_tracker_connection=self.is_tracker_connection)
|
||||
self.reputation += 1
|
||||
self.connection.sites += 1
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.onConnectionError("Getting connection error")
|
||||
self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" %
|
||||
(Debug.formatException(err), self.connection_error, self.hash_failed))
|
||||
|
@ -164,7 +164,7 @@ class Peer(object):
|
|||
return res
|
||||
else:
|
||||
raise Exception("Invalid response: %s" % res)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
if type(err).__name__ == "Notify": # Greenlet killed by worker
|
||||
self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd))
|
||||
break
|
||||
|
@ -195,7 +195,7 @@ class Peer(object):
|
|||
if config.use_tempfiles:
|
||||
buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b')
|
||||
else:
|
||||
buff = StringIO()
|
||||
buff = io.BytesIO()
|
||||
|
||||
s = time.time()
|
||||
while True: # Read in smaller parts
|
||||
|
@ -240,7 +240,7 @@ class Peer(object):
|
|||
with gevent.Timeout(10.0, False): # 10 sec timeout, don't raise exception
|
||||
res = self.request("ping")
|
||||
|
||||
if res and "body" in res and res["body"] == "Pong!":
|
||||
if res and "body" in res and res["body"] == b"Pong!":
|
||||
response_time = time.time() - s
|
||||
break # All fine, exit from for loop
|
||||
# Timeout reached or bad response
|
||||
|
@ -267,12 +267,9 @@ class Peer(object):
|
|||
request["peers_onion"] = packed_peers["onion"]
|
||||
if packed_peers["ipv6"]:
|
||||
request["peers_ipv6"] = packed_peers["ipv6"]
|
||||
|
||||
res = self.request("pex", request)
|
||||
|
||||
if not res or "error" in res:
|
||||
return False
|
||||
|
||||
added = 0
|
||||
|
||||
# Remove unsupported peer types
|
||||
|
@ -331,13 +328,13 @@ class Peer(object):
|
|||
key = "peers"
|
||||
else:
|
||||
key = "peers_%s" % ip_type
|
||||
for hash, peers in res.get(key, {}).items()[0:30]:
|
||||
for hash, peers in list(res.get(key, {}).items())[0:30]:
|
||||
if ip_type == "onion":
|
||||
unpacker_func = helper.unpackOnionAddress
|
||||
else:
|
||||
unpacker_func = helper.unpackAddress
|
||||
|
||||
back[hash] += map(unpacker_func, peers)
|
||||
back[hash] += list(map(unpacker_func, peers))
|
||||
|
||||
for hash in res.get("my", []):
|
||||
back[hash].append((self.connection.ip, self.connection.port))
|
||||
|
|
|
@ -68,8 +68,8 @@ if __name__ == "__main__":
|
|||
s = time.time()
|
||||
for i in range(10000):
|
||||
field.appendHashId(i)
|
||||
print time.time()-s
|
||||
print(time.time()-s)
|
||||
s = time.time()
|
||||
for i in range(10000):
|
||||
field.hasHash("AABB")
|
||||
print time.time()-s
|
||||
print(time.time()-s)
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
import urllib
|
||||
import urllib2
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
import re
|
||||
import time
|
||||
|
||||
|
@ -16,10 +16,10 @@ class PeerPortchecker(object):
|
|||
|
||||
def requestUrl(self, url, post_data=None):
|
||||
if type(post_data) is dict:
|
||||
post_data = urllib.urlencode(post_data)
|
||||
req = urllib2.Request(url, post_data)
|
||||
post_data = urllib.parse.urlencode(post_data).encode("utf8")
|
||||
req = urllib.request.Request(url, post_data)
|
||||
req.add_header('Referer', url)
|
||||
return urllib2.urlopen(req, timeout=20.0)
|
||||
return urllib.request.urlopen(req, timeout=20.0)
|
||||
|
||||
def portOpen(self, port):
|
||||
self.log.info("Trying to open port using UpnpPunch...")
|
||||
|
@ -67,7 +67,7 @@ class PeerPortchecker(object):
|
|||
return res
|
||||
|
||||
def checkCanyouseeme(self, port):
|
||||
data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read()
|
||||
data = urllib.request.urlopen("http://www.canyouseeme.org/", b"port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8")
|
||||
message = re.match('.*<p style="padding-left:15px">(.*?)</p>', data, re.DOTALL).group(1)
|
||||
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace(" ", " ")) # Strip http tags
|
||||
|
||||
|
@ -85,7 +85,7 @@ class PeerPortchecker(object):
|
|||
raise Exception("Invalid response: %s" % message)
|
||||
|
||||
def checkPortchecker(self, port):
|
||||
data = urllib2.urlopen("https://portchecker.co/check", "port=%s" % port, timeout=20.0).read()
|
||||
data = urllib.request.urlopen("https://portchecker.co/check", b"port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8")
|
||||
message = re.match('.*<div id="results-wrapper">(.*?)</div>', data, re.DOTALL).group(1)
|
||||
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace(" ", " ").strip()) # Strip http tags
|
||||
|
||||
|
@ -109,7 +109,6 @@ class PeerPortchecker(object):
|
|||
|
||||
ip = re.match('.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL).group(1)
|
||||
token = re.match('.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1)
|
||||
print ip
|
||||
|
||||
post_data = {"host": ip, "port": port, "allow": "on", "token": token, "submit": "Scanning.."}
|
||||
data = self.requestUrl(url, post_data).read()
|
||||
|
@ -168,4 +167,4 @@ if __name__ == "__main__":
|
|||
peer_portchecker = PeerPortchecker()
|
||||
for func_name in ["checkIpv6scanner", "checkMyaddr", "checkPortchecker", "checkCanyouseeme"]:
|
||||
s = time.time()
|
||||
print(func_name, getattr(peer_portchecker, func_name)(3894), "%.3fs" % (time.time() - s))
|
||||
print((func_name, getattr(peer_portchecker, func_name)(3894), "%.3fs" % (time.time() - s)))
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
from Peer import Peer
|
||||
from PeerHashfield import PeerHashfield
|
||||
from .Peer import Peer
|
||||
from .PeerHashfield import PeerHashfield
|
||||
|
|
|
@ -7,6 +7,7 @@ from collections import defaultdict
|
|||
|
||||
from Debug import Debug
|
||||
from Config import config
|
||||
import importlib
|
||||
|
||||
|
||||
class PluginManager:
|
||||
|
@ -48,7 +49,7 @@ class PluginManager:
|
|||
self.log.debug("Loading plugin: %s" % dir_name)
|
||||
try:
|
||||
__import__(dir_name)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err)))
|
||||
if dir_name not in self.plugin_names:
|
||||
self.plugin_names.append(dir_name)
|
||||
|
@ -62,19 +63,19 @@ class PluginManager:
|
|||
self.after_load = []
|
||||
self.plugins_before = self.plugins
|
||||
self.plugins = defaultdict(list) # Reset registered plugins
|
||||
for module_name, module in sys.modules.items():
|
||||
if module and "__file__" in dir(module) and self.plugin_path in module.__file__: # Module file within plugin_path
|
||||
for module_name, module in list(sys.modules.items()):
|
||||
if module and getattr(module, "__file__", None) and self.plugin_path in module.__file__: # Module file in plugin_path
|
||||
if "allow_reload" in dir(module) and not module.allow_reload: # Reload disabled
|
||||
# Re-add non-reloadable plugins
|
||||
for class_name, classes in self.plugins_before.iteritems():
|
||||
for class_name, classes in self.plugins_before.items():
|
||||
for c in classes:
|
||||
if c.__module__ != module.__name__:
|
||||
continue
|
||||
self.plugins[class_name].append(c)
|
||||
else:
|
||||
try:
|
||||
reload(module)
|
||||
except Exception, err:
|
||||
importlib.reload(module)
|
||||
except Exception as err:
|
||||
self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err)))
|
||||
|
||||
self.loadPlugins() # Load new plugins
|
||||
|
@ -82,7 +83,7 @@ class PluginManager:
|
|||
# Change current classes in memory
|
||||
import gc
|
||||
patched = {}
|
||||
for class_name, classes in self.plugins.iteritems():
|
||||
for class_name, classes in self.plugins.items():
|
||||
classes = classes[:] # Copy the current plugins
|
||||
classes.reverse()
|
||||
base_class = self.pluggable[class_name] # Original class
|
||||
|
@ -96,8 +97,8 @@ class PluginManager:
|
|||
|
||||
# Change classes in modules
|
||||
patched = {}
|
||||
for class_name, classes in self.plugins.iteritems():
|
||||
for module_name, module in sys.modules.iteritems():
|
||||
for class_name, classes in self.plugins.items():
|
||||
for module_name, module in list(sys.modules.items()):
|
||||
if class_name in dir(module):
|
||||
if "__class__" not in dir(getattr(module, class_name)): # Not a class
|
||||
continue
|
||||
|
@ -134,7 +135,7 @@ def acceptPlugins(base_class):
|
|||
if str(key) in plugin_manager.subclass_order[class_name]
|
||||
else 9999
|
||||
)
|
||||
plugin_manager.subclass_order[class_name] = map(str, classes)
|
||||
plugin_manager.subclass_order[class_name] = list(map(str, classes))
|
||||
|
||||
classes.reverse()
|
||||
classes.append(base_class) # Add the class itself to end of inherience line
|
||||
|
@ -181,4 +182,4 @@ if __name__ == "__main__":
|
|||
else:
|
||||
return "Can't route to", path
|
||||
|
||||
print Request().route("MainPage")
|
||||
print(Request().route("MainPage"))
|
||||
|
|
|
@ -7,6 +7,7 @@ import random
|
|||
import sys
|
||||
import hashlib
|
||||
import collections
|
||||
import base64
|
||||
|
||||
import gevent
|
||||
import gevent.pool
|
||||
|
@ -17,14 +18,14 @@ from Peer import Peer
|
|||
from Worker import WorkerManager
|
||||
from Debug import Debug
|
||||
from Content import ContentManager
|
||||
from SiteStorage import SiteStorage
|
||||
from .SiteStorage import SiteStorage
|
||||
from Crypt import CryptHash
|
||||
from util import helper
|
||||
from util import Diff
|
||||
from Plugin import PluginManager
|
||||
from File import FileServer
|
||||
from SiteAnnouncer import SiteAnnouncer
|
||||
import SiteManager
|
||||
from .SiteAnnouncer import SiteAnnouncer
|
||||
from . import SiteManager
|
||||
|
||||
|
||||
@PluginManager.acceptPlugins
|
||||
|
@ -32,7 +33,8 @@ class Site(object):
|
|||
|
||||
def __init__(self, address, allow_create=True, settings=None):
|
||||
self.address = str(re.sub("[^A-Za-z0-9]", "", address)) # Make sure its correct address
|
||||
self.address_hash = hashlib.sha256(self.address).digest()
|
||||
self.address_hash = hashlib.sha256(self.address.encode("ascii")).digest()
|
||||
self.address_sha1 = hashlib.sha1(self.address.encode("ascii")).digest()
|
||||
self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging
|
||||
self.log = logging.getLogger("Site:%s" % self.address_short)
|
||||
self.addEventListeners()
|
||||
|
@ -127,7 +129,7 @@ class Site(object):
|
|||
def getSettingsCache(self):
|
||||
back = {}
|
||||
back["bad_files"] = self.bad_files
|
||||
back["hashfield"] = self.content_manager.hashfield.tostring().encode("base64")
|
||||
back["hashfield"] = base64.b64encode(self.content_manager.hashfield.tobytes()).decode("ascii")
|
||||
return back
|
||||
|
||||
# Max site size in MB
|
||||
|
@ -173,7 +175,7 @@ class Site(object):
|
|||
# Start download files
|
||||
file_threads = []
|
||||
if download_files:
|
||||
for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys():
|
||||
for file_relative_path in list(self.content_manager.contents[inner_path].get("files", {}).keys()):
|
||||
file_inner_path = content_inner_dir + file_relative_path
|
||||
|
||||
# Try to diff first
|
||||
|
@ -204,7 +206,7 @@ class Site(object):
|
|||
"Patched successfully: %s (diff: %.3fs, verify: %.3fs, write: %.3fs, on_done: %.3fs)" %
|
||||
(file_inner_path, time_diff, time_verify, time_write, time_on_done)
|
||||
)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Failed to patch %s: %s" % (file_inner_path, err))
|
||||
diff_success = False
|
||||
|
||||
|
@ -218,7 +220,7 @@ class Site(object):
|
|||
if inner_path == "content.json":
|
||||
gevent.spawn(self.updateHashfield)
|
||||
|
||||
for file_relative_path in self.content_manager.contents[inner_path].get("files_optional", {}).keys():
|
||||
for file_relative_path in list(self.content_manager.contents[inner_path].get("files_optional", {}).keys()):
|
||||
file_inner_path = content_inner_dir + file_relative_path
|
||||
if file_inner_path not in changed and not self.bad_files.get(file_inner_path):
|
||||
continue
|
||||
|
@ -233,7 +235,7 @@ class Site(object):
|
|||
|
||||
# Wait for includes download
|
||||
include_threads = []
|
||||
for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys():
|
||||
for file_relative_path in list(self.content_manager.contents[inner_path].get("includes", {}).keys()):
|
||||
file_inner_path = content_inner_dir + file_relative_path
|
||||
include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer)
|
||||
include_threads.append(include_thread)
|
||||
|
@ -262,7 +264,7 @@ class Site(object):
|
|||
def getReachableBadFiles(self):
|
||||
if not self.bad_files:
|
||||
return False
|
||||
return [bad_file for bad_file, retry in self.bad_files.iteritems() if retry < 3]
|
||||
return [bad_file for bad_file, retry in self.bad_files.items() if retry < 3]
|
||||
|
||||
# Retry download bad files
|
||||
def retryBadFiles(self, force=False):
|
||||
|
@ -272,7 +274,7 @@ class Site(object):
|
|||
content_inner_paths = []
|
||||
file_inner_paths = []
|
||||
|
||||
for bad_file, tries in self.bad_files.items():
|
||||
for bad_file, tries in list(self.bad_files.items()):
|
||||
if force or random.randint(0, min(40, tries)) < 4: # Larger number tries = less likely to check every 15min
|
||||
if bad_file.endswith("content.json"):
|
||||
content_inner_paths.append(bad_file)
|
||||
|
@ -286,7 +288,7 @@ class Site(object):
|
|||
self.pooledDownloadFile(file_inner_paths, only_if_bad=True)
|
||||
|
||||
def checkBadFiles(self):
|
||||
for bad_file in self.bad_files.keys():
|
||||
for bad_file in list(self.bad_files.keys()):
|
||||
file_info = self.content_manager.getFileInfo(bad_file)
|
||||
if bad_file.endswith("content.json"):
|
||||
if file_info is False and bad_file != "content.json":
|
||||
|
@ -374,7 +376,7 @@ class Site(object):
|
|||
queried.append(peer)
|
||||
modified_contents = []
|
||||
my_modified = self.content_manager.listModified(since)
|
||||
for inner_path, modified in res["modified_files"].iteritems(): # Check if the peer has newer files than we
|
||||
for inner_path, modified in res["modified_files"].items(): # Check if the peer has newer files than we
|
||||
has_newer = int(modified) > my_modified.get(inner_path, 0)
|
||||
has_older = int(modified) < my_modified.get(inner_path, 0)
|
||||
if inner_path not in self.bad_files and not self.content_manager.isArchived(inner_path, modified):
|
||||
|
@ -480,7 +482,7 @@ class Site(object):
|
|||
def redownloadContents(self):
|
||||
# Download all content.json again
|
||||
content_threads = []
|
||||
for inner_path in self.content_manager.contents.keys():
|
||||
for inner_path in list(self.content_manager.contents.keys()):
|
||||
content_threads.append(self.needFile(inner_path, update=True, blocking=False))
|
||||
|
||||
self.log.debug("Waiting %s content.json to finish..." % len(content_threads))
|
||||
|
@ -523,7 +525,7 @@ class Site(object):
|
|||
})
|
||||
if result:
|
||||
break
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Publish error: %s" % Debug.formatException(err))
|
||||
result = {"exception": Debug.formatException(err)}
|
||||
|
||||
|
@ -563,7 +565,7 @@ class Site(object):
|
|||
peers = set(peers)
|
||||
|
||||
self.log.info("Publishing %s to %s/%s peers (connected: %s) diffs: %s (%.2fk)..." % (
|
||||
inner_path, limit, len(self.peers), num_connected_peers, diffs.keys(), float(len(str(diffs))) / 1024
|
||||
inner_path, limit, len(self.peers), num_connected_peers, list(diffs.keys()), float(len(str(diffs))) / 1024
|
||||
))
|
||||
|
||||
if not peers:
|
||||
|
@ -631,8 +633,8 @@ class Site(object):
|
|||
)
|
||||
|
||||
# Copy files
|
||||
for content_inner_path, content in self.content_manager.contents.items():
|
||||
file_relative_paths = content.get("files", {}).keys()
|
||||
for content_inner_path, content in list(self.content_manager.contents.items()):
|
||||
file_relative_paths = list(content.get("files", {}).keys())
|
||||
|
||||
# Sign content.json at the end to make sure every file is included
|
||||
file_relative_paths.sort()
|
||||
|
@ -812,7 +814,7 @@ class Site(object):
|
|||
self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, connected, len(self.peers)))
|
||||
|
||||
if connected < need: # Need more than we have
|
||||
for peer in self.peers.values():
|
||||
for peer in list(self.peers.values()):
|
||||
if not peer.connection or not peer.connection.connected: # No peer connection or disconnected
|
||||
peer.pex() # Initiate peer exchange
|
||||
if peer.connection and peer.connection.connected:
|
||||
|
@ -831,7 +833,7 @@ class Site(object):
|
|||
|
||||
# Return: Probably peers verified to be connectable recently
|
||||
def getConnectablePeers(self, need_num=5, ignore=[], allow_private=True):
|
||||
peers = self.peers.values()
|
||||
peers = list(self.peers.values())
|
||||
found = []
|
||||
for peer in peers:
|
||||
if peer.key.endswith(":0"):
|
||||
|
@ -874,7 +876,7 @@ class Site(object):
|
|||
# Add random peers
|
||||
need_more = need_num - len(found)
|
||||
found_more = sorted(
|
||||
self.peers.values()[0:need_more * 50],
|
||||
list(self.peers.values())[0:need_more * 50],
|
||||
key=lambda peer: peer.reputation,
|
||||
reverse=True
|
||||
)[0:need_more * 2]
|
||||
|
@ -906,7 +908,7 @@ class Site(object):
|
|||
|
||||
# Cleanup probably dead peers and close connection if too much
|
||||
def cleanupPeers(self, peers_protected=[]):
|
||||
peers = self.peers.values()
|
||||
peers = list(self.peers.values())
|
||||
if len(peers) > 20:
|
||||
# Cleanup old peers
|
||||
removed = 0
|
||||
|
@ -1019,7 +1021,7 @@ class Site(object):
|
|||
# Send site status update to websocket clients
|
||||
def updateWebsocket(self, **kwargs):
|
||||
if kwargs:
|
||||
param = {"event": kwargs.items()[0]}
|
||||
param = {"event": list(kwargs.items())[0]}
|
||||
else:
|
||||
param = None
|
||||
for ws in self.websockets:
|
||||
|
|
|
@ -1,17 +1,16 @@
|
|||
import random
|
||||
import time
|
||||
import hashlib
|
||||
import urllib
|
||||
import urllib2
|
||||
import urllib.request
|
||||
import struct
|
||||
import socket
|
||||
import re
|
||||
import collections
|
||||
|
||||
from lib import bencode
|
||||
import bencode
|
||||
from lib.subtl.subtl import UdpTrackerClient
|
||||
from lib.PySocks import socks
|
||||
from lib.PySocks import sockshandler
|
||||
import socks
|
||||
import sockshandler
|
||||
import gevent
|
||||
|
||||
from Plugin import PluginManager
|
||||
|
@ -69,7 +68,7 @@ class SiteAnnouncer(object):
|
|||
back = []
|
||||
# Type of addresses they can reach me
|
||||
if config.trackers_proxy == "disable":
|
||||
for ip_type, opened in self.site.connection_server.port_opened.items():
|
||||
for ip_type, opened in list(self.site.connection_server.port_opened.items()):
|
||||
if opened:
|
||||
back.append(ip_type)
|
||||
if self.site.connection_server.tor_manager.start_onions:
|
||||
|
@ -221,7 +220,7 @@ class SiteAnnouncer(object):
|
|||
if error:
|
||||
self.stats[tracker]["status"] = "error"
|
||||
self.stats[tracker]["time_status"] = time.time()
|
||||
self.stats[tracker]["last_error"] = str(err).decode("utf8", "ignore")
|
||||
self.stats[tracker]["last_error"] = str(error)
|
||||
self.stats[tracker]["time_last_error"] = time.time()
|
||||
self.stats[tracker]["num_error"] += 1
|
||||
self.stats[tracker]["num_request"] += 1
|
||||
|
@ -359,9 +358,9 @@ class SiteAnnouncer(object):
|
|||
try:
|
||||
peer_data = bencode.decode(response)["peers"]
|
||||
response = None
|
||||
peer_count = len(peer_data) / 6
|
||||
peer_count = int(len(peer_data) / 6)
|
||||
peers = []
|
||||
for peer_offset in xrange(peer_count):
|
||||
for peer_offset in range(peer_count):
|
||||
off = 6 * peer_offset
|
||||
peer = peer_data[off:off + 6]
|
||||
addr, port = struct.unpack('!LH', peer)
|
||||
|
@ -379,7 +378,7 @@ class SiteAnnouncer(object):
|
|||
peers = self.site.getConnectedPeers()
|
||||
|
||||
if len(peers) == 0: # Small number of connected peers for this site, connect to any
|
||||
peers = self.site.peers.values()
|
||||
peers = list(self.site.peers.values())
|
||||
need_num = 10
|
||||
|
||||
random.shuffle(peers)
|
||||
|
@ -399,7 +398,7 @@ class SiteAnnouncer(object):
|
|||
|
||||
def updateWebsocket(self, **kwargs):
|
||||
if kwargs:
|
||||
param = {"event": kwargs.items()[0]}
|
||||
param = {"event": list(kwargs.items())[0]}
|
||||
else:
|
||||
param = None
|
||||
|
||||
|
|
|
@ -28,11 +28,11 @@ class SiteManager(object):
|
|||
def load(self, cleanup=True, startup=False):
|
||||
self.log.debug("Loading sites...")
|
||||
self.loaded = False
|
||||
from Site import Site
|
||||
from .Site import Site
|
||||
address_found = []
|
||||
added = 0
|
||||
# Load new adresses
|
||||
for address, settings in json.load(open("%s/sites.json" % config.data_dir)).iteritems():
|
||||
for address, settings in json.load(open("%s/sites.json" % config.data_dir)).items():
|
||||
if address not in self.sites:
|
||||
if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)):
|
||||
# Root content.json exists, try load site
|
||||
|
@ -40,7 +40,7 @@ class SiteManager(object):
|
|||
try:
|
||||
site = Site(address, settings=settings)
|
||||
site.content_manager.contents.get("content.json")
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("Error loading site %s: %s" % (address, err))
|
||||
continue
|
||||
self.sites[address] = site
|
||||
|
@ -56,7 +56,7 @@ class SiteManager(object):
|
|||
|
||||
# Remove deleted adresses
|
||||
if cleanup:
|
||||
for address in self.sites.keys():
|
||||
for address in list(self.sites.keys()):
|
||||
if address not in address_found:
|
||||
del(self.sites[address])
|
||||
self.log.debug("Removed site: %s" % address)
|
||||
|
@ -93,7 +93,7 @@ class SiteManager(object):
|
|||
data = {}
|
||||
# Generate data file
|
||||
s = time.time()
|
||||
for address, site in self.list().iteritems():
|
||||
for address, site in self.list().items():
|
||||
if recalculate_size:
|
||||
site.settings["size"], site.settings["size_optional"] = site.content_manager.getTotalSize() # Update site size
|
||||
data[address] = site.settings
|
||||
|
@ -108,7 +108,7 @@ class SiteManager(object):
|
|||
time_write = time.time() - s
|
||||
|
||||
# Remove cache from site settings
|
||||
for address, site in self.list().iteritems():
|
||||
for address, site in self.list().items():
|
||||
site.settings["cache"] = {}
|
||||
|
||||
self.log.debug("Saved sites in %.2fs (generate: %.2fs, write: %.2fs)" % (time.time() - s, time_generate, time_write))
|
||||
|
@ -134,12 +134,12 @@ class SiteManager(object):
|
|||
|
||||
# Return or create site and start download site files
|
||||
def need(self, address, all_file=True, settings=None):
|
||||
from Site import Site
|
||||
from .Site import Site
|
||||
site = self.get(address)
|
||||
if not site: # Site not exist yet
|
||||
self.sites_changed = int(time.time())
|
||||
# Try to find site with differect case
|
||||
for recover_address, recover_site in self.sites.items():
|
||||
for recover_address, recover_site in list(self.sites.items()):
|
||||
if recover_address.lower() == address.lower():
|
||||
return recover_site
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ from Translate import translate as _
|
|||
class SiteStorage(object):
|
||||
def __init__(self, site, allow_create=True):
|
||||
self.site = site
|
||||
self.directory = u"%s/%s" % (config.data_dir, self.site.address) # Site data diretory
|
||||
self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
|
||||
self.allowed_dir = os.path.abspath(self.directory) # Only serve file within this dir
|
||||
self.log = site.log
|
||||
self.db = None # Db class
|
||||
|
@ -59,7 +59,7 @@ class SiteStorage(object):
|
|||
def getDbSchema(self):
|
||||
try:
|
||||
schema = self.loadJson("dbschema.json")
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
raise Exception("dbschema.json is not a valid JSON: %s" % err)
|
||||
return schema
|
||||
|
||||
|
@ -92,7 +92,7 @@ class SiteStorage(object):
|
|||
# Return possible db files for the site
|
||||
def getDbFiles(self):
|
||||
found = 0
|
||||
for content_inner_path, content in self.site.content_manager.contents.iteritems():
|
||||
for content_inner_path, content in self.site.content_manager.contents.items():
|
||||
# content.json file itself
|
||||
if self.isFile(content_inner_path):
|
||||
yield content_inner_path, self.getPath(content_inner_path)
|
||||
|
@ -100,7 +100,7 @@ class SiteStorage(object):
|
|||
self.log.error("[MISSING] %s" % content_inner_path)
|
||||
# Data files in content.json
|
||||
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
|
||||
for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys():
|
||||
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
|
||||
if not file_relative_path.endswith(".json") and not file_relative_path.endswith("json.gz"):
|
||||
continue # We only interesed in json files
|
||||
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
|
||||
|
@ -181,7 +181,7 @@ class SiteStorage(object):
|
|||
self.event_db_busy.get() # Wait for event
|
||||
try:
|
||||
res = self.getDb().execute(query, params)
|
||||
except sqlite3.DatabaseError, err:
|
||||
except sqlite3.DatabaseError as err:
|
||||
if err.__class__.__name__ == "DatabaseError":
|
||||
self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query))
|
||||
self.rebuildDb()
|
||||
|
@ -240,7 +240,7 @@ class SiteStorage(object):
|
|||
os.rename(self.getPath(inner_path_before), self.getPath(inner_path_after))
|
||||
err = None
|
||||
break
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("%s rename error: %s (retry #%s)" % (inner_path_before, err, retry))
|
||||
time.sleep(0.1 + retry)
|
||||
if err:
|
||||
|
@ -297,7 +297,7 @@ class SiteStorage(object):
|
|||
self.log.debug("Loading json file to db: %s (file: %s)" % (inner_path, file))
|
||||
try:
|
||||
self.updateDbFile(inner_path, file)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
|
||||
self.closeDb()
|
||||
|
||||
|
@ -363,9 +363,9 @@ class SiteStorage(object):
|
|||
return self.directory
|
||||
|
||||
if ".." in inner_path:
|
||||
raise Exception(u"File not allowed: %s" % inner_path)
|
||||
raise Exception("File not allowed: %s" % inner_path)
|
||||
|
||||
return u"%s/%s" % (self.directory, inner_path)
|
||||
return "%s/%s" % (self.directory, inner_path)
|
||||
|
||||
# Get site dir relative path
|
||||
def getInnerPath(self, path):
|
||||
|
@ -375,7 +375,7 @@ class SiteStorage(object):
|
|||
if path.startswith(self.directory):
|
||||
inner_path = path[len(self.directory) + 1:]
|
||||
else:
|
||||
raise Exception(u"File not allowed: %s" % path)
|
||||
raise Exception("File not allowed: %s" % path)
|
||||
return inner_path
|
||||
|
||||
# Verify all files sha512sum using content.json
|
||||
|
@ -390,7 +390,7 @@ class SiteStorage(object):
|
|||
self.log.debug("VerifyFile content.json not exists")
|
||||
self.site.needFile("content.json", update=True) # Force update to fix corrupt file
|
||||
self.site.content_manager.loadContent() # Reload content.json
|
||||
for content_inner_path, content in self.site.content_manager.contents.items():
|
||||
for content_inner_path, content in list(self.site.content_manager.contents.items()):
|
||||
back["num_content"] += 1
|
||||
i += 1
|
||||
if i % 50 == 0:
|
||||
|
@ -400,7 +400,7 @@ class SiteStorage(object):
|
|||
self.log.debug("[MISSING] %s" % content_inner_path)
|
||||
bad_files.append(content_inner_path)
|
||||
|
||||
for file_relative_path in content.get("files", {}).keys():
|
||||
for file_relative_path in list(content.get("files", {}).keys()):
|
||||
back["num_file"] += 1
|
||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||
file_inner_path = file_inner_path.strip("/") # Strip leading /
|
||||
|
@ -418,7 +418,7 @@ class SiteStorage(object):
|
|||
else:
|
||||
try:
|
||||
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
ok = False
|
||||
|
||||
if not ok:
|
||||
|
@ -430,7 +430,7 @@ class SiteStorage(object):
|
|||
# Optional files
|
||||
optional_added = 0
|
||||
optional_removed = 0
|
||||
for file_relative_path in content.get("files_optional", {}).keys():
|
||||
for file_relative_path in list(content.get("files_optional", {}).keys()):
|
||||
back["num_optional"] += 1
|
||||
file_node = content["files_optional"][file_relative_path]
|
||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||
|
@ -451,7 +451,7 @@ class SiteStorage(object):
|
|||
else:
|
||||
try:
|
||||
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
ok = False
|
||||
|
||||
if ok:
|
||||
|
@ -475,7 +475,7 @@ class SiteStorage(object):
|
|||
)
|
||||
|
||||
self.site.content_manager.contents.db.processDelayed()
|
||||
time.sleep(0.0001) # Context switch to avoid gevent hangs
|
||||
time.sleep(0.001) # Context switch to avoid gevent hangs
|
||||
return back
|
||||
|
||||
# Check and try to fix site files integrity
|
||||
|
@ -497,15 +497,15 @@ class SiteStorage(object):
|
|||
def deleteFiles(self):
|
||||
self.log.debug("Deleting files from content.json...")
|
||||
files = [] # Get filenames
|
||||
for content_inner_path in self.site.content_manager.contents.keys():
|
||||
for content_inner_path in list(self.site.content_manager.contents.keys()):
|
||||
content = self.site.content_manager.contents.get(content_inner_path, {})
|
||||
files.append(content_inner_path)
|
||||
# Add normal files
|
||||
for file_relative_path in content.get("files", {}).keys():
|
||||
for file_relative_path in list(content.get("files", {}).keys()):
|
||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||
files.append(file_inner_path)
|
||||
# Add optional files
|
||||
for file_relative_path in content.get("files_optional", {}).keys():
|
||||
for file_relative_path in list(content.get("files_optional", {}).keys()):
|
||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||
files.append(file_inner_path)
|
||||
|
||||
|
@ -518,7 +518,7 @@ class SiteStorage(object):
|
|||
db_path = self.getPath(schema["db_file"])
|
||||
if os.path.isfile(db_path):
|
||||
os.unlink(db_path)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Db file delete error: %s" % err)
|
||||
|
||||
for inner_path in files:
|
||||
|
@ -528,8 +528,8 @@ class SiteStorage(object):
|
|||
try:
|
||||
os.unlink(path)
|
||||
break
|
||||
except Exception, err:
|
||||
self.log.error(u"Error removing %s: %s, try #%s" % (inner_path, err, retry))
|
||||
except Exception as err:
|
||||
self.log.error("Error removing %s: %s, try #%s" % (inner_path, err, retry))
|
||||
time.sleep(float(retry) / 10)
|
||||
self.onUpdated(inner_path, False)
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
from Site import Site
|
||||
from SiteStorage import SiteStorage
|
||||
from SiteAnnouncer import SiteAnnouncer
|
||||
from .Site import Site
|
||||
from .SiteStorage import SiteStorage
|
||||
from .SiteAnnouncer import SiteAnnouncer
|
||||
|
|
|
@ -8,7 +8,7 @@ import socket
|
|||
import ssl
|
||||
sys.path.append(os.path.abspath("..")) # Imports relative to src dir
|
||||
|
||||
import cStringIO as StringIO
|
||||
import io as StringIO
|
||||
import gevent
|
||||
|
||||
from gevent.server import StreamServer
|
||||
|
@ -46,8 +46,8 @@ def handle(sock_raw, addr):
|
|||
)
|
||||
else:
|
||||
sock.sendall(data)
|
||||
except Exception, err:
|
||||
print err
|
||||
except Exception as err:
|
||||
print(err)
|
||||
try:
|
||||
sock.shutdown(gevent.socket.SHUT_WR)
|
||||
sock.close()
|
||||
|
@ -102,7 +102,7 @@ def getData():
|
|||
total_num += 1
|
||||
total_bytes += buff.tell()
|
||||
if not data:
|
||||
print "No data"
|
||||
print("No data")
|
||||
|
||||
sock.shutdown(gevent.socket.SHUT_WR)
|
||||
sock.close()
|
||||
|
@ -119,8 +119,8 @@ def info():
|
|||
else:
|
||||
memory_info = process.get_memory_info
|
||||
while 1:
|
||||
print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s,
|
||||
print "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20)
|
||||
print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, end=' ')
|
||||
print("using", clipher, "Mem:", memory_info()[0] / float(2 ** 20))
|
||||
time.sleep(1)
|
||||
|
||||
gevent.spawn(info)
|
||||
|
@ -132,7 +132,7 @@ for test in range(1):
|
|||
gevent.joinall(clients)
|
||||
|
||||
|
||||
print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s
|
||||
print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s)
|
||||
|
||||
# Separate client/server process:
|
||||
# 10*10*100:
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
class Spy:
|
||||
def __init__(self, obj, func_name):
|
||||
self.obj = obj
|
||||
self.func_name = func_name
|
||||
self.__name__ = func_name
|
||||
self.func_original = getattr(self.obj, func_name)
|
||||
self.calls = []
|
||||
|
||||
|
@ -10,11 +10,11 @@ class Spy:
|
|||
call = dict(enumerate(args, 1))
|
||||
call[0] = cls
|
||||
call.update(kwargs)
|
||||
print "Logging", call
|
||||
print("Logging", call)
|
||||
self.calls.append(call)
|
||||
return self.func_original(cls, *args, **kwargs)
|
||||
setattr(self.obj, self.func_name, loggedFunc)
|
||||
setattr(self.obj, self.__name__, loggedFunc)
|
||||
return self.calls
|
||||
|
||||
def __exit__(self, *args, **kwargs):
|
||||
setattr(self.obj, self.func_name, self.func_original)
|
||||
setattr(self.obj, self.__name__, self.func_original)
|
|
@ -1,6 +1,6 @@
|
|||
import json
|
||||
import time
|
||||
from cStringIO import StringIO
|
||||
import io
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -52,7 +52,7 @@ class TestContent:
|
|||
|
||||
# Normal data
|
||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||
# Reset
|
||||
del data_dict["signs"]
|
||||
|
@ -60,7 +60,7 @@ class TestContent:
|
|||
# Too large
|
||||
data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json
|
||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
with pytest.raises(VerifyError) as err:
|
||||
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||
assert "Include too large" in str(err)
|
||||
|
@ -72,7 +72,7 @@ class TestContent:
|
|||
# Not allowed file
|
||||
data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"]
|
||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
with pytest.raises(VerifyError) as err:
|
||||
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||
assert "File not allowed" in str(err)
|
||||
|
@ -83,7 +83,7 @@ class TestContent:
|
|||
|
||||
# Should work again
|
||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||
|
||||
@pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"])
|
||||
|
@ -166,7 +166,7 @@ class TestContent:
|
|||
data_dict["signs"] = {
|
||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||
}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
with pytest.raises(VerifyError) as err:
|
||||
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||
assert "Wrong site address" in str(err)
|
||||
|
@ -178,7 +178,7 @@ class TestContent:
|
|||
data_dict["signs"] = {
|
||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||
}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
with pytest.raises(VerifyError) as err:
|
||||
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||
assert "Wrong inner_path" in str(err)
|
||||
|
@ -190,7 +190,7 @@ class TestContent:
|
|||
data_dict["signs"] = {
|
||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||
}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||
|
||||
def testVerifyInnerPath(self, site):
|
||||
|
@ -206,7 +206,7 @@ class TestContent:
|
|||
data_dict["signs"] = {
|
||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||
}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||
|
||||
for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg"]:
|
||||
|
@ -218,7 +218,7 @@ class TestContent:
|
|||
data_dict["signs"] = {
|
||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||
}
|
||||
data = StringIO(json.dumps(data_dict))
|
||||
data = io.StringIO(json.dumps(data_dict))
|
||||
with pytest.raises(VerifyError) as err:
|
||||
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||
assert "Invalid relative path" in str(err)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import json
|
||||
from cStringIO import StringIO
|
||||
from io import StringIO
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import cStringIO as StringIO
|
||||
import io
|
||||
|
||||
|
||||
class TestDb:
|
||||
|
@ -63,11 +63,11 @@ class TestDb:
|
|||
# Large ammount of IN values
|
||||
assert db.execute(
|
||||
"SELECT COUNT(*) AS num FROM test WHERE ?",
|
||||
{"not__test_id": range(2, 3000)}
|
||||
{"not__test_id": list(range(2, 3000))}
|
||||
).fetchone()["num"] == 2
|
||||
assert db.execute(
|
||||
"SELECT COUNT(*) AS num FROM test WHERE ?",
|
||||
{"test_id": range(50, 3000)}
|
||||
{"test_id": list(range(50, 3000))}
|
||||
).fetchone()["num"] == 50
|
||||
|
||||
assert db.execute(
|
||||
|
@ -103,7 +103,7 @@ class TestDb:
|
|||
|
||||
|
||||
def testUpdateJson(self, db):
|
||||
f = StringIO.StringIO()
|
||||
f = io.StringIO()
|
||||
f.write("""
|
||||
{
|
||||
"test": [
|
||||
|
@ -118,7 +118,7 @@ class TestDb:
|
|||
|
||||
def testUnsafePattern(self, db):
|
||||
db.schema["maps"] = {"[A-Za-z.]*": db.schema["maps"]["data.json"]} # Only repetition of . supported
|
||||
f = StringIO.StringIO()
|
||||
f = io.StringIO()
|
||||
f.write("""
|
||||
{
|
||||
"test": [
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import cStringIO as StringIO
|
||||
import io
|
||||
|
||||
from util import Diff
|
||||
|
||||
|
@ -31,19 +31,19 @@ class TestDiff:
|
|||
) == [("-", 11)]
|
||||
|
||||
def testDiffLimit(self):
|
||||
old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix")
|
||||
new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix")
|
||||
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
|
||||
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix")
|
||||
actions = Diff.diff(list(old_f), list(new_f), limit=1024)
|
||||
assert actions
|
||||
|
||||
old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix")
|
||||
new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix"*1024)
|
||||
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
|
||||
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix"*1024)
|
||||
actions = Diff.diff(list(old_f), list(new_f), limit=1024)
|
||||
assert actions is False
|
||||
|
||||
def testPatch(self):
|
||||
old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix")
|
||||
new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix")
|
||||
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
|
||||
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix")
|
||||
actions = Diff.diff(
|
||||
list(old_f),
|
||||
list(new_f)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import cStringIO as StringIO
|
||||
import io
|
||||
|
||||
import pytest
|
||||
import time
|
||||
|
@ -20,10 +20,10 @@ class TestFileRequest:
|
|||
|
||||
# Normal request
|
||||
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
|
||||
assert "sign" in response["body"]
|
||||
assert b"sign" in response["body"]
|
||||
|
||||
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json")})
|
||||
assert "sign" in response["body"]
|
||||
assert b"sign" in response["body"]
|
||||
|
||||
# Invalid file
|
||||
response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0})
|
||||
|
@ -57,25 +57,25 @@ class TestFileRequest:
|
|||
connection = client.getConnection(file_server.ip, 1544)
|
||||
file_server.sites[site.address] = site
|
||||
|
||||
buff = StringIO.StringIO()
|
||||
buff = io.BytesIO()
|
||||
response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff)
|
||||
assert "stream_bytes" in response
|
||||
assert "sign" in buff.getvalue()
|
||||
assert b"sign" in buff.getvalue()
|
||||
|
||||
# Invalid file
|
||||
buff = StringIO.StringIO()
|
||||
buff = io.BytesIO()
|
||||
response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff)
|
||||
assert "File read error" in response["error"]
|
||||
|
||||
# Location over size
|
||||
buff = StringIO.StringIO()
|
||||
buff = io.BytesIO()
|
||||
response = connection.request(
|
||||
"streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff
|
||||
)
|
||||
assert "File read error" in response["error"]
|
||||
|
||||
# Stream from parent dir
|
||||
buff = StringIO.StringIO()
|
||||
buff = io.BytesIO()
|
||||
response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff)
|
||||
assert "File read error" in response["error"]
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import time
|
||||
from cStringIO import StringIO
|
||||
import io
|
||||
|
||||
import pytest
|
||||
|
||||
from File import FileServer
|
||||
from File import FileRequest
|
||||
from Crypt import CryptHash
|
||||
import Spy
|
||||
from . import Spy
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("resetSettings")
|
||||
|
@ -43,17 +43,17 @@ class TestPeer:
|
|||
|
||||
# Testing streamFile
|
||||
buff = peer_file_server.getFile(site_temp.address, "content.json", streaming=True)
|
||||
assert "sign" in buff.getvalue()
|
||||
assert b"sign" in buff.getvalue()
|
||||
|
||||
# Testing getFile
|
||||
buff = peer_file_server.getFile(site_temp.address, "content.json")
|
||||
assert "sign" in buff.getvalue()
|
||||
assert b"sign" in buff.getvalue()
|
||||
|
||||
connection.close()
|
||||
client.stop()
|
||||
|
||||
def testHashfield(self, site):
|
||||
sample_hash = site.content_manager.contents["content.json"]["files_optional"].values()[0]["sha512"]
|
||||
sample_hash = list(site.content_manager.contents["content.json"]["files_optional"].values())[0]["sha512"]
|
||||
|
||||
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
|
||||
|
||||
|
@ -65,7 +65,7 @@ class TestPeer:
|
|||
assert site.content_manager.hashfield.getHashId(sample_hash) in site.content_manager.hashfield
|
||||
|
||||
# Add new hash
|
||||
new_hash = CryptHash.sha512sum(StringIO("hello"))
|
||||
new_hash = CryptHash.sha512sum(io.BytesIO(b"hello"))
|
||||
assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
|
||||
assert site.content_manager.hashfield.appendHash(new_hash)
|
||||
assert not site.content_manager.hashfield.appendHash(new_hash) # Don't add second time
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue