Change to Python3 coding style
This commit is contained in:
parent
fc0fe0557b
commit
b0b9a4d33c
137 changed files with 910 additions and 913 deletions
|
@ -4,7 +4,7 @@ import gevent
|
|||
|
||||
from Plugin import PluginManager
|
||||
from Config import config
|
||||
import BroadcastServer
|
||||
from . import BroadcastServer
|
||||
|
||||
|
||||
@PluginManager.registerTo("SiteAnnouncer")
|
||||
|
@ -42,7 +42,7 @@ class LocalAnnouncer(BroadcastServer.BroadcastServer):
|
|||
if force: # Probably new site added, clean cache
|
||||
self.known_peers = {}
|
||||
|
||||
for peer_id, known_peer in self.known_peers.items():
|
||||
for peer_id, known_peer in list(self.known_peers.items()):
|
||||
if time.time() - known_peer["found"] > 20 * 60:
|
||||
del(self.known_peers[peer_id])
|
||||
self.log.debug("Timeout, removing from known_peers: %s" % peer_id)
|
||||
|
@ -78,7 +78,7 @@ class LocalAnnouncer(BroadcastServer.BroadcastServer):
|
|||
|
||||
def actionSiteListRequest(self, sender, params):
|
||||
back = []
|
||||
sites = self.server.sites.values()
|
||||
sites = list(self.server.sites.values())
|
||||
|
||||
# Split adresses to group of 100 to avoid UDP size limit
|
||||
site_groups = [sites[i:i + 100] for i in range(0, len(sites), 100)]
|
||||
|
|
|
@ -85,10 +85,10 @@ class TestAnnounce:
|
|||
|
||||
def testPeerDiscover(self, announcer, announcer_remote, site):
|
||||
assert announcer.server.peer_id != announcer_remote.server.peer_id
|
||||
assert len(announcer.server.sites.values()[0].peers) == 0
|
||||
assert len(list(announcer.server.sites.values())[0].peers) == 0
|
||||
announcer.broadcast({"cmd": "discoverRequest"}, port=announcer_remote.listen_port)
|
||||
time.sleep(0.1)
|
||||
assert len(announcer.server.sites.values()[0].peers) == 1
|
||||
assert len(list(announcer.server.sites.values())[0].peers) == 1
|
||||
|
||||
def testRecentPeerList(self, announcer, announcer_remote, site):
|
||||
assert len(site.peers_recent) == 0
|
||||
|
@ -101,13 +101,13 @@ class TestAnnounce:
|
|||
assert len(site.peers) == 1
|
||||
|
||||
# It should update peer without siteListResponse
|
||||
last_time_found = site.peers.values()[0].time_found
|
||||
last_time_found = list(site.peers.values())[0].time_found
|
||||
site.peers_recent.clear()
|
||||
with Spy.Spy(announcer, "handleMessage") as responses:
|
||||
announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port)
|
||||
time.sleep(0.1)
|
||||
assert [response[1]["cmd"] for response in responses] == ["discoverResponse"]
|
||||
assert len(site.peers_recent) == 1
|
||||
assert site.peers.values()[0].time_found > last_time_found
|
||||
assert list(site.peers.values())[0].time_found > last_time_found
|
||||
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
import AnnounceLocalPlugin
|
||||
from . import AnnounceLocalPlugin
|
|
@ -75,7 +75,7 @@ class TrackerStorage(object):
|
|||
|
||||
def getWorkingTrackers(self, type="shared"):
|
||||
trackers = {
|
||||
key: tracker for key, tracker in self.getTrackers(type).iteritems()
|
||||
key: tracker for key, tracker in self.getTrackers(type).items()
|
||||
if tracker["time_success"] > time.time() - 60 * 60
|
||||
}
|
||||
return trackers
|
||||
|
@ -95,7 +95,7 @@ class TrackerStorage(object):
|
|||
|
||||
trackers = self.getTrackers()
|
||||
self.log.debug("Loaded %s shared trackers" % len(trackers))
|
||||
for address, tracker in trackers.items():
|
||||
for address, tracker in list(trackers.items()):
|
||||
tracker["num_error"] = 0
|
||||
if not address.startswith("zero://"):
|
||||
del trackers[address]
|
||||
|
@ -144,7 +144,7 @@ class SiteAnnouncerPlugin(object):
|
|||
tracker_storage.time_discover = time.time()
|
||||
gevent.spawn(tracker_storage.discoverTrackers, self.site.getConnectedPeers())
|
||||
trackers = super(SiteAnnouncerPlugin, self).getTrackers()
|
||||
shared_trackers = tracker_storage.getTrackers("shared").keys()
|
||||
shared_trackers = list(tracker_storage.getTrackers("shared").keys())
|
||||
if shared_trackers:
|
||||
return trackers + shared_trackers
|
||||
else:
|
||||
|
@ -164,7 +164,7 @@ class SiteAnnouncerPlugin(object):
|
|||
@PluginManager.registerTo("FileRequest")
|
||||
class FileRequestPlugin(object):
|
||||
def actionGetTrackers(self, params):
|
||||
shared_trackers = tracker_storage.getWorkingTrackers("shared").keys()
|
||||
shared_trackers = list(tracker_storage.getWorkingTrackers("shared").keys())
|
||||
self.response({"trackers": shared_trackers})
|
||||
|
||||
|
||||
|
|
|
@ -12,7 +12,6 @@ class TestAnnounceShare:
|
|||
open("%s/trackers.json" % config.data_dir, "w").write("{}")
|
||||
tracker_storage = AnnounceSharePlugin.tracker_storage
|
||||
tracker_storage.load()
|
||||
print tracker_storage.file_path, config.data_dir
|
||||
peer = Peer(file_server.ip, 1544, connection_server=file_server)
|
||||
assert peer.request("getTrackers")["trackers"] == []
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
import AnnounceSharePlugin
|
||||
from . import AnnounceSharePlugin
|
||||
|
|
|
@ -119,7 +119,7 @@ class SiteAnnouncerPlugin(object):
|
|||
onion = self.site.connection_server.tor_manager.getOnion(site.address)
|
||||
publickey = self.site.connection_server.tor_manager.getPublickey(onion)
|
||||
if publickey not in request["onion_signs"]:
|
||||
sign = CryptRsa.sign(res["onion_sign_this"], self.site.connection_server.tor_manager.getPrivatekey(onion))
|
||||
sign = CryptRsa.sign(res["onion_sign_this"].encode("utf8"), self.site.connection_server.tor_manager.getPrivatekey(onion))
|
||||
request["onion_signs"][publickey] = sign
|
||||
res = tracker_peer.request("announce", request)
|
||||
if not res or "onion_sign_this" in res:
|
||||
|
|
|
@ -1 +1 @@
|
|||
import AnnounceZeroPlugin
|
||||
from . import AnnounceZeroPlugin
|
|
@ -4,7 +4,7 @@ import array
|
|||
def packPiecefield(data):
|
||||
res = []
|
||||
if not data:
|
||||
return array.array("H", "")
|
||||
return array.array("H", b"")
|
||||
|
||||
if data[0] == "0":
|
||||
res.append(0)
|
||||
|
@ -48,7 +48,7 @@ class BigfilePiecefield(object):
|
|||
__slots__ = ["data"]
|
||||
|
||||
def __init__(self):
|
||||
self.data = ""
|
||||
self.data = b""
|
||||
|
||||
def fromstring(self, s):
|
||||
self.data = s
|
||||
|
@ -71,7 +71,7 @@ class BigfilePiecefield(object):
|
|||
def __setitem__(self, key, value):
|
||||
data = self.data
|
||||
if len(data) < key:
|
||||
data = data.ljust(key+1, "0")
|
||||
data = data.ljust(key + 1, "0")
|
||||
data = data[:key] + str(int(value)) + data[key + 1:]
|
||||
self.data = data
|
||||
|
||||
|
@ -80,7 +80,7 @@ class BigfilePiecefieldPacked(object):
|
|||
__slots__ = ["data"]
|
||||
|
||||
def __init__(self):
|
||||
self.data = ""
|
||||
self.data = b""
|
||||
|
||||
def fromstring(self, data):
|
||||
self.data = packPiecefield(data).tostring()
|
||||
|
@ -103,7 +103,7 @@ class BigfilePiecefieldPacked(object):
|
|||
def __setitem__(self, key, value):
|
||||
data = self.tostring()
|
||||
if len(data) < key:
|
||||
data = data.ljust(key+1, "0")
|
||||
data = data.ljust(key + 1, "0")
|
||||
data = data[:key] + str(int(value)) + data[key + 1:]
|
||||
self.fromstring(data)
|
||||
|
||||
|
@ -116,7 +116,7 @@ if __name__ == "__main__":
|
|||
meminfo = psutil.Process(os.getpid()).memory_info
|
||||
|
||||
for storage in [BigfilePiecefieldPacked, BigfilePiecefield]:
|
||||
print "-- Testing storage: %s --" % storage
|
||||
print("-- Testing storage: %s --" % storage))
|
||||
m = meminfo()[0]
|
||||
s = time.time()
|
||||
piecefields = {}
|
||||
|
@ -125,34 +125,34 @@ if __name__ == "__main__":
|
|||
piecefield.fromstring(testdata[:i] + "0" + testdata[i + 1:])
|
||||
piecefields[i] = piecefield
|
||||
|
||||
print "Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))
|
||||
print("Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)))
|
||||
|
||||
m = meminfo()[0]
|
||||
s = time.time()
|
||||
for piecefield in piecefields.values():
|
||||
for piecefield in list(piecefields.values()):
|
||||
val = piecefield[1000]
|
||||
|
||||
print "Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)
|
||||
print("Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s))
|
||||
|
||||
m = meminfo()[0]
|
||||
s = time.time()
|
||||
for piecefield in piecefields.values():
|
||||
for piecefield in list(piecefields.values()):
|
||||
piecefield[1000] = True
|
||||
|
||||
print "Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)
|
||||
print("Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s))
|
||||
|
||||
m = meminfo()[0]
|
||||
s = time.time()
|
||||
for piecefield in piecefields.values():
|
||||
for piecefield in list(piecefields.values()):
|
||||
packed = piecefield.pack()
|
||||
|
||||
print "Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed))
|
||||
print("Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed)))
|
||||
|
||||
m = meminfo()[0]
|
||||
s = time.time()
|
||||
for piecefield in piecefields.values():
|
||||
for piecefield in list(piecefields.values()):
|
||||
piecefield.unpack(packed)
|
||||
|
||||
print "Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))
|
||||
print("Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)))
|
||||
|
||||
piecefields = {}
|
||||
|
|
|
@ -5,7 +5,6 @@ import shutil
|
|||
import collections
|
||||
import math
|
||||
|
||||
import msgpack
|
||||
import gevent
|
||||
import gevent.lock
|
||||
|
||||
|
@ -15,7 +14,7 @@ from Crypt import CryptHash
|
|||
from lib import merkletools
|
||||
from util import helper
|
||||
import util
|
||||
from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
|
||||
from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
|
||||
|
||||
|
||||
# We can only import plugin host clases after the plugins are loaded
|
||||
|
@ -61,7 +60,7 @@ class UiRequestPlugin(object):
|
|||
)
|
||||
|
||||
if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split
|
||||
hash = piecemap_info["sha512_pieces"][0].encode("hex")
|
||||
hash = piecemap_info["sha512_pieces"][0].hex()
|
||||
hash_id = site.content_manager.hashfield.getHashId(hash)
|
||||
site.content_manager.optionalDownloaded(inner_path, hash_id, upload_info["size"], own=True)
|
||||
|
||||
|
@ -178,7 +177,7 @@ class UiWebsocketPlugin(object):
|
|||
self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True)
|
||||
try:
|
||||
self.site.storage.delete(piecemap_inner_path)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("File %s delete error: %s" % (piecemap_inner_path, err))
|
||||
|
||||
return super(UiWebsocketPlugin, self).actionFileDelete(to, inner_path)
|
||||
|
@ -324,7 +323,7 @@ class ContentManagerPlugin(object):
|
|||
|
||||
def verifyPiece(self, inner_path, pos, piece):
|
||||
piecemap = self.getPiecemap(inner_path)
|
||||
piece_i = pos / piecemap["piece_size"]
|
||||
piece_i = int(pos / piecemap["piece_size"])
|
||||
if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]:
|
||||
raise VerifyError("Invalid hash")
|
||||
return True
|
||||
|
@ -345,7 +344,7 @@ class ContentManagerPlugin(object):
|
|||
file_info = self.getFileInfo(inner_path)
|
||||
|
||||
# Mark piece downloaded
|
||||
piece_i = pos_from / file_info["piece_size"]
|
||||
piece_i = int(pos_from / file_info["piece_size"])
|
||||
self.site.storage.piecefields[file_info["sha512"]][piece_i] = True
|
||||
|
||||
# Only add to site size on first request
|
||||
|
@ -368,7 +367,7 @@ class ContentManagerPlugin(object):
|
|||
del self.site.storage.piecefields[sha512]
|
||||
|
||||
# Also remove other pieces of the file from download queue
|
||||
for key in self.site.bad_files.keys():
|
||||
for key in list(self.site.bad_files.keys()):
|
||||
if key.startswith(inner_path + "|"):
|
||||
del self.site.bad_files[key]
|
||||
self.site.worker_manager.removeSolvedFileTasks()
|
||||
|
@ -381,9 +380,9 @@ class SiteStoragePlugin(object):
|
|||
super(SiteStoragePlugin, self).__init__(*args, **kwargs)
|
||||
self.piecefields = collections.defaultdict(BigfilePiecefield)
|
||||
if "piecefields" in self.site.settings.get("cache", {}):
|
||||
for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").iteritems():
|
||||
for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").items():
|
||||
if piecefield_packed:
|
||||
self.piecefields[sha512].unpack(piecefield_packed.decode("base64"))
|
||||
self.piecefields[sha512].unpack(base64.b64decode(piecefield_packed))
|
||||
self.site.settings["cache"]["piecefields"] = {}
|
||||
|
||||
def createSparseFile(self, inner_path, size, sha512=None):
|
||||
|
@ -486,7 +485,7 @@ class BigFile(object):
|
|||
requests = []
|
||||
# Request all required blocks
|
||||
while 1:
|
||||
piece_i = pos / self.piece_size
|
||||
piece_i = int(pos / self.piece_size)
|
||||
if piece_i * self.piece_size >= read_until:
|
||||
break
|
||||
pos_from = piece_i * self.piece_size
|
||||
|
@ -503,7 +502,7 @@ class BigFile(object):
|
|||
prebuffer_until = min(self.size, read_until + self.prebuffer)
|
||||
priority = 3
|
||||
while 1:
|
||||
piece_i = pos / self.piece_size
|
||||
piece_i = int(pos / self.piece_size)
|
||||
if piece_i * self.piece_size >= prebuffer_until:
|
||||
break
|
||||
pos_from = piece_i * self.piece_size
|
||||
|
@ -565,7 +564,7 @@ class WorkerManagerPlugin(object):
|
|||
|
||||
inner_path, file_range = inner_path.split("|")
|
||||
pos_from, pos_to = map(int, file_range.split("-"))
|
||||
task["piece_i"] = pos_from / file_info["piece_size"]
|
||||
task["piece_i"] = int(pos_from / file_info["piece_size"])
|
||||
task["sha512"] = file_info["sha512"]
|
||||
else:
|
||||
if inner_path in self.site.bad_files:
|
||||
|
@ -601,10 +600,10 @@ class WorkerManagerPlugin(object):
|
|||
class FileRequestPlugin(object):
|
||||
def isReadable(self, site, inner_path, file, pos):
|
||||
# Peek into file
|
||||
if file.read(10) == "\0" * 10:
|
||||
if file.read(10) == b"\0" * 10:
|
||||
# Looks empty, but makes sures we don't have that piece
|
||||
file_info = site.content_manager.getFileInfo(inner_path)
|
||||
piece_i = pos / file_info["piece_size"]
|
||||
piece_i = int(pos / file_info["piece_size"])
|
||||
if not site.storage.piecefields[file_info["sha512"]][piece_i]:
|
||||
return False
|
||||
# Seek back to position we want to read
|
||||
|
@ -622,7 +621,7 @@ class FileRequestPlugin(object):
|
|||
if not peer.connection: # Just added
|
||||
peer.connect(self.connection) # Assign current connection to peer
|
||||
|
||||
piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.iteritems()}
|
||||
piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.items()}
|
||||
self.response({"piecefields_packed": piecefields_packed})
|
||||
|
||||
def actionSetPiecefields(self, params):
|
||||
|
@ -638,7 +637,7 @@ class FileRequestPlugin(object):
|
|||
peer.connect(self.connection)
|
||||
|
||||
peer.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
|
||||
for sha512, piecefield_packed in params["piecefields_packed"].iteritems():
|
||||
for sha512, piecefield_packed in params["piecefields_packed"].items():
|
||||
peer.piecefields[sha512].unpack(piecefield_packed)
|
||||
site.settings["has_bigfile"] = True
|
||||
|
||||
|
@ -673,7 +672,7 @@ class PeerPlugin(object):
|
|||
|
||||
self.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
|
||||
try:
|
||||
for sha512, piecefield_packed in res["piecefields_packed"].iteritems():
|
||||
for sha512, piecefield_packed in res["piecefields_packed"].items():
|
||||
self.piecefields[sha512].unpack(piecefield_packed)
|
||||
except Exception as err:
|
||||
self.log("Invalid updatePiecefields response: %s" % Debug.formatException(err))
|
||||
|
@ -720,7 +719,7 @@ class SitePlugin(object):
|
|||
def getSettingsCache(self):
|
||||
back = super(SitePlugin, self).getSettingsCache()
|
||||
if self.storage.piecefields:
|
||||
back["piecefields"] = {sha512: piecefield.pack().encode("base64") for sha512, piecefield in self.storage.piecefields.iteritems()}
|
||||
back["piecefields"] = {sha512: base64.b64encode(piecefield.pack()).decode("utf8") for sha512, piecefield in self.storage.piecefields.items()}
|
||||
return back
|
||||
|
||||
def needFile(self, inner_path, *args, **kwargs):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import time
|
||||
from cStringIO import StringIO
|
||||
import io
|
||||
|
||||
import pytest
|
||||
import msgpack
|
||||
|
@ -40,7 +40,7 @@ class TestBigfile:
|
|||
piecemap = msgpack.unpack(site.storage.open(file_node["piecemap"], "rb"))["optional.any.iso"]
|
||||
assert len(piecemap["sha512_pieces"]) == 10
|
||||
assert piecemap["sha512_pieces"][0] != piecemap["sha512_pieces"][1]
|
||||
assert piecemap["sha512_pieces"][0].encode("hex") == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3"
|
||||
assert piecemap["sha512_pieces"][0].hex() == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3"
|
||||
|
||||
def testVerifyPiece(self, site):
|
||||
inner_path = self.createBigfile(site)
|
||||
|
@ -48,7 +48,7 @@ class TestBigfile:
|
|||
# Verify all 10 piece
|
||||
f = site.storage.open(inner_path, "rb")
|
||||
for i in range(10):
|
||||
piece = StringIO(f.read(1024 * 1024))
|
||||
piece = io.BytesIO(f.read(1024 * 1024))
|
||||
piece.seek(0)
|
||||
site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
|
||||
f.close()
|
||||
|
@ -57,7 +57,7 @@ class TestBigfile:
|
|||
with pytest.raises(VerifyError) as err:
|
||||
i = 1
|
||||
f = site.storage.open(inner_path, "rb")
|
||||
piece = StringIO(f.read(1024 * 1024))
|
||||
piece = io.BytesIO(f.read(1024 * 1024))
|
||||
f.close()
|
||||
site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
|
||||
assert "Invalid hash" in str(err)
|
||||
|
@ -70,19 +70,19 @@ class TestBigfile:
|
|||
|
||||
# Write to file beginning
|
||||
s = time.time()
|
||||
f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), "hellostart" * 1024)
|
||||
f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), b"hellostart" * 1024)
|
||||
time_write_start = time.time() - s
|
||||
|
||||
# Write to file end
|
||||
s = time.time()
|
||||
f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), "helloend" * 1024)
|
||||
f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), b"helloend" * 1024)
|
||||
time_write_end = time.time() - s
|
||||
|
||||
# Verify writes
|
||||
f = site.storage.open(inner_path)
|
||||
assert f.read(10) == "hellostart"
|
||||
assert f.read(10) == b"hellostart"
|
||||
f.seek(99 * 1024 * 1024)
|
||||
assert f.read(8) == "helloend"
|
||||
assert f.read(8) == b"helloend"
|
||||
f.close()
|
||||
|
||||
site.storage.delete(inner_path)
|
||||
|
@ -105,7 +105,7 @@ class TestBigfile:
|
|||
buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
|
||||
|
||||
assert len(buff.getvalue()) == 1 * 1024 * 1024 # Correct block size
|
||||
assert buff.getvalue().startswith("Test524") # Correct data
|
||||
assert buff.getvalue().startswith(b"Test524") # Correct data
|
||||
buff.seek(0)
|
||||
assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff) # Correct hash
|
||||
|
||||
|
@ -147,12 +147,12 @@ class TestBigfile:
|
|||
|
||||
# Verify 0. block not downloaded
|
||||
f = site_temp.storage.open(inner_path)
|
||||
assert f.read(10) == "\0" * 10
|
||||
assert f.read(10) == b"\0" * 10
|
||||
# Verify 5. and 10. block downloaded
|
||||
f.seek(5 * 1024 * 1024)
|
||||
assert f.read(7) == "Test524"
|
||||
assert f.read(7) == b"Test524"
|
||||
f.seek(9 * 1024 * 1024)
|
||||
assert f.read(7) == "943---T"
|
||||
assert f.read(7) == b"943---T"
|
||||
|
||||
# Verify hashfield
|
||||
assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) # 18343: data/optional.any.iso, 30970: data/optional.any.iso.hashmap.msgpack
|
||||
|
@ -178,14 +178,14 @@ class TestBigfile:
|
|||
with site_temp.storage.openBigfile(inner_path) as f:
|
||||
with Spy.Spy(FileRequest, "route") as requests:
|
||||
f.seek(5 * 1024 * 1024)
|
||||
assert f.read(7) == "Test524"
|
||||
assert f.read(7) == b"Test524"
|
||||
|
||||
f.seek(9 * 1024 * 1024)
|
||||
assert f.read(7) == "943---T"
|
||||
assert f.read(7) == b"943---T"
|
||||
|
||||
assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces
|
||||
|
||||
assert set(site_temp.content_manager.hashfield) == set([18343, 30970])
|
||||
assert set(site_temp.content_manager.hashfield) == set([18343, 43727])
|
||||
|
||||
assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001"
|
||||
assert f.sha512 in site_temp.getSettingsCache()["piecefields"]
|
||||
|
@ -193,7 +193,7 @@ class TestBigfile:
|
|||
# Test requesting already downloaded
|
||||
with Spy.Spy(FileRequest, "route") as requests:
|
||||
f.seek(5 * 1024 * 1024)
|
||||
assert f.read(7) == "Test524"
|
||||
assert f.read(7) == b"Test524"
|
||||
|
||||
assert len(requests) == 0
|
||||
|
||||
|
@ -201,9 +201,9 @@ class TestBigfile:
|
|||
with Spy.Spy(FileRequest, "route") as requests:
|
||||
f.seek(5 * 1024 * 1024) # We already have this block
|
||||
data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block
|
||||
assert data.startswith("Test524")
|
||||
assert data.endswith("Test838-")
|
||||
assert "\0" not in data # No null bytes allowed
|
||||
assert data.startswith(b"Test524")
|
||||
assert data.endswith(b"Test838-")
|
||||
assert b"\0" not in data # No null bytes allowed
|
||||
|
||||
assert len(requests) == 2 # Two block download
|
||||
|
||||
|
@ -258,11 +258,11 @@ class TestBigfile:
|
|||
# Download second block
|
||||
with site_temp.storage.openBigfile(inner_path) as f:
|
||||
f.seek(1024 * 1024)
|
||||
assert f.read(1024)[0] != "\0"
|
||||
assert f.read(1024)[0:1] != b"\0"
|
||||
|
||||
# Make sure first block not download
|
||||
with site_temp.storage.open(inner_path) as f:
|
||||
assert f.read(1024)[0] == "\0"
|
||||
assert f.read(1024)[0:1] == b"\0"
|
||||
|
||||
peer2 = site.addPeer(file_server.ip, 1545, return_peer=True)
|
||||
|
||||
|
@ -284,8 +284,8 @@ class TestBigfile:
|
|||
s = time.time()
|
||||
for i in range(25000):
|
||||
site.addPeer(file_server.ip, i)
|
||||
print "%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024) # 0.082s MEM: + 6800KB
|
||||
print site.peers.values()[0].piecefields
|
||||
print("%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024)) # 0.082s MEM: + 6800KB
|
||||
print(list(site.peers.values())[0].piecefields)
|
||||
|
||||
def testUpdatePiecefield(self, file_server, site, site_temp):
|
||||
inner_path = self.createBigfile(site)
|
||||
|
@ -390,16 +390,16 @@ class TestBigfile:
|
|||
size_bigfile = site_temp.content_manager.getFileInfo(inner_path)["size"]
|
||||
|
||||
with site_temp.storage.openBigfile(inner_path) as f:
|
||||
assert "\0" not in f.read(1024)
|
||||
assert b"\0" not in f.read(1024)
|
||||
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
|
||||
|
||||
with site_temp.storage.openBigfile(inner_path) as f:
|
||||
# Don't count twice
|
||||
assert "\0" not in f.read(1024)
|
||||
assert b"\0" not in f.read(1024)
|
||||
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
|
||||
|
||||
# Add second block
|
||||
assert "\0" not in f.read(1024 * 1024)
|
||||
assert b"\0" not in f.read(1024 * 1024)
|
||||
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
|
||||
|
||||
def testPrebuffer(self, file_server, site, site_temp):
|
||||
|
@ -423,7 +423,7 @@ class TestBigfile:
|
|||
with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f:
|
||||
with Spy.Spy(FileRequest, "route") as requests:
|
||||
f.seek(5 * 1024 * 1024)
|
||||
assert f.read(7) == "Test524"
|
||||
assert f.read(7) == b"Test524"
|
||||
# assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces
|
||||
assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2
|
||||
|
||||
|
@ -434,7 +434,7 @@ class TestBigfile:
|
|||
|
||||
# No prebuffer beyond end of the file
|
||||
f.seek(9 * 1024 * 1024)
|
||||
assert "\0" not in f.read(7)
|
||||
assert b"\0" not in f.read(7)
|
||||
|
||||
assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0
|
||||
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
import BigfilePlugin
|
||||
from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
|
||||
from . import BigfilePlugin
|
||||
from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
|
|
@ -29,7 +29,7 @@ class ChartCollector(object):
|
|||
sites = file_server.sites
|
||||
if not sites:
|
||||
return collectors
|
||||
content_db = sites.values()[0].content_manager.contents.db
|
||||
content_db = list(sites.values())[0].content_manager.contents.db
|
||||
|
||||
# Connection stats
|
||||
collectors["connection"] = lambda: len(file_server.connections)
|
||||
|
@ -67,8 +67,8 @@ class ChartCollector(object):
|
|||
collectors["optional_downloaded"] = lambda: sum([site.settings.get("optional_downloaded", 0) for site in sites.values()])
|
||||
|
||||
# Peers
|
||||
collectors["peer"] = lambda (peers): len(peers)
|
||||
collectors["peer_onion"] = lambda (peers): len([True for peer in peers if ".onion" in peer])
|
||||
collectors["peer"] = lambda peers: len(peers)
|
||||
collectors["peer_onion"] = lambda peers: len([True for peer in peers if ".onion" in peer])
|
||||
|
||||
# Size
|
||||
collectors["size"] = lambda: sum([site.settings.get("size", 0) for site in sites.values()])
|
||||
|
@ -81,21 +81,21 @@ class ChartCollector(object):
|
|||
site_collectors = {}
|
||||
|
||||
# Size
|
||||
site_collectors["site_size"] = lambda(site): site.settings.get("size", 0)
|
||||
site_collectors["site_size_optional"] = lambda(site): site.settings.get("size_optional", 0)
|
||||
site_collectors["site_optional_downloaded"] = lambda(site): site.settings.get("optional_downloaded", 0)
|
||||
site_collectors["site_content"] = lambda(site): len(site.content_manager.contents)
|
||||
site_collectors["site_size"] = lambda site: site.settings.get("size", 0)
|
||||
site_collectors["site_size_optional"] = lambda site: site.settings.get("size_optional", 0)
|
||||
site_collectors["site_optional_downloaded"] = lambda site: site.settings.get("optional_downloaded", 0)
|
||||
site_collectors["site_content"] = lambda site: len(site.content_manager.contents)
|
||||
|
||||
# Data transfer
|
||||
site_collectors["site_bytes_recv|change"] = lambda(site): site.settings.get("bytes_recv", 0)
|
||||
site_collectors["site_bytes_sent|change"] = lambda(site): site.settings.get("bytes_sent", 0)
|
||||
site_collectors["site_bytes_recv|change"] = lambda site: site.settings.get("bytes_recv", 0)
|
||||
site_collectors["site_bytes_sent|change"] = lambda site: site.settings.get("bytes_sent", 0)
|
||||
|
||||
# Peers
|
||||
site_collectors["site_peer"] = lambda(site): len(site.peers)
|
||||
site_collectors["site_peer_onion"] = lambda(site): len(
|
||||
[True for peer in site.peers.itervalues() if peer.ip.endswith(".onion")]
|
||||
site_collectors["site_peer"] = lambda site: len(site.peers)
|
||||
site_collectors["site_peer_onion"] = lambda site: len(
|
||||
[True for peer in site.peers.values() if peer.ip.endswith(".onion")]
|
||||
)
|
||||
site_collectors["site_peer_connected"] = lambda(site): len([True for peer in site.peers.itervalues() if peer.connection])
|
||||
site_collectors["site_peer_connected"] = lambda site: len([True for peer in site.peers.values() if peer.connection])
|
||||
|
||||
return site_collectors
|
||||
|
||||
|
@ -109,7 +109,7 @@ class ChartCollector(object):
|
|||
if site is None:
|
||||
peers = self.getUniquePeers()
|
||||
datas = {}
|
||||
for key, collector in collectors.iteritems():
|
||||
for key, collector in collectors.items():
|
||||
try:
|
||||
if site:
|
||||
value = collector(site)
|
||||
|
@ -138,7 +138,7 @@ class ChartCollector(object):
|
|||
s = time.time()
|
||||
datas = self.collectDatas(collectors, last_values["global"])
|
||||
values = []
|
||||
for key, value in datas.iteritems():
|
||||
for key, value in datas.items():
|
||||
values.append((self.db.getTypeId(key), value, now))
|
||||
self.log.debug("Global collectors done in %.3fs" % (time.time() - s))
|
||||
|
||||
|
@ -154,9 +154,9 @@ class ChartCollector(object):
|
|||
now = int(time.time())
|
||||
s = time.time()
|
||||
values = []
|
||||
for address, site in sites.iteritems():
|
||||
for address, site in sites.items():
|
||||
site_datas = self.collectDatas(collectors, last_values["site:%s" % address], site)
|
||||
for key, value in site_datas.iteritems():
|
||||
for key, value in site_datas.items():
|
||||
values.append((self.db.getTypeId(key), self.db.getSiteId(address), value, now))
|
||||
time.sleep(0.000001)
|
||||
self.log.debug("Site collections done in %.3fs" % (time.time() - s))
|
||||
|
|
|
@ -6,8 +6,8 @@ import gevent
|
|||
from Config import config
|
||||
from util import helper
|
||||
from Plugin import PluginManager
|
||||
from ChartDb import ChartDb
|
||||
from ChartCollector import ChartCollector
|
||||
from .ChartDb import ChartDb
|
||||
from .ChartCollector import ChartCollector
|
||||
|
||||
if "db" not in locals().keys(): # Share on reloads
|
||||
db = ChartDb()
|
||||
|
@ -39,7 +39,7 @@ class UiWebsocketPlugin(object):
|
|||
if not query.strip().upper().startswith("SELECT"):
|
||||
raise Exception("Only SELECT query supported")
|
||||
res = db.execute(query, params)
|
||||
except Exception, err: # Response the error to client
|
||||
except Exception as err: # Response the error to client
|
||||
self.log.error("ChartDbQuery error: %s" % err)
|
||||
return {"error": str(err)}
|
||||
# Convert result to dict
|
||||
|
|
|
@ -1 +1 @@
|
|||
import ChartPlugin
|
||||
from . import ChartPlugin
|
|
@ -1,13 +1,13 @@
|
|||
import time
|
||||
import re
|
||||
import cgi
|
||||
import html
|
||||
import hashlib
|
||||
|
||||
from Plugin import PluginManager
|
||||
from Translate import Translate
|
||||
from Config import config
|
||||
|
||||
from ContentFilterStorage import ContentFilterStorage
|
||||
from .ContentFilterStorage import ContentFilterStorage
|
||||
|
||||
|
||||
if "_" not in locals():
|
||||
|
@ -39,8 +39,8 @@ class UiWebsocketPlugin(object):
|
|||
else:
|
||||
self.cmd(
|
||||
"confirm",
|
||||
[_["Hide all content from <b>%s</b>?"] % cgi.escape(cert_user_id), _["Mute"]],
|
||||
lambda (res): self.cbMuteAdd(to, auth_address, cert_user_id, reason)
|
||||
[_["Hide all content from <b>%s</b>?"] % html.escape(cert_user_id), _["Mute"]],
|
||||
lambda res: self.cbMuteAdd(to, auth_address, cert_user_id, reason)
|
||||
)
|
||||
|
||||
def cbMuteRemove(self, to, auth_address):
|
||||
|
@ -55,8 +55,8 @@ class UiWebsocketPlugin(object):
|
|||
else:
|
||||
self.cmd(
|
||||
"confirm",
|
||||
[_["Unmute <b>%s</b>?"] % cgi.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]],
|
||||
lambda (res): self.cbMuteRemove(to, auth_address)
|
||||
[_["Unmute <b>%s</b>?"] % html.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]],
|
||||
lambda res: self.cbMuteRemove(to, auth_address)
|
||||
)
|
||||
|
||||
def actionMuteList(self, to):
|
||||
|
@ -101,13 +101,13 @@ class UiWebsocketPlugin(object):
|
|||
else:
|
||||
content = site.storage.loadJson(inner_path)
|
||||
title = _["New shared global content filter: <b>%s</b> (%s sites, %s users)"] % (
|
||||
cgi.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {}))
|
||||
html.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {}))
|
||||
)
|
||||
|
||||
self.cmd(
|
||||
"confirm",
|
||||
[title, "Add"],
|
||||
lambda (res): self.cbFilterIncludeAdd(to, res, address, inner_path, description)
|
||||
lambda res: self.cbFilterIncludeAdd(to, res, address, inner_path, description)
|
||||
)
|
||||
|
||||
def cbFilterIncludeAdd(self, to, res, address, inner_path, description):
|
||||
|
@ -189,7 +189,7 @@ class UiRequestPlugin(object):
|
|||
address = self.server.site_manager.resolveDomain(address)
|
||||
|
||||
if address:
|
||||
address_sha256 = "0x" + hashlib.sha256(address).hexdigest()
|
||||
address_sha256 = "0x" + hashlib.sha256(address.encode("utf8")).hexdigest()
|
||||
else:
|
||||
address_sha256 = None
|
||||
|
||||
|
|
|
@ -62,7 +62,7 @@ class ContentFilterStorage(object):
|
|||
)
|
||||
continue
|
||||
|
||||
for key, val in content.iteritems():
|
||||
for key, val in content.items():
|
||||
if type(val) is not dict:
|
||||
continue
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
import ContentFilterPlugin
|
||||
from . import ContentFilterPlugin
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import re
|
||||
import cgi
|
||||
import html
|
||||
import copy
|
||||
|
||||
from Plugin import PluginManager
|
||||
|
@ -78,8 +78,8 @@ class UiWebsocketPlugin(object):
|
|||
|
||||
self.cmd(
|
||||
"confirm",
|
||||
[_["This site requests <b>read</b> permission to: <b>%s</b>"] % cgi.escape(site_name), button_title],
|
||||
lambda (res): self.cbCorsPermission(to, address)
|
||||
[_["This site requests <b>read</b> permission to: <b>%s</b>"] % html.escape(site_name), button_title],
|
||||
lambda res: self.cbCorsPermission(to, address)
|
||||
)
|
||||
|
||||
def cbCorsPermission(self, to, address):
|
||||
|
|
|
@ -1 +1 @@
|
|||
import CorsPlugin
|
||||
from . import CorsPlugin
|
|
@ -43,11 +43,11 @@ def getEcc(privatekey=None):
|
|||
|
||||
def toOpensslPrivatekey(privatekey):
|
||||
privatekey_bin = btctools.encode_privkey(privatekey, "bin")
|
||||
return '\x02\xca\x00\x20' + privatekey_bin
|
||||
return b'\x02\xca\x00\x20' + privatekey_bin
|
||||
|
||||
|
||||
def toOpensslPublickey(publickey):
|
||||
publickey_bin = btctools.encode_pubkey(publickey, "bin")
|
||||
publickey_bin = publickey_bin[1:]
|
||||
publickey_openssl = '\x02\xca\x00 ' + publickey_bin[:32] + '\x00 ' + publickey_bin[32:]
|
||||
publickey_openssl = b'\x02\xca\x00 ' + publickey_bin[:32] + b'\x00 ' + publickey_bin[32:]
|
||||
return publickey_openssl
|
||||
|
|
|
@ -3,9 +3,9 @@ import os
|
|||
|
||||
from Plugin import PluginManager
|
||||
from Crypt import CryptBitcoin
|
||||
from lib.pybitcointools import bitcoin as btctools
|
||||
import lib.pybitcointools as btctools
|
||||
|
||||
import CryptMessage
|
||||
from . import CryptMessage
|
||||
|
||||
|
||||
@PluginManager.registerTo("UiWebsocket")
|
||||
|
|
|
@ -1 +1 @@
|
|||
import CryptMessagePlugin
|
||||
from . import CryptMessagePlugin
|
|
@ -48,7 +48,7 @@ class UiRequestPlugin(object):
|
|||
if ".zip/" in path or ".tar.gz/" in path:
|
||||
file_obj = None
|
||||
path_parts = self.parsePath(path)
|
||||
file_path = u"%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"].decode("utf8"))
|
||||
file_path = "%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"])
|
||||
match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", file_path)
|
||||
archive_path, path_within = match.groups()
|
||||
if archive_path not in archive_cache:
|
||||
|
|
|
@ -1 +1 @@
|
|||
import FilePackPlugin
|
||||
from . import FilePackPlugin
|
|
@ -66,7 +66,7 @@ class UiWebsocketPlugin(object):
|
|||
self.cmd(
|
||||
"confirm",
|
||||
[_["Add <b>%s</b> new site?"] % len(addresses), "Add"],
|
||||
lambda (res): self.cbMergerSiteAdd(to, addresses)
|
||||
lambda res: self.cbMergerSiteAdd(to, addresses)
|
||||
)
|
||||
self.response(to, "ok")
|
||||
|
||||
|
@ -102,7 +102,7 @@ class UiWebsocketPlugin(object):
|
|||
ret = {}
|
||||
if not merger_types:
|
||||
return self.response(to, {"error": "Not a merger site"})
|
||||
for address, merged_type in merged_db.iteritems():
|
||||
for address, merged_type in merged_db.items():
|
||||
if merged_type not in merger_types:
|
||||
continue # Site not for us
|
||||
if query_site_info:
|
||||
|
@ -215,7 +215,7 @@ class UiWebsocketPlugin(object):
|
|||
if not re.match("^[A-Za-z0-9-]+$", merger_type):
|
||||
raise Exception("Invalid merger_type: %s" % merger_type)
|
||||
merged_sites = []
|
||||
for address, merged_type in merged_db.iteritems():
|
||||
for address, merged_type in merged_db.items():
|
||||
if merged_type != merger_type:
|
||||
continue
|
||||
site = self.server.sites.get(address)
|
||||
|
@ -253,18 +253,18 @@ class SiteStoragePlugin(object):
|
|||
|
||||
# Not a merger site, that's all
|
||||
if not merger_types:
|
||||
raise StopIteration
|
||||
return
|
||||
|
||||
merged_sites = [
|
||||
site_manager.sites[address]
|
||||
for address, merged_type in merged_db.iteritems()
|
||||
for address, merged_type in merged_db.items()
|
||||
if merged_type in merger_types
|
||||
]
|
||||
found = 0
|
||||
for merged_site in merged_sites:
|
||||
self.log.debug("Loading merged site: %s" % merged_site)
|
||||
merged_type = merged_db[merged_site.address]
|
||||
for content_inner_path, content in merged_site.content_manager.contents.iteritems():
|
||||
for content_inner_path, content in merged_site.content_manager.contents.items():
|
||||
# content.json file itself
|
||||
if merged_site.storage.isFile(content_inner_path): # Missing content.json file
|
||||
merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path)
|
||||
|
@ -273,7 +273,7 @@ class SiteStoragePlugin(object):
|
|||
merged_site.log.error("[MISSING] %s" % content_inner_path)
|
||||
# Data files in content.json
|
||||
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
|
||||
for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys():
|
||||
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
|
||||
if not file_relative_path.endswith(".json"):
|
||||
continue # We only interesed in json files
|
||||
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
|
||||
|
@ -285,7 +285,7 @@ class SiteStoragePlugin(object):
|
|||
merged_site.log.error("[MISSING] %s" % file_inner_path)
|
||||
found += 1
|
||||
if found % 100 == 0:
|
||||
time.sleep(0.000001) # Context switch to avoid UI block
|
||||
time.sleep(0.001) # Context switch to avoid UI block
|
||||
|
||||
# Also notice merger sites on a merged site file change
|
||||
def onUpdated(self, inner_path, file=None):
|
||||
|
@ -339,11 +339,11 @@ class SiteManagerPlugin(object):
|
|||
site_manager = self
|
||||
if not self.sites:
|
||||
return
|
||||
for site in self.sites.itervalues():
|
||||
for site in self.sites.values():
|
||||
# Update merged sites
|
||||
try:
|
||||
merged_type = site.content_manager.contents.get("content.json", {}).get("merged_type")
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("Error loading site %s: %s" % (site.address, Debug.formatException(err)))
|
||||
continue
|
||||
if merged_type:
|
||||
|
@ -368,7 +368,7 @@ class SiteManagerPlugin(object):
|
|||
|
||||
# Update merged to merger
|
||||
if merged_type:
|
||||
for merger_site in self.sites.itervalues():
|
||||
for merger_site in self.sites.values():
|
||||
if "Merger:" + merged_type in merger_site.settings["permissions"]:
|
||||
if site.address not in merged_to_merger:
|
||||
merged_to_merger[site.address] = []
|
||||
|
|
|
@ -1 +1 @@
|
|||
import MergerSitePlugin
|
||||
from . import MergerSitePlugin
|
|
@ -37,7 +37,7 @@ class UiWebsocketPlugin(object):
|
|||
total_s = time.time()
|
||||
num_sites = 0
|
||||
|
||||
for address, site_data in self.user.sites.items():
|
||||
for address, site_data in list(self.user.sites.items()):
|
||||
feeds = site_data.get("follow")
|
||||
if not feeds:
|
||||
continue
|
||||
|
@ -45,7 +45,7 @@ class UiWebsocketPlugin(object):
|
|||
self.log.debug("Invalid feed for site %s" % address)
|
||||
continue
|
||||
num_sites += 1
|
||||
for name, query_set in feeds.iteritems():
|
||||
for name, query_set in feeds.items():
|
||||
site = SiteManager.site_manager.get(address)
|
||||
if not site or not site.storage.has_db:
|
||||
continue
|
||||
|
@ -78,7 +78,7 @@ class UiWebsocketPlugin(object):
|
|||
|
||||
for row in res:
|
||||
row = dict(row)
|
||||
if not isinstance(row["date_added"], (int, long, float, complex)):
|
||||
if not isinstance(row["date_added"], (int, float, complex)):
|
||||
self.log.debug("Invalid date_added from site %s: %r" % (address, row["date_added"]))
|
||||
continue
|
||||
if row["date_added"] > 1000000000000: # Formatted as millseconds
|
||||
|
@ -116,7 +116,7 @@ class UiWebsocketPlugin(object):
|
|||
|
||||
search_text, filters = self.parseSearch(search)
|
||||
|
||||
for address, site in SiteManager.site_manager.list().iteritems():
|
||||
for address, site in SiteManager.site_manager.list().items():
|
||||
if not site.storage.has_db:
|
||||
continue
|
||||
|
||||
|
@ -137,7 +137,7 @@ class UiWebsocketPlugin(object):
|
|||
|
||||
num_sites += 1
|
||||
|
||||
for name, query in feeds.iteritems():
|
||||
for name, query in feeds.items():
|
||||
s = time.time()
|
||||
try:
|
||||
db_query = DbQuery(query)
|
||||
|
@ -162,7 +162,7 @@ class UiWebsocketPlugin(object):
|
|||
db_query.parts["LIMIT"] = str(limit)
|
||||
|
||||
res = site.storage.query(str(db_query), params)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err)))
|
||||
stats.append({"site": site.address, "feed_name": name, "error": str(err), "query": query})
|
||||
continue
|
||||
|
|
|
@ -1 +1 @@
|
|||
import NewsfeedPlugin
|
||||
from . import NewsfeedPlugin
|
|
@ -88,8 +88,8 @@ class ContentDbPlugin(object):
|
|||
site_sizes[row["site_id"]]["optional_downloaded"] += row["size"]
|
||||
|
||||
# Site site size stats to sites.json settings
|
||||
site_ids_reverse = {val: key for key, val in self.site_ids.iteritems()}
|
||||
for site_id, stats in site_sizes.iteritems():
|
||||
site_ids_reverse = {val: key for key, val in self.site_ids.items()}
|
||||
for site_id, stats in site_sizes.items():
|
||||
site_address = site_ids_reverse.get(site_id)
|
||||
if not site_address:
|
||||
self.log.error("Not found site_id: %s" % site_id)
|
||||
|
@ -166,7 +166,7 @@ class ContentDbPlugin(object):
|
|||
num = 0
|
||||
site_id = self.site_ids[site.address]
|
||||
content_inner_dir = helper.getDirname(content_inner_path)
|
||||
for relative_inner_path, file in content.get("files_optional", {}).iteritems():
|
||||
for relative_inner_path, file in content.get("files_optional", {}).items():
|
||||
file_inner_path = content_inner_dir + relative_inner_path
|
||||
hash_id = int(file["sha512"][0:4], 16)
|
||||
if hash_id in site.content_manager.hashfield:
|
||||
|
@ -232,14 +232,14 @@ class ContentDbPlugin(object):
|
|||
num_file = 0
|
||||
num_updated = 0
|
||||
num_site = 0
|
||||
for site in self.sites.values():
|
||||
for site in list(self.sites.values()):
|
||||
if not site.content_manager.has_optional_files:
|
||||
continue
|
||||
if not site.settings["serving"]:
|
||||
continue
|
||||
has_updated_hashfield = next((
|
||||
peer
|
||||
for peer in site.peers.itervalues()
|
||||
for peer in site.peers.values()
|
||||
if peer.has_hashfield and peer.hashfield.time_changed > self.time_peer_numbers_updated
|
||||
), None)
|
||||
|
||||
|
@ -248,7 +248,7 @@ class ContentDbPlugin(object):
|
|||
|
||||
hashfield_peers = itertools.chain.from_iterable(
|
||||
peer.hashfield.storage
|
||||
for peer in site.peers.itervalues()
|
||||
for peer in site.peers.values()
|
||||
if peer.has_hashfield
|
||||
)
|
||||
peer_nums = collections.Counter(
|
||||
|
@ -270,7 +270,7 @@ class ContentDbPlugin(object):
|
|||
updates[row["file_id"]] = peer_num
|
||||
|
||||
self.execute("BEGIN")
|
||||
for file_id, peer_num in updates.iteritems():
|
||||
for file_id, peer_num in updates.items():
|
||||
self.execute("UPDATE file_optional SET peer = ? WHERE file_id = ?", (peer_num, file_id))
|
||||
self.execute("END")
|
||||
|
||||
|
@ -394,7 +394,7 @@ class ContentDbPlugin(object):
|
|||
|
||||
self.updatePeerNumbers()
|
||||
|
||||
site_ids_reverse = {val: key for key, val in self.site_ids.iteritems()}
|
||||
site_ids_reverse = {val: key for key, val in self.site_ids.items()}
|
||||
deleted_file_ids = []
|
||||
for row in self.queryDeletableFiles():
|
||||
site_address = site_ids_reverse.get(row["site_id"])
|
||||
|
|
|
@ -6,7 +6,7 @@ import gevent
|
|||
|
||||
from util import helper
|
||||
from Plugin import PluginManager
|
||||
import ContentDbPlugin
|
||||
from . import ContentDbPlugin
|
||||
|
||||
|
||||
# We can only import plugin host clases after the plugins are loaded
|
||||
|
@ -24,7 +24,7 @@ def processAccessLog():
|
|||
for site_id in access_log:
|
||||
content_db.execute(
|
||||
"UPDATE file_optional SET time_accessed = %s WHERE ?" % now,
|
||||
{"site_id": site_id, "inner_path": access_log[site_id].keys()}
|
||||
{"site_id": site_id, "inner_path": list(access_log[site_id].keys())}
|
||||
)
|
||||
num += len(access_log[site_id])
|
||||
access_log.clear()
|
||||
|
@ -37,7 +37,7 @@ def processRequestLog():
|
|||
num = 0
|
||||
cur.execute("BEGIN")
|
||||
for site_id in request_log:
|
||||
for inner_path, uploaded in request_log[site_id].iteritems():
|
||||
for inner_path, uploaded in request_log[site_id].items():
|
||||
content_db.execute(
|
||||
"UPDATE file_optional SET uploaded = uploaded + %s WHERE ?" % uploaded,
|
||||
{"site_id": site_id, "inner_path": inner_path}
|
||||
|
@ -101,7 +101,7 @@ class ContentManagerPlugin(object):
|
|||
{"site_id": self.contents.db.site_ids[self.site.address], "hash_id": hash_id}
|
||||
)
|
||||
row = res.fetchone()
|
||||
if row and row[0]:
|
||||
if row and row["is_downloaded"]:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
@ -191,7 +191,7 @@ class SitePlugin(object):
|
|||
if is_downloadable:
|
||||
return is_downloadable
|
||||
|
||||
for path in self.settings.get("optional_help", {}).iterkeys():
|
||||
for path in self.settings.get("optional_help", {}).keys():
|
||||
if inner_path.startswith(path):
|
||||
return True
|
||||
|
||||
|
|
|
@ -1,15 +1,7 @@
|
|||
import hashlib
|
||||
import os
|
||||
import copy
|
||||
import json
|
||||
from cStringIO import StringIO
|
||||
|
||||
import pytest
|
||||
|
||||
from OptionalManager import OptionalManagerPlugin
|
||||
from util import helper
|
||||
from Crypt import CryptBitcoin
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("resetSettings")
|
||||
class TestOptionalManager:
|
||||
|
@ -58,7 +50,7 @@ class TestOptionalManager:
|
|||
assert not file_row["is_downloaded"]
|
||||
|
||||
# Write file from outside of ZeroNet
|
||||
site.storage.open("testfile", "wb").write("A" * 1234) # For quick check hash does not matter only file size
|
||||
site.storage.open("testfile", "wb").write(b"A" * 1234) # For quick check hash does not matter only file size
|
||||
|
||||
hashfield_len_before = len(site.content_manager.hashfield)
|
||||
site.storage.verifyFiles(quick_check=True)
|
||||
|
@ -92,8 +84,8 @@ class TestOptionalManager:
|
|||
assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") == site.content_manager.hashfield.getHashId("aaaabbbbdddd")
|
||||
|
||||
# Write files from outside of ZeroNet (For quick check hash does not matter only file size)
|
||||
site.storage.open("testfile1", "wb").write("A" * 1234)
|
||||
site.storage.open("testfile2", "wb").write("B" * 2345)
|
||||
site.storage.open("testfile1", "wb").write(b"A" * 1234)
|
||||
site.storage.open("testfile2", "wb").write(b"B" * 2345)
|
||||
|
||||
site.storage.verifyFiles(quick_check=True)
|
||||
|
||||
|
@ -129,7 +121,6 @@ class TestOptionalManager:
|
|||
assert site.bad_files["data/fake_bigfile.mp4|2048-3064"] == 1
|
||||
|
||||
def testOptionalDelete(self, site):
|
||||
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
|
||||
contents = site.content_manager.contents
|
||||
|
||||
site.content_manager.setPin("data/img/zerotalk-upvote.png", True)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import re
|
||||
import time
|
||||
import cgi
|
||||
import html
|
||||
|
||||
import gevent
|
||||
|
||||
|
@ -28,7 +28,7 @@ class UiWebsocketPlugin(object):
|
|||
content_db.my_optional_files[self.site.address + "/" + content_inner_dir] = time.time()
|
||||
if len(content_db.my_optional_files) > 50: # Keep only last 50
|
||||
oldest_key = min(
|
||||
content_db.my_optional_files.iterkeys(),
|
||||
iter(content_db.my_optional_files.keys()),
|
||||
key=(lambda key: content_db.my_optional_files[key])
|
||||
)
|
||||
del content_db.my_optional_files[oldest_key]
|
||||
|
@ -80,7 +80,7 @@ class UiWebsocketPlugin(object):
|
|||
# Add leech / seed stats
|
||||
row["peer_seed"] = 0
|
||||
row["peer_leech"] = 0
|
||||
for peer in site.peers.itervalues():
|
||||
for peer in site.peers.values():
|
||||
if not peer.time_piecefields_updated or sha512 not in peer.piecefields:
|
||||
continue
|
||||
peer_piecefield = peer.piecefields[sha512].tostring()
|
||||
|
@ -212,7 +212,7 @@ class UiWebsocketPlugin(object):
|
|||
num_file = len(inner_path)
|
||||
if back == "ok":
|
||||
if num_file == 1:
|
||||
self.cmd("notification", ["done", _["Pinned %s"] % cgi.escape(helper.getFilename(inner_path[0])), 5000])
|
||||
self.cmd("notification", ["done", _["Pinned %s"] % html.escape(helper.getFilename(inner_path[0])), 5000])
|
||||
else:
|
||||
self.cmd("notification", ["done", _["Pinned %s files"] % num_file, 5000])
|
||||
self.response(to, back)
|
||||
|
@ -224,7 +224,7 @@ class UiWebsocketPlugin(object):
|
|||
num_file = len(inner_path)
|
||||
if back == "ok":
|
||||
if num_file == 1:
|
||||
self.cmd("notification", ["done", _["Removed pin from %s"] % cgi.escape(helper.getFilename(inner_path[0])), 5000])
|
||||
self.cmd("notification", ["done", _["Removed pin from %s"] % html.escape(helper.getFilename(inner_path[0])), 5000])
|
||||
else:
|
||||
self.cmd("notification", ["done", _["Removed pin from %s files"] % num_file, 5000])
|
||||
self.response(to, back)
|
||||
|
@ -325,7 +325,7 @@ class UiWebsocketPlugin(object):
|
|||
self.cmd("notification", [
|
||||
"done",
|
||||
_["You started to help distribute <b>%s</b>.<br><small>Directory: %s</small>"] %
|
||||
(cgi.escape(title), cgi.escape(directory)),
|
||||
(html.escape(title), html.escape(directory)),
|
||||
10000
|
||||
])
|
||||
|
||||
|
@ -369,10 +369,10 @@ class UiWebsocketPlugin(object):
|
|||
self.cmd(
|
||||
"confirm",
|
||||
[
|
||||
_["Help distribute all new optional files on site <b>%s</b>"] % cgi.escape(site_title),
|
||||
_["Help distribute all new optional files on site <b>%s</b>"] % html.escape(site_title),
|
||||
_["Yes, I want to help!"]
|
||||
],
|
||||
lambda (res): self.cbOptionalHelpAll(to, site, True)
|
||||
lambda res: self.cbOptionalHelpAll(to, site, True)
|
||||
)
|
||||
else:
|
||||
site.settings["autodownloadoptional"] = False
|
||||
|
|
|
@ -1 +1 @@
|
|||
import OptionalManagerPlugin
|
||||
from . import OptionalManagerPlugin
|
|
@ -96,8 +96,8 @@ class ContentDbPlugin(object):
|
|||
gevent.spawn_later(60*60, self.savePeers, site, spawn=True)
|
||||
|
||||
def saveAllPeers(self):
|
||||
for site in self.sites.values():
|
||||
for site in list(self.sites.values()):
|
||||
try:
|
||||
self.savePeers(site)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
site.log.error("Save peer error: %s" % err)
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
import PeerDbPlugin
|
||||
from . import PeerDbPlugin
|
||||
|
||||
|
|
|
@ -1,14 +1,11 @@
|
|||
import re
|
||||
import os
|
||||
import cgi
|
||||
import html
|
||||
import sys
|
||||
import math
|
||||
import time
|
||||
import json
|
||||
try:
|
||||
import cStringIO as StringIO
|
||||
except:
|
||||
import StringIO
|
||||
import io
|
||||
|
||||
import gevent
|
||||
|
||||
|
@ -17,7 +14,7 @@ from Plugin import PluginManager
|
|||
from Debug import Debug
|
||||
from Translate import Translate
|
||||
from util import helper
|
||||
from ZipStream import ZipStream
|
||||
from .ZipStream import ZipStream
|
||||
|
||||
plugin_dir = "plugins/Sidebar"
|
||||
media_dir = plugin_dir + "/media"
|
||||
|
@ -46,7 +43,7 @@ class UiRequestPlugin(object):
|
|||
from Debug import DebugMedia
|
||||
DebugMedia.merge(plugin_media_file)
|
||||
if ext == "js":
|
||||
yield _.translateData(open(plugin_media_file).read())
|
||||
yield _.translateData(open(plugin_media_file).read()).encode("utf8")
|
||||
else:
|
||||
for part in self.actionFile(plugin_media_file, send_header=False):
|
||||
yield part
|
||||
|
@ -84,15 +81,13 @@ class UiRequestPlugin(object):
|
|||
yield data
|
||||
|
||||
|
||||
|
||||
|
||||
@PluginManager.registerTo("UiWebsocket")
|
||||
class UiWebsocketPlugin(object):
|
||||
def sidebarRenderPeerStats(self, body, site):
|
||||
connected = len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected])
|
||||
connectable = len([peer_id for peer_id in site.peers.keys() if not peer_id.endswith(":0")])
|
||||
onion = len([peer_id for peer_id in site.peers.keys() if ".onion" in peer_id])
|
||||
local = len([peer for peer in site.peers.values() if helper.isPrivateIp(peer.ip)])
|
||||
connected = len([peer for peer in list(site.peers.values()) if peer.connection and peer.connection.connected])
|
||||
connectable = len([peer_id for peer_id in list(site.peers.keys()) if not peer_id.endswith(":0")])
|
||||
onion = len([peer_id for peer_id in list(site.peers.keys()) if ".onion" in peer_id])
|
||||
local = len([peer for peer in list(site.peers.values()) if helper.isPrivateIp(peer.ip)])
|
||||
peers_total = len(site.peers)
|
||||
|
||||
# Add myself
|
||||
|
@ -111,7 +106,7 @@ class UiWebsocketPlugin(object):
|
|||
percent_connectable = percent_connected = percent_onion = 0
|
||||
|
||||
if local:
|
||||
local_html = _(u"<li class='color-yellow'><span>{_[Local]}:</span><b>{local}</b></li>")
|
||||
local_html = _("<li class='color-yellow'><span>{_[Local]}:</span><b>{local}</b></li>")
|
||||
else:
|
||||
local_html = ""
|
||||
|
||||
|
@ -122,7 +117,7 @@ class UiWebsocketPlugin(object):
|
|||
",".join(peer_ips)
|
||||
)
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>
|
||||
{_[Peers]}
|
||||
|
@ -155,7 +150,7 @@ class UiWebsocketPlugin(object):
|
|||
percent_recv = 0.5
|
||||
percent_sent = 0.5
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Data transfer]}</label>
|
||||
<ul class='graph graph-stacked'>
|
||||
|
@ -170,7 +165,7 @@ class UiWebsocketPlugin(object):
|
|||
"""))
|
||||
|
||||
def sidebarRenderFileStats(self, body, site):
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>
|
||||
{_[Files]}
|
||||
|
@ -198,7 +193,7 @@ class UiWebsocketPlugin(object):
|
|||
content = site.content_manager.contents[inner_path]
|
||||
if "files" not in content or content["files"] is None:
|
||||
continue
|
||||
for file_name, file_details in content["files"].items():
|
||||
for file_name, file_details in list(content["files"].items()):
|
||||
size_total += file_details["size"]
|
||||
ext = file_name.split(".")[-1]
|
||||
size_filetypes[ext] = size_filetypes.get(ext, 0) + file_details["size"]
|
||||
|
@ -236,7 +231,7 @@ class UiWebsocketPlugin(object):
|
|||
percent = 100 * (float(size) / size_total)
|
||||
percent = math.floor(percent * 100) / 100 # Floor to 2 digits
|
||||
body.append(
|
||||
u"""<li style='width: %.2f%%' class='%s back-%s' title="%s"></li>""" %
|
||||
"""<li style='width: %.2f%%' class='%s back-%s' title="%s"></li>""" %
|
||||
(percent, _[extension], color, _[extension])
|
||||
)
|
||||
|
||||
|
@ -262,7 +257,7 @@ class UiWebsocketPlugin(object):
|
|||
else:
|
||||
size_formatted = "%.0fkB" % (size / 1024)
|
||||
|
||||
body.append(u"<li class='color-%s'><span>%s:</span><b>%s</b></li>" % (color, _[title], size_formatted))
|
||||
body.append("<li class='color-%s'><span>%s:</span><b>%s</b></li>" % (color, _[title], size_formatted))
|
||||
|
||||
body.append("</ul></li>")
|
||||
|
||||
|
@ -272,9 +267,9 @@ class UiWebsocketPlugin(object):
|
|||
size_limit = site.getSizeLimit()
|
||||
percent_used = size / size_limit
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Size limit]} <small>({_[limit used]}: {percent_used:.0%}, {_[free space]}: {free_space:,d}MB)</small></label>
|
||||
<label>{_[Size limit]} <small>({_[limit used]}: {percent_used:.0%}, {_[free space]}: {free_space:,.0f}MB)</small></label>
|
||||
<input type='text' class='text text-num' value="{size_limit}" id='input-sitelimit'/><span class='text-post'>MB</span>
|
||||
<a href='#Set' class='button' id='button-sitelimit'>{_[Set]}</a>
|
||||
</li>
|
||||
|
@ -292,7 +287,7 @@ class UiWebsocketPlugin(object):
|
|||
size_formatted_total = size_total / 1024 / 1024
|
||||
size_formatted_downloaded = size_downloaded / 1024 / 1024
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Optional files]}</label>
|
||||
<ul class='graph'>
|
||||
|
@ -314,14 +309,14 @@ class UiWebsocketPlugin(object):
|
|||
else:
|
||||
checked = ""
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Download and help distribute all files]}</label>
|
||||
<input type="checkbox" class="checkbox" id="checkbox-autodownloadoptional" {checked}/><div class="checkbox-skin"></div>
|
||||
"""))
|
||||
|
||||
autodownload_bigfile_size_limit = int(site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit))
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<div class='settings-autodownloadoptional'>
|
||||
<label>{_[Auto download big file size limit]}</label>
|
||||
<input type='text' class='text text-num' value="{autodownload_bigfile_size_limit}" id='input-autodownload_bigfile_size_limit'/><span class='text-post'>MB</span>
|
||||
|
@ -331,16 +326,16 @@ class UiWebsocketPlugin(object):
|
|||
body.append("</li>")
|
||||
|
||||
def sidebarRenderBadFiles(self, body, site):
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Needs to be updated]}:</label>
|
||||
<ul class='filelist'>
|
||||
"""))
|
||||
|
||||
i = 0
|
||||
for bad_file, tries in site.bad_files.iteritems():
|
||||
for bad_file, tries in site.bad_files.items():
|
||||
i += 1
|
||||
body.append(_(u"""<li class='color-red' title="{bad_file_path} ({tries})">{bad_filename}</li>""", {
|
||||
body.append(_("""<li class='color-red' title="{bad_file_path} ({tries})">{bad_filename}</li>""", {
|
||||
"bad_file_path": bad_file,
|
||||
"bad_filename": helper.getFilename(bad_file),
|
||||
"tries": _.pluralize(tries, "{} try", "{} tries")
|
||||
|
@ -350,7 +345,7 @@ class UiWebsocketPlugin(object):
|
|||
|
||||
if len(site.bad_files) > 30:
|
||||
num_bad_files = len(site.bad_files) - 30
|
||||
body.append(_(u"""<li class='color-red'>{_[+ {num_bad_files} more]}</li>""", nested=True))
|
||||
body.append(_("""<li class='color-red'>{_[+ {num_bad_files} more]}</li>""", nested=True))
|
||||
|
||||
body.append("""
|
||||
</ul>
|
||||
|
@ -363,11 +358,11 @@ class UiWebsocketPlugin(object):
|
|||
size = float(site.storage.getSize(inner_path)) / 1024
|
||||
feeds = len(site.storage.db.schema.get("feeds", {}))
|
||||
else:
|
||||
inner_path = _[u"No database found"]
|
||||
inner_path = _["No database found"]
|
||||
size = 0.0
|
||||
feeds = 0
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Database]} <small>({size:.2f}kB, {_[search feeds]}: {_[{feeds} query]})</small></label>
|
||||
<div class='flex'>
|
||||
|
@ -385,14 +380,14 @@ class UiWebsocketPlugin(object):
|
|||
quota = rules["max_size"] / 1024
|
||||
try:
|
||||
content = site.content_manager.contents["data/users/%s/content.json" % auth_address]
|
||||
used = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()])
|
||||
used = len(json.dumps(content)) + sum([file["size"] for file in list(content["files"].values())])
|
||||
except:
|
||||
used = 0
|
||||
used = used / 1024
|
||||
else:
|
||||
quota = used = 0
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Identity address]} <small>({_[limit used]}: {used:.2f}kB / {quota:.2f}kB)</small></label>
|
||||
<div class='flex'>
|
||||
|
@ -411,7 +406,7 @@ class UiWebsocketPlugin(object):
|
|||
class_pause = "hidden"
|
||||
class_resume = ""
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Site control]}</label>
|
||||
<a href='#Update' class='button noupdate' id='button-update'>{_[Update]}</a>
|
||||
|
@ -423,7 +418,7 @@ class UiWebsocketPlugin(object):
|
|||
|
||||
donate_key = site.content_manager.contents.get("content.json", {}).get("donate", True)
|
||||
site_address = self.site.address
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Site address]}</label><br>
|
||||
<div class='flex'>
|
||||
|
@ -431,8 +426,8 @@ class UiWebsocketPlugin(object):
|
|||
"""))
|
||||
if donate_key == False or donate_key == "":
|
||||
pass
|
||||
elif (type(donate_key) == str or type(donate_key) == unicode) and len(donate_key) > 0:
|
||||
body.append(_(u"""
|
||||
elif (type(donate_key) == str or type(donate_key) == str) and len(donate_key) > 0:
|
||||
body.append(_("""
|
||||
</div>
|
||||
</li>
|
||||
<li>
|
||||
|
@ -441,10 +436,10 @@ class UiWebsocketPlugin(object):
|
|||
{donate_key}
|
||||
"""))
|
||||
else:
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<a href='bitcoin:{site_address}' class='button' id='button-donate'>{_[Donate]}</a>
|
||||
"""))
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
</div>
|
||||
</li>
|
||||
"""))
|
||||
|
@ -455,7 +450,7 @@ class UiWebsocketPlugin(object):
|
|||
else:
|
||||
checked = ""
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<h2 class='owned-title'>{_[This is my site]}</h2>
|
||||
<input type="checkbox" class="checkbox" id="checkbox-owned" {checked}/><div class="checkbox-skin"></div>
|
||||
"""))
|
||||
|
@ -464,7 +459,7 @@ class UiWebsocketPlugin(object):
|
|||
title = site.content_manager.contents.get("content.json", {}).get("title", "")
|
||||
description = site.content_manager.contents.get("content.json", {}).get("description", "")
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label for='settings-title'>{_[Site title]}</label>
|
||||
<input type='text' class='text' value="{title}" id='settings-title'/>
|
||||
|
@ -483,17 +478,17 @@ class UiWebsocketPlugin(object):
|
|||
def sidebarRenderContents(self, body, site):
|
||||
has_privatekey = bool(self.user.getSiteData(site.address, create=False).get("privatekey"))
|
||||
if has_privatekey:
|
||||
tag_privatekey = _(u"{_[Private key saved.]} <a href='#Forgot+private+key' id='privatekey-forgot' class='link-right'>{_[Forgot]}</a>")
|
||||
tag_privatekey = _("{_[Private key saved.]} <a href='#Forgot+private+key' id='privatekey-forgot' class='link-right'>{_[Forgot]}</a>")
|
||||
else:
|
||||
tag_privatekey = _(u"<a href='#Add+private+key' id='privatekey-add' class='link-right'>{_[Add saved private key]}</a>")
|
||||
tag_privatekey = _("<a href='#Add+private+key' id='privatekey-add' class='link-right'>{_[Add saved private key]}</a>")
|
||||
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<li>
|
||||
<label>{_[Content publishing]} <small class='label-right'>{tag_privatekey}</small></label>
|
||||
""".replace("{tag_privatekey}", tag_privatekey)))
|
||||
|
||||
# Choose content you want to sign
|
||||
body.append(_(u"""
|
||||
body.append(_("""
|
||||
<div class='flex'>
|
||||
<input type='text' class='text' value="content.json" id='input-contents'/>
|
||||
<a href='#Sign-and-Publish' id='button-sign-publish' class='button'>{_[Sign and publish]}</a>
|
||||
|
@ -502,8 +497,8 @@ class UiWebsocketPlugin(object):
|
|||
"""))
|
||||
|
||||
contents = ["content.json"]
|
||||
contents += site.content_manager.contents.get("content.json", {}).get("includes", {}).keys()
|
||||
body.append(_(u"<div class='contents'>{_[Choose]}: "))
|
||||
contents += list(site.content_manager.contents.get("content.json", {}).get("includes", {}).keys())
|
||||
body.append(_("<div class='contents'>{_[Choose]}: "))
|
||||
for content in contents:
|
||||
body.append(_("<a href='{content}' class='contents-content'>{content}</a> "))
|
||||
body.append("</div>")
|
||||
|
@ -520,7 +515,7 @@ class UiWebsocketPlugin(object):
|
|||
|
||||
body.append("<div>")
|
||||
body.append("<a href='#Close' class='close'>×</a>")
|
||||
body.append("<h1>%s</h1>" % cgi.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True))
|
||||
body.append("<h1>%s</h1>" % html.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True))
|
||||
|
||||
body.append("<div class='globe loading'></div>")
|
||||
|
||||
|
@ -554,7 +549,6 @@ class UiWebsocketPlugin(object):
|
|||
self.response(to, "".join(body))
|
||||
|
||||
def downloadGeoLiteDb(self, db_path):
|
||||
import urllib
|
||||
import gzip
|
||||
import shutil
|
||||
from util import helper
|
||||
|
@ -566,12 +560,13 @@ class UiWebsocketPlugin(object):
|
|||
"https://raw.githubusercontent.com/texnikru/GeoLite2-Database/master/GeoLite2-City.mmdb.gz"
|
||||
]
|
||||
for db_url in db_urls:
|
||||
downloadl_err = None
|
||||
try:
|
||||
# Download
|
||||
response = helper.httpRequest(db_url)
|
||||
data_size = response.getheader('content-length')
|
||||
data_recv = 0
|
||||
data = StringIO.StringIO()
|
||||
data = io.BytesIO()
|
||||
while True:
|
||||
buff = response.read(1024 * 512)
|
||||
if not buff:
|
||||
|
@ -592,11 +587,12 @@ class UiWebsocketPlugin(object):
|
|||
time.sleep(2) # Wait for notify animation
|
||||
return True
|
||||
except Exception as err:
|
||||
download_err = err
|
||||
self.log.error("Error downloading %s: %s" % (db_url, err))
|
||||
pass
|
||||
self.cmd("progress", [
|
||||
"geolite-info",
|
||||
_["GeoLite2 City database download error: {}!<br>Please download manually and unpack to data dir:<br>{}"].format(err, db_urls[0]),
|
||||
_["GeoLite2 City database download error: {}!<br>Please download manually and unpack to data dir:<br>{}"].format(download_err, db_urls[0]),
|
||||
-100
|
||||
])
|
||||
|
||||
|
@ -629,14 +625,14 @@ class UiWebsocketPlugin(object):
|
|||
return loc
|
||||
|
||||
def getPeerLocations(self, peers):
|
||||
import maxminddb
|
||||
from . import maxminddb
|
||||
db_path = config.data_dir + '/GeoLite2-City.mmdb'
|
||||
if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0:
|
||||
if not self.downloadGeoLiteDb(db_path):
|
||||
return False
|
||||
geodb = maxminddb.open_database(db_path)
|
||||
|
||||
peers = peers.values()
|
||||
peers = list(peers.values())
|
||||
# Place bars
|
||||
peer_locations = []
|
||||
placed = {} # Already placed bars here
|
||||
|
@ -704,9 +700,9 @@ class UiWebsocketPlugin(object):
|
|||
globe_data += [peer_location["lat"], peer_location["lon"], height]
|
||||
|
||||
self.response(to, globe_data)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
self.log.debug("sidebarGetPeers error: %s" % Debug.formatException(err))
|
||||
self.response(to, {"error": err})
|
||||
self.response(to, {"error": str(err)})
|
||||
|
||||
def actionSiteSetOwned(self, to, owned):
|
||||
permissions = self.getPermissions(to)
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
import cStringIO as StringIO
|
||||
import io
|
||||
import os
|
||||
import zipfile
|
||||
|
||||
|
||||
class ZipStream(file):
|
||||
class ZipStream(object):
|
||||
def __init__(self, dir_path):
|
||||
self.dir_path = dir_path
|
||||
self.pos = 0
|
||||
self.zf = zipfile.ZipFile(self, 'w', zipfile.ZIP_DEFLATED, allowZip64 = True)
|
||||
self.buff = StringIO.StringIO()
|
||||
self.zf = zipfile.ZipFile(self, 'w', zipfile.ZIP_DEFLATED, allowZip64=True)
|
||||
self.buff = io.BytesIO()
|
||||
self.file_list = self.getFileList()
|
||||
|
||||
def getFileList(self):
|
||||
|
|
|
@ -1 +1 @@
|
|||
import SidebarPlugin
|
||||
from . import SidebarPlugin
|
|
@ -1 +1 @@
|
|||
import StatsPlugin
|
||||
from . import StatsPlugin
|
|
@ -15,7 +15,7 @@ class UiRequestPlugin(object):
|
|||
path_parts = self.parsePath(path)
|
||||
kwargs["header_length"] = False
|
||||
file_generator = super(UiRequestPlugin, self).actionSiteMedia(path, **kwargs)
|
||||
if "next" in dir(file_generator): # File found and generator returned
|
||||
if "__next__" in dir(file_generator): # File found and generator returned
|
||||
site = self.server.sites.get(path_parts["address"])
|
||||
return self.actionPatchFile(site, path_parts["inner_path"], file_generator)
|
||||
else:
|
||||
|
@ -28,10 +28,10 @@ class UiRequestPlugin(object):
|
|||
file_generator = super(UiRequestPlugin, self).actionUiMedia(path)
|
||||
if translate.lang != "en" and path.endswith(".js"):
|
||||
s = time.time()
|
||||
data = "".join(list(file_generator))
|
||||
data = translate.translateData(data)
|
||||
data = b"".join(list(file_generator))
|
||||
data = translate.translateData(data.decode("utf8"))
|
||||
self.log.debug("Patched %s (%s bytes) in %.3fs" % (path, len(data), time.time() - s))
|
||||
return iter([data])
|
||||
return iter([data.encode("utf8")])
|
||||
else:
|
||||
return file_generator
|
||||
|
||||
|
@ -49,12 +49,12 @@ class UiRequestPlugin(object):
|
|||
if not lang_file_exist or inner_path not in content_json.get("translate", []):
|
||||
for part in file_generator:
|
||||
if inner_path.endswith(".html"):
|
||||
yield part.replace("lang={lang}", "lang=" + str(translate.lang)) # lang get parameter to .js file to avoid cache
|
||||
yield part.replace(b"lang={lang}", b"lang=%s" % translate.lang.encode("utf8")) # lang get parameter to .js file to avoid cache
|
||||
else:
|
||||
yield part
|
||||
else:
|
||||
s = time.time()
|
||||
data = "".join(list(file_generator))
|
||||
data = b"".join(list(file_generator)).decode("utf8")
|
||||
|
||||
# if site.content_manager.contents["content.json"]["files"].get(lang_file):
|
||||
site.needFile(lang_file, priority=10)
|
||||
|
@ -63,9 +63,9 @@ class UiRequestPlugin(object):
|
|||
data = translate.translateData(data, site.storage.loadJson(lang_file), "js")
|
||||
else:
|
||||
data = translate.translateData(data, site.storage.loadJson(lang_file), "html")
|
||||
data = data.replace("lang={lang}", "lang=" + str(translate.lang)) # lang get parameter to .js file to avoid cache
|
||||
data = data.replace(b"lang={lang}", b"lang=%s" % translate.lang.encode("utf8")) # lang get parameter to .js file to avoid cache
|
||||
except Exception as err:
|
||||
site.log.error("Error loading translation file %s: %s" % (lang_file, err))
|
||||
|
||||
self.log.debug("Patched %s (%s bytes) in %.3fs" % (inner_path, len(data), time.time() - s))
|
||||
yield data
|
||||
yield data.encode("utf8")
|
||||
|
|
|
@ -1 +1 @@
|
|||
import TranslateSitePlugin
|
||||
from . import TranslateSitePlugin
|
||||
|
|
|
@ -17,7 +17,7 @@ class ActionsPlugin(object):
|
|||
|
||||
def main(self):
|
||||
global notificationicon, winfolders
|
||||
from lib import notificationicon, winfolders
|
||||
from .lib import notificationicon, winfolders
|
||||
import gevent.threadpool
|
||||
|
||||
self.main = sys.modules["main"]
|
||||
|
@ -25,7 +25,7 @@ class ActionsPlugin(object):
|
|||
fs_encoding = sys.getfilesystemencoding()
|
||||
|
||||
icon = notificationicon.NotificationIcon(
|
||||
os.path.join(os.path.dirname(os.path.abspath(__file__).decode(fs_encoding)), 'trayicon.ico'),
|
||||
os.path.join(os.path.dirname(os.path.abspath(__file__)), 'trayicon.ico'),
|
||||
"ZeroNet %s" % config.version
|
||||
)
|
||||
self.icon = icon
|
||||
|
@ -137,7 +137,7 @@ class ActionsPlugin(object):
|
|||
cmd += ' --open_browser ""'
|
||||
cmd = cmd.decode(sys.getfilesystemencoding())
|
||||
|
||||
return u"""
|
||||
return """
|
||||
@echo off
|
||||
chcp 65001 > nul
|
||||
set PYTHONIOENCODING=utf-8
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import sys
|
||||
|
||||
if sys.platform == 'win32':
|
||||
import TrayiconPlugin
|
||||
from . import TrayiconPlugin
|
|
@ -7,7 +7,7 @@
|
|||
"Quit": "Sair",
|
||||
"(active)": "(activo)",
|
||||
"(passive)": "(pasivo)",
|
||||
"Connections: %s": "Conecciones: %s",
|
||||
"Connections: %s": "Conecciones: %s",
|
||||
"Received: %.2f MB | Sent: %.2f MB": "Recibido: %.2f MB | Enviado: %.2f MB",
|
||||
"Show console window": "Mostrar consola",
|
||||
"Start ZeroNet when Windows starts": "Iniciar Zeronet cuando inicie Windows"
|
||||
|
|
|
@ -190,27 +190,27 @@ DefWindowProc = ctypes.windll.user32.DefWindowProcW
|
|||
DefWindowProc.restype = ctypes.c_int
|
||||
DefWindowProc.argtypes = [ctypes.wintypes.HWND, ctypes.c_uint, ctypes.wintypes.WPARAM, ctypes.wintypes.LPARAM]
|
||||
|
||||
WS_OVERLAPPED = 0x00000000L
|
||||
WS_POPUP = 0x80000000L
|
||||
WS_CHILD = 0x40000000L
|
||||
WS_MINIMIZE = 0x20000000L
|
||||
WS_VISIBLE = 0x10000000L
|
||||
WS_DISABLED = 0x08000000L
|
||||
WS_CLIPSIBLINGS = 0x04000000L
|
||||
WS_CLIPCHILDREN = 0x02000000L
|
||||
WS_MAXIMIZE = 0x01000000L
|
||||
WS_CAPTION = 0x00C00000L
|
||||
WS_BORDER = 0x00800000L
|
||||
WS_DLGFRAME = 0x00400000L
|
||||
WS_VSCROLL = 0x00200000L
|
||||
WS_HSCROLL = 0x00100000L
|
||||
WS_SYSMENU = 0x00080000L
|
||||
WS_THICKFRAME = 0x00040000L
|
||||
WS_GROUP = 0x00020000L
|
||||
WS_TABSTOP = 0x00010000L
|
||||
WS_OVERLAPPED = 0x00000000
|
||||
WS_POPUP = 0x80000000
|
||||
WS_CHILD = 0x40000000
|
||||
WS_MINIMIZE = 0x20000000
|
||||
WS_VISIBLE = 0x10000000
|
||||
WS_DISABLED = 0x08000000
|
||||
WS_CLIPSIBLINGS = 0x04000000
|
||||
WS_CLIPCHILDREN = 0x02000000
|
||||
WS_MAXIMIZE = 0x01000000
|
||||
WS_CAPTION = 0x00C00000
|
||||
WS_BORDER = 0x00800000
|
||||
WS_DLGFRAME = 0x00400000
|
||||
WS_VSCROLL = 0x00200000
|
||||
WS_HSCROLL = 0x00100000
|
||||
WS_SYSMENU = 0x00080000
|
||||
WS_THICKFRAME = 0x00040000
|
||||
WS_GROUP = 0x00020000
|
||||
WS_TABSTOP = 0x00010000
|
||||
|
||||
WS_MINIMIZEBOX = 0x00020000L
|
||||
WS_MAXIMIZEBOX = 0x00010000L
|
||||
WS_MINIMIZEBOX = 0x00020000
|
||||
WS_MAXIMIZEBOX = 0x00010000
|
||||
|
||||
WS_OVERLAPPEDWINDOW = (WS_OVERLAPPED |
|
||||
WS_CAPTION |
|
||||
|
@ -497,7 +497,7 @@ DispatchMessage.argtypes = [ctypes.POINTER(MSG)]
|
|||
|
||||
def LoadIcon(iconfilename, small=False):
|
||||
return LoadImage(0,
|
||||
unicode(iconfilename),
|
||||
str(iconfilename),
|
||||
IMAGE_ICON,
|
||||
16 if small else 0,
|
||||
16 if small else 0,
|
||||
|
@ -506,15 +506,15 @@ def LoadIcon(iconfilename, small=False):
|
|||
|
||||
class NotificationIcon(object):
|
||||
def __init__(self, iconfilename, tooltip=None):
|
||||
assert os.path.isfile(unicode(iconfilename)), "{} doesn't exist".format(iconfilename)
|
||||
self._iconfile = unicode(iconfilename)
|
||||
assert os.path.isfile(str(iconfilename)), "{} doesn't exist".format(iconfilename)
|
||||
self._iconfile = str(iconfilename)
|
||||
self._hicon = LoadIcon(self._iconfile, True)
|
||||
assert self._hicon, "Failed to load {}".format(iconfilename)
|
||||
#self._pumpqueue = Queue.Queue()
|
||||
self._die = False
|
||||
self._timerid = None
|
||||
self._uid = uuid.uuid4()
|
||||
self._tooltip = unicode(tooltip) if tooltip else u''
|
||||
self._tooltip = str(tooltip) if tooltip else ''
|
||||
#self._thread = threading.Thread(target=self._run)
|
||||
#self._thread.start()
|
||||
self._info_bubble = None
|
||||
|
@ -525,7 +525,7 @@ class NotificationIcon(object):
|
|||
if self._info_bubble:
|
||||
info_bubble = self._info_bubble
|
||||
self._info_bubble = None
|
||||
message = unicode(self._info_bubble)
|
||||
message = str(self._info_bubble)
|
||||
iconinfo.uFlags |= NIF_INFO
|
||||
iconinfo.szInfo = message
|
||||
iconinfo.szInfoTitle = message
|
||||
|
@ -535,7 +535,7 @@ class NotificationIcon(object):
|
|||
|
||||
|
||||
def _run(self):
|
||||
self.WM_TASKBARCREATED = ctypes.windll.user32.RegisterWindowMessageW(u'TaskbarCreated')
|
||||
self.WM_TASKBARCREATED = ctypes.windll.user32.RegisterWindowMessageW('TaskbarCreated')
|
||||
|
||||
self._windowproc = WNDPROC(self._callback)
|
||||
self._hwnd = GenerateDummyWindow(self._windowproc, str(self._uid))
|
||||
|
@ -562,11 +562,11 @@ class NotificationIcon(object):
|
|||
ret = GetMessage(ctypes.pointer(message), 0, 0, 0)
|
||||
TranslateMessage(ctypes.pointer(message))
|
||||
DispatchMessage(ctypes.pointer(message))
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
# print "NotificationIcon error", err, message
|
||||
message = MSG()
|
||||
time.sleep(0.125)
|
||||
print "Icon thread stopped, removing icon..."
|
||||
print("Icon thread stopped, removing icon...")
|
||||
|
||||
Shell_NotifyIcon(NIM_DELETE, ctypes.cast(ctypes.pointer(iconinfo), ctypes.POINTER(NOTIFYICONDATA)))
|
||||
ctypes.windll.user32.DestroyWindow(self._hwnd)
|
||||
|
@ -586,7 +586,7 @@ class NotificationIcon(object):
|
|||
item_map = {}
|
||||
for fs in self.items:
|
||||
iidx += 1
|
||||
if isinstance(fs, basestring):
|
||||
if isinstance(fs, str):
|
||||
if fs and not fs.strip('-_='):
|
||||
AppendMenu(menu, MF_SEPARATOR, iidx, fs)
|
||||
else:
|
||||
|
@ -595,7 +595,7 @@ class NotificationIcon(object):
|
|||
if callable(fs[0]):
|
||||
itemstring = fs[0]()
|
||||
else:
|
||||
itemstring = unicode(fs[0])
|
||||
itemstring = str(fs[0])
|
||||
flags = MF_STRING
|
||||
if itemstring.startswith("!"):
|
||||
itemstring = itemstring[1:]
|
||||
|
@ -660,8 +660,8 @@ class NotificationIcon(object):
|
|||
time.sleep(0.2)
|
||||
try:
|
||||
Shell_NotifyIcon(NIM_DELETE, self.iconinfo)
|
||||
except Exception, err:
|
||||
print "Icon remove error", err
|
||||
except Exception as err:
|
||||
print("Icon remove error", err)
|
||||
ctypes.windll.user32.DestroyWindow(self._hwnd)
|
||||
ctypes.windll.user32.DestroyIcon(self._hicon)
|
||||
|
||||
|
@ -693,7 +693,7 @@ if __name__ == "__main__":
|
|||
|
||||
def greet():
|
||||
ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 0)
|
||||
print "Hello"
|
||||
print("Hello")
|
||||
|
||||
def quit():
|
||||
ni._die = True
|
||||
|
@ -724,6 +724,6 @@ if __name__ == "__main__":
|
|||
|
||||
@atexit.register
|
||||
def goodbye():
|
||||
print "You are now leaving the Python sector."
|
||||
print("You are now leaving the Python sector.")
|
||||
|
||||
ni._run()
|
||||
|
|
|
@ -3,14 +3,15 @@
|
|||
import specialfolders
|
||||
start_programs = specialfolders.get(specialfolders.PROGRAMS)
|
||||
|
||||
Code is public domain, do with it what you will.
|
||||
Code is public domain, do with it what you will.
|
||||
|
||||
Luke Pinner - Environment.gov.au, 2010 February 10
|
||||
'''
|
||||
|
||||
#Imports use _syntax to mask them from autocomplete IDE's
|
||||
import ctypes as _ctypes
|
||||
from ctypes.wintypes import HWND as _HWND, HANDLE as _HANDLE,DWORD as _DWORD,LPCWSTR as _LPCWSTR,MAX_PATH as _MAX_PATH, create_unicode_buffer as _cub
|
||||
from ctypes import create_unicode_buffer as _cub
|
||||
from ctypes.wintypes import HWND as _HWND, HANDLE as _HANDLE,DWORD as _DWORD,LPCWSTR as _LPCWSTR,MAX_PATH as _MAX_PATH
|
||||
_SHGetFolderPath = _ctypes.windll.shell32.SHGetFolderPathW
|
||||
|
||||
#public special folder constants
|
||||
|
@ -49,5 +50,5 @@ def get(intFolder):
|
|||
|
||||
if __name__ == "__main__":
|
||||
import os
|
||||
print get(STARTUP)
|
||||
print(get(STARTUP))
|
||||
open(get(STARTUP)+"\\zeronet.cmd", "w").write("cd /D %s\r\nzeronet.py" % os.getcwd())
|
|
@ -1,7 +1,8 @@
|
|||
import io
|
||||
|
||||
from Plugin import PluginManager
|
||||
from Config import config
|
||||
from Translate import Translate
|
||||
from cStringIO import StringIO
|
||||
|
||||
|
||||
if "_" not in locals():
|
||||
|
@ -47,7 +48,7 @@ class UiRequestPlugin(object):
|
|||
else:
|
||||
data = open(file_path).read()
|
||||
|
||||
return self.actionFile(file_path, file_obj=StringIO(data), file_size=len(data))
|
||||
return self.actionFile(file_path, file_obj=io.BytesIO(data), file_size=len(data))
|
||||
else:
|
||||
return super(UiRequestPlugin, self).actionUiMedia(path)
|
||||
|
||||
|
@ -58,7 +59,7 @@ class UiWebsocketPlugin(object):
|
|||
back = {}
|
||||
config_values = vars(config.arguments)
|
||||
config_values.update(config.pending_changes)
|
||||
for key, val in config_values.iteritems():
|
||||
for key, val in config_values.items():
|
||||
if key not in config.keys_api_change_allowed:
|
||||
continue
|
||||
is_pending = key in config.pending_changes
|
||||
|
|
|
@ -1 +1 @@
|
|||
import UiConfigPlugin
|
||||
from . import UiConfigPlugin
|
||||
|
|
|
@ -3,7 +3,7 @@ import time
|
|||
from util import helper
|
||||
|
||||
from Plugin import PluginManager
|
||||
from BootstrapperDb import BootstrapperDb
|
||||
from .BootstrapperDb import BootstrapperDb
|
||||
from Crypt import CryptRsa
|
||||
from Config import config
|
||||
|
||||
|
@ -70,7 +70,7 @@ class FileRequestPlugin(object):
|
|||
|
||||
hashes_changed = 0
|
||||
db.execute("BEGIN")
|
||||
for onion, onion_hashes in onion_to_hash.iteritems():
|
||||
for onion, onion_hashes in onion_to_hash.items():
|
||||
hashes_changed += db.peerAnnounce(
|
||||
ip_type="onion",
|
||||
address=onion,
|
||||
|
@ -113,7 +113,7 @@ class FileRequestPlugin(object):
|
|||
|
||||
hash_peers = db.peerList(
|
||||
hash,
|
||||
address=self.connection.ip, onions=onion_to_hash.keys(), port=params["port"],
|
||||
address=self.connection.ip, onions=list(onion_to_hash.keys()), port=params["port"],
|
||||
limit=min(limit, params["need_num"]), need_types=params["need_types"], order=order
|
||||
)
|
||||
if "ip4" in params["need_types"]: # Backward compatibility
|
||||
|
|
|
@ -78,7 +78,7 @@ class TestBootstrapper:
|
|||
assert len(res["peers"][0][ip_type]) == 1
|
||||
|
||||
# Test DB cleanup
|
||||
assert map(lambda row: row[0], bootstrapper_db.execute("SELECT address FROM peer").fetchall()) == [file_server.ip_external] # 127.0.0.1 never get added to db
|
||||
assert [row[0] for row in bootstrapper_db.execute("SELECT address FROM peer").fetchall()] == [file_server.ip_external] # 127.0.0.1 never get added to db
|
||||
|
||||
# Delete peers
|
||||
bootstrapper_db.execute("DELETE FROM peer WHERE address = ?", [file_server.ip_external])
|
||||
|
|
|
@ -1 +1 @@
|
|||
import BootstrapperPlugin
|
||||
from . import BootstrapperPlugin
|
|
@ -54,7 +54,7 @@ class SiteManagerPlugin(object):
|
|||
res = Http.get("https://api.dnschain.net/v1/namecoin/key/%s" % top_domain).read()
|
||||
data = json.loads(res)["data"]["value"]
|
||||
if "zeronet" in data:
|
||||
for key, val in data["zeronet"].iteritems():
|
||||
for key, val in data["zeronet"].items():
|
||||
self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours
|
||||
self.saveDnsCache()
|
||||
return data["zeronet"].get(sub_domain)
|
||||
|
@ -76,7 +76,7 @@ class SiteManagerPlugin(object):
|
|||
with gevent.Timeout(5, Exception("Timeout: 5s")):
|
||||
res = Http.get("https://dnschain.info/bit/d/%s" % re.sub(r"\.bit$", "", top_domain)).read()
|
||||
data = json.loads(res)["value"]
|
||||
for key, val in data["zeronet"].iteritems():
|
||||
for key, val in data["zeronet"].items():
|
||||
self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours
|
||||
self.saveDnsCache()
|
||||
return data["zeronet"].get(sub_domain)
|
||||
|
|
|
@ -1 +1 @@
|
|||
import DonationMessagePlugin
|
||||
from . import DonationMessagePlugin
|
||||
|
|
|
@ -5,11 +5,11 @@ import json
|
|||
from Config import config
|
||||
from Plugin import PluginManager
|
||||
from Crypt import CryptBitcoin
|
||||
import UserPlugin
|
||||
from . import UserPlugin
|
||||
|
||||
try:
|
||||
local_master_addresses = set(json.load(open("%s/users.json" % config.data_dir)).keys()) # Users in users.json
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
local_master_addresses = set()
|
||||
|
||||
|
||||
|
@ -59,7 +59,7 @@ class UiRequestPlugin(object):
|
|||
return False
|
||||
|
||||
elif loggedin:
|
||||
back = back_generator.next()
|
||||
back = next(back_generator)
|
||||
inject_html = """
|
||||
<!-- Multiser plugin -->
|
||||
<script nonce="{script_nonce}">
|
||||
|
|
|
@ -1 +1 @@
|
|||
import MultiuserPlugin
|
||||
from . import MultiuserPlugin
|
||||
|
|
|
@ -16,9 +16,9 @@ if config.tor != "disable":
|
|||
monkey.patch_time()
|
||||
monkey.patch_socket(dns=False)
|
||||
monkey.patch_thread()
|
||||
print "Stem Port Plugin: modules are patched."
|
||||
print("Stem Port Plugin: modules are patched.")
|
||||
else:
|
||||
print "Stem Port Plugin: Tor mode disabled. Module patching skipped."
|
||||
print("Stem Port Plugin: Tor mode disabled. Module patching skipped.")
|
||||
|
||||
|
||||
class PatchedControlPort(ControlPort):
|
||||
|
@ -66,14 +66,14 @@ class TorManagerPlugin(object):
|
|||
controller = from_port(port=self.port)
|
||||
controller.authenticate()
|
||||
self.controller = controller
|
||||
self.status = u"Connected (via Stem)"
|
||||
except Exception, err:
|
||||
self.status = "Connected (via Stem)"
|
||||
except Exception as err:
|
||||
print("\n")
|
||||
traceback.print_exc()
|
||||
print("\n")
|
||||
|
||||
self.controller = None
|
||||
self.status = u"Error (%s)" % err
|
||||
self.status = "Error (%s)" % err
|
||||
self.log.error("Tor stem connect error: %s" % Debug.formatException(err))
|
||||
|
||||
return self.controller
|
||||
|
@ -87,8 +87,8 @@ class TorManagerPlugin(object):
|
|||
def resetCircuits(self):
|
||||
try:
|
||||
self.controller.signal(Signal.NEWNYM)
|
||||
except Exception, err:
|
||||
self.status = u"Stem reset circuits error (%s)" % err
|
||||
except Exception as err:
|
||||
self.status = "Stem reset circuits error (%s)" % err
|
||||
self.log.error("Stem reset circuits error: %s" % err)
|
||||
|
||||
|
||||
|
@ -105,8 +105,8 @@ class TorManagerPlugin(object):
|
|||
|
||||
return (service.service_id, service.private_key)
|
||||
|
||||
except Exception, err:
|
||||
self.status = u"AddOnion error (Stem: %s)" % err
|
||||
except Exception as err:
|
||||
self.status = "AddOnion error (Stem: %s)" % err
|
||||
self.log.error("Failed to create hidden service with Stem: " + err)
|
||||
return False
|
||||
|
||||
|
@ -115,8 +115,8 @@ class TorManagerPlugin(object):
|
|||
try:
|
||||
self.controller.remove_ephemeral_hidden_service(address)
|
||||
return True
|
||||
except Exception, err:
|
||||
self.status = u"DelOnion error (Stem: %s)" % err
|
||||
except Exception as err:
|
||||
self.status = "DelOnion error (Stem: %s)" % err
|
||||
self.log.error("Stem failed to delete %s.onion: %s" % (address, err))
|
||||
self.disconnect() # Why?
|
||||
return False
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
try:
|
||||
from stem.control import Controller
|
||||
stem_found = True
|
||||
except Exception as err:
|
||||
print "STEM NOT FOUND! %s" % err
|
||||
stem_found = False
|
||||
|
||||
if stem_found:
|
||||
print "Starting Stem plugin..."
|
||||
import StemPortPlugin
|
||||
try:
|
||||
from stem.control import Controller
|
||||
stem_found = True
|
||||
except Exception as err:
|
||||
print(("STEM NOT FOUND! %s" % err))
|
||||
stem_found = False
|
||||
|
||||
if stem_found:
|
||||
print("Starting Stem plugin...")
|
||||
from . import StemPortPlugin
|
||||
|
|
|
@ -1 +1 @@
|
|||
import UiPasswordPlugin
|
||||
from . import UiPasswordPlugin
|
|
@ -3,7 +3,7 @@ import gevent
|
|||
from Plugin import PluginManager
|
||||
from Config import config
|
||||
from Debug import Debug
|
||||
from domainLookup import lookupDomain
|
||||
from .domainLookup import lookupDomain
|
||||
|
||||
allow_reload = False # No reload supported
|
||||
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
import UiRequestPlugin
|
||||
import SiteManagerPlugin
|
||||
from . import UiRequestPlugin
|
||||
from . import SiteManagerPlugin
|
|
@ -37,7 +37,7 @@
|
|||
try:
|
||||
import http.client as httplib
|
||||
except ImportError:
|
||||
import httplib
|
||||
import http.client
|
||||
import base64
|
||||
import decimal
|
||||
import json
|
||||
|
@ -45,7 +45,7 @@ import logging
|
|||
try:
|
||||
import urllib.parse as urlparse
|
||||
except ImportError:
|
||||
import urlparse
|
||||
import urllib.parse
|
||||
|
||||
USER_AGENT = "AuthServiceProxy/0.1"
|
||||
|
||||
|
@ -83,7 +83,7 @@ class AuthServiceProxy(object):
|
|||
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None):
|
||||
self.__service_url = service_url
|
||||
self.__service_name = service_name
|
||||
self.__url = urlparse.urlparse(service_url)
|
||||
self.__url = urllib.parse.urlparse(service_url)
|
||||
if self.__url.port is None:
|
||||
port = 80
|
||||
else:
|
||||
|
@ -106,10 +106,10 @@ class AuthServiceProxy(object):
|
|||
# Callables re-use the connection of the original proxy
|
||||
self.__conn = connection
|
||||
elif self.__url.scheme == 'https':
|
||||
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port,
|
||||
self.__conn = http.client.HTTPSConnection(self.__url.hostname, port,
|
||||
timeout=timeout)
|
||||
else:
|
||||
self.__conn = httplib.HTTPConnection(self.__url.hostname, port,
|
||||
self.__conn = http.client.HTTPConnection(self.__url.hostname, port,
|
||||
timeout=timeout)
|
||||
|
||||
def __getattr__(self, name):
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
|
||||
from .bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
|
||||
import time, json, os, sys, re, socket
|
||||
|
||||
# Connecting to RPC
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue