Change to Python3 coding style
This commit is contained in:
parent
fc0fe0557b
commit
b0b9a4d33c
137 changed files with 910 additions and 913 deletions
|
@ -4,7 +4,7 @@ import gevent
|
||||||
|
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
from Config import config
|
from Config import config
|
||||||
import BroadcastServer
|
from . import BroadcastServer
|
||||||
|
|
||||||
|
|
||||||
@PluginManager.registerTo("SiteAnnouncer")
|
@PluginManager.registerTo("SiteAnnouncer")
|
||||||
|
@ -42,7 +42,7 @@ class LocalAnnouncer(BroadcastServer.BroadcastServer):
|
||||||
if force: # Probably new site added, clean cache
|
if force: # Probably new site added, clean cache
|
||||||
self.known_peers = {}
|
self.known_peers = {}
|
||||||
|
|
||||||
for peer_id, known_peer in self.known_peers.items():
|
for peer_id, known_peer in list(self.known_peers.items()):
|
||||||
if time.time() - known_peer["found"] > 20 * 60:
|
if time.time() - known_peer["found"] > 20 * 60:
|
||||||
del(self.known_peers[peer_id])
|
del(self.known_peers[peer_id])
|
||||||
self.log.debug("Timeout, removing from known_peers: %s" % peer_id)
|
self.log.debug("Timeout, removing from known_peers: %s" % peer_id)
|
||||||
|
@ -78,7 +78,7 @@ class LocalAnnouncer(BroadcastServer.BroadcastServer):
|
||||||
|
|
||||||
def actionSiteListRequest(self, sender, params):
|
def actionSiteListRequest(self, sender, params):
|
||||||
back = []
|
back = []
|
||||||
sites = self.server.sites.values()
|
sites = list(self.server.sites.values())
|
||||||
|
|
||||||
# Split adresses to group of 100 to avoid UDP size limit
|
# Split adresses to group of 100 to avoid UDP size limit
|
||||||
site_groups = [sites[i:i + 100] for i in range(0, len(sites), 100)]
|
site_groups = [sites[i:i + 100] for i in range(0, len(sites), 100)]
|
||||||
|
|
|
@ -85,10 +85,10 @@ class TestAnnounce:
|
||||||
|
|
||||||
def testPeerDiscover(self, announcer, announcer_remote, site):
|
def testPeerDiscover(self, announcer, announcer_remote, site):
|
||||||
assert announcer.server.peer_id != announcer_remote.server.peer_id
|
assert announcer.server.peer_id != announcer_remote.server.peer_id
|
||||||
assert len(announcer.server.sites.values()[0].peers) == 0
|
assert len(list(announcer.server.sites.values())[0].peers) == 0
|
||||||
announcer.broadcast({"cmd": "discoverRequest"}, port=announcer_remote.listen_port)
|
announcer.broadcast({"cmd": "discoverRequest"}, port=announcer_remote.listen_port)
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
assert len(announcer.server.sites.values()[0].peers) == 1
|
assert len(list(announcer.server.sites.values())[0].peers) == 1
|
||||||
|
|
||||||
def testRecentPeerList(self, announcer, announcer_remote, site):
|
def testRecentPeerList(self, announcer, announcer_remote, site):
|
||||||
assert len(site.peers_recent) == 0
|
assert len(site.peers_recent) == 0
|
||||||
|
@ -101,13 +101,13 @@ class TestAnnounce:
|
||||||
assert len(site.peers) == 1
|
assert len(site.peers) == 1
|
||||||
|
|
||||||
# It should update peer without siteListResponse
|
# It should update peer without siteListResponse
|
||||||
last_time_found = site.peers.values()[0].time_found
|
last_time_found = list(site.peers.values())[0].time_found
|
||||||
site.peers_recent.clear()
|
site.peers_recent.clear()
|
||||||
with Spy.Spy(announcer, "handleMessage") as responses:
|
with Spy.Spy(announcer, "handleMessage") as responses:
|
||||||
announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port)
|
announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port)
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
assert [response[1]["cmd"] for response in responses] == ["discoverResponse"]
|
assert [response[1]["cmd"] for response in responses] == ["discoverResponse"]
|
||||||
assert len(site.peers_recent) == 1
|
assert len(site.peers_recent) == 1
|
||||||
assert site.peers.values()[0].time_found > last_time_found
|
assert list(site.peers.values())[0].time_found > last_time_found
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import AnnounceLocalPlugin
|
from . import AnnounceLocalPlugin
|
|
@ -75,7 +75,7 @@ class TrackerStorage(object):
|
||||||
|
|
||||||
def getWorkingTrackers(self, type="shared"):
|
def getWorkingTrackers(self, type="shared"):
|
||||||
trackers = {
|
trackers = {
|
||||||
key: tracker for key, tracker in self.getTrackers(type).iteritems()
|
key: tracker for key, tracker in self.getTrackers(type).items()
|
||||||
if tracker["time_success"] > time.time() - 60 * 60
|
if tracker["time_success"] > time.time() - 60 * 60
|
||||||
}
|
}
|
||||||
return trackers
|
return trackers
|
||||||
|
@ -95,7 +95,7 @@ class TrackerStorage(object):
|
||||||
|
|
||||||
trackers = self.getTrackers()
|
trackers = self.getTrackers()
|
||||||
self.log.debug("Loaded %s shared trackers" % len(trackers))
|
self.log.debug("Loaded %s shared trackers" % len(trackers))
|
||||||
for address, tracker in trackers.items():
|
for address, tracker in list(trackers.items()):
|
||||||
tracker["num_error"] = 0
|
tracker["num_error"] = 0
|
||||||
if not address.startswith("zero://"):
|
if not address.startswith("zero://"):
|
||||||
del trackers[address]
|
del trackers[address]
|
||||||
|
@ -144,7 +144,7 @@ class SiteAnnouncerPlugin(object):
|
||||||
tracker_storage.time_discover = time.time()
|
tracker_storage.time_discover = time.time()
|
||||||
gevent.spawn(tracker_storage.discoverTrackers, self.site.getConnectedPeers())
|
gevent.spawn(tracker_storage.discoverTrackers, self.site.getConnectedPeers())
|
||||||
trackers = super(SiteAnnouncerPlugin, self).getTrackers()
|
trackers = super(SiteAnnouncerPlugin, self).getTrackers()
|
||||||
shared_trackers = tracker_storage.getTrackers("shared").keys()
|
shared_trackers = list(tracker_storage.getTrackers("shared").keys())
|
||||||
if shared_trackers:
|
if shared_trackers:
|
||||||
return trackers + shared_trackers
|
return trackers + shared_trackers
|
||||||
else:
|
else:
|
||||||
|
@ -164,7 +164,7 @@ class SiteAnnouncerPlugin(object):
|
||||||
@PluginManager.registerTo("FileRequest")
|
@PluginManager.registerTo("FileRequest")
|
||||||
class FileRequestPlugin(object):
|
class FileRequestPlugin(object):
|
||||||
def actionGetTrackers(self, params):
|
def actionGetTrackers(self, params):
|
||||||
shared_trackers = tracker_storage.getWorkingTrackers("shared").keys()
|
shared_trackers = list(tracker_storage.getWorkingTrackers("shared").keys())
|
||||||
self.response({"trackers": shared_trackers})
|
self.response({"trackers": shared_trackers})
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,6 @@ class TestAnnounceShare:
|
||||||
open("%s/trackers.json" % config.data_dir, "w").write("{}")
|
open("%s/trackers.json" % config.data_dir, "w").write("{}")
|
||||||
tracker_storage = AnnounceSharePlugin.tracker_storage
|
tracker_storage = AnnounceSharePlugin.tracker_storage
|
||||||
tracker_storage.load()
|
tracker_storage.load()
|
||||||
print tracker_storage.file_path, config.data_dir
|
|
||||||
peer = Peer(file_server.ip, 1544, connection_server=file_server)
|
peer = Peer(file_server.ip, 1544, connection_server=file_server)
|
||||||
assert peer.request("getTrackers")["trackers"] == []
|
assert peer.request("getTrackers")["trackers"] == []
|
||||||
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import AnnounceSharePlugin
|
from . import AnnounceSharePlugin
|
||||||
|
|
|
@ -119,7 +119,7 @@ class SiteAnnouncerPlugin(object):
|
||||||
onion = self.site.connection_server.tor_manager.getOnion(site.address)
|
onion = self.site.connection_server.tor_manager.getOnion(site.address)
|
||||||
publickey = self.site.connection_server.tor_manager.getPublickey(onion)
|
publickey = self.site.connection_server.tor_manager.getPublickey(onion)
|
||||||
if publickey not in request["onion_signs"]:
|
if publickey not in request["onion_signs"]:
|
||||||
sign = CryptRsa.sign(res["onion_sign_this"], self.site.connection_server.tor_manager.getPrivatekey(onion))
|
sign = CryptRsa.sign(res["onion_sign_this"].encode("utf8"), self.site.connection_server.tor_manager.getPrivatekey(onion))
|
||||||
request["onion_signs"][publickey] = sign
|
request["onion_signs"][publickey] = sign
|
||||||
res = tracker_peer.request("announce", request)
|
res = tracker_peer.request("announce", request)
|
||||||
if not res or "onion_sign_this" in res:
|
if not res or "onion_sign_this" in res:
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import AnnounceZeroPlugin
|
from . import AnnounceZeroPlugin
|
|
@ -4,7 +4,7 @@ import array
|
||||||
def packPiecefield(data):
|
def packPiecefield(data):
|
||||||
res = []
|
res = []
|
||||||
if not data:
|
if not data:
|
||||||
return array.array("H", "")
|
return array.array("H", b"")
|
||||||
|
|
||||||
if data[0] == "0":
|
if data[0] == "0":
|
||||||
res.append(0)
|
res.append(0)
|
||||||
|
@ -48,7 +48,7 @@ class BigfilePiecefield(object):
|
||||||
__slots__ = ["data"]
|
__slots__ = ["data"]
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.data = ""
|
self.data = b""
|
||||||
|
|
||||||
def fromstring(self, s):
|
def fromstring(self, s):
|
||||||
self.data = s
|
self.data = s
|
||||||
|
@ -80,7 +80,7 @@ class BigfilePiecefieldPacked(object):
|
||||||
__slots__ = ["data"]
|
__slots__ = ["data"]
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.data = ""
|
self.data = b""
|
||||||
|
|
||||||
def fromstring(self, data):
|
def fromstring(self, data):
|
||||||
self.data = packPiecefield(data).tostring()
|
self.data = packPiecefield(data).tostring()
|
||||||
|
@ -116,7 +116,7 @@ if __name__ == "__main__":
|
||||||
meminfo = psutil.Process(os.getpid()).memory_info
|
meminfo = psutil.Process(os.getpid()).memory_info
|
||||||
|
|
||||||
for storage in [BigfilePiecefieldPacked, BigfilePiecefield]:
|
for storage in [BigfilePiecefieldPacked, BigfilePiecefield]:
|
||||||
print "-- Testing storage: %s --" % storage
|
print("-- Testing storage: %s --" % storage))
|
||||||
m = meminfo()[0]
|
m = meminfo()[0]
|
||||||
s = time.time()
|
s = time.time()
|
||||||
piecefields = {}
|
piecefields = {}
|
||||||
|
@ -125,34 +125,34 @@ if __name__ == "__main__":
|
||||||
piecefield.fromstring(testdata[:i] + "0" + testdata[i + 1:])
|
piecefield.fromstring(testdata[:i] + "0" + testdata[i + 1:])
|
||||||
piecefields[i] = piecefield
|
piecefields[i] = piecefield
|
||||||
|
|
||||||
print "Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))
|
print("Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)))
|
||||||
|
|
||||||
m = meminfo()[0]
|
m = meminfo()[0]
|
||||||
s = time.time()
|
s = time.time()
|
||||||
for piecefield in piecefields.values():
|
for piecefield in list(piecefields.values()):
|
||||||
val = piecefield[1000]
|
val = piecefield[1000]
|
||||||
|
|
||||||
print "Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)
|
print("Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s))
|
||||||
|
|
||||||
m = meminfo()[0]
|
m = meminfo()[0]
|
||||||
s = time.time()
|
s = time.time()
|
||||||
for piecefield in piecefields.values():
|
for piecefield in list(piecefields.values()):
|
||||||
piecefield[1000] = True
|
piecefield[1000] = True
|
||||||
|
|
||||||
print "Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)
|
print("Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s))
|
||||||
|
|
||||||
m = meminfo()[0]
|
m = meminfo()[0]
|
||||||
s = time.time()
|
s = time.time()
|
||||||
for piecefield in piecefields.values():
|
for piecefield in list(piecefields.values()):
|
||||||
packed = piecefield.pack()
|
packed = piecefield.pack()
|
||||||
|
|
||||||
print "Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed))
|
print("Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed)))
|
||||||
|
|
||||||
m = meminfo()[0]
|
m = meminfo()[0]
|
||||||
s = time.time()
|
s = time.time()
|
||||||
for piecefield in piecefields.values():
|
for piecefield in list(piecefields.values()):
|
||||||
piecefield.unpack(packed)
|
piecefield.unpack(packed)
|
||||||
|
|
||||||
print "Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))
|
print("Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)))
|
||||||
|
|
||||||
piecefields = {}
|
piecefields = {}
|
||||||
|
|
|
@ -5,7 +5,6 @@ import shutil
|
||||||
import collections
|
import collections
|
||||||
import math
|
import math
|
||||||
|
|
||||||
import msgpack
|
|
||||||
import gevent
|
import gevent
|
||||||
import gevent.lock
|
import gevent.lock
|
||||||
|
|
||||||
|
@ -15,7 +14,7 @@ from Crypt import CryptHash
|
||||||
from lib import merkletools
|
from lib import merkletools
|
||||||
from util import helper
|
from util import helper
|
||||||
import util
|
import util
|
||||||
from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
|
from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
|
||||||
|
|
||||||
|
|
||||||
# We can only import plugin host clases after the plugins are loaded
|
# We can only import plugin host clases after the plugins are loaded
|
||||||
|
@ -61,7 +60,7 @@ class UiRequestPlugin(object):
|
||||||
)
|
)
|
||||||
|
|
||||||
if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split
|
if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split
|
||||||
hash = piecemap_info["sha512_pieces"][0].encode("hex")
|
hash = piecemap_info["sha512_pieces"][0].hex()
|
||||||
hash_id = site.content_manager.hashfield.getHashId(hash)
|
hash_id = site.content_manager.hashfield.getHashId(hash)
|
||||||
site.content_manager.optionalDownloaded(inner_path, hash_id, upload_info["size"], own=True)
|
site.content_manager.optionalDownloaded(inner_path, hash_id, upload_info["size"], own=True)
|
||||||
|
|
||||||
|
@ -178,7 +177,7 @@ class UiWebsocketPlugin(object):
|
||||||
self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True)
|
self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True)
|
||||||
try:
|
try:
|
||||||
self.site.storage.delete(piecemap_inner_path)
|
self.site.storage.delete(piecemap_inner_path)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.error("File %s delete error: %s" % (piecemap_inner_path, err))
|
self.log.error("File %s delete error: %s" % (piecemap_inner_path, err))
|
||||||
|
|
||||||
return super(UiWebsocketPlugin, self).actionFileDelete(to, inner_path)
|
return super(UiWebsocketPlugin, self).actionFileDelete(to, inner_path)
|
||||||
|
@ -324,7 +323,7 @@ class ContentManagerPlugin(object):
|
||||||
|
|
||||||
def verifyPiece(self, inner_path, pos, piece):
|
def verifyPiece(self, inner_path, pos, piece):
|
||||||
piecemap = self.getPiecemap(inner_path)
|
piecemap = self.getPiecemap(inner_path)
|
||||||
piece_i = pos / piecemap["piece_size"]
|
piece_i = int(pos / piecemap["piece_size"])
|
||||||
if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]:
|
if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]:
|
||||||
raise VerifyError("Invalid hash")
|
raise VerifyError("Invalid hash")
|
||||||
return True
|
return True
|
||||||
|
@ -345,7 +344,7 @@ class ContentManagerPlugin(object):
|
||||||
file_info = self.getFileInfo(inner_path)
|
file_info = self.getFileInfo(inner_path)
|
||||||
|
|
||||||
# Mark piece downloaded
|
# Mark piece downloaded
|
||||||
piece_i = pos_from / file_info["piece_size"]
|
piece_i = int(pos_from / file_info["piece_size"])
|
||||||
self.site.storage.piecefields[file_info["sha512"]][piece_i] = True
|
self.site.storage.piecefields[file_info["sha512"]][piece_i] = True
|
||||||
|
|
||||||
# Only add to site size on first request
|
# Only add to site size on first request
|
||||||
|
@ -368,7 +367,7 @@ class ContentManagerPlugin(object):
|
||||||
del self.site.storage.piecefields[sha512]
|
del self.site.storage.piecefields[sha512]
|
||||||
|
|
||||||
# Also remove other pieces of the file from download queue
|
# Also remove other pieces of the file from download queue
|
||||||
for key in self.site.bad_files.keys():
|
for key in list(self.site.bad_files.keys()):
|
||||||
if key.startswith(inner_path + "|"):
|
if key.startswith(inner_path + "|"):
|
||||||
del self.site.bad_files[key]
|
del self.site.bad_files[key]
|
||||||
self.site.worker_manager.removeSolvedFileTasks()
|
self.site.worker_manager.removeSolvedFileTasks()
|
||||||
|
@ -381,9 +380,9 @@ class SiteStoragePlugin(object):
|
||||||
super(SiteStoragePlugin, self).__init__(*args, **kwargs)
|
super(SiteStoragePlugin, self).__init__(*args, **kwargs)
|
||||||
self.piecefields = collections.defaultdict(BigfilePiecefield)
|
self.piecefields = collections.defaultdict(BigfilePiecefield)
|
||||||
if "piecefields" in self.site.settings.get("cache", {}):
|
if "piecefields" in self.site.settings.get("cache", {}):
|
||||||
for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").iteritems():
|
for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").items():
|
||||||
if piecefield_packed:
|
if piecefield_packed:
|
||||||
self.piecefields[sha512].unpack(piecefield_packed.decode("base64"))
|
self.piecefields[sha512].unpack(base64.b64decode(piecefield_packed))
|
||||||
self.site.settings["cache"]["piecefields"] = {}
|
self.site.settings["cache"]["piecefields"] = {}
|
||||||
|
|
||||||
def createSparseFile(self, inner_path, size, sha512=None):
|
def createSparseFile(self, inner_path, size, sha512=None):
|
||||||
|
@ -486,7 +485,7 @@ class BigFile(object):
|
||||||
requests = []
|
requests = []
|
||||||
# Request all required blocks
|
# Request all required blocks
|
||||||
while 1:
|
while 1:
|
||||||
piece_i = pos / self.piece_size
|
piece_i = int(pos / self.piece_size)
|
||||||
if piece_i * self.piece_size >= read_until:
|
if piece_i * self.piece_size >= read_until:
|
||||||
break
|
break
|
||||||
pos_from = piece_i * self.piece_size
|
pos_from = piece_i * self.piece_size
|
||||||
|
@ -503,7 +502,7 @@ class BigFile(object):
|
||||||
prebuffer_until = min(self.size, read_until + self.prebuffer)
|
prebuffer_until = min(self.size, read_until + self.prebuffer)
|
||||||
priority = 3
|
priority = 3
|
||||||
while 1:
|
while 1:
|
||||||
piece_i = pos / self.piece_size
|
piece_i = int(pos / self.piece_size)
|
||||||
if piece_i * self.piece_size >= prebuffer_until:
|
if piece_i * self.piece_size >= prebuffer_until:
|
||||||
break
|
break
|
||||||
pos_from = piece_i * self.piece_size
|
pos_from = piece_i * self.piece_size
|
||||||
|
@ -565,7 +564,7 @@ class WorkerManagerPlugin(object):
|
||||||
|
|
||||||
inner_path, file_range = inner_path.split("|")
|
inner_path, file_range = inner_path.split("|")
|
||||||
pos_from, pos_to = map(int, file_range.split("-"))
|
pos_from, pos_to = map(int, file_range.split("-"))
|
||||||
task["piece_i"] = pos_from / file_info["piece_size"]
|
task["piece_i"] = int(pos_from / file_info["piece_size"])
|
||||||
task["sha512"] = file_info["sha512"]
|
task["sha512"] = file_info["sha512"]
|
||||||
else:
|
else:
|
||||||
if inner_path in self.site.bad_files:
|
if inner_path in self.site.bad_files:
|
||||||
|
@ -601,10 +600,10 @@ class WorkerManagerPlugin(object):
|
||||||
class FileRequestPlugin(object):
|
class FileRequestPlugin(object):
|
||||||
def isReadable(self, site, inner_path, file, pos):
|
def isReadable(self, site, inner_path, file, pos):
|
||||||
# Peek into file
|
# Peek into file
|
||||||
if file.read(10) == "\0" * 10:
|
if file.read(10) == b"\0" * 10:
|
||||||
# Looks empty, but makes sures we don't have that piece
|
# Looks empty, but makes sures we don't have that piece
|
||||||
file_info = site.content_manager.getFileInfo(inner_path)
|
file_info = site.content_manager.getFileInfo(inner_path)
|
||||||
piece_i = pos / file_info["piece_size"]
|
piece_i = int(pos / file_info["piece_size"])
|
||||||
if not site.storage.piecefields[file_info["sha512"]][piece_i]:
|
if not site.storage.piecefields[file_info["sha512"]][piece_i]:
|
||||||
return False
|
return False
|
||||||
# Seek back to position we want to read
|
# Seek back to position we want to read
|
||||||
|
@ -622,7 +621,7 @@ class FileRequestPlugin(object):
|
||||||
if not peer.connection: # Just added
|
if not peer.connection: # Just added
|
||||||
peer.connect(self.connection) # Assign current connection to peer
|
peer.connect(self.connection) # Assign current connection to peer
|
||||||
|
|
||||||
piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.iteritems()}
|
piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.items()}
|
||||||
self.response({"piecefields_packed": piecefields_packed})
|
self.response({"piecefields_packed": piecefields_packed})
|
||||||
|
|
||||||
def actionSetPiecefields(self, params):
|
def actionSetPiecefields(self, params):
|
||||||
|
@ -638,7 +637,7 @@ class FileRequestPlugin(object):
|
||||||
peer.connect(self.connection)
|
peer.connect(self.connection)
|
||||||
|
|
||||||
peer.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
|
peer.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
|
||||||
for sha512, piecefield_packed in params["piecefields_packed"].iteritems():
|
for sha512, piecefield_packed in params["piecefields_packed"].items():
|
||||||
peer.piecefields[sha512].unpack(piecefield_packed)
|
peer.piecefields[sha512].unpack(piecefield_packed)
|
||||||
site.settings["has_bigfile"] = True
|
site.settings["has_bigfile"] = True
|
||||||
|
|
||||||
|
@ -673,7 +672,7 @@ class PeerPlugin(object):
|
||||||
|
|
||||||
self.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
|
self.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
|
||||||
try:
|
try:
|
||||||
for sha512, piecefield_packed in res["piecefields_packed"].iteritems():
|
for sha512, piecefield_packed in res["piecefields_packed"].items():
|
||||||
self.piecefields[sha512].unpack(piecefield_packed)
|
self.piecefields[sha512].unpack(piecefield_packed)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
self.log("Invalid updatePiecefields response: %s" % Debug.formatException(err))
|
self.log("Invalid updatePiecefields response: %s" % Debug.formatException(err))
|
||||||
|
@ -720,7 +719,7 @@ class SitePlugin(object):
|
||||||
def getSettingsCache(self):
|
def getSettingsCache(self):
|
||||||
back = super(SitePlugin, self).getSettingsCache()
|
back = super(SitePlugin, self).getSettingsCache()
|
||||||
if self.storage.piecefields:
|
if self.storage.piecefields:
|
||||||
back["piecefields"] = {sha512: piecefield.pack().encode("base64") for sha512, piecefield in self.storage.piecefields.iteritems()}
|
back["piecefields"] = {sha512: base64.b64encode(piecefield.pack()).decode("utf8") for sha512, piecefield in self.storage.piecefields.items()}
|
||||||
return back
|
return back
|
||||||
|
|
||||||
def needFile(self, inner_path, *args, **kwargs):
|
def needFile(self, inner_path, *args, **kwargs):
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import time
|
import time
|
||||||
from cStringIO import StringIO
|
import io
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import msgpack
|
import msgpack
|
||||||
|
@ -40,7 +40,7 @@ class TestBigfile:
|
||||||
piecemap = msgpack.unpack(site.storage.open(file_node["piecemap"], "rb"))["optional.any.iso"]
|
piecemap = msgpack.unpack(site.storage.open(file_node["piecemap"], "rb"))["optional.any.iso"]
|
||||||
assert len(piecemap["sha512_pieces"]) == 10
|
assert len(piecemap["sha512_pieces"]) == 10
|
||||||
assert piecemap["sha512_pieces"][0] != piecemap["sha512_pieces"][1]
|
assert piecemap["sha512_pieces"][0] != piecemap["sha512_pieces"][1]
|
||||||
assert piecemap["sha512_pieces"][0].encode("hex") == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3"
|
assert piecemap["sha512_pieces"][0].hex() == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3"
|
||||||
|
|
||||||
def testVerifyPiece(self, site):
|
def testVerifyPiece(self, site):
|
||||||
inner_path = self.createBigfile(site)
|
inner_path = self.createBigfile(site)
|
||||||
|
@ -48,7 +48,7 @@ class TestBigfile:
|
||||||
# Verify all 10 piece
|
# Verify all 10 piece
|
||||||
f = site.storage.open(inner_path, "rb")
|
f = site.storage.open(inner_path, "rb")
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
piece = StringIO(f.read(1024 * 1024))
|
piece = io.BytesIO(f.read(1024 * 1024))
|
||||||
piece.seek(0)
|
piece.seek(0)
|
||||||
site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
|
site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
|
||||||
f.close()
|
f.close()
|
||||||
|
@ -57,7 +57,7 @@ class TestBigfile:
|
||||||
with pytest.raises(VerifyError) as err:
|
with pytest.raises(VerifyError) as err:
|
||||||
i = 1
|
i = 1
|
||||||
f = site.storage.open(inner_path, "rb")
|
f = site.storage.open(inner_path, "rb")
|
||||||
piece = StringIO(f.read(1024 * 1024))
|
piece = io.BytesIO(f.read(1024 * 1024))
|
||||||
f.close()
|
f.close()
|
||||||
site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
|
site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
|
||||||
assert "Invalid hash" in str(err)
|
assert "Invalid hash" in str(err)
|
||||||
|
@ -70,19 +70,19 @@ class TestBigfile:
|
||||||
|
|
||||||
# Write to file beginning
|
# Write to file beginning
|
||||||
s = time.time()
|
s = time.time()
|
||||||
f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), "hellostart" * 1024)
|
f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), b"hellostart" * 1024)
|
||||||
time_write_start = time.time() - s
|
time_write_start = time.time() - s
|
||||||
|
|
||||||
# Write to file end
|
# Write to file end
|
||||||
s = time.time()
|
s = time.time()
|
||||||
f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), "helloend" * 1024)
|
f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), b"helloend" * 1024)
|
||||||
time_write_end = time.time() - s
|
time_write_end = time.time() - s
|
||||||
|
|
||||||
# Verify writes
|
# Verify writes
|
||||||
f = site.storage.open(inner_path)
|
f = site.storage.open(inner_path)
|
||||||
assert f.read(10) == "hellostart"
|
assert f.read(10) == b"hellostart"
|
||||||
f.seek(99 * 1024 * 1024)
|
f.seek(99 * 1024 * 1024)
|
||||||
assert f.read(8) == "helloend"
|
assert f.read(8) == b"helloend"
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
site.storage.delete(inner_path)
|
site.storage.delete(inner_path)
|
||||||
|
@ -105,7 +105,7 @@ class TestBigfile:
|
||||||
buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
|
buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
|
||||||
|
|
||||||
assert len(buff.getvalue()) == 1 * 1024 * 1024 # Correct block size
|
assert len(buff.getvalue()) == 1 * 1024 * 1024 # Correct block size
|
||||||
assert buff.getvalue().startswith("Test524") # Correct data
|
assert buff.getvalue().startswith(b"Test524") # Correct data
|
||||||
buff.seek(0)
|
buff.seek(0)
|
||||||
assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff) # Correct hash
|
assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff) # Correct hash
|
||||||
|
|
||||||
|
@ -147,12 +147,12 @@ class TestBigfile:
|
||||||
|
|
||||||
# Verify 0. block not downloaded
|
# Verify 0. block not downloaded
|
||||||
f = site_temp.storage.open(inner_path)
|
f = site_temp.storage.open(inner_path)
|
||||||
assert f.read(10) == "\0" * 10
|
assert f.read(10) == b"\0" * 10
|
||||||
# Verify 5. and 10. block downloaded
|
# Verify 5. and 10. block downloaded
|
||||||
f.seek(5 * 1024 * 1024)
|
f.seek(5 * 1024 * 1024)
|
||||||
assert f.read(7) == "Test524"
|
assert f.read(7) == b"Test524"
|
||||||
f.seek(9 * 1024 * 1024)
|
f.seek(9 * 1024 * 1024)
|
||||||
assert f.read(7) == "943---T"
|
assert f.read(7) == b"943---T"
|
||||||
|
|
||||||
# Verify hashfield
|
# Verify hashfield
|
||||||
assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) # 18343: data/optional.any.iso, 30970: data/optional.any.iso.hashmap.msgpack
|
assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) # 18343: data/optional.any.iso, 30970: data/optional.any.iso.hashmap.msgpack
|
||||||
|
@ -178,14 +178,14 @@ class TestBigfile:
|
||||||
with site_temp.storage.openBigfile(inner_path) as f:
|
with site_temp.storage.openBigfile(inner_path) as f:
|
||||||
with Spy.Spy(FileRequest, "route") as requests:
|
with Spy.Spy(FileRequest, "route") as requests:
|
||||||
f.seek(5 * 1024 * 1024)
|
f.seek(5 * 1024 * 1024)
|
||||||
assert f.read(7) == "Test524"
|
assert f.read(7) == b"Test524"
|
||||||
|
|
||||||
f.seek(9 * 1024 * 1024)
|
f.seek(9 * 1024 * 1024)
|
||||||
assert f.read(7) == "943---T"
|
assert f.read(7) == b"943---T"
|
||||||
|
|
||||||
assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces
|
assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces
|
||||||
|
|
||||||
assert set(site_temp.content_manager.hashfield) == set([18343, 30970])
|
assert set(site_temp.content_manager.hashfield) == set([18343, 43727])
|
||||||
|
|
||||||
assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001"
|
assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001"
|
||||||
assert f.sha512 in site_temp.getSettingsCache()["piecefields"]
|
assert f.sha512 in site_temp.getSettingsCache()["piecefields"]
|
||||||
|
@ -193,7 +193,7 @@ class TestBigfile:
|
||||||
# Test requesting already downloaded
|
# Test requesting already downloaded
|
||||||
with Spy.Spy(FileRequest, "route") as requests:
|
with Spy.Spy(FileRequest, "route") as requests:
|
||||||
f.seek(5 * 1024 * 1024)
|
f.seek(5 * 1024 * 1024)
|
||||||
assert f.read(7) == "Test524"
|
assert f.read(7) == b"Test524"
|
||||||
|
|
||||||
assert len(requests) == 0
|
assert len(requests) == 0
|
||||||
|
|
||||||
|
@ -201,9 +201,9 @@ class TestBigfile:
|
||||||
with Spy.Spy(FileRequest, "route") as requests:
|
with Spy.Spy(FileRequest, "route") as requests:
|
||||||
f.seek(5 * 1024 * 1024) # We already have this block
|
f.seek(5 * 1024 * 1024) # We already have this block
|
||||||
data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block
|
data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block
|
||||||
assert data.startswith("Test524")
|
assert data.startswith(b"Test524")
|
||||||
assert data.endswith("Test838-")
|
assert data.endswith(b"Test838-")
|
||||||
assert "\0" not in data # No null bytes allowed
|
assert b"\0" not in data # No null bytes allowed
|
||||||
|
|
||||||
assert len(requests) == 2 # Two block download
|
assert len(requests) == 2 # Two block download
|
||||||
|
|
||||||
|
@ -258,11 +258,11 @@ class TestBigfile:
|
||||||
# Download second block
|
# Download second block
|
||||||
with site_temp.storage.openBigfile(inner_path) as f:
|
with site_temp.storage.openBigfile(inner_path) as f:
|
||||||
f.seek(1024 * 1024)
|
f.seek(1024 * 1024)
|
||||||
assert f.read(1024)[0] != "\0"
|
assert f.read(1024)[0:1] != b"\0"
|
||||||
|
|
||||||
# Make sure first block not download
|
# Make sure first block not download
|
||||||
with site_temp.storage.open(inner_path) as f:
|
with site_temp.storage.open(inner_path) as f:
|
||||||
assert f.read(1024)[0] == "\0"
|
assert f.read(1024)[0:1] == b"\0"
|
||||||
|
|
||||||
peer2 = site.addPeer(file_server.ip, 1545, return_peer=True)
|
peer2 = site.addPeer(file_server.ip, 1545, return_peer=True)
|
||||||
|
|
||||||
|
@ -284,8 +284,8 @@ class TestBigfile:
|
||||||
s = time.time()
|
s = time.time()
|
||||||
for i in range(25000):
|
for i in range(25000):
|
||||||
site.addPeer(file_server.ip, i)
|
site.addPeer(file_server.ip, i)
|
||||||
print "%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024) # 0.082s MEM: + 6800KB
|
print("%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024)) # 0.082s MEM: + 6800KB
|
||||||
print site.peers.values()[0].piecefields
|
print(list(site.peers.values())[0].piecefields)
|
||||||
|
|
||||||
def testUpdatePiecefield(self, file_server, site, site_temp):
|
def testUpdatePiecefield(self, file_server, site, site_temp):
|
||||||
inner_path = self.createBigfile(site)
|
inner_path = self.createBigfile(site)
|
||||||
|
@ -390,16 +390,16 @@ class TestBigfile:
|
||||||
size_bigfile = site_temp.content_manager.getFileInfo(inner_path)["size"]
|
size_bigfile = site_temp.content_manager.getFileInfo(inner_path)["size"]
|
||||||
|
|
||||||
with site_temp.storage.openBigfile(inner_path) as f:
|
with site_temp.storage.openBigfile(inner_path) as f:
|
||||||
assert "\0" not in f.read(1024)
|
assert b"\0" not in f.read(1024)
|
||||||
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
|
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
|
||||||
|
|
||||||
with site_temp.storage.openBigfile(inner_path) as f:
|
with site_temp.storage.openBigfile(inner_path) as f:
|
||||||
# Don't count twice
|
# Don't count twice
|
||||||
assert "\0" not in f.read(1024)
|
assert b"\0" not in f.read(1024)
|
||||||
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
|
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
|
||||||
|
|
||||||
# Add second block
|
# Add second block
|
||||||
assert "\0" not in f.read(1024 * 1024)
|
assert b"\0" not in f.read(1024 * 1024)
|
||||||
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
|
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
|
||||||
|
|
||||||
def testPrebuffer(self, file_server, site, site_temp):
|
def testPrebuffer(self, file_server, site, site_temp):
|
||||||
|
@ -423,7 +423,7 @@ class TestBigfile:
|
||||||
with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f:
|
with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f:
|
||||||
with Spy.Spy(FileRequest, "route") as requests:
|
with Spy.Spy(FileRequest, "route") as requests:
|
||||||
f.seek(5 * 1024 * 1024)
|
f.seek(5 * 1024 * 1024)
|
||||||
assert f.read(7) == "Test524"
|
assert f.read(7) == b"Test524"
|
||||||
# assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces
|
# assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces
|
||||||
assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2
|
assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2
|
||||||
|
|
||||||
|
@ -434,7 +434,7 @@ class TestBigfile:
|
||||||
|
|
||||||
# No prebuffer beyond end of the file
|
# No prebuffer beyond end of the file
|
||||||
f.seek(9 * 1024 * 1024)
|
f.seek(9 * 1024 * 1024)
|
||||||
assert "\0" not in f.read(7)
|
assert b"\0" not in f.read(7)
|
||||||
|
|
||||||
assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0
|
assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0
|
||||||
|
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
import BigfilePlugin
|
from . import BigfilePlugin
|
||||||
from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
|
from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
|
|
@ -29,7 +29,7 @@ class ChartCollector(object):
|
||||||
sites = file_server.sites
|
sites = file_server.sites
|
||||||
if not sites:
|
if not sites:
|
||||||
return collectors
|
return collectors
|
||||||
content_db = sites.values()[0].content_manager.contents.db
|
content_db = list(sites.values())[0].content_manager.contents.db
|
||||||
|
|
||||||
# Connection stats
|
# Connection stats
|
||||||
collectors["connection"] = lambda: len(file_server.connections)
|
collectors["connection"] = lambda: len(file_server.connections)
|
||||||
|
@ -67,8 +67,8 @@ class ChartCollector(object):
|
||||||
collectors["optional_downloaded"] = lambda: sum([site.settings.get("optional_downloaded", 0) for site in sites.values()])
|
collectors["optional_downloaded"] = lambda: sum([site.settings.get("optional_downloaded", 0) for site in sites.values()])
|
||||||
|
|
||||||
# Peers
|
# Peers
|
||||||
collectors["peer"] = lambda (peers): len(peers)
|
collectors["peer"] = lambda peers: len(peers)
|
||||||
collectors["peer_onion"] = lambda (peers): len([True for peer in peers if ".onion" in peer])
|
collectors["peer_onion"] = lambda peers: len([True for peer in peers if ".onion" in peer])
|
||||||
|
|
||||||
# Size
|
# Size
|
||||||
collectors["size"] = lambda: sum([site.settings.get("size", 0) for site in sites.values()])
|
collectors["size"] = lambda: sum([site.settings.get("size", 0) for site in sites.values()])
|
||||||
|
@ -81,21 +81,21 @@ class ChartCollector(object):
|
||||||
site_collectors = {}
|
site_collectors = {}
|
||||||
|
|
||||||
# Size
|
# Size
|
||||||
site_collectors["site_size"] = lambda(site): site.settings.get("size", 0)
|
site_collectors["site_size"] = lambda site: site.settings.get("size", 0)
|
||||||
site_collectors["site_size_optional"] = lambda(site): site.settings.get("size_optional", 0)
|
site_collectors["site_size_optional"] = lambda site: site.settings.get("size_optional", 0)
|
||||||
site_collectors["site_optional_downloaded"] = lambda(site): site.settings.get("optional_downloaded", 0)
|
site_collectors["site_optional_downloaded"] = lambda site: site.settings.get("optional_downloaded", 0)
|
||||||
site_collectors["site_content"] = lambda(site): len(site.content_manager.contents)
|
site_collectors["site_content"] = lambda site: len(site.content_manager.contents)
|
||||||
|
|
||||||
# Data transfer
|
# Data transfer
|
||||||
site_collectors["site_bytes_recv|change"] = lambda(site): site.settings.get("bytes_recv", 0)
|
site_collectors["site_bytes_recv|change"] = lambda site: site.settings.get("bytes_recv", 0)
|
||||||
site_collectors["site_bytes_sent|change"] = lambda(site): site.settings.get("bytes_sent", 0)
|
site_collectors["site_bytes_sent|change"] = lambda site: site.settings.get("bytes_sent", 0)
|
||||||
|
|
||||||
# Peers
|
# Peers
|
||||||
site_collectors["site_peer"] = lambda(site): len(site.peers)
|
site_collectors["site_peer"] = lambda site: len(site.peers)
|
||||||
site_collectors["site_peer_onion"] = lambda(site): len(
|
site_collectors["site_peer_onion"] = lambda site: len(
|
||||||
[True for peer in site.peers.itervalues() if peer.ip.endswith(".onion")]
|
[True for peer in site.peers.values() if peer.ip.endswith(".onion")]
|
||||||
)
|
)
|
||||||
site_collectors["site_peer_connected"] = lambda(site): len([True for peer in site.peers.itervalues() if peer.connection])
|
site_collectors["site_peer_connected"] = lambda site: len([True for peer in site.peers.values() if peer.connection])
|
||||||
|
|
||||||
return site_collectors
|
return site_collectors
|
||||||
|
|
||||||
|
@ -109,7 +109,7 @@ class ChartCollector(object):
|
||||||
if site is None:
|
if site is None:
|
||||||
peers = self.getUniquePeers()
|
peers = self.getUniquePeers()
|
||||||
datas = {}
|
datas = {}
|
||||||
for key, collector in collectors.iteritems():
|
for key, collector in collectors.items():
|
||||||
try:
|
try:
|
||||||
if site:
|
if site:
|
||||||
value = collector(site)
|
value = collector(site)
|
||||||
|
@ -138,7 +138,7 @@ class ChartCollector(object):
|
||||||
s = time.time()
|
s = time.time()
|
||||||
datas = self.collectDatas(collectors, last_values["global"])
|
datas = self.collectDatas(collectors, last_values["global"])
|
||||||
values = []
|
values = []
|
||||||
for key, value in datas.iteritems():
|
for key, value in datas.items():
|
||||||
values.append((self.db.getTypeId(key), value, now))
|
values.append((self.db.getTypeId(key), value, now))
|
||||||
self.log.debug("Global collectors done in %.3fs" % (time.time() - s))
|
self.log.debug("Global collectors done in %.3fs" % (time.time() - s))
|
||||||
|
|
||||||
|
@ -154,9 +154,9 @@ class ChartCollector(object):
|
||||||
now = int(time.time())
|
now = int(time.time())
|
||||||
s = time.time()
|
s = time.time()
|
||||||
values = []
|
values = []
|
||||||
for address, site in sites.iteritems():
|
for address, site in sites.items():
|
||||||
site_datas = self.collectDatas(collectors, last_values["site:%s" % address], site)
|
site_datas = self.collectDatas(collectors, last_values["site:%s" % address], site)
|
||||||
for key, value in site_datas.iteritems():
|
for key, value in site_datas.items():
|
||||||
values.append((self.db.getTypeId(key), self.db.getSiteId(address), value, now))
|
values.append((self.db.getTypeId(key), self.db.getSiteId(address), value, now))
|
||||||
time.sleep(0.000001)
|
time.sleep(0.000001)
|
||||||
self.log.debug("Site collections done in %.3fs" % (time.time() - s))
|
self.log.debug("Site collections done in %.3fs" % (time.time() - s))
|
||||||
|
|
|
@ -6,8 +6,8 @@ import gevent
|
||||||
from Config import config
|
from Config import config
|
||||||
from util import helper
|
from util import helper
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
from ChartDb import ChartDb
|
from .ChartDb import ChartDb
|
||||||
from ChartCollector import ChartCollector
|
from .ChartCollector import ChartCollector
|
||||||
|
|
||||||
if "db" not in locals().keys(): # Share on reloads
|
if "db" not in locals().keys(): # Share on reloads
|
||||||
db = ChartDb()
|
db = ChartDb()
|
||||||
|
@ -39,7 +39,7 @@ class UiWebsocketPlugin(object):
|
||||||
if not query.strip().upper().startswith("SELECT"):
|
if not query.strip().upper().startswith("SELECT"):
|
||||||
raise Exception("Only SELECT query supported")
|
raise Exception("Only SELECT query supported")
|
||||||
res = db.execute(query, params)
|
res = db.execute(query, params)
|
||||||
except Exception, err: # Response the error to client
|
except Exception as err: # Response the error to client
|
||||||
self.log.error("ChartDbQuery error: %s" % err)
|
self.log.error("ChartDbQuery error: %s" % err)
|
||||||
return {"error": str(err)}
|
return {"error": str(err)}
|
||||||
# Convert result to dict
|
# Convert result to dict
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import ChartPlugin
|
from . import ChartPlugin
|
|
@ -1,13 +1,13 @@
|
||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
import cgi
|
import html
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
from Translate import Translate
|
from Translate import Translate
|
||||||
from Config import config
|
from Config import config
|
||||||
|
|
||||||
from ContentFilterStorage import ContentFilterStorage
|
from .ContentFilterStorage import ContentFilterStorage
|
||||||
|
|
||||||
|
|
||||||
if "_" not in locals():
|
if "_" not in locals():
|
||||||
|
@ -39,8 +39,8 @@ class UiWebsocketPlugin(object):
|
||||||
else:
|
else:
|
||||||
self.cmd(
|
self.cmd(
|
||||||
"confirm",
|
"confirm",
|
||||||
[_["Hide all content from <b>%s</b>?"] % cgi.escape(cert_user_id), _["Mute"]],
|
[_["Hide all content from <b>%s</b>?"] % html.escape(cert_user_id), _["Mute"]],
|
||||||
lambda (res): self.cbMuteAdd(to, auth_address, cert_user_id, reason)
|
lambda res: self.cbMuteAdd(to, auth_address, cert_user_id, reason)
|
||||||
)
|
)
|
||||||
|
|
||||||
def cbMuteRemove(self, to, auth_address):
|
def cbMuteRemove(self, to, auth_address):
|
||||||
|
@ -55,8 +55,8 @@ class UiWebsocketPlugin(object):
|
||||||
else:
|
else:
|
||||||
self.cmd(
|
self.cmd(
|
||||||
"confirm",
|
"confirm",
|
||||||
[_["Unmute <b>%s</b>?"] % cgi.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]],
|
[_["Unmute <b>%s</b>?"] % html.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]],
|
||||||
lambda (res): self.cbMuteRemove(to, auth_address)
|
lambda res: self.cbMuteRemove(to, auth_address)
|
||||||
)
|
)
|
||||||
|
|
||||||
def actionMuteList(self, to):
|
def actionMuteList(self, to):
|
||||||
|
@ -101,13 +101,13 @@ class UiWebsocketPlugin(object):
|
||||||
else:
|
else:
|
||||||
content = site.storage.loadJson(inner_path)
|
content = site.storage.loadJson(inner_path)
|
||||||
title = _["New shared global content filter: <b>%s</b> (%s sites, %s users)"] % (
|
title = _["New shared global content filter: <b>%s</b> (%s sites, %s users)"] % (
|
||||||
cgi.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {}))
|
html.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {}))
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd(
|
self.cmd(
|
||||||
"confirm",
|
"confirm",
|
||||||
[title, "Add"],
|
[title, "Add"],
|
||||||
lambda (res): self.cbFilterIncludeAdd(to, res, address, inner_path, description)
|
lambda res: self.cbFilterIncludeAdd(to, res, address, inner_path, description)
|
||||||
)
|
)
|
||||||
|
|
||||||
def cbFilterIncludeAdd(self, to, res, address, inner_path, description):
|
def cbFilterIncludeAdd(self, to, res, address, inner_path, description):
|
||||||
|
@ -189,7 +189,7 @@ class UiRequestPlugin(object):
|
||||||
address = self.server.site_manager.resolveDomain(address)
|
address = self.server.site_manager.resolveDomain(address)
|
||||||
|
|
||||||
if address:
|
if address:
|
||||||
address_sha256 = "0x" + hashlib.sha256(address).hexdigest()
|
address_sha256 = "0x" + hashlib.sha256(address.encode("utf8")).hexdigest()
|
||||||
else:
|
else:
|
||||||
address_sha256 = None
|
address_sha256 = None
|
||||||
|
|
||||||
|
|
|
@ -62,7 +62,7 @@ class ContentFilterStorage(object):
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for key, val in content.iteritems():
|
for key, val in content.items():
|
||||||
if type(val) is not dict:
|
if type(val) is not dict:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import ContentFilterPlugin
|
from . import ContentFilterPlugin
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import re
|
import re
|
||||||
import cgi
|
import html
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
|
@ -78,8 +78,8 @@ class UiWebsocketPlugin(object):
|
||||||
|
|
||||||
self.cmd(
|
self.cmd(
|
||||||
"confirm",
|
"confirm",
|
||||||
[_["This site requests <b>read</b> permission to: <b>%s</b>"] % cgi.escape(site_name), button_title],
|
[_["This site requests <b>read</b> permission to: <b>%s</b>"] % html.escape(site_name), button_title],
|
||||||
lambda (res): self.cbCorsPermission(to, address)
|
lambda res: self.cbCorsPermission(to, address)
|
||||||
)
|
)
|
||||||
|
|
||||||
def cbCorsPermission(self, to, address):
|
def cbCorsPermission(self, to, address):
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import CorsPlugin
|
from . import CorsPlugin
|
|
@ -43,11 +43,11 @@ def getEcc(privatekey=None):
|
||||||
|
|
||||||
def toOpensslPrivatekey(privatekey):
|
def toOpensslPrivatekey(privatekey):
|
||||||
privatekey_bin = btctools.encode_privkey(privatekey, "bin")
|
privatekey_bin = btctools.encode_privkey(privatekey, "bin")
|
||||||
return '\x02\xca\x00\x20' + privatekey_bin
|
return b'\x02\xca\x00\x20' + privatekey_bin
|
||||||
|
|
||||||
|
|
||||||
def toOpensslPublickey(publickey):
|
def toOpensslPublickey(publickey):
|
||||||
publickey_bin = btctools.encode_pubkey(publickey, "bin")
|
publickey_bin = btctools.encode_pubkey(publickey, "bin")
|
||||||
publickey_bin = publickey_bin[1:]
|
publickey_bin = publickey_bin[1:]
|
||||||
publickey_openssl = '\x02\xca\x00 ' + publickey_bin[:32] + '\x00 ' + publickey_bin[32:]
|
publickey_openssl = b'\x02\xca\x00 ' + publickey_bin[:32] + b'\x00 ' + publickey_bin[32:]
|
||||||
return publickey_openssl
|
return publickey_openssl
|
||||||
|
|
|
@ -3,9 +3,9 @@ import os
|
||||||
|
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
from Crypt import CryptBitcoin
|
from Crypt import CryptBitcoin
|
||||||
from lib.pybitcointools import bitcoin as btctools
|
import lib.pybitcointools as btctools
|
||||||
|
|
||||||
import CryptMessage
|
from . import CryptMessage
|
||||||
|
|
||||||
|
|
||||||
@PluginManager.registerTo("UiWebsocket")
|
@PluginManager.registerTo("UiWebsocket")
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import CryptMessagePlugin
|
from . import CryptMessagePlugin
|
|
@ -48,7 +48,7 @@ class UiRequestPlugin(object):
|
||||||
if ".zip/" in path or ".tar.gz/" in path:
|
if ".zip/" in path or ".tar.gz/" in path:
|
||||||
file_obj = None
|
file_obj = None
|
||||||
path_parts = self.parsePath(path)
|
path_parts = self.parsePath(path)
|
||||||
file_path = u"%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"].decode("utf8"))
|
file_path = "%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"])
|
||||||
match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", file_path)
|
match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", file_path)
|
||||||
archive_path, path_within = match.groups()
|
archive_path, path_within = match.groups()
|
||||||
if archive_path not in archive_cache:
|
if archive_path not in archive_cache:
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import FilePackPlugin
|
from . import FilePackPlugin
|
|
@ -66,7 +66,7 @@ class UiWebsocketPlugin(object):
|
||||||
self.cmd(
|
self.cmd(
|
||||||
"confirm",
|
"confirm",
|
||||||
[_["Add <b>%s</b> new site?"] % len(addresses), "Add"],
|
[_["Add <b>%s</b> new site?"] % len(addresses), "Add"],
|
||||||
lambda (res): self.cbMergerSiteAdd(to, addresses)
|
lambda res: self.cbMergerSiteAdd(to, addresses)
|
||||||
)
|
)
|
||||||
self.response(to, "ok")
|
self.response(to, "ok")
|
||||||
|
|
||||||
|
@ -102,7 +102,7 @@ class UiWebsocketPlugin(object):
|
||||||
ret = {}
|
ret = {}
|
||||||
if not merger_types:
|
if not merger_types:
|
||||||
return self.response(to, {"error": "Not a merger site"})
|
return self.response(to, {"error": "Not a merger site"})
|
||||||
for address, merged_type in merged_db.iteritems():
|
for address, merged_type in merged_db.items():
|
||||||
if merged_type not in merger_types:
|
if merged_type not in merger_types:
|
||||||
continue # Site not for us
|
continue # Site not for us
|
||||||
if query_site_info:
|
if query_site_info:
|
||||||
|
@ -215,7 +215,7 @@ class UiWebsocketPlugin(object):
|
||||||
if not re.match("^[A-Za-z0-9-]+$", merger_type):
|
if not re.match("^[A-Za-z0-9-]+$", merger_type):
|
||||||
raise Exception("Invalid merger_type: %s" % merger_type)
|
raise Exception("Invalid merger_type: %s" % merger_type)
|
||||||
merged_sites = []
|
merged_sites = []
|
||||||
for address, merged_type in merged_db.iteritems():
|
for address, merged_type in merged_db.items():
|
||||||
if merged_type != merger_type:
|
if merged_type != merger_type:
|
||||||
continue
|
continue
|
||||||
site = self.server.sites.get(address)
|
site = self.server.sites.get(address)
|
||||||
|
@ -253,18 +253,18 @@ class SiteStoragePlugin(object):
|
||||||
|
|
||||||
# Not a merger site, that's all
|
# Not a merger site, that's all
|
||||||
if not merger_types:
|
if not merger_types:
|
||||||
raise StopIteration
|
return
|
||||||
|
|
||||||
merged_sites = [
|
merged_sites = [
|
||||||
site_manager.sites[address]
|
site_manager.sites[address]
|
||||||
for address, merged_type in merged_db.iteritems()
|
for address, merged_type in merged_db.items()
|
||||||
if merged_type in merger_types
|
if merged_type in merger_types
|
||||||
]
|
]
|
||||||
found = 0
|
found = 0
|
||||||
for merged_site in merged_sites:
|
for merged_site in merged_sites:
|
||||||
self.log.debug("Loading merged site: %s" % merged_site)
|
self.log.debug("Loading merged site: %s" % merged_site)
|
||||||
merged_type = merged_db[merged_site.address]
|
merged_type = merged_db[merged_site.address]
|
||||||
for content_inner_path, content in merged_site.content_manager.contents.iteritems():
|
for content_inner_path, content in merged_site.content_manager.contents.items():
|
||||||
# content.json file itself
|
# content.json file itself
|
||||||
if merged_site.storage.isFile(content_inner_path): # Missing content.json file
|
if merged_site.storage.isFile(content_inner_path): # Missing content.json file
|
||||||
merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path)
|
merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path)
|
||||||
|
@ -273,7 +273,7 @@ class SiteStoragePlugin(object):
|
||||||
merged_site.log.error("[MISSING] %s" % content_inner_path)
|
merged_site.log.error("[MISSING] %s" % content_inner_path)
|
||||||
# Data files in content.json
|
# Data files in content.json
|
||||||
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
|
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
|
||||||
for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys():
|
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
|
||||||
if not file_relative_path.endswith(".json"):
|
if not file_relative_path.endswith(".json"):
|
||||||
continue # We only interesed in json files
|
continue # We only interesed in json files
|
||||||
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
|
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
|
||||||
|
@ -285,7 +285,7 @@ class SiteStoragePlugin(object):
|
||||||
merged_site.log.error("[MISSING] %s" % file_inner_path)
|
merged_site.log.error("[MISSING] %s" % file_inner_path)
|
||||||
found += 1
|
found += 1
|
||||||
if found % 100 == 0:
|
if found % 100 == 0:
|
||||||
time.sleep(0.000001) # Context switch to avoid UI block
|
time.sleep(0.001) # Context switch to avoid UI block
|
||||||
|
|
||||||
# Also notice merger sites on a merged site file change
|
# Also notice merger sites on a merged site file change
|
||||||
def onUpdated(self, inner_path, file=None):
|
def onUpdated(self, inner_path, file=None):
|
||||||
|
@ -339,11 +339,11 @@ class SiteManagerPlugin(object):
|
||||||
site_manager = self
|
site_manager = self
|
||||||
if not self.sites:
|
if not self.sites:
|
||||||
return
|
return
|
||||||
for site in self.sites.itervalues():
|
for site in self.sites.values():
|
||||||
# Update merged sites
|
# Update merged sites
|
||||||
try:
|
try:
|
||||||
merged_type = site.content_manager.contents.get("content.json", {}).get("merged_type")
|
merged_type = site.content_manager.contents.get("content.json", {}).get("merged_type")
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.error("Error loading site %s: %s" % (site.address, Debug.formatException(err)))
|
self.log.error("Error loading site %s: %s" % (site.address, Debug.formatException(err)))
|
||||||
continue
|
continue
|
||||||
if merged_type:
|
if merged_type:
|
||||||
|
@ -368,7 +368,7 @@ class SiteManagerPlugin(object):
|
||||||
|
|
||||||
# Update merged to merger
|
# Update merged to merger
|
||||||
if merged_type:
|
if merged_type:
|
||||||
for merger_site in self.sites.itervalues():
|
for merger_site in self.sites.values():
|
||||||
if "Merger:" + merged_type in merger_site.settings["permissions"]:
|
if "Merger:" + merged_type in merger_site.settings["permissions"]:
|
||||||
if site.address not in merged_to_merger:
|
if site.address not in merged_to_merger:
|
||||||
merged_to_merger[site.address] = []
|
merged_to_merger[site.address] = []
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import MergerSitePlugin
|
from . import MergerSitePlugin
|
|
@ -37,7 +37,7 @@ class UiWebsocketPlugin(object):
|
||||||
total_s = time.time()
|
total_s = time.time()
|
||||||
num_sites = 0
|
num_sites = 0
|
||||||
|
|
||||||
for address, site_data in self.user.sites.items():
|
for address, site_data in list(self.user.sites.items()):
|
||||||
feeds = site_data.get("follow")
|
feeds = site_data.get("follow")
|
||||||
if not feeds:
|
if not feeds:
|
||||||
continue
|
continue
|
||||||
|
@ -45,7 +45,7 @@ class UiWebsocketPlugin(object):
|
||||||
self.log.debug("Invalid feed for site %s" % address)
|
self.log.debug("Invalid feed for site %s" % address)
|
||||||
continue
|
continue
|
||||||
num_sites += 1
|
num_sites += 1
|
||||||
for name, query_set in feeds.iteritems():
|
for name, query_set in feeds.items():
|
||||||
site = SiteManager.site_manager.get(address)
|
site = SiteManager.site_manager.get(address)
|
||||||
if not site or not site.storage.has_db:
|
if not site or not site.storage.has_db:
|
||||||
continue
|
continue
|
||||||
|
@ -78,7 +78,7 @@ class UiWebsocketPlugin(object):
|
||||||
|
|
||||||
for row in res:
|
for row in res:
|
||||||
row = dict(row)
|
row = dict(row)
|
||||||
if not isinstance(row["date_added"], (int, long, float, complex)):
|
if not isinstance(row["date_added"], (int, float, complex)):
|
||||||
self.log.debug("Invalid date_added from site %s: %r" % (address, row["date_added"]))
|
self.log.debug("Invalid date_added from site %s: %r" % (address, row["date_added"]))
|
||||||
continue
|
continue
|
||||||
if row["date_added"] > 1000000000000: # Formatted as millseconds
|
if row["date_added"] > 1000000000000: # Formatted as millseconds
|
||||||
|
@ -116,7 +116,7 @@ class UiWebsocketPlugin(object):
|
||||||
|
|
||||||
search_text, filters = self.parseSearch(search)
|
search_text, filters = self.parseSearch(search)
|
||||||
|
|
||||||
for address, site in SiteManager.site_manager.list().iteritems():
|
for address, site in SiteManager.site_manager.list().items():
|
||||||
if not site.storage.has_db:
|
if not site.storage.has_db:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -137,7 +137,7 @@ class UiWebsocketPlugin(object):
|
||||||
|
|
||||||
num_sites += 1
|
num_sites += 1
|
||||||
|
|
||||||
for name, query in feeds.iteritems():
|
for name, query in feeds.items():
|
||||||
s = time.time()
|
s = time.time()
|
||||||
try:
|
try:
|
||||||
db_query = DbQuery(query)
|
db_query = DbQuery(query)
|
||||||
|
@ -162,7 +162,7 @@ class UiWebsocketPlugin(object):
|
||||||
db_query.parts["LIMIT"] = str(limit)
|
db_query.parts["LIMIT"] = str(limit)
|
||||||
|
|
||||||
res = site.storage.query(str(db_query), params)
|
res = site.storage.query(str(db_query), params)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err)))
|
self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err)))
|
||||||
stats.append({"site": site.address, "feed_name": name, "error": str(err), "query": query})
|
stats.append({"site": site.address, "feed_name": name, "error": str(err), "query": query})
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import NewsfeedPlugin
|
from . import NewsfeedPlugin
|
|
@ -88,8 +88,8 @@ class ContentDbPlugin(object):
|
||||||
site_sizes[row["site_id"]]["optional_downloaded"] += row["size"]
|
site_sizes[row["site_id"]]["optional_downloaded"] += row["size"]
|
||||||
|
|
||||||
# Site site size stats to sites.json settings
|
# Site site size stats to sites.json settings
|
||||||
site_ids_reverse = {val: key for key, val in self.site_ids.iteritems()}
|
site_ids_reverse = {val: key for key, val in self.site_ids.items()}
|
||||||
for site_id, stats in site_sizes.iteritems():
|
for site_id, stats in site_sizes.items():
|
||||||
site_address = site_ids_reverse.get(site_id)
|
site_address = site_ids_reverse.get(site_id)
|
||||||
if not site_address:
|
if not site_address:
|
||||||
self.log.error("Not found site_id: %s" % site_id)
|
self.log.error("Not found site_id: %s" % site_id)
|
||||||
|
@ -166,7 +166,7 @@ class ContentDbPlugin(object):
|
||||||
num = 0
|
num = 0
|
||||||
site_id = self.site_ids[site.address]
|
site_id = self.site_ids[site.address]
|
||||||
content_inner_dir = helper.getDirname(content_inner_path)
|
content_inner_dir = helper.getDirname(content_inner_path)
|
||||||
for relative_inner_path, file in content.get("files_optional", {}).iteritems():
|
for relative_inner_path, file in content.get("files_optional", {}).items():
|
||||||
file_inner_path = content_inner_dir + relative_inner_path
|
file_inner_path = content_inner_dir + relative_inner_path
|
||||||
hash_id = int(file["sha512"][0:4], 16)
|
hash_id = int(file["sha512"][0:4], 16)
|
||||||
if hash_id in site.content_manager.hashfield:
|
if hash_id in site.content_manager.hashfield:
|
||||||
|
@ -232,14 +232,14 @@ class ContentDbPlugin(object):
|
||||||
num_file = 0
|
num_file = 0
|
||||||
num_updated = 0
|
num_updated = 0
|
||||||
num_site = 0
|
num_site = 0
|
||||||
for site in self.sites.values():
|
for site in list(self.sites.values()):
|
||||||
if not site.content_manager.has_optional_files:
|
if not site.content_manager.has_optional_files:
|
||||||
continue
|
continue
|
||||||
if not site.settings["serving"]:
|
if not site.settings["serving"]:
|
||||||
continue
|
continue
|
||||||
has_updated_hashfield = next((
|
has_updated_hashfield = next((
|
||||||
peer
|
peer
|
||||||
for peer in site.peers.itervalues()
|
for peer in site.peers.values()
|
||||||
if peer.has_hashfield and peer.hashfield.time_changed > self.time_peer_numbers_updated
|
if peer.has_hashfield and peer.hashfield.time_changed > self.time_peer_numbers_updated
|
||||||
), None)
|
), None)
|
||||||
|
|
||||||
|
@ -248,7 +248,7 @@ class ContentDbPlugin(object):
|
||||||
|
|
||||||
hashfield_peers = itertools.chain.from_iterable(
|
hashfield_peers = itertools.chain.from_iterable(
|
||||||
peer.hashfield.storage
|
peer.hashfield.storage
|
||||||
for peer in site.peers.itervalues()
|
for peer in site.peers.values()
|
||||||
if peer.has_hashfield
|
if peer.has_hashfield
|
||||||
)
|
)
|
||||||
peer_nums = collections.Counter(
|
peer_nums = collections.Counter(
|
||||||
|
@ -270,7 +270,7 @@ class ContentDbPlugin(object):
|
||||||
updates[row["file_id"]] = peer_num
|
updates[row["file_id"]] = peer_num
|
||||||
|
|
||||||
self.execute("BEGIN")
|
self.execute("BEGIN")
|
||||||
for file_id, peer_num in updates.iteritems():
|
for file_id, peer_num in updates.items():
|
||||||
self.execute("UPDATE file_optional SET peer = ? WHERE file_id = ?", (peer_num, file_id))
|
self.execute("UPDATE file_optional SET peer = ? WHERE file_id = ?", (peer_num, file_id))
|
||||||
self.execute("END")
|
self.execute("END")
|
||||||
|
|
||||||
|
@ -394,7 +394,7 @@ class ContentDbPlugin(object):
|
||||||
|
|
||||||
self.updatePeerNumbers()
|
self.updatePeerNumbers()
|
||||||
|
|
||||||
site_ids_reverse = {val: key for key, val in self.site_ids.iteritems()}
|
site_ids_reverse = {val: key for key, val in self.site_ids.items()}
|
||||||
deleted_file_ids = []
|
deleted_file_ids = []
|
||||||
for row in self.queryDeletableFiles():
|
for row in self.queryDeletableFiles():
|
||||||
site_address = site_ids_reverse.get(row["site_id"])
|
site_address = site_ids_reverse.get(row["site_id"])
|
||||||
|
|
|
@ -6,7 +6,7 @@ import gevent
|
||||||
|
|
||||||
from util import helper
|
from util import helper
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
import ContentDbPlugin
|
from . import ContentDbPlugin
|
||||||
|
|
||||||
|
|
||||||
# We can only import plugin host clases after the plugins are loaded
|
# We can only import plugin host clases after the plugins are loaded
|
||||||
|
@ -24,7 +24,7 @@ def processAccessLog():
|
||||||
for site_id in access_log:
|
for site_id in access_log:
|
||||||
content_db.execute(
|
content_db.execute(
|
||||||
"UPDATE file_optional SET time_accessed = %s WHERE ?" % now,
|
"UPDATE file_optional SET time_accessed = %s WHERE ?" % now,
|
||||||
{"site_id": site_id, "inner_path": access_log[site_id].keys()}
|
{"site_id": site_id, "inner_path": list(access_log[site_id].keys())}
|
||||||
)
|
)
|
||||||
num += len(access_log[site_id])
|
num += len(access_log[site_id])
|
||||||
access_log.clear()
|
access_log.clear()
|
||||||
|
@ -37,7 +37,7 @@ def processRequestLog():
|
||||||
num = 0
|
num = 0
|
||||||
cur.execute("BEGIN")
|
cur.execute("BEGIN")
|
||||||
for site_id in request_log:
|
for site_id in request_log:
|
||||||
for inner_path, uploaded in request_log[site_id].iteritems():
|
for inner_path, uploaded in request_log[site_id].items():
|
||||||
content_db.execute(
|
content_db.execute(
|
||||||
"UPDATE file_optional SET uploaded = uploaded + %s WHERE ?" % uploaded,
|
"UPDATE file_optional SET uploaded = uploaded + %s WHERE ?" % uploaded,
|
||||||
{"site_id": site_id, "inner_path": inner_path}
|
{"site_id": site_id, "inner_path": inner_path}
|
||||||
|
@ -101,7 +101,7 @@ class ContentManagerPlugin(object):
|
||||||
{"site_id": self.contents.db.site_ids[self.site.address], "hash_id": hash_id}
|
{"site_id": self.contents.db.site_ids[self.site.address], "hash_id": hash_id}
|
||||||
)
|
)
|
||||||
row = res.fetchone()
|
row = res.fetchone()
|
||||||
if row and row[0]:
|
if row and row["is_downloaded"]:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
@ -191,7 +191,7 @@ class SitePlugin(object):
|
||||||
if is_downloadable:
|
if is_downloadable:
|
||||||
return is_downloadable
|
return is_downloadable
|
||||||
|
|
||||||
for path in self.settings.get("optional_help", {}).iterkeys():
|
for path in self.settings.get("optional_help", {}).keys():
|
||||||
if inner_path.startswith(path):
|
if inner_path.startswith(path):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,7 @@
|
||||||
import hashlib
|
|
||||||
import os
|
|
||||||
import copy
|
import copy
|
||||||
import json
|
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from OptionalManager import OptionalManagerPlugin
|
|
||||||
from util import helper
|
|
||||||
from Crypt import CryptBitcoin
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("resetSettings")
|
@pytest.mark.usefixtures("resetSettings")
|
||||||
class TestOptionalManager:
|
class TestOptionalManager:
|
||||||
|
@ -58,7 +50,7 @@ class TestOptionalManager:
|
||||||
assert not file_row["is_downloaded"]
|
assert not file_row["is_downloaded"]
|
||||||
|
|
||||||
# Write file from outside of ZeroNet
|
# Write file from outside of ZeroNet
|
||||||
site.storage.open("testfile", "wb").write("A" * 1234) # For quick check hash does not matter only file size
|
site.storage.open("testfile", "wb").write(b"A" * 1234) # For quick check hash does not matter only file size
|
||||||
|
|
||||||
hashfield_len_before = len(site.content_manager.hashfield)
|
hashfield_len_before = len(site.content_manager.hashfield)
|
||||||
site.storage.verifyFiles(quick_check=True)
|
site.storage.verifyFiles(quick_check=True)
|
||||||
|
@ -92,8 +84,8 @@ class TestOptionalManager:
|
||||||
assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") == site.content_manager.hashfield.getHashId("aaaabbbbdddd")
|
assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") == site.content_manager.hashfield.getHashId("aaaabbbbdddd")
|
||||||
|
|
||||||
# Write files from outside of ZeroNet (For quick check hash does not matter only file size)
|
# Write files from outside of ZeroNet (For quick check hash does not matter only file size)
|
||||||
site.storage.open("testfile1", "wb").write("A" * 1234)
|
site.storage.open("testfile1", "wb").write(b"A" * 1234)
|
||||||
site.storage.open("testfile2", "wb").write("B" * 2345)
|
site.storage.open("testfile2", "wb").write(b"B" * 2345)
|
||||||
|
|
||||||
site.storage.verifyFiles(quick_check=True)
|
site.storage.verifyFiles(quick_check=True)
|
||||||
|
|
||||||
|
@ -129,7 +121,6 @@ class TestOptionalManager:
|
||||||
assert site.bad_files["data/fake_bigfile.mp4|2048-3064"] == 1
|
assert site.bad_files["data/fake_bigfile.mp4|2048-3064"] == 1
|
||||||
|
|
||||||
def testOptionalDelete(self, site):
|
def testOptionalDelete(self, site):
|
||||||
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
|
|
||||||
contents = site.content_manager.contents
|
contents = site.content_manager.contents
|
||||||
|
|
||||||
site.content_manager.setPin("data/img/zerotalk-upvote.png", True)
|
site.content_manager.setPin("data/img/zerotalk-upvote.png", True)
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
import cgi
|
import html
|
||||||
|
|
||||||
import gevent
|
import gevent
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ class UiWebsocketPlugin(object):
|
||||||
content_db.my_optional_files[self.site.address + "/" + content_inner_dir] = time.time()
|
content_db.my_optional_files[self.site.address + "/" + content_inner_dir] = time.time()
|
||||||
if len(content_db.my_optional_files) > 50: # Keep only last 50
|
if len(content_db.my_optional_files) > 50: # Keep only last 50
|
||||||
oldest_key = min(
|
oldest_key = min(
|
||||||
content_db.my_optional_files.iterkeys(),
|
iter(content_db.my_optional_files.keys()),
|
||||||
key=(lambda key: content_db.my_optional_files[key])
|
key=(lambda key: content_db.my_optional_files[key])
|
||||||
)
|
)
|
||||||
del content_db.my_optional_files[oldest_key]
|
del content_db.my_optional_files[oldest_key]
|
||||||
|
@ -80,7 +80,7 @@ class UiWebsocketPlugin(object):
|
||||||
# Add leech / seed stats
|
# Add leech / seed stats
|
||||||
row["peer_seed"] = 0
|
row["peer_seed"] = 0
|
||||||
row["peer_leech"] = 0
|
row["peer_leech"] = 0
|
||||||
for peer in site.peers.itervalues():
|
for peer in site.peers.values():
|
||||||
if not peer.time_piecefields_updated or sha512 not in peer.piecefields:
|
if not peer.time_piecefields_updated or sha512 not in peer.piecefields:
|
||||||
continue
|
continue
|
||||||
peer_piecefield = peer.piecefields[sha512].tostring()
|
peer_piecefield = peer.piecefields[sha512].tostring()
|
||||||
|
@ -212,7 +212,7 @@ class UiWebsocketPlugin(object):
|
||||||
num_file = len(inner_path)
|
num_file = len(inner_path)
|
||||||
if back == "ok":
|
if back == "ok":
|
||||||
if num_file == 1:
|
if num_file == 1:
|
||||||
self.cmd("notification", ["done", _["Pinned %s"] % cgi.escape(helper.getFilename(inner_path[0])), 5000])
|
self.cmd("notification", ["done", _["Pinned %s"] % html.escape(helper.getFilename(inner_path[0])), 5000])
|
||||||
else:
|
else:
|
||||||
self.cmd("notification", ["done", _["Pinned %s files"] % num_file, 5000])
|
self.cmd("notification", ["done", _["Pinned %s files"] % num_file, 5000])
|
||||||
self.response(to, back)
|
self.response(to, back)
|
||||||
|
@ -224,7 +224,7 @@ class UiWebsocketPlugin(object):
|
||||||
num_file = len(inner_path)
|
num_file = len(inner_path)
|
||||||
if back == "ok":
|
if back == "ok":
|
||||||
if num_file == 1:
|
if num_file == 1:
|
||||||
self.cmd("notification", ["done", _["Removed pin from %s"] % cgi.escape(helper.getFilename(inner_path[0])), 5000])
|
self.cmd("notification", ["done", _["Removed pin from %s"] % html.escape(helper.getFilename(inner_path[0])), 5000])
|
||||||
else:
|
else:
|
||||||
self.cmd("notification", ["done", _["Removed pin from %s files"] % num_file, 5000])
|
self.cmd("notification", ["done", _["Removed pin from %s files"] % num_file, 5000])
|
||||||
self.response(to, back)
|
self.response(to, back)
|
||||||
|
@ -325,7 +325,7 @@ class UiWebsocketPlugin(object):
|
||||||
self.cmd("notification", [
|
self.cmd("notification", [
|
||||||
"done",
|
"done",
|
||||||
_["You started to help distribute <b>%s</b>.<br><small>Directory: %s</small>"] %
|
_["You started to help distribute <b>%s</b>.<br><small>Directory: %s</small>"] %
|
||||||
(cgi.escape(title), cgi.escape(directory)),
|
(html.escape(title), html.escape(directory)),
|
||||||
10000
|
10000
|
||||||
])
|
])
|
||||||
|
|
||||||
|
@ -369,10 +369,10 @@ class UiWebsocketPlugin(object):
|
||||||
self.cmd(
|
self.cmd(
|
||||||
"confirm",
|
"confirm",
|
||||||
[
|
[
|
||||||
_["Help distribute all new optional files on site <b>%s</b>"] % cgi.escape(site_title),
|
_["Help distribute all new optional files on site <b>%s</b>"] % html.escape(site_title),
|
||||||
_["Yes, I want to help!"]
|
_["Yes, I want to help!"]
|
||||||
],
|
],
|
||||||
lambda (res): self.cbOptionalHelpAll(to, site, True)
|
lambda res: self.cbOptionalHelpAll(to, site, True)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
site.settings["autodownloadoptional"] = False
|
site.settings["autodownloadoptional"] = False
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import OptionalManagerPlugin
|
from . import OptionalManagerPlugin
|
|
@ -96,8 +96,8 @@ class ContentDbPlugin(object):
|
||||||
gevent.spawn_later(60*60, self.savePeers, site, spawn=True)
|
gevent.spawn_later(60*60, self.savePeers, site, spawn=True)
|
||||||
|
|
||||||
def saveAllPeers(self):
|
def saveAllPeers(self):
|
||||||
for site in self.sites.values():
|
for site in list(self.sites.values()):
|
||||||
try:
|
try:
|
||||||
self.savePeers(site)
|
self.savePeers(site)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
site.log.error("Save peer error: %s" % err)
|
site.log.error("Save peer error: %s" % err)
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
import PeerDbPlugin
|
from . import PeerDbPlugin
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,11 @@
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
import cgi
|
import html
|
||||||
import sys
|
import sys
|
||||||
import math
|
import math
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
try:
|
import io
|
||||||
import cStringIO as StringIO
|
|
||||||
except:
|
|
||||||
import StringIO
|
|
||||||
|
|
||||||
import gevent
|
import gevent
|
||||||
|
|
||||||
|
@ -17,7 +14,7 @@ from Plugin import PluginManager
|
||||||
from Debug import Debug
|
from Debug import Debug
|
||||||
from Translate import Translate
|
from Translate import Translate
|
||||||
from util import helper
|
from util import helper
|
||||||
from ZipStream import ZipStream
|
from .ZipStream import ZipStream
|
||||||
|
|
||||||
plugin_dir = "plugins/Sidebar"
|
plugin_dir = "plugins/Sidebar"
|
||||||
media_dir = plugin_dir + "/media"
|
media_dir = plugin_dir + "/media"
|
||||||
|
@ -46,7 +43,7 @@ class UiRequestPlugin(object):
|
||||||
from Debug import DebugMedia
|
from Debug import DebugMedia
|
||||||
DebugMedia.merge(plugin_media_file)
|
DebugMedia.merge(plugin_media_file)
|
||||||
if ext == "js":
|
if ext == "js":
|
||||||
yield _.translateData(open(plugin_media_file).read())
|
yield _.translateData(open(plugin_media_file).read()).encode("utf8")
|
||||||
else:
|
else:
|
||||||
for part in self.actionFile(plugin_media_file, send_header=False):
|
for part in self.actionFile(plugin_media_file, send_header=False):
|
||||||
yield part
|
yield part
|
||||||
|
@ -84,15 +81,13 @@ class UiRequestPlugin(object):
|
||||||
yield data
|
yield data
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@PluginManager.registerTo("UiWebsocket")
|
@PluginManager.registerTo("UiWebsocket")
|
||||||
class UiWebsocketPlugin(object):
|
class UiWebsocketPlugin(object):
|
||||||
def sidebarRenderPeerStats(self, body, site):
|
def sidebarRenderPeerStats(self, body, site):
|
||||||
connected = len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected])
|
connected = len([peer for peer in list(site.peers.values()) if peer.connection and peer.connection.connected])
|
||||||
connectable = len([peer_id for peer_id in site.peers.keys() if not peer_id.endswith(":0")])
|
connectable = len([peer_id for peer_id in list(site.peers.keys()) if not peer_id.endswith(":0")])
|
||||||
onion = len([peer_id for peer_id in site.peers.keys() if ".onion" in peer_id])
|
onion = len([peer_id for peer_id in list(site.peers.keys()) if ".onion" in peer_id])
|
||||||
local = len([peer for peer in site.peers.values() if helper.isPrivateIp(peer.ip)])
|
local = len([peer for peer in list(site.peers.values()) if helper.isPrivateIp(peer.ip)])
|
||||||
peers_total = len(site.peers)
|
peers_total = len(site.peers)
|
||||||
|
|
||||||
# Add myself
|
# Add myself
|
||||||
|
@ -111,7 +106,7 @@ class UiWebsocketPlugin(object):
|
||||||
percent_connectable = percent_connected = percent_onion = 0
|
percent_connectable = percent_connected = percent_onion = 0
|
||||||
|
|
||||||
if local:
|
if local:
|
||||||
local_html = _(u"<li class='color-yellow'><span>{_[Local]}:</span><b>{local}</b></li>")
|
local_html = _("<li class='color-yellow'><span>{_[Local]}:</span><b>{local}</b></li>")
|
||||||
else:
|
else:
|
||||||
local_html = ""
|
local_html = ""
|
||||||
|
|
||||||
|
@ -122,7 +117,7 @@ class UiWebsocketPlugin(object):
|
||||||
",".join(peer_ips)
|
",".join(peer_ips)
|
||||||
)
|
)
|
||||||
|
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<li>
|
<li>
|
||||||
<label>
|
<label>
|
||||||
{_[Peers]}
|
{_[Peers]}
|
||||||
|
@ -155,7 +150,7 @@ class UiWebsocketPlugin(object):
|
||||||
percent_recv = 0.5
|
percent_recv = 0.5
|
||||||
percent_sent = 0.5
|
percent_sent = 0.5
|
||||||
|
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<li>
|
<li>
|
||||||
<label>{_[Data transfer]}</label>
|
<label>{_[Data transfer]}</label>
|
||||||
<ul class='graph graph-stacked'>
|
<ul class='graph graph-stacked'>
|
||||||
|
@ -170,7 +165,7 @@ class UiWebsocketPlugin(object):
|
||||||
"""))
|
"""))
|
||||||
|
|
||||||
def sidebarRenderFileStats(self, body, site):
|
def sidebarRenderFileStats(self, body, site):
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<li>
|
<li>
|
||||||
<label>
|
<label>
|
||||||
{_[Files]}
|
{_[Files]}
|
||||||
|
@ -198,7 +193,7 @@ class UiWebsocketPlugin(object):
|
||||||
content = site.content_manager.contents[inner_path]
|
content = site.content_manager.contents[inner_path]
|
||||||
if "files" not in content or content["files"] is None:
|
if "files" not in content or content["files"] is None:
|
||||||
continue
|
continue
|
||||||
for file_name, file_details in content["files"].items():
|
for file_name, file_details in list(content["files"].items()):
|
||||||
size_total += file_details["size"]
|
size_total += file_details["size"]
|
||||||
ext = file_name.split(".")[-1]
|
ext = file_name.split(".")[-1]
|
||||||
size_filetypes[ext] = size_filetypes.get(ext, 0) + file_details["size"]
|
size_filetypes[ext] = size_filetypes.get(ext, 0) + file_details["size"]
|
||||||
|
@ -236,7 +231,7 @@ class UiWebsocketPlugin(object):
|
||||||
percent = 100 * (float(size) / size_total)
|
percent = 100 * (float(size) / size_total)
|
||||||
percent = math.floor(percent * 100) / 100 # Floor to 2 digits
|
percent = math.floor(percent * 100) / 100 # Floor to 2 digits
|
||||||
body.append(
|
body.append(
|
||||||
u"""<li style='width: %.2f%%' class='%s back-%s' title="%s"></li>""" %
|
"""<li style='width: %.2f%%' class='%s back-%s' title="%s"></li>""" %
|
||||||
(percent, _[extension], color, _[extension])
|
(percent, _[extension], color, _[extension])
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -262,7 +257,7 @@ class UiWebsocketPlugin(object):
|
||||||
else:
|
else:
|
||||||
size_formatted = "%.0fkB" % (size / 1024)
|
size_formatted = "%.0fkB" % (size / 1024)
|
||||||
|
|
||||||
body.append(u"<li class='color-%s'><span>%s:</span><b>%s</b></li>" % (color, _[title], size_formatted))
|
body.append("<li class='color-%s'><span>%s:</span><b>%s</b></li>" % (color, _[title], size_formatted))
|
||||||
|
|
||||||
body.append("</ul></li>")
|
body.append("</ul></li>")
|
||||||
|
|
||||||
|
@ -272,9 +267,9 @@ class UiWebsocketPlugin(object):
|
||||||
size_limit = site.getSizeLimit()
|
size_limit = site.getSizeLimit()
|
||||||
percent_used = size / size_limit
|
percent_used = size / size_limit
|
||||||
|
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<li>
|
<li>
|
||||||
<label>{_[Size limit]} <small>({_[limit used]}: {percent_used:.0%}, {_[free space]}: {free_space:,d}MB)</small></label>
|
<label>{_[Size limit]} <small>({_[limit used]}: {percent_used:.0%}, {_[free space]}: {free_space:,.0f}MB)</small></label>
|
||||||
<input type='text' class='text text-num' value="{size_limit}" id='input-sitelimit'/><span class='text-post'>MB</span>
|
<input type='text' class='text text-num' value="{size_limit}" id='input-sitelimit'/><span class='text-post'>MB</span>
|
||||||
<a href='#Set' class='button' id='button-sitelimit'>{_[Set]}</a>
|
<a href='#Set' class='button' id='button-sitelimit'>{_[Set]}</a>
|
||||||
</li>
|
</li>
|
||||||
|
@ -292,7 +287,7 @@ class UiWebsocketPlugin(object):
|
||||||
size_formatted_total = size_total / 1024 / 1024
|
size_formatted_total = size_total / 1024 / 1024
|
||||||
size_formatted_downloaded = size_downloaded / 1024 / 1024
|
size_formatted_downloaded = size_downloaded / 1024 / 1024
|
||||||
|
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<li>
|
<li>
|
||||||
<label>{_[Optional files]}</label>
|
<label>{_[Optional files]}</label>
|
||||||
<ul class='graph'>
|
<ul class='graph'>
|
||||||
|
@ -314,14 +309,14 @@ class UiWebsocketPlugin(object):
|
||||||
else:
|
else:
|
||||||
checked = ""
|
checked = ""
|
||||||
|
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<li>
|
<li>
|
||||||
<label>{_[Download and help distribute all files]}</label>
|
<label>{_[Download and help distribute all files]}</label>
|
||||||
<input type="checkbox" class="checkbox" id="checkbox-autodownloadoptional" {checked}/><div class="checkbox-skin"></div>
|
<input type="checkbox" class="checkbox" id="checkbox-autodownloadoptional" {checked}/><div class="checkbox-skin"></div>
|
||||||
"""))
|
"""))
|
||||||
|
|
||||||
autodownload_bigfile_size_limit = int(site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit))
|
autodownload_bigfile_size_limit = int(site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit))
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<div class='settings-autodownloadoptional'>
|
<div class='settings-autodownloadoptional'>
|
||||||
<label>{_[Auto download big file size limit]}</label>
|
<label>{_[Auto download big file size limit]}</label>
|
||||||
<input type='text' class='text text-num' value="{autodownload_bigfile_size_limit}" id='input-autodownload_bigfile_size_limit'/><span class='text-post'>MB</span>
|
<input type='text' class='text text-num' value="{autodownload_bigfile_size_limit}" id='input-autodownload_bigfile_size_limit'/><span class='text-post'>MB</span>
|
||||||
|
@ -331,16 +326,16 @@ class UiWebsocketPlugin(object):
|
||||||
body.append("</li>")
|
body.append("</li>")
|
||||||
|
|
||||||
def sidebarRenderBadFiles(self, body, site):
|
def sidebarRenderBadFiles(self, body, site):
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<li>
|
<li>
|
||||||
<label>{_[Needs to be updated]}:</label>
|
<label>{_[Needs to be updated]}:</label>
|
||||||
<ul class='filelist'>
|
<ul class='filelist'>
|
||||||
"""))
|
"""))
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
for bad_file, tries in site.bad_files.iteritems():
|
for bad_file, tries in site.bad_files.items():
|
||||||
i += 1
|
i += 1
|
||||||
body.append(_(u"""<li class='color-red' title="{bad_file_path} ({tries})">{bad_filename}</li>""", {
|
body.append(_("""<li class='color-red' title="{bad_file_path} ({tries})">{bad_filename}</li>""", {
|
||||||
"bad_file_path": bad_file,
|
"bad_file_path": bad_file,
|
||||||
"bad_filename": helper.getFilename(bad_file),
|
"bad_filename": helper.getFilename(bad_file),
|
||||||
"tries": _.pluralize(tries, "{} try", "{} tries")
|
"tries": _.pluralize(tries, "{} try", "{} tries")
|
||||||
|
@ -350,7 +345,7 @@ class UiWebsocketPlugin(object):
|
||||||
|
|
||||||
if len(site.bad_files) > 30:
|
if len(site.bad_files) > 30:
|
||||||
num_bad_files = len(site.bad_files) - 30
|
num_bad_files = len(site.bad_files) - 30
|
||||||
body.append(_(u"""<li class='color-red'>{_[+ {num_bad_files} more]}</li>""", nested=True))
|
body.append(_("""<li class='color-red'>{_[+ {num_bad_files} more]}</li>""", nested=True))
|
||||||
|
|
||||||
body.append("""
|
body.append("""
|
||||||
</ul>
|
</ul>
|
||||||
|
@ -363,11 +358,11 @@ class UiWebsocketPlugin(object):
|
||||||
size = float(site.storage.getSize(inner_path)) / 1024
|
size = float(site.storage.getSize(inner_path)) / 1024
|
||||||
feeds = len(site.storage.db.schema.get("feeds", {}))
|
feeds = len(site.storage.db.schema.get("feeds", {}))
|
||||||
else:
|
else:
|
||||||
inner_path = _[u"No database found"]
|
inner_path = _["No database found"]
|
||||||
size = 0.0
|
size = 0.0
|
||||||
feeds = 0
|
feeds = 0
|
||||||
|
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<li>
|
<li>
|
||||||
<label>{_[Database]} <small>({size:.2f}kB, {_[search feeds]}: {_[{feeds} query]})</small></label>
|
<label>{_[Database]} <small>({size:.2f}kB, {_[search feeds]}: {_[{feeds} query]})</small></label>
|
||||||
<div class='flex'>
|
<div class='flex'>
|
||||||
|
@ -385,14 +380,14 @@ class UiWebsocketPlugin(object):
|
||||||
quota = rules["max_size"] / 1024
|
quota = rules["max_size"] / 1024
|
||||||
try:
|
try:
|
||||||
content = site.content_manager.contents["data/users/%s/content.json" % auth_address]
|
content = site.content_manager.contents["data/users/%s/content.json" % auth_address]
|
||||||
used = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()])
|
used = len(json.dumps(content)) + sum([file["size"] for file in list(content["files"].values())])
|
||||||
except:
|
except:
|
||||||
used = 0
|
used = 0
|
||||||
used = used / 1024
|
used = used / 1024
|
||||||
else:
|
else:
|
||||||
quota = used = 0
|
quota = used = 0
|
||||||
|
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<li>
|
<li>
|
||||||
<label>{_[Identity address]} <small>({_[limit used]}: {used:.2f}kB / {quota:.2f}kB)</small></label>
|
<label>{_[Identity address]} <small>({_[limit used]}: {used:.2f}kB / {quota:.2f}kB)</small></label>
|
||||||
<div class='flex'>
|
<div class='flex'>
|
||||||
|
@ -411,7 +406,7 @@ class UiWebsocketPlugin(object):
|
||||||
class_pause = "hidden"
|
class_pause = "hidden"
|
||||||
class_resume = ""
|
class_resume = ""
|
||||||
|
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<li>
|
<li>
|
||||||
<label>{_[Site control]}</label>
|
<label>{_[Site control]}</label>
|
||||||
<a href='#Update' class='button noupdate' id='button-update'>{_[Update]}</a>
|
<a href='#Update' class='button noupdate' id='button-update'>{_[Update]}</a>
|
||||||
|
@ -423,7 +418,7 @@ class UiWebsocketPlugin(object):
|
||||||
|
|
||||||
donate_key = site.content_manager.contents.get("content.json", {}).get("donate", True)
|
donate_key = site.content_manager.contents.get("content.json", {}).get("donate", True)
|
||||||
site_address = self.site.address
|
site_address = self.site.address
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<li>
|
<li>
|
||||||
<label>{_[Site address]}</label><br>
|
<label>{_[Site address]}</label><br>
|
||||||
<div class='flex'>
|
<div class='flex'>
|
||||||
|
@ -431,8 +426,8 @@ class UiWebsocketPlugin(object):
|
||||||
"""))
|
"""))
|
||||||
if donate_key == False or donate_key == "":
|
if donate_key == False or donate_key == "":
|
||||||
pass
|
pass
|
||||||
elif (type(donate_key) == str or type(donate_key) == unicode) and len(donate_key) > 0:
|
elif (type(donate_key) == str or type(donate_key) == str) and len(donate_key) > 0:
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
</div>
|
</div>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
|
@ -441,10 +436,10 @@ class UiWebsocketPlugin(object):
|
||||||
{donate_key}
|
{donate_key}
|
||||||
"""))
|
"""))
|
||||||
else:
|
else:
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<a href='bitcoin:{site_address}' class='button' id='button-donate'>{_[Donate]}</a>
|
<a href='bitcoin:{site_address}' class='button' id='button-donate'>{_[Donate]}</a>
|
||||||
"""))
|
"""))
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
</div>
|
</div>
|
||||||
</li>
|
</li>
|
||||||
"""))
|
"""))
|
||||||
|
@ -455,7 +450,7 @@ class UiWebsocketPlugin(object):
|
||||||
else:
|
else:
|
||||||
checked = ""
|
checked = ""
|
||||||
|
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<h2 class='owned-title'>{_[This is my site]}</h2>
|
<h2 class='owned-title'>{_[This is my site]}</h2>
|
||||||
<input type="checkbox" class="checkbox" id="checkbox-owned" {checked}/><div class="checkbox-skin"></div>
|
<input type="checkbox" class="checkbox" id="checkbox-owned" {checked}/><div class="checkbox-skin"></div>
|
||||||
"""))
|
"""))
|
||||||
|
@ -464,7 +459,7 @@ class UiWebsocketPlugin(object):
|
||||||
title = site.content_manager.contents.get("content.json", {}).get("title", "")
|
title = site.content_manager.contents.get("content.json", {}).get("title", "")
|
||||||
description = site.content_manager.contents.get("content.json", {}).get("description", "")
|
description = site.content_manager.contents.get("content.json", {}).get("description", "")
|
||||||
|
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<li>
|
<li>
|
||||||
<label for='settings-title'>{_[Site title]}</label>
|
<label for='settings-title'>{_[Site title]}</label>
|
||||||
<input type='text' class='text' value="{title}" id='settings-title'/>
|
<input type='text' class='text' value="{title}" id='settings-title'/>
|
||||||
|
@ -483,17 +478,17 @@ class UiWebsocketPlugin(object):
|
||||||
def sidebarRenderContents(self, body, site):
|
def sidebarRenderContents(self, body, site):
|
||||||
has_privatekey = bool(self.user.getSiteData(site.address, create=False).get("privatekey"))
|
has_privatekey = bool(self.user.getSiteData(site.address, create=False).get("privatekey"))
|
||||||
if has_privatekey:
|
if has_privatekey:
|
||||||
tag_privatekey = _(u"{_[Private key saved.]} <a href='#Forgot+private+key' id='privatekey-forgot' class='link-right'>{_[Forgot]}</a>")
|
tag_privatekey = _("{_[Private key saved.]} <a href='#Forgot+private+key' id='privatekey-forgot' class='link-right'>{_[Forgot]}</a>")
|
||||||
else:
|
else:
|
||||||
tag_privatekey = _(u"<a href='#Add+private+key' id='privatekey-add' class='link-right'>{_[Add saved private key]}</a>")
|
tag_privatekey = _("<a href='#Add+private+key' id='privatekey-add' class='link-right'>{_[Add saved private key]}</a>")
|
||||||
|
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<li>
|
<li>
|
||||||
<label>{_[Content publishing]} <small class='label-right'>{tag_privatekey}</small></label>
|
<label>{_[Content publishing]} <small class='label-right'>{tag_privatekey}</small></label>
|
||||||
""".replace("{tag_privatekey}", tag_privatekey)))
|
""".replace("{tag_privatekey}", tag_privatekey)))
|
||||||
|
|
||||||
# Choose content you want to sign
|
# Choose content you want to sign
|
||||||
body.append(_(u"""
|
body.append(_("""
|
||||||
<div class='flex'>
|
<div class='flex'>
|
||||||
<input type='text' class='text' value="content.json" id='input-contents'/>
|
<input type='text' class='text' value="content.json" id='input-contents'/>
|
||||||
<a href='#Sign-and-Publish' id='button-sign-publish' class='button'>{_[Sign and publish]}</a>
|
<a href='#Sign-and-Publish' id='button-sign-publish' class='button'>{_[Sign and publish]}</a>
|
||||||
|
@ -502,8 +497,8 @@ class UiWebsocketPlugin(object):
|
||||||
"""))
|
"""))
|
||||||
|
|
||||||
contents = ["content.json"]
|
contents = ["content.json"]
|
||||||
contents += site.content_manager.contents.get("content.json", {}).get("includes", {}).keys()
|
contents += list(site.content_manager.contents.get("content.json", {}).get("includes", {}).keys())
|
||||||
body.append(_(u"<div class='contents'>{_[Choose]}: "))
|
body.append(_("<div class='contents'>{_[Choose]}: "))
|
||||||
for content in contents:
|
for content in contents:
|
||||||
body.append(_("<a href='{content}' class='contents-content'>{content}</a> "))
|
body.append(_("<a href='{content}' class='contents-content'>{content}</a> "))
|
||||||
body.append("</div>")
|
body.append("</div>")
|
||||||
|
@ -520,7 +515,7 @@ class UiWebsocketPlugin(object):
|
||||||
|
|
||||||
body.append("<div>")
|
body.append("<div>")
|
||||||
body.append("<a href='#Close' class='close'>×</a>")
|
body.append("<a href='#Close' class='close'>×</a>")
|
||||||
body.append("<h1>%s</h1>" % cgi.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True))
|
body.append("<h1>%s</h1>" % html.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True))
|
||||||
|
|
||||||
body.append("<div class='globe loading'></div>")
|
body.append("<div class='globe loading'></div>")
|
||||||
|
|
||||||
|
@ -554,7 +549,6 @@ class UiWebsocketPlugin(object):
|
||||||
self.response(to, "".join(body))
|
self.response(to, "".join(body))
|
||||||
|
|
||||||
def downloadGeoLiteDb(self, db_path):
|
def downloadGeoLiteDb(self, db_path):
|
||||||
import urllib
|
|
||||||
import gzip
|
import gzip
|
||||||
import shutil
|
import shutil
|
||||||
from util import helper
|
from util import helper
|
||||||
|
@ -566,12 +560,13 @@ class UiWebsocketPlugin(object):
|
||||||
"https://raw.githubusercontent.com/texnikru/GeoLite2-Database/master/GeoLite2-City.mmdb.gz"
|
"https://raw.githubusercontent.com/texnikru/GeoLite2-Database/master/GeoLite2-City.mmdb.gz"
|
||||||
]
|
]
|
||||||
for db_url in db_urls:
|
for db_url in db_urls:
|
||||||
|
downloadl_err = None
|
||||||
try:
|
try:
|
||||||
# Download
|
# Download
|
||||||
response = helper.httpRequest(db_url)
|
response = helper.httpRequest(db_url)
|
||||||
data_size = response.getheader('content-length')
|
data_size = response.getheader('content-length')
|
||||||
data_recv = 0
|
data_recv = 0
|
||||||
data = StringIO.StringIO()
|
data = io.BytesIO()
|
||||||
while True:
|
while True:
|
||||||
buff = response.read(1024 * 512)
|
buff = response.read(1024 * 512)
|
||||||
if not buff:
|
if not buff:
|
||||||
|
@ -592,11 +587,12 @@ class UiWebsocketPlugin(object):
|
||||||
time.sleep(2) # Wait for notify animation
|
time.sleep(2) # Wait for notify animation
|
||||||
return True
|
return True
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
|
download_err = err
|
||||||
self.log.error("Error downloading %s: %s" % (db_url, err))
|
self.log.error("Error downloading %s: %s" % (db_url, err))
|
||||||
pass
|
pass
|
||||||
self.cmd("progress", [
|
self.cmd("progress", [
|
||||||
"geolite-info",
|
"geolite-info",
|
||||||
_["GeoLite2 City database download error: {}!<br>Please download manually and unpack to data dir:<br>{}"].format(err, db_urls[0]),
|
_["GeoLite2 City database download error: {}!<br>Please download manually and unpack to data dir:<br>{}"].format(download_err, db_urls[0]),
|
||||||
-100
|
-100
|
||||||
])
|
])
|
||||||
|
|
||||||
|
@ -629,14 +625,14 @@ class UiWebsocketPlugin(object):
|
||||||
return loc
|
return loc
|
||||||
|
|
||||||
def getPeerLocations(self, peers):
|
def getPeerLocations(self, peers):
|
||||||
import maxminddb
|
from . import maxminddb
|
||||||
db_path = config.data_dir + '/GeoLite2-City.mmdb'
|
db_path = config.data_dir + '/GeoLite2-City.mmdb'
|
||||||
if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0:
|
if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0:
|
||||||
if not self.downloadGeoLiteDb(db_path):
|
if not self.downloadGeoLiteDb(db_path):
|
||||||
return False
|
return False
|
||||||
geodb = maxminddb.open_database(db_path)
|
geodb = maxminddb.open_database(db_path)
|
||||||
|
|
||||||
peers = peers.values()
|
peers = list(peers.values())
|
||||||
# Place bars
|
# Place bars
|
||||||
peer_locations = []
|
peer_locations = []
|
||||||
placed = {} # Already placed bars here
|
placed = {} # Already placed bars here
|
||||||
|
@ -704,9 +700,9 @@ class UiWebsocketPlugin(object):
|
||||||
globe_data += [peer_location["lat"], peer_location["lon"], height]
|
globe_data += [peer_location["lat"], peer_location["lon"], height]
|
||||||
|
|
||||||
self.response(to, globe_data)
|
self.response(to, globe_data)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.debug("sidebarGetPeers error: %s" % Debug.formatException(err))
|
self.log.debug("sidebarGetPeers error: %s" % Debug.formatException(err))
|
||||||
self.response(to, {"error": err})
|
self.response(to, {"error": str(err)})
|
||||||
|
|
||||||
def actionSiteSetOwned(self, to, owned):
|
def actionSiteSetOwned(self, to, owned):
|
||||||
permissions = self.getPermissions(to)
|
permissions = self.getPermissions(to)
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
import cStringIO as StringIO
|
import io
|
||||||
import os
|
import os
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
|
|
||||||
class ZipStream(file):
|
class ZipStream(object):
|
||||||
def __init__(self, dir_path):
|
def __init__(self, dir_path):
|
||||||
self.dir_path = dir_path
|
self.dir_path = dir_path
|
||||||
self.pos = 0
|
self.pos = 0
|
||||||
self.zf = zipfile.ZipFile(self, 'w', zipfile.ZIP_DEFLATED, allowZip64=True)
|
self.zf = zipfile.ZipFile(self, 'w', zipfile.ZIP_DEFLATED, allowZip64=True)
|
||||||
self.buff = StringIO.StringIO()
|
self.buff = io.BytesIO()
|
||||||
self.file_list = self.getFileList()
|
self.file_list = self.getFileList()
|
||||||
|
|
||||||
def getFileList(self):
|
def getFileList(self):
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import SidebarPlugin
|
from . import SidebarPlugin
|
|
@ -1 +1 @@
|
||||||
import StatsPlugin
|
from . import StatsPlugin
|
|
@ -15,7 +15,7 @@ class UiRequestPlugin(object):
|
||||||
path_parts = self.parsePath(path)
|
path_parts = self.parsePath(path)
|
||||||
kwargs["header_length"] = False
|
kwargs["header_length"] = False
|
||||||
file_generator = super(UiRequestPlugin, self).actionSiteMedia(path, **kwargs)
|
file_generator = super(UiRequestPlugin, self).actionSiteMedia(path, **kwargs)
|
||||||
if "next" in dir(file_generator): # File found and generator returned
|
if "__next__" in dir(file_generator): # File found and generator returned
|
||||||
site = self.server.sites.get(path_parts["address"])
|
site = self.server.sites.get(path_parts["address"])
|
||||||
return self.actionPatchFile(site, path_parts["inner_path"], file_generator)
|
return self.actionPatchFile(site, path_parts["inner_path"], file_generator)
|
||||||
else:
|
else:
|
||||||
|
@ -28,10 +28,10 @@ class UiRequestPlugin(object):
|
||||||
file_generator = super(UiRequestPlugin, self).actionUiMedia(path)
|
file_generator = super(UiRequestPlugin, self).actionUiMedia(path)
|
||||||
if translate.lang != "en" and path.endswith(".js"):
|
if translate.lang != "en" and path.endswith(".js"):
|
||||||
s = time.time()
|
s = time.time()
|
||||||
data = "".join(list(file_generator))
|
data = b"".join(list(file_generator))
|
||||||
data = translate.translateData(data)
|
data = translate.translateData(data.decode("utf8"))
|
||||||
self.log.debug("Patched %s (%s bytes) in %.3fs" % (path, len(data), time.time() - s))
|
self.log.debug("Patched %s (%s bytes) in %.3fs" % (path, len(data), time.time() - s))
|
||||||
return iter([data])
|
return iter([data.encode("utf8")])
|
||||||
else:
|
else:
|
||||||
return file_generator
|
return file_generator
|
||||||
|
|
||||||
|
@ -49,12 +49,12 @@ class UiRequestPlugin(object):
|
||||||
if not lang_file_exist or inner_path not in content_json.get("translate", []):
|
if not lang_file_exist or inner_path not in content_json.get("translate", []):
|
||||||
for part in file_generator:
|
for part in file_generator:
|
||||||
if inner_path.endswith(".html"):
|
if inner_path.endswith(".html"):
|
||||||
yield part.replace("lang={lang}", "lang=" + str(translate.lang)) # lang get parameter to .js file to avoid cache
|
yield part.replace(b"lang={lang}", b"lang=%s" % translate.lang.encode("utf8")) # lang get parameter to .js file to avoid cache
|
||||||
else:
|
else:
|
||||||
yield part
|
yield part
|
||||||
else:
|
else:
|
||||||
s = time.time()
|
s = time.time()
|
||||||
data = "".join(list(file_generator))
|
data = b"".join(list(file_generator)).decode("utf8")
|
||||||
|
|
||||||
# if site.content_manager.contents["content.json"]["files"].get(lang_file):
|
# if site.content_manager.contents["content.json"]["files"].get(lang_file):
|
||||||
site.needFile(lang_file, priority=10)
|
site.needFile(lang_file, priority=10)
|
||||||
|
@ -63,9 +63,9 @@ class UiRequestPlugin(object):
|
||||||
data = translate.translateData(data, site.storage.loadJson(lang_file), "js")
|
data = translate.translateData(data, site.storage.loadJson(lang_file), "js")
|
||||||
else:
|
else:
|
||||||
data = translate.translateData(data, site.storage.loadJson(lang_file), "html")
|
data = translate.translateData(data, site.storage.loadJson(lang_file), "html")
|
||||||
data = data.replace("lang={lang}", "lang=" + str(translate.lang)) # lang get parameter to .js file to avoid cache
|
data = data.replace(b"lang={lang}", b"lang=%s" % translate.lang.encode("utf8")) # lang get parameter to .js file to avoid cache
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
site.log.error("Error loading translation file %s: %s" % (lang_file, err))
|
site.log.error("Error loading translation file %s: %s" % (lang_file, err))
|
||||||
|
|
||||||
self.log.debug("Patched %s (%s bytes) in %.3fs" % (inner_path, len(data), time.time() - s))
|
self.log.debug("Patched %s (%s bytes) in %.3fs" % (inner_path, len(data), time.time() - s))
|
||||||
yield data
|
yield data.encode("utf8")
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import TranslateSitePlugin
|
from . import TranslateSitePlugin
|
||||||
|
|
|
@ -17,7 +17,7 @@ class ActionsPlugin(object):
|
||||||
|
|
||||||
def main(self):
|
def main(self):
|
||||||
global notificationicon, winfolders
|
global notificationicon, winfolders
|
||||||
from lib import notificationicon, winfolders
|
from .lib import notificationicon, winfolders
|
||||||
import gevent.threadpool
|
import gevent.threadpool
|
||||||
|
|
||||||
self.main = sys.modules["main"]
|
self.main = sys.modules["main"]
|
||||||
|
@ -25,7 +25,7 @@ class ActionsPlugin(object):
|
||||||
fs_encoding = sys.getfilesystemencoding()
|
fs_encoding = sys.getfilesystemencoding()
|
||||||
|
|
||||||
icon = notificationicon.NotificationIcon(
|
icon = notificationicon.NotificationIcon(
|
||||||
os.path.join(os.path.dirname(os.path.abspath(__file__).decode(fs_encoding)), 'trayicon.ico'),
|
os.path.join(os.path.dirname(os.path.abspath(__file__)), 'trayicon.ico'),
|
||||||
"ZeroNet %s" % config.version
|
"ZeroNet %s" % config.version
|
||||||
)
|
)
|
||||||
self.icon = icon
|
self.icon = icon
|
||||||
|
@ -137,7 +137,7 @@ class ActionsPlugin(object):
|
||||||
cmd += ' --open_browser ""'
|
cmd += ' --open_browser ""'
|
||||||
cmd = cmd.decode(sys.getfilesystemencoding())
|
cmd = cmd.decode(sys.getfilesystemencoding())
|
||||||
|
|
||||||
return u"""
|
return """
|
||||||
@echo off
|
@echo off
|
||||||
chcp 65001 > nul
|
chcp 65001 > nul
|
||||||
set PYTHONIOENCODING=utf-8
|
set PYTHONIOENCODING=utf-8
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
import TrayiconPlugin
|
from . import TrayiconPlugin
|
|
@ -190,27 +190,27 @@ DefWindowProc = ctypes.windll.user32.DefWindowProcW
|
||||||
DefWindowProc.restype = ctypes.c_int
|
DefWindowProc.restype = ctypes.c_int
|
||||||
DefWindowProc.argtypes = [ctypes.wintypes.HWND, ctypes.c_uint, ctypes.wintypes.WPARAM, ctypes.wintypes.LPARAM]
|
DefWindowProc.argtypes = [ctypes.wintypes.HWND, ctypes.c_uint, ctypes.wintypes.WPARAM, ctypes.wintypes.LPARAM]
|
||||||
|
|
||||||
WS_OVERLAPPED = 0x00000000L
|
WS_OVERLAPPED = 0x00000000
|
||||||
WS_POPUP = 0x80000000L
|
WS_POPUP = 0x80000000
|
||||||
WS_CHILD = 0x40000000L
|
WS_CHILD = 0x40000000
|
||||||
WS_MINIMIZE = 0x20000000L
|
WS_MINIMIZE = 0x20000000
|
||||||
WS_VISIBLE = 0x10000000L
|
WS_VISIBLE = 0x10000000
|
||||||
WS_DISABLED = 0x08000000L
|
WS_DISABLED = 0x08000000
|
||||||
WS_CLIPSIBLINGS = 0x04000000L
|
WS_CLIPSIBLINGS = 0x04000000
|
||||||
WS_CLIPCHILDREN = 0x02000000L
|
WS_CLIPCHILDREN = 0x02000000
|
||||||
WS_MAXIMIZE = 0x01000000L
|
WS_MAXIMIZE = 0x01000000
|
||||||
WS_CAPTION = 0x00C00000L
|
WS_CAPTION = 0x00C00000
|
||||||
WS_BORDER = 0x00800000L
|
WS_BORDER = 0x00800000
|
||||||
WS_DLGFRAME = 0x00400000L
|
WS_DLGFRAME = 0x00400000
|
||||||
WS_VSCROLL = 0x00200000L
|
WS_VSCROLL = 0x00200000
|
||||||
WS_HSCROLL = 0x00100000L
|
WS_HSCROLL = 0x00100000
|
||||||
WS_SYSMENU = 0x00080000L
|
WS_SYSMENU = 0x00080000
|
||||||
WS_THICKFRAME = 0x00040000L
|
WS_THICKFRAME = 0x00040000
|
||||||
WS_GROUP = 0x00020000L
|
WS_GROUP = 0x00020000
|
||||||
WS_TABSTOP = 0x00010000L
|
WS_TABSTOP = 0x00010000
|
||||||
|
|
||||||
WS_MINIMIZEBOX = 0x00020000L
|
WS_MINIMIZEBOX = 0x00020000
|
||||||
WS_MAXIMIZEBOX = 0x00010000L
|
WS_MAXIMIZEBOX = 0x00010000
|
||||||
|
|
||||||
WS_OVERLAPPEDWINDOW = (WS_OVERLAPPED |
|
WS_OVERLAPPEDWINDOW = (WS_OVERLAPPED |
|
||||||
WS_CAPTION |
|
WS_CAPTION |
|
||||||
|
@ -497,7 +497,7 @@ DispatchMessage.argtypes = [ctypes.POINTER(MSG)]
|
||||||
|
|
||||||
def LoadIcon(iconfilename, small=False):
|
def LoadIcon(iconfilename, small=False):
|
||||||
return LoadImage(0,
|
return LoadImage(0,
|
||||||
unicode(iconfilename),
|
str(iconfilename),
|
||||||
IMAGE_ICON,
|
IMAGE_ICON,
|
||||||
16 if small else 0,
|
16 if small else 0,
|
||||||
16 if small else 0,
|
16 if small else 0,
|
||||||
|
@ -506,15 +506,15 @@ def LoadIcon(iconfilename, small=False):
|
||||||
|
|
||||||
class NotificationIcon(object):
|
class NotificationIcon(object):
|
||||||
def __init__(self, iconfilename, tooltip=None):
|
def __init__(self, iconfilename, tooltip=None):
|
||||||
assert os.path.isfile(unicode(iconfilename)), "{} doesn't exist".format(iconfilename)
|
assert os.path.isfile(str(iconfilename)), "{} doesn't exist".format(iconfilename)
|
||||||
self._iconfile = unicode(iconfilename)
|
self._iconfile = str(iconfilename)
|
||||||
self._hicon = LoadIcon(self._iconfile, True)
|
self._hicon = LoadIcon(self._iconfile, True)
|
||||||
assert self._hicon, "Failed to load {}".format(iconfilename)
|
assert self._hicon, "Failed to load {}".format(iconfilename)
|
||||||
#self._pumpqueue = Queue.Queue()
|
#self._pumpqueue = Queue.Queue()
|
||||||
self._die = False
|
self._die = False
|
||||||
self._timerid = None
|
self._timerid = None
|
||||||
self._uid = uuid.uuid4()
|
self._uid = uuid.uuid4()
|
||||||
self._tooltip = unicode(tooltip) if tooltip else u''
|
self._tooltip = str(tooltip) if tooltip else ''
|
||||||
#self._thread = threading.Thread(target=self._run)
|
#self._thread = threading.Thread(target=self._run)
|
||||||
#self._thread.start()
|
#self._thread.start()
|
||||||
self._info_bubble = None
|
self._info_bubble = None
|
||||||
|
@ -525,7 +525,7 @@ class NotificationIcon(object):
|
||||||
if self._info_bubble:
|
if self._info_bubble:
|
||||||
info_bubble = self._info_bubble
|
info_bubble = self._info_bubble
|
||||||
self._info_bubble = None
|
self._info_bubble = None
|
||||||
message = unicode(self._info_bubble)
|
message = str(self._info_bubble)
|
||||||
iconinfo.uFlags |= NIF_INFO
|
iconinfo.uFlags |= NIF_INFO
|
||||||
iconinfo.szInfo = message
|
iconinfo.szInfo = message
|
||||||
iconinfo.szInfoTitle = message
|
iconinfo.szInfoTitle = message
|
||||||
|
@ -535,7 +535,7 @@ class NotificationIcon(object):
|
||||||
|
|
||||||
|
|
||||||
def _run(self):
|
def _run(self):
|
||||||
self.WM_TASKBARCREATED = ctypes.windll.user32.RegisterWindowMessageW(u'TaskbarCreated')
|
self.WM_TASKBARCREATED = ctypes.windll.user32.RegisterWindowMessageW('TaskbarCreated')
|
||||||
|
|
||||||
self._windowproc = WNDPROC(self._callback)
|
self._windowproc = WNDPROC(self._callback)
|
||||||
self._hwnd = GenerateDummyWindow(self._windowproc, str(self._uid))
|
self._hwnd = GenerateDummyWindow(self._windowproc, str(self._uid))
|
||||||
|
@ -562,11 +562,11 @@ class NotificationIcon(object):
|
||||||
ret = GetMessage(ctypes.pointer(message), 0, 0, 0)
|
ret = GetMessage(ctypes.pointer(message), 0, 0, 0)
|
||||||
TranslateMessage(ctypes.pointer(message))
|
TranslateMessage(ctypes.pointer(message))
|
||||||
DispatchMessage(ctypes.pointer(message))
|
DispatchMessage(ctypes.pointer(message))
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
# print "NotificationIcon error", err, message
|
# print "NotificationIcon error", err, message
|
||||||
message = MSG()
|
message = MSG()
|
||||||
time.sleep(0.125)
|
time.sleep(0.125)
|
||||||
print "Icon thread stopped, removing icon..."
|
print("Icon thread stopped, removing icon...")
|
||||||
|
|
||||||
Shell_NotifyIcon(NIM_DELETE, ctypes.cast(ctypes.pointer(iconinfo), ctypes.POINTER(NOTIFYICONDATA)))
|
Shell_NotifyIcon(NIM_DELETE, ctypes.cast(ctypes.pointer(iconinfo), ctypes.POINTER(NOTIFYICONDATA)))
|
||||||
ctypes.windll.user32.DestroyWindow(self._hwnd)
|
ctypes.windll.user32.DestroyWindow(self._hwnd)
|
||||||
|
@ -586,7 +586,7 @@ class NotificationIcon(object):
|
||||||
item_map = {}
|
item_map = {}
|
||||||
for fs in self.items:
|
for fs in self.items:
|
||||||
iidx += 1
|
iidx += 1
|
||||||
if isinstance(fs, basestring):
|
if isinstance(fs, str):
|
||||||
if fs and not fs.strip('-_='):
|
if fs and not fs.strip('-_='):
|
||||||
AppendMenu(menu, MF_SEPARATOR, iidx, fs)
|
AppendMenu(menu, MF_SEPARATOR, iidx, fs)
|
||||||
else:
|
else:
|
||||||
|
@ -595,7 +595,7 @@ class NotificationIcon(object):
|
||||||
if callable(fs[0]):
|
if callable(fs[0]):
|
||||||
itemstring = fs[0]()
|
itemstring = fs[0]()
|
||||||
else:
|
else:
|
||||||
itemstring = unicode(fs[0])
|
itemstring = str(fs[0])
|
||||||
flags = MF_STRING
|
flags = MF_STRING
|
||||||
if itemstring.startswith("!"):
|
if itemstring.startswith("!"):
|
||||||
itemstring = itemstring[1:]
|
itemstring = itemstring[1:]
|
||||||
|
@ -660,8 +660,8 @@ class NotificationIcon(object):
|
||||||
time.sleep(0.2)
|
time.sleep(0.2)
|
||||||
try:
|
try:
|
||||||
Shell_NotifyIcon(NIM_DELETE, self.iconinfo)
|
Shell_NotifyIcon(NIM_DELETE, self.iconinfo)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
print "Icon remove error", err
|
print("Icon remove error", err)
|
||||||
ctypes.windll.user32.DestroyWindow(self._hwnd)
|
ctypes.windll.user32.DestroyWindow(self._hwnd)
|
||||||
ctypes.windll.user32.DestroyIcon(self._hicon)
|
ctypes.windll.user32.DestroyIcon(self._hicon)
|
||||||
|
|
||||||
|
@ -693,7 +693,7 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
def greet():
|
def greet():
|
||||||
ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 0)
|
ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 0)
|
||||||
print "Hello"
|
print("Hello")
|
||||||
|
|
||||||
def quit():
|
def quit():
|
||||||
ni._die = True
|
ni._die = True
|
||||||
|
@ -724,6 +724,6 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
@atexit.register
|
@atexit.register
|
||||||
def goodbye():
|
def goodbye():
|
||||||
print "You are now leaving the Python sector."
|
print("You are now leaving the Python sector.")
|
||||||
|
|
||||||
ni._run()
|
ni._run()
|
||||||
|
|
|
@ -10,7 +10,8 @@ Luke Pinner - Environment.gov.au, 2010 February 10
|
||||||
|
|
||||||
#Imports use _syntax to mask them from autocomplete IDE's
|
#Imports use _syntax to mask them from autocomplete IDE's
|
||||||
import ctypes as _ctypes
|
import ctypes as _ctypes
|
||||||
from ctypes.wintypes import HWND as _HWND, HANDLE as _HANDLE,DWORD as _DWORD,LPCWSTR as _LPCWSTR,MAX_PATH as _MAX_PATH, create_unicode_buffer as _cub
|
from ctypes import create_unicode_buffer as _cub
|
||||||
|
from ctypes.wintypes import HWND as _HWND, HANDLE as _HANDLE,DWORD as _DWORD,LPCWSTR as _LPCWSTR,MAX_PATH as _MAX_PATH
|
||||||
_SHGetFolderPath = _ctypes.windll.shell32.SHGetFolderPathW
|
_SHGetFolderPath = _ctypes.windll.shell32.SHGetFolderPathW
|
||||||
|
|
||||||
#public special folder constants
|
#public special folder constants
|
||||||
|
@ -49,5 +50,5 @@ def get(intFolder):
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import os
|
import os
|
||||||
print get(STARTUP)
|
print(get(STARTUP))
|
||||||
open(get(STARTUP)+"\\zeronet.cmd", "w").write("cd /D %s\r\nzeronet.py" % os.getcwd())
|
open(get(STARTUP)+"\\zeronet.cmd", "w").write("cd /D %s\r\nzeronet.py" % os.getcwd())
|
|
@ -1,7 +1,8 @@
|
||||||
|
import io
|
||||||
|
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
from Config import config
|
from Config import config
|
||||||
from Translate import Translate
|
from Translate import Translate
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
|
|
||||||
if "_" not in locals():
|
if "_" not in locals():
|
||||||
|
@ -47,7 +48,7 @@ class UiRequestPlugin(object):
|
||||||
else:
|
else:
|
||||||
data = open(file_path).read()
|
data = open(file_path).read()
|
||||||
|
|
||||||
return self.actionFile(file_path, file_obj=StringIO(data), file_size=len(data))
|
return self.actionFile(file_path, file_obj=io.BytesIO(data), file_size=len(data))
|
||||||
else:
|
else:
|
||||||
return super(UiRequestPlugin, self).actionUiMedia(path)
|
return super(UiRequestPlugin, self).actionUiMedia(path)
|
||||||
|
|
||||||
|
@ -58,7 +59,7 @@ class UiWebsocketPlugin(object):
|
||||||
back = {}
|
back = {}
|
||||||
config_values = vars(config.arguments)
|
config_values = vars(config.arguments)
|
||||||
config_values.update(config.pending_changes)
|
config_values.update(config.pending_changes)
|
||||||
for key, val in config_values.iteritems():
|
for key, val in config_values.items():
|
||||||
if key not in config.keys_api_change_allowed:
|
if key not in config.keys_api_change_allowed:
|
||||||
continue
|
continue
|
||||||
is_pending = key in config.pending_changes
|
is_pending = key in config.pending_changes
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import UiConfigPlugin
|
from . import UiConfigPlugin
|
||||||
|
|
|
@ -3,7 +3,7 @@ import time
|
||||||
from util import helper
|
from util import helper
|
||||||
|
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
from BootstrapperDb import BootstrapperDb
|
from .BootstrapperDb import BootstrapperDb
|
||||||
from Crypt import CryptRsa
|
from Crypt import CryptRsa
|
||||||
from Config import config
|
from Config import config
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@ class FileRequestPlugin(object):
|
||||||
|
|
||||||
hashes_changed = 0
|
hashes_changed = 0
|
||||||
db.execute("BEGIN")
|
db.execute("BEGIN")
|
||||||
for onion, onion_hashes in onion_to_hash.iteritems():
|
for onion, onion_hashes in onion_to_hash.items():
|
||||||
hashes_changed += db.peerAnnounce(
|
hashes_changed += db.peerAnnounce(
|
||||||
ip_type="onion",
|
ip_type="onion",
|
||||||
address=onion,
|
address=onion,
|
||||||
|
@ -113,7 +113,7 @@ class FileRequestPlugin(object):
|
||||||
|
|
||||||
hash_peers = db.peerList(
|
hash_peers = db.peerList(
|
||||||
hash,
|
hash,
|
||||||
address=self.connection.ip, onions=onion_to_hash.keys(), port=params["port"],
|
address=self.connection.ip, onions=list(onion_to_hash.keys()), port=params["port"],
|
||||||
limit=min(limit, params["need_num"]), need_types=params["need_types"], order=order
|
limit=min(limit, params["need_num"]), need_types=params["need_types"], order=order
|
||||||
)
|
)
|
||||||
if "ip4" in params["need_types"]: # Backward compatibility
|
if "ip4" in params["need_types"]: # Backward compatibility
|
||||||
|
|
|
@ -78,7 +78,7 @@ class TestBootstrapper:
|
||||||
assert len(res["peers"][0][ip_type]) == 1
|
assert len(res["peers"][0][ip_type]) == 1
|
||||||
|
|
||||||
# Test DB cleanup
|
# Test DB cleanup
|
||||||
assert map(lambda row: row[0], bootstrapper_db.execute("SELECT address FROM peer").fetchall()) == [file_server.ip_external] # 127.0.0.1 never get added to db
|
assert [row[0] for row in bootstrapper_db.execute("SELECT address FROM peer").fetchall()] == [file_server.ip_external] # 127.0.0.1 never get added to db
|
||||||
|
|
||||||
# Delete peers
|
# Delete peers
|
||||||
bootstrapper_db.execute("DELETE FROM peer WHERE address = ?", [file_server.ip_external])
|
bootstrapper_db.execute("DELETE FROM peer WHERE address = ?", [file_server.ip_external])
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import BootstrapperPlugin
|
from . import BootstrapperPlugin
|
|
@ -54,7 +54,7 @@ class SiteManagerPlugin(object):
|
||||||
res = Http.get("https://api.dnschain.net/v1/namecoin/key/%s" % top_domain).read()
|
res = Http.get("https://api.dnschain.net/v1/namecoin/key/%s" % top_domain).read()
|
||||||
data = json.loads(res)["data"]["value"]
|
data = json.loads(res)["data"]["value"]
|
||||||
if "zeronet" in data:
|
if "zeronet" in data:
|
||||||
for key, val in data["zeronet"].iteritems():
|
for key, val in data["zeronet"].items():
|
||||||
self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours
|
self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours
|
||||||
self.saveDnsCache()
|
self.saveDnsCache()
|
||||||
return data["zeronet"].get(sub_domain)
|
return data["zeronet"].get(sub_domain)
|
||||||
|
@ -76,7 +76,7 @@ class SiteManagerPlugin(object):
|
||||||
with gevent.Timeout(5, Exception("Timeout: 5s")):
|
with gevent.Timeout(5, Exception("Timeout: 5s")):
|
||||||
res = Http.get("https://dnschain.info/bit/d/%s" % re.sub(r"\.bit$", "", top_domain)).read()
|
res = Http.get("https://dnschain.info/bit/d/%s" % re.sub(r"\.bit$", "", top_domain)).read()
|
||||||
data = json.loads(res)["value"]
|
data = json.loads(res)["value"]
|
||||||
for key, val in data["zeronet"].iteritems():
|
for key, val in data["zeronet"].items():
|
||||||
self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours
|
self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours
|
||||||
self.saveDnsCache()
|
self.saveDnsCache()
|
||||||
return data["zeronet"].get(sub_domain)
|
return data["zeronet"].get(sub_domain)
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import DonationMessagePlugin
|
from . import DonationMessagePlugin
|
||||||
|
|
|
@ -5,11 +5,11 @@ import json
|
||||||
from Config import config
|
from Config import config
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
from Crypt import CryptBitcoin
|
from Crypt import CryptBitcoin
|
||||||
import UserPlugin
|
from . import UserPlugin
|
||||||
|
|
||||||
try:
|
try:
|
||||||
local_master_addresses = set(json.load(open("%s/users.json" % config.data_dir)).keys()) # Users in users.json
|
local_master_addresses = set(json.load(open("%s/users.json" % config.data_dir)).keys()) # Users in users.json
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
local_master_addresses = set()
|
local_master_addresses = set()
|
||||||
|
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ class UiRequestPlugin(object):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
elif loggedin:
|
elif loggedin:
|
||||||
back = back_generator.next()
|
back = next(back_generator)
|
||||||
inject_html = """
|
inject_html = """
|
||||||
<!-- Multiser plugin -->
|
<!-- Multiser plugin -->
|
||||||
<script nonce="{script_nonce}">
|
<script nonce="{script_nonce}">
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import MultiuserPlugin
|
from . import MultiuserPlugin
|
||||||
|
|
|
@ -16,9 +16,9 @@ if config.tor != "disable":
|
||||||
monkey.patch_time()
|
monkey.patch_time()
|
||||||
monkey.patch_socket(dns=False)
|
monkey.patch_socket(dns=False)
|
||||||
monkey.patch_thread()
|
monkey.patch_thread()
|
||||||
print "Stem Port Plugin: modules are patched."
|
print("Stem Port Plugin: modules are patched.")
|
||||||
else:
|
else:
|
||||||
print "Stem Port Plugin: Tor mode disabled. Module patching skipped."
|
print("Stem Port Plugin: Tor mode disabled. Module patching skipped.")
|
||||||
|
|
||||||
|
|
||||||
class PatchedControlPort(ControlPort):
|
class PatchedControlPort(ControlPort):
|
||||||
|
@ -66,14 +66,14 @@ class TorManagerPlugin(object):
|
||||||
controller = from_port(port=self.port)
|
controller = from_port(port=self.port)
|
||||||
controller.authenticate()
|
controller.authenticate()
|
||||||
self.controller = controller
|
self.controller = controller
|
||||||
self.status = u"Connected (via Stem)"
|
self.status = "Connected (via Stem)"
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
print("\n")
|
print("\n")
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
print("\n")
|
print("\n")
|
||||||
|
|
||||||
self.controller = None
|
self.controller = None
|
||||||
self.status = u"Error (%s)" % err
|
self.status = "Error (%s)" % err
|
||||||
self.log.error("Tor stem connect error: %s" % Debug.formatException(err))
|
self.log.error("Tor stem connect error: %s" % Debug.formatException(err))
|
||||||
|
|
||||||
return self.controller
|
return self.controller
|
||||||
|
@ -87,8 +87,8 @@ class TorManagerPlugin(object):
|
||||||
def resetCircuits(self):
|
def resetCircuits(self):
|
||||||
try:
|
try:
|
||||||
self.controller.signal(Signal.NEWNYM)
|
self.controller.signal(Signal.NEWNYM)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.status = u"Stem reset circuits error (%s)" % err
|
self.status = "Stem reset circuits error (%s)" % err
|
||||||
self.log.error("Stem reset circuits error: %s" % err)
|
self.log.error("Stem reset circuits error: %s" % err)
|
||||||
|
|
||||||
|
|
||||||
|
@ -105,8 +105,8 @@ class TorManagerPlugin(object):
|
||||||
|
|
||||||
return (service.service_id, service.private_key)
|
return (service.service_id, service.private_key)
|
||||||
|
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.status = u"AddOnion error (Stem: %s)" % err
|
self.status = "AddOnion error (Stem: %s)" % err
|
||||||
self.log.error("Failed to create hidden service with Stem: " + err)
|
self.log.error("Failed to create hidden service with Stem: " + err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -115,8 +115,8 @@ class TorManagerPlugin(object):
|
||||||
try:
|
try:
|
||||||
self.controller.remove_ephemeral_hidden_service(address)
|
self.controller.remove_ephemeral_hidden_service(address)
|
||||||
return True
|
return True
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.status = u"DelOnion error (Stem: %s)" % err
|
self.status = "DelOnion error (Stem: %s)" % err
|
||||||
self.log.error("Stem failed to delete %s.onion: %s" % (address, err))
|
self.log.error("Stem failed to delete %s.onion: %s" % (address, err))
|
||||||
self.disconnect() # Why?
|
self.disconnect() # Why?
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -2,9 +2,9 @@ try:
|
||||||
from stem.control import Controller
|
from stem.control import Controller
|
||||||
stem_found = True
|
stem_found = True
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
print "STEM NOT FOUND! %s" % err
|
print(("STEM NOT FOUND! %s" % err))
|
||||||
stem_found = False
|
stem_found = False
|
||||||
|
|
||||||
if stem_found:
|
if stem_found:
|
||||||
print "Starting Stem plugin..."
|
print("Starting Stem plugin...")
|
||||||
import StemPortPlugin
|
from . import StemPortPlugin
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
import UiPasswordPlugin
|
from . import UiPasswordPlugin
|
|
@ -3,7 +3,7 @@ import gevent
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
from Config import config
|
from Config import config
|
||||||
from Debug import Debug
|
from Debug import Debug
|
||||||
from domainLookup import lookupDomain
|
from .domainLookup import lookupDomain
|
||||||
|
|
||||||
allow_reload = False # No reload supported
|
allow_reload = False # No reload supported
|
||||||
|
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
import UiRequestPlugin
|
from . import UiRequestPlugin
|
||||||
import SiteManagerPlugin
|
from . import SiteManagerPlugin
|
|
@ -37,7 +37,7 @@
|
||||||
try:
|
try:
|
||||||
import http.client as httplib
|
import http.client as httplib
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import httplib
|
import http.client
|
||||||
import base64
|
import base64
|
||||||
import decimal
|
import decimal
|
||||||
import json
|
import json
|
||||||
|
@ -45,7 +45,7 @@ import logging
|
||||||
try:
|
try:
|
||||||
import urllib.parse as urlparse
|
import urllib.parse as urlparse
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import urlparse
|
import urllib.parse
|
||||||
|
|
||||||
USER_AGENT = "AuthServiceProxy/0.1"
|
USER_AGENT = "AuthServiceProxy/0.1"
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ class AuthServiceProxy(object):
|
||||||
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None):
|
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None):
|
||||||
self.__service_url = service_url
|
self.__service_url = service_url
|
||||||
self.__service_name = service_name
|
self.__service_name = service_name
|
||||||
self.__url = urlparse.urlparse(service_url)
|
self.__url = urllib.parse.urlparse(service_url)
|
||||||
if self.__url.port is None:
|
if self.__url.port is None:
|
||||||
port = 80
|
port = 80
|
||||||
else:
|
else:
|
||||||
|
@ -106,10 +106,10 @@ class AuthServiceProxy(object):
|
||||||
# Callables re-use the connection of the original proxy
|
# Callables re-use the connection of the original proxy
|
||||||
self.__conn = connection
|
self.__conn = connection
|
||||||
elif self.__url.scheme == 'https':
|
elif self.__url.scheme == 'https':
|
||||||
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port,
|
self.__conn = http.client.HTTPSConnection(self.__url.hostname, port,
|
||||||
timeout=timeout)
|
timeout=timeout)
|
||||||
else:
|
else:
|
||||||
self.__conn = httplib.HTTPConnection(self.__url.hostname, port,
|
self.__conn = http.client.HTTPConnection(self.__url.hostname, port,
|
||||||
timeout=timeout)
|
timeout=timeout)
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
|
from .bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
|
||||||
import time, json, os, sys, re, socket
|
import time, json, os, sys, re, socket
|
||||||
|
|
||||||
# Connecting to RPC
|
# Connecting to RPC
|
||||||
|
|
|
@ -3,7 +3,7 @@ import sys
|
||||||
import os
|
import os
|
||||||
import locale
|
import locale
|
||||||
import re
|
import re
|
||||||
import ConfigParser
|
import configparser
|
||||||
import logging
|
import logging
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
import stat
|
import stat
|
||||||
|
@ -304,7 +304,7 @@ class Config(object):
|
||||||
if "://" in tracker and tracker not in self.trackers:
|
if "://" in tracker and tracker not in self.trackers:
|
||||||
self.trackers.append(tracker)
|
self.trackers.append(tracker)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
print "Error loading trackers file: %s" % err
|
print("Error loading trackers file: %s" % err)
|
||||||
|
|
||||||
# Find arguments specified for current action
|
# Find arguments specified for current action
|
||||||
def getActionArguments(self):
|
def getActionArguments(self):
|
||||||
|
@ -316,7 +316,7 @@ class Config(object):
|
||||||
|
|
||||||
# Try to find action from argv
|
# Try to find action from argv
|
||||||
def getAction(self, argv):
|
def getAction(self, argv):
|
||||||
actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions
|
actions = [list(action.choices.keys()) for action in self.parser._actions if action.dest == "action"][0] # Valid actions
|
||||||
found_action = False
|
found_action = False
|
||||||
for action in actions: # See if any in argv
|
for action in actions: # See if any in argv
|
||||||
if action in argv:
|
if action in argv:
|
||||||
|
@ -404,7 +404,7 @@ class Config(object):
|
||||||
self.config_file = argv[argv.index("--config_file") + 1]
|
self.config_file = argv[argv.index("--config_file") + 1]
|
||||||
# Load config file
|
# Load config file
|
||||||
if os.path.isfile(self.config_file):
|
if os.path.isfile(self.config_file):
|
||||||
config = ConfigParser.ConfigParser(allow_no_value=True)
|
config = configparser.ConfigParser(allow_no_value=True)
|
||||||
config.read(self.config_file)
|
config.read(self.config_file)
|
||||||
for section in config.sections():
|
for section in config.sections():
|
||||||
for key, val in config.items(section):
|
for key, val in config.items(section):
|
||||||
|
@ -570,7 +570,7 @@ class Config(object):
|
||||||
try:
|
try:
|
||||||
os.chmod(self.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
os.chmod(self.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
print "Can't change permission of %s: %s" % (self.log_dir, err)
|
print("Can't change permission of %s: %s" % (self.log_dir, err))
|
||||||
|
|
||||||
# Make warning hidden from console
|
# Make warning hidden from console
|
||||||
logging.WARNING = 15 # Don't display warnings if not in debug mode
|
logging.WARNING = 15 # Don't display warnings if not in debug mode
|
||||||
|
|
|
@ -314,7 +314,7 @@ class Connection(object):
|
||||||
self.incomplete_buff_recv += 1
|
self.incomplete_buff_recv += 1
|
||||||
self.bytes_recv += buff_len
|
self.bytes_recv += buff_len
|
||||||
self.server.bytes_recv += buff_len
|
self.server.bytes_recv += buff_len
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log("Stream read error: %s" % Debug.formatException(err))
|
self.log("Stream read error: %s" % Debug.formatException(err))
|
||||||
|
|
||||||
if config.debug_socket:
|
if config.debug_socket:
|
||||||
|
@ -328,7 +328,7 @@ class Connection(object):
|
||||||
if unpacker_stream_bytes:
|
if unpacker_stream_bytes:
|
||||||
return buff[buff_stream_start + unpacker_stream_bytes:]
|
return buff[buff_stream_start + unpacker_stream_bytes:]
|
||||||
else:
|
else:
|
||||||
return ""
|
return b""
|
||||||
|
|
||||||
# My handshake info
|
# My handshake info
|
||||||
def getHandshakeInfo(self):
|
def getHandshakeInfo(self):
|
||||||
|
@ -476,7 +476,7 @@ class Connection(object):
|
||||||
try:
|
try:
|
||||||
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin)
|
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin)
|
||||||
self.sock_wrapped = True
|
self.sock_wrapped = True
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
if not config.force_encryption:
|
if not config.force_encryption:
|
||||||
self.log("Crypt connection error: %s, adding ip %s as broken ssl." % (err, self.ip))
|
self.log("Crypt connection error: %s, adding ip %s as broken ssl." % (err, self.ip))
|
||||||
self.server.broken_ssl_ips[self.ip] = True
|
self.server.broken_ssl_ips[self.ip] = True
|
||||||
|
@ -526,7 +526,7 @@ class Connection(object):
|
||||||
message = None
|
message = None
|
||||||
with self.send_lock:
|
with self.send_lock:
|
||||||
self.sock.sendall(data)
|
self.sock.sendall(data)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.close("Send error: %s (cmd: %s)" % (err, stat_key))
|
self.close("Send error: %s (cmd: %s)" % (err, stat_key))
|
||||||
return False
|
return False
|
||||||
self.last_sent_time = time.time()
|
self.last_sent_time = time.time()
|
||||||
|
@ -577,9 +577,9 @@ class Connection(object):
|
||||||
with gevent.Timeout(10.0, False):
|
with gevent.Timeout(10.0, False):
|
||||||
try:
|
try:
|
||||||
response = self.request("ping")
|
response = self.request("ping")
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log("Ping error: %s" % Debug.formatException(err))
|
self.log("Ping error: %s" % Debug.formatException(err))
|
||||||
if response and "body" in response and response["body"] == "Pong!":
|
if response and "body" in response and response["body"] == b"Pong!":
|
||||||
self.last_ping_delay = time.time() - s
|
self.last_ping_delay = time.time() - s
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
|
@ -608,7 +608,7 @@ class Connection(object):
|
||||||
if self.sock:
|
if self.sock:
|
||||||
self.sock.shutdown(gevent.socket.SHUT_WR)
|
self.sock.shutdown(gevent.socket.SHUT_WR)
|
||||||
self.sock.close()
|
self.sock.close()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
if config.debug_socket:
|
if config.debug_socket:
|
||||||
self.log("Close error: %s" % err)
|
self.log("Close error: %s" % err)
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ from gevent.pool import Pool
|
||||||
import util
|
import util
|
||||||
from util import helper
|
from util import helper
|
||||||
from Debug import Debug
|
from Debug import Debug
|
||||||
from Connection import Connection
|
from .Connection import Connection
|
||||||
from Config import config
|
from Config import config
|
||||||
from Crypt import CryptConnection
|
from Crypt import CryptConnection
|
||||||
from Crypt import CryptHash
|
from Crypt import CryptHash
|
||||||
|
@ -94,7 +94,7 @@ class ConnectionServer(object):
|
||||||
self.stream_server = StreamServer(
|
self.stream_server = StreamServer(
|
||||||
(self.ip, self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100
|
(self.ip, self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100
|
||||||
)
|
)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.info("StreamServer create error: %s" % Debug.formatException(err))
|
self.log.info("StreamServer create error: %s" % Debug.formatException(err))
|
||||||
|
|
||||||
def listen(self):
|
def listen(self):
|
||||||
|
@ -102,7 +102,7 @@ class ConnectionServer(object):
|
||||||
gevent.spawn(self.listenProxy)
|
gevent.spawn(self.listenProxy)
|
||||||
try:
|
try:
|
||||||
self.stream_server.serve_forever()
|
self.stream_server.serve_forever()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.info("StreamServer listen error: %s" % err)
|
self.log.info("StreamServer listen error: %s" % err)
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
|
@ -199,7 +199,7 @@ class ConnectionServer(object):
|
||||||
connection.close("Connection event return error")
|
connection.close("Connection event return error")
|
||||||
raise Exception("Connection event return error")
|
raise Exception("Connection event return error")
|
||||||
|
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
connection.close("%s Connect error: %s" % (ip, Debug.formatException(err)))
|
connection.close("%s Connect error: %s" % (ip, Debug.formatException(err)))
|
||||||
raise err
|
raise err
|
||||||
|
|
||||||
|
@ -346,6 +346,6 @@ class ConnectionServer(object):
|
||||||
])
|
])
|
||||||
if len(corrections) < 6:
|
if len(corrections) < 6:
|
||||||
return 0.0
|
return 0.0
|
||||||
mid = len(corrections) / 2 - 1
|
mid = int(len(corrections) / 2 - 1)
|
||||||
median = (corrections[mid - 1] + corrections[mid] + corrections[mid + 1]) / 3
|
median = (corrections[mid - 1] + corrections[mid] + corrections[mid + 1]) / 3
|
||||||
return median
|
return median
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
from ConnectionServer import ConnectionServer
|
from .ConnectionServer import ConnectionServer
|
||||||
from Connection import Connection
|
from .Connection import Connection
|
||||||
|
|
|
@ -19,7 +19,7 @@ class ContentDb(Db):
|
||||||
foreign_key_error = self.execute("PRAGMA foreign_key_check").fetchone()
|
foreign_key_error = self.execute("PRAGMA foreign_key_check").fetchone()
|
||||||
if foreign_key_error:
|
if foreign_key_error:
|
||||||
raise Exception("Database foreign key error: %s" % foreign_key_error)
|
raise Exception("Database foreign key error: %s" % foreign_key_error)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.error("Error loading content.db: %s, rebuilding..." % Debug.formatException(err))
|
self.log.error("Error loading content.db: %s, rebuilding..." % Debug.formatException(err))
|
||||||
self.close()
|
self.close()
|
||||||
os.unlink(path) # Remove and try again
|
os.unlink(path) # Remove and try again
|
||||||
|
@ -95,8 +95,8 @@ class ContentDb(Db):
|
||||||
def setContent(self, site, inner_path, content, size=0):
|
def setContent(self, site, inner_path, content, size=0):
|
||||||
self.insertOrUpdate("content", {
|
self.insertOrUpdate("content", {
|
||||||
"size": size,
|
"size": size,
|
||||||
"size_files": sum([val["size"] for key, val in content.get("files", {}).iteritems()]),
|
"size_files": sum([val["size"] for key, val in content.get("files", {}).items()]),
|
||||||
"size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).iteritems()]),
|
"size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).items()]),
|
||||||
"modified": int(content.get("modified", 0))
|
"modified": int(content.get("modified", 0))
|
||||||
}, {
|
}, {
|
||||||
"site_id": self.site_ids.get(site.address, 0),
|
"site_id": self.site_ids.get(site.address, 0),
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import time
|
import time
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import ContentDb
|
from . import ContentDb
|
||||||
from Debug import Debug
|
from Debug import Debug
|
||||||
from Config import config
|
from Config import config
|
||||||
|
|
||||||
|
@ -127,29 +127,29 @@ if __name__ == "__main__":
|
||||||
s_mem = process.memory_info()[0] / float(2 ** 20)
|
s_mem = process.memory_info()[0] / float(2 ** 20)
|
||||||
root = "data-live/1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27"
|
root = "data-live/1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27"
|
||||||
contents = ContentDbDict("1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27", root)
|
contents = ContentDbDict("1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27", root)
|
||||||
print "Init len", len(contents)
|
print("Init len", len(contents))
|
||||||
|
|
||||||
s = time.time()
|
s = time.time()
|
||||||
for dir_name in os.listdir(root + "/data/users/")[0:8000]:
|
for dir_name in os.listdir(root + "/data/users/")[0:8000]:
|
||||||
contents["data/users/%s/content.json" % dir_name]
|
contents["data/users/%s/content.json" % dir_name]
|
||||||
print "Load: %.3fs" % (time.time() - s)
|
print("Load: %.3fs" % (time.time() - s))
|
||||||
|
|
||||||
s = time.time()
|
s = time.time()
|
||||||
found = 0
|
found = 0
|
||||||
for key, val in contents.iteritems():
|
for key, val in contents.items():
|
||||||
found += 1
|
found += 1
|
||||||
assert key
|
assert key
|
||||||
assert val
|
assert val
|
||||||
print "Found:", found
|
print("Found:", found)
|
||||||
print "Iteritem: %.3fs" % (time.time() - s)
|
print("Iteritem: %.3fs" % (time.time() - s))
|
||||||
|
|
||||||
s = time.time()
|
s = time.time()
|
||||||
found = 0
|
found = 0
|
||||||
for key in contents.keys():
|
for key in list(contents.keys()):
|
||||||
found += 1
|
found += 1
|
||||||
assert key in contents
|
assert key in contents
|
||||||
print "In: %.3fs" % (time.time() - s)
|
print("In: %.3fs" % (time.time() - s))
|
||||||
|
|
||||||
print "Len:", len(contents.values()), len(contents.keys())
|
print("Len:", len(list(contents.values())), len(list(contents.keys())))
|
||||||
|
|
||||||
print "Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem
|
print("Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem)
|
||||||
|
|
|
@ -3,6 +3,7 @@ import time
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
import copy
|
import copy
|
||||||
|
import base64
|
||||||
|
|
||||||
import gevent
|
import gevent
|
||||||
|
|
||||||
|
@ -13,7 +14,7 @@ from util import helper
|
||||||
from util import Diff
|
from util import Diff
|
||||||
from util import SafeRe
|
from util import SafeRe
|
||||||
from Peer import PeerHashfield
|
from Peer import PeerHashfield
|
||||||
from ContentDbDict import ContentDbDict
|
from .ContentDbDict import ContentDbDict
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
|
|
||||||
|
|
||||||
|
@ -44,7 +45,7 @@ class ContentManager(object):
|
||||||
|
|
||||||
# Load hashfield cache
|
# Load hashfield cache
|
||||||
if "hashfield" in self.site.settings.get("cache", {}):
|
if "hashfield" in self.site.settings.get("cache", {}):
|
||||||
self.hashfield.fromstring(self.site.settings["cache"]["hashfield"].decode("base64"))
|
self.hashfield.frombytes(base64.b64decode(self.site.settings["cache"]["hashfield"]))
|
||||||
del self.site.settings["cache"]["hashfield"]
|
del self.site.settings["cache"]["hashfield"]
|
||||||
elif self.contents.get("content.json") and self.site.settings["size_optional"] > 0:
|
elif self.contents.get("content.json") and self.site.settings["size_optional"] > 0:
|
||||||
self.site.storage.updateBadFiles() # No hashfield cache created yet
|
self.site.storage.updateBadFiles() # No hashfield cache created yet
|
||||||
|
@ -74,7 +75,7 @@ class ContentManager(object):
|
||||||
return [], []
|
return [], []
|
||||||
|
|
||||||
new_content = json.load(open(content_path))
|
new_content = json.load(open(content_path))
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.warning("%s load error: %s" % (content_path, Debug.formatException(err)))
|
self.log.warning("%s load error: %s" % (content_path, Debug.formatException(err)))
|
||||||
return [], []
|
return [], []
|
||||||
else:
|
else:
|
||||||
|
@ -86,7 +87,7 @@ class ContentManager(object):
|
||||||
changed = []
|
changed = []
|
||||||
deleted = []
|
deleted = []
|
||||||
# Check changed
|
# Check changed
|
||||||
for relative_path, info in new_content.get("files", {}).iteritems():
|
for relative_path, info in new_content.get("files", {}).items():
|
||||||
if "sha512" in info:
|
if "sha512" in info:
|
||||||
hash_type = "sha512"
|
hash_type = "sha512"
|
||||||
else: # Backward compatibility
|
else: # Backward compatibility
|
||||||
|
@ -101,7 +102,7 @@ class ContentManager(object):
|
||||||
changed.append(content_inner_dir + relative_path)
|
changed.append(content_inner_dir + relative_path)
|
||||||
|
|
||||||
# Check changed optional files
|
# Check changed optional files
|
||||||
for relative_path, info in new_content.get("files_optional", {}).iteritems():
|
for relative_path, info in new_content.get("files_optional", {}).items():
|
||||||
file_inner_path = content_inner_dir + relative_path
|
file_inner_path = content_inner_dir + relative_path
|
||||||
new_hash = info["sha512"]
|
new_hash = info["sha512"]
|
||||||
if old_content and old_content.get("files_optional", {}).get(relative_path):
|
if old_content and old_content.get("files_optional", {}).get(relative_path):
|
||||||
|
@ -115,7 +116,7 @@ class ContentManager(object):
|
||||||
self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][relative_path]["size"])
|
self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][relative_path]["size"])
|
||||||
self.optionalDelete(file_inner_path)
|
self.optionalDelete(file_inner_path)
|
||||||
self.log.debug("Deleted changed optional file: %s" % file_inner_path)
|
self.log.debug("Deleted changed optional file: %s" % file_inner_path)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err)))
|
self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err)))
|
||||||
else: # The file is not in the old content
|
else: # The file is not in the old content
|
||||||
if self.site.isDownloadable(file_inner_path):
|
if self.site.isDownloadable(file_inner_path):
|
||||||
|
@ -151,7 +152,7 @@ class ContentManager(object):
|
||||||
self.site.storage.delete(file_inner_path)
|
self.site.storage.delete(file_inner_path)
|
||||||
|
|
||||||
self.log.debug("Deleted file: %s" % file_inner_path)
|
self.log.debug("Deleted file: %s" % file_inner_path)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err)))
|
self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err)))
|
||||||
|
|
||||||
# Cleanup empty dirs
|
# Cleanup empty dirs
|
||||||
|
@ -165,7 +166,7 @@ class ContentManager(object):
|
||||||
self.site.storage.deleteDir(root_inner_path)
|
self.site.storage.deleteDir(root_inner_path)
|
||||||
# Remove from tree dict to reflect changed state
|
# Remove from tree dict to reflect changed state
|
||||||
tree[os.path.dirname(root)][0].remove(os.path.basename(root))
|
tree[os.path.dirname(root)][0].remove(os.path.basename(root))
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.debug("Error deleting empty directory %s: %s" % (root_inner_path, err))
|
self.log.debug("Error deleting empty directory %s: %s" % (root_inner_path, err))
|
||||||
|
|
||||||
# Check archived
|
# Check archived
|
||||||
|
@ -175,12 +176,12 @@ class ContentManager(object):
|
||||||
self.log.debug("old archived: %s, new archived: %s" % (len(old_archived), len(new_archived)))
|
self.log.debug("old archived: %s, new archived: %s" % (len(old_archived), len(new_archived)))
|
||||||
archived_changed = {
|
archived_changed = {
|
||||||
key: date_archived
|
key: date_archived
|
||||||
for key, date_archived in new_archived.iteritems()
|
for key, date_archived in new_archived.items()
|
||||||
if old_archived.get(key) != new_archived[key]
|
if old_archived.get(key) != new_archived[key]
|
||||||
}
|
}
|
||||||
if archived_changed:
|
if archived_changed:
|
||||||
self.log.debug("Archived changed: %s" % archived_changed)
|
self.log.debug("Archived changed: %s" % archived_changed)
|
||||||
for archived_dirname, date_archived in archived_changed.iteritems():
|
for archived_dirname, date_archived in archived_changed.items():
|
||||||
archived_inner_path = content_inner_dir + archived_dirname + "/content.json"
|
archived_inner_path = content_inner_dir + archived_dirname + "/content.json"
|
||||||
if self.contents.get(archived_inner_path, {}).get("modified", 0) < date_archived:
|
if self.contents.get(archived_inner_path, {}).get("modified", 0) < date_archived:
|
||||||
self.removeContent(archived_inner_path)
|
self.removeContent(archived_inner_path)
|
||||||
|
@ -204,7 +205,7 @@ class ContentManager(object):
|
||||||
|
|
||||||
# Remove archived files from download queue
|
# Remove archived files from download queue
|
||||||
num_removed_bad_files = 0
|
num_removed_bad_files = 0
|
||||||
for bad_file in self.site.bad_files.keys():
|
for bad_file in list(self.site.bad_files.keys()):
|
||||||
if bad_file.endswith("content.json"):
|
if bad_file.endswith("content.json"):
|
||||||
del self.site.bad_files[bad_file]
|
del self.site.bad_files[bad_file]
|
||||||
num_removed_bad_files += 1
|
num_removed_bad_files += 1
|
||||||
|
@ -217,7 +218,7 @@ class ContentManager(object):
|
||||||
|
|
||||||
# Load includes
|
# Load includes
|
||||||
if load_includes and "includes" in new_content:
|
if load_includes and "includes" in new_content:
|
||||||
for relative_path, info in new_content["includes"].items():
|
for relative_path, info in list(new_content["includes"].items()):
|
||||||
include_inner_path = content_inner_dir + relative_path
|
include_inner_path = content_inner_dir + relative_path
|
||||||
if self.site.storage.isFile(include_inner_path): # Content.json exists, load it
|
if self.site.storage.isFile(include_inner_path): # Content.json exists, load it
|
||||||
include_changed, include_deleted = self.loadContent(
|
include_changed, include_deleted = self.loadContent(
|
||||||
|
@ -255,7 +256,7 @@ class ContentManager(object):
|
||||||
self.has_optional_files = True
|
self.has_optional_files = True
|
||||||
# Update the content
|
# Update the content
|
||||||
self.contents[content_inner_path] = new_content
|
self.contents[content_inner_path] = new_content
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.warning("%s parse error: %s" % (content_inner_path, Debug.formatException(err)))
|
self.log.warning("%s parse error: %s" % (content_inner_path, Debug.formatException(err)))
|
||||||
return [], [] # Content.json parse error
|
return [], [] # Content.json parse error
|
||||||
|
|
||||||
|
@ -282,7 +283,7 @@ class ContentManager(object):
|
||||||
content.get("files", {}),
|
content.get("files", {}),
|
||||||
**content.get("files_optional", {})
|
**content.get("files_optional", {})
|
||||||
)
|
)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.debug("Error loading %s for removeContent: %s" % (inner_path, Debug.formatException(err)))
|
self.log.debug("Error loading %s for removeContent: %s" % (inner_path, Debug.formatException(err)))
|
||||||
files = {}
|
files = {}
|
||||||
files["content.json"] = True
|
files["content.json"] = True
|
||||||
|
@ -292,16 +293,16 @@ class ContentManager(object):
|
||||||
try:
|
try:
|
||||||
self.site.storage.delete(file_inner_path)
|
self.site.storage.delete(file_inner_path)
|
||||||
self.log.debug("Deleted file: %s" % file_inner_path)
|
self.log.debug("Deleted file: %s" % file_inner_path)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.debug("Error deleting file %s: %s" % (file_inner_path, err))
|
self.log.debug("Error deleting file %s: %s" % (file_inner_path, err))
|
||||||
try:
|
try:
|
||||||
self.site.storage.deleteDir(inner_dir)
|
self.site.storage.deleteDir(inner_dir)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.debug("Error deleting dir %s: %s" % (inner_dir, err))
|
self.log.debug("Error deleting dir %s: %s" % (inner_dir, err))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
del self.contents[inner_path]
|
del self.contents[inner_path]
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.debug("Error key from contents: %s" % inner_path)
|
self.log.debug("Error key from contents: %s" % inner_path)
|
||||||
|
|
||||||
# Get total size of site
|
# Get total size of site
|
||||||
|
@ -317,7 +318,7 @@ class ContentManager(object):
|
||||||
return []
|
return []
|
||||||
back = [inner_path]
|
back = [inner_path]
|
||||||
content_inner_dir = helper.getDirname(inner_path)
|
content_inner_dir = helper.getDirname(inner_path)
|
||||||
for relative_path in self.contents[inner_path].get("includes", {}).keys():
|
for relative_path in list(self.contents[inner_path].get("includes", {}).keys()):
|
||||||
include_inner_path = content_inner_dir + relative_path
|
include_inner_path = content_inner_dir + relative_path
|
||||||
back += self.listContents(include_inner_path)
|
back += self.listContents(include_inner_path)
|
||||||
return back
|
return back
|
||||||
|
@ -333,7 +334,7 @@ class ContentManager(object):
|
||||||
file_info = self.getFileInfo(user_contents_inner_path)
|
file_info = self.getFileInfo(user_contents_inner_path)
|
||||||
if file_info:
|
if file_info:
|
||||||
time_archived_before = file_info.get("archived_before", 0)
|
time_archived_before = file_info.get("archived_before", 0)
|
||||||
time_directory_archived = file_info.get("archived", {}).get(relative_directory)
|
time_directory_archived = file_info.get("archived", {}).get(relative_directory, 0)
|
||||||
if modified <= time_archived_before or modified <= time_directory_archived:
|
if modified <= time_archived_before or modified <= time_directory_archived:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
|
@ -493,11 +494,11 @@ class ContentManager(object):
|
||||||
banned = False
|
banned = False
|
||||||
if "signers" in rules:
|
if "signers" in rules:
|
||||||
rules["signers"] = rules["signers"][:] # Make copy of the signers
|
rules["signers"] = rules["signers"][:] # Make copy of the signers
|
||||||
for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules
|
for permission_pattern, permission_rules in list(user_contents["permission_rules"].items()): # Regexp rules
|
||||||
if not SafeRe.match(permission_pattern, user_urn):
|
if not SafeRe.match(permission_pattern, user_urn):
|
||||||
continue # Rule is not valid for user
|
continue # Rule is not valid for user
|
||||||
# Update rules if its better than current recorded ones
|
# Update rules if its better than current recorded ones
|
||||||
for key, val in permission_rules.iteritems():
|
for key, val in permission_rules.items():
|
||||||
if key not in rules:
|
if key not in rules:
|
||||||
if type(val) is list:
|
if type(val) is list:
|
||||||
rules[key] = val[:] # Make copy
|
rules[key] = val[:] # Make copy
|
||||||
|
@ -649,7 +650,7 @@ class ContentManager(object):
|
||||||
|
|
||||||
if extend:
|
if extend:
|
||||||
# Add extend keys if not exists
|
# Add extend keys if not exists
|
||||||
for key, val in extend.items():
|
for key, val in list(extend.items()):
|
||||||
if not content.get(key):
|
if not content.get(key):
|
||||||
content[key] = val
|
content[key] = val
|
||||||
self.log.info("Extending content.json with: %s" % key)
|
self.log.info("Extending content.json with: %s" % key)
|
||||||
|
@ -664,14 +665,14 @@ class ContentManager(object):
|
||||||
)
|
)
|
||||||
|
|
||||||
if not remove_missing_optional:
|
if not remove_missing_optional:
|
||||||
for file_inner_path, file_details in content.get("files_optional", {}).iteritems():
|
for file_inner_path, file_details in content.get("files_optional", {}).items():
|
||||||
if file_inner_path not in files_optional_node:
|
if file_inner_path not in files_optional_node:
|
||||||
files_optional_node[file_inner_path] = file_details
|
files_optional_node[file_inner_path] = file_details
|
||||||
|
|
||||||
# Find changed files
|
# Find changed files
|
||||||
files_merged = files_node.copy()
|
files_merged = files_node.copy()
|
||||||
files_merged.update(files_optional_node)
|
files_merged.update(files_optional_node)
|
||||||
for file_relative_path, file_details in files_merged.iteritems():
|
for file_relative_path, file_details in files_merged.items():
|
||||||
old_hash = content.get("files", {}).get(file_relative_path, {}).get("sha512")
|
old_hash = content.get("files", {}).get(file_relative_path, {}).get("sha512")
|
||||||
new_hash = files_merged[file_relative_path]["sha512"]
|
new_hash = files_merged[file_relative_path]["sha512"]
|
||||||
if old_hash != new_hash:
|
if old_hash != new_hash:
|
||||||
|
@ -795,19 +796,19 @@ class ContentManager(object):
|
||||||
try:
|
try:
|
||||||
cert_subject = "%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name)
|
cert_subject = "%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name)
|
||||||
result = CryptBitcoin.verify(cert_subject, cert_address, content["cert_sign"])
|
result = CryptBitcoin.verify(cert_subject, cert_address, content["cert_sign"])
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
raise VerifyError("Certificate verify error: %s" % err)
|
raise VerifyError("Certificate verify error: %s" % err)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# Checks if the content.json content is valid
|
# Checks if the content.json content is valid
|
||||||
# Return: True or False
|
# Return: True or False
|
||||||
def verifyContent(self, inner_path, content):
|
def verifyContent(self, inner_path, content):
|
||||||
content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in content["files"].values() if file["size"] >= 0]) # Size of new content
|
content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in list(content["files"].values()) if file["size"] >= 0]) # Size of new content
|
||||||
# Calculate old content size
|
# Calculate old content size
|
||||||
old_content = self.contents.get(inner_path)
|
old_content = self.contents.get(inner_path)
|
||||||
if old_content:
|
if old_content:
|
||||||
old_content_size = len(json.dumps(old_content, indent=1)) + sum([file["size"] for file in old_content.get("files", {}).values()])
|
old_content_size = len(json.dumps(old_content, indent=1)) + sum([file["size"] for file in list(old_content.get("files", {}).values())])
|
||||||
old_content_size_optional = sum([file["size"] for file in old_content.get("files_optional", {}).values()])
|
old_content_size_optional = sum([file["size"] for file in list(old_content.get("files_optional", {}).values())])
|
||||||
else:
|
else:
|
||||||
old_content_size = 0
|
old_content_size = 0
|
||||||
old_content_size_optional = 0
|
old_content_size_optional = 0
|
||||||
|
@ -816,7 +817,7 @@ class ContentManager(object):
|
||||||
if not old_content and inner_path == "content.json":
|
if not old_content and inner_path == "content.json":
|
||||||
self.site.settings["size"] = 0
|
self.site.settings["size"] = 0
|
||||||
|
|
||||||
content_size_optional = sum([file["size"] for file in content.get("files_optional", {}).values() if file["size"] >= 0])
|
content_size_optional = sum([file["size"] for file in list(content.get("files_optional", {}).values()) if file["size"] >= 0])
|
||||||
site_size = self.site.settings["size"] - old_content_size + content_size # Site size without old content plus the new
|
site_size = self.site.settings["size"] - old_content_size + content_size # Site size without old content plus the new
|
||||||
site_size_optional = self.site.settings["size_optional"] - old_content_size_optional + content_size_optional # Site size without old content plus the new
|
site_size_optional = self.site.settings["size_optional"] - old_content_size_optional + content_size_optional # Site size without old content plus the new
|
||||||
|
|
||||||
|
@ -841,7 +842,7 @@ class ContentManager(object):
|
||||||
raise VerifyError("Content too large %sB > %sB, aborting task..." % (site_size, site_size_limit))
|
raise VerifyError("Content too large %sB > %sB, aborting task..." % (site_size, site_size_limit))
|
||||||
|
|
||||||
# Verify valid filenames
|
# Verify valid filenames
|
||||||
for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys():
|
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
|
||||||
if not self.isValidRelativePath(file_relative_path):
|
if not self.isValidRelativePath(file_relative_path):
|
||||||
raise VerifyError("Invalid relative path: %s" % file_relative_path)
|
raise VerifyError("Invalid relative path: %s" % file_relative_path)
|
||||||
|
|
||||||
|
@ -876,12 +877,12 @@ class ContentManager(object):
|
||||||
|
|
||||||
# Filename limit
|
# Filename limit
|
||||||
if rules.get("files_allowed"):
|
if rules.get("files_allowed"):
|
||||||
for file_inner_path in content["files"].keys():
|
for file_inner_path in list(content["files"].keys()):
|
||||||
if not SafeRe.match("^%s$" % rules["files_allowed"], file_inner_path):
|
if not SafeRe.match("^%s$" % rules["files_allowed"], file_inner_path):
|
||||||
raise VerifyError("File not allowed: %s" % file_inner_path)
|
raise VerifyError("File not allowed: %s" % file_inner_path)
|
||||||
|
|
||||||
if rules.get("files_allowed_optional"):
|
if rules.get("files_allowed_optional"):
|
||||||
for file_inner_path in content.get("files_optional", {}).keys():
|
for file_inner_path in list(content.get("files_optional", {}).keys()):
|
||||||
if not SafeRe.match("^%s$" % rules["files_allowed_optional"], file_inner_path):
|
if not SafeRe.match("^%s$" % rules["files_allowed_optional"], file_inner_path):
|
||||||
raise VerifyError("Optional file not allowed: %s" % file_inner_path)
|
raise VerifyError("Optional file not allowed: %s" % file_inner_path)
|
||||||
|
|
||||||
|
@ -964,7 +965,7 @@ class ContentManager(object):
|
||||||
else:
|
else:
|
||||||
raise VerifyError("Invalid old-style sign")
|
raise VerifyError("Invalid old-style sign")
|
||||||
|
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.warning("%s: verify sign error: %s" % (inner_path, Debug.formatException(err)))
|
self.log.warning("%s: verify sign error: %s" % (inner_path, Debug.formatException(err)))
|
||||||
raise err
|
raise err
|
||||||
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
from ContentManager import ContentManager
|
from .ContentManager import ContentManager
|
|
@ -13,10 +13,10 @@ def sha1sum(file, blocksize=65536):
|
||||||
|
|
||||||
|
|
||||||
def sha512sum(file, blocksize=65536, format="hexdigest"):
|
def sha512sum(file, blocksize=65536, format="hexdigest"):
|
||||||
if hasattr(file, "endswith"): # Its a string open it
|
if type(file) is str: # Filename specified
|
||||||
file = open(file, "rb")
|
file = open(file, "rb")
|
||||||
hash = hashlib.sha512()
|
hash = hashlib.sha512()
|
||||||
for block in iter(lambda: file.read(blocksize), ""):
|
for block in iter(lambda: file.read(blocksize), b""):
|
||||||
hash.update(block)
|
hash.update(block)
|
||||||
|
|
||||||
# Truncate to 256bits is good enough
|
# Truncate to 256bits is good enough
|
||||||
|
@ -31,7 +31,7 @@ def sha256sum(file, blocksize=65536):
|
||||||
if hasattr(file, "endswith"): # Its a string open it
|
if hasattr(file, "endswith"): # Its a string open it
|
||||||
file = open(file, "rb")
|
file = open(file, "rb")
|
||||||
hash = hashlib.sha256()
|
hash = hashlib.sha256()
|
||||||
for block in iter(lambda: file.read(blocksize), ""):
|
for block in iter(lambda: file.read(blocksize), b""):
|
||||||
hash.update(block)
|
hash.update(block)
|
||||||
return hash.hexdigest()
|
return hash.hexdigest()
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ def sha256sum(file, blocksize=65536):
|
||||||
def random(length=64, encoding="hex"):
|
def random(length=64, encoding="hex"):
|
||||||
if encoding == "base64": # Characters: A-Za-z0-9
|
if encoding == "base64": # Characters: A-Za-z0-9
|
||||||
hash = hashlib.sha512(os.urandom(256)).digest()
|
hash = hashlib.sha512(os.urandom(256)).digest()
|
||||||
return base64.standard_b64encode(hash).replace("+", "").replace("/", "").replace("=", "")[0:length]
|
return base64.b64encode(hash).decode("ascii").replace("+", "").replace("/", "").replace("=", "")[0:length]
|
||||||
else: # Characters: a-f0-9 (faster)
|
else: # Characters: a-f0-9 (faster)
|
||||||
return hashlib.sha512(os.urandom(256)).hexdigest()[0:length]
|
return hashlib.sha512(os.urandom(256)).hexdigest()[0:length]
|
||||||
|
|
||||||
|
|
|
@ -35,4 +35,4 @@ def privatekeyToPublickey(privatekey):
|
||||||
return pub.save_pkcs1("DER")
|
return pub.save_pkcs1("DER")
|
||||||
|
|
||||||
def publickeyToOnion(publickey):
|
def publickeyToOnion(publickey):
|
||||||
return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower()
|
return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower().decode("ascii")
|
||||||
|
|
25
src/Db/Db.py
25
src/Db/Db.py
|
@ -7,7 +7,7 @@ import os
|
||||||
import gevent
|
import gevent
|
||||||
|
|
||||||
from Debug import Debug
|
from Debug import Debug
|
||||||
from DbCursor import DbCursor
|
from .DbCursor import DbCursor
|
||||||
from Config import config
|
from Config import config
|
||||||
from util import SafeRe
|
from util import SafeRe
|
||||||
from util import helper
|
from util import helper
|
||||||
|
@ -149,8 +149,8 @@ class Db(object):
|
||||||
if not self.db_keyvalues: # Get db keyvalues
|
if not self.db_keyvalues: # Get db keyvalues
|
||||||
try:
|
try:
|
||||||
res = self.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues
|
res = self.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues
|
||||||
except sqlite3.OperationalError, err: # Table not exist
|
except sqlite3.OperationalError as err: # Table not exist
|
||||||
self.log.debug("Query error: %s" % err)
|
self.log.debug("Query table version error: %s" % err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for row in res:
|
for row in res:
|
||||||
|
@ -260,7 +260,7 @@ class Db(object):
|
||||||
data = json.load(helper.limitedGzipFile(fileobj=file))
|
data = json.load(helper.limitedGzipFile(fileobj=file))
|
||||||
else:
|
else:
|
||||||
data = json.load(file)
|
data = json.load(file)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.debug("Json file %s load error: %s" % (file_path, err))
|
self.log.debug("Json file %s load error: %s" % (file_path, err))
|
||||||
data = {}
|
data = {}
|
||||||
|
|
||||||
|
@ -274,7 +274,7 @@ class Db(object):
|
||||||
commit_after_done = False
|
commit_after_done = False
|
||||||
|
|
||||||
# Row for current json file if required
|
# Row for current json file if required
|
||||||
if not data or filter(lambda dbmap: "to_keyvalue" in dbmap or "to_table" in dbmap, matched_maps):
|
if not data or [dbmap for dbmap in matched_maps if "to_keyvalue" in dbmap or "to_table" in dbmap]:
|
||||||
json_row = cur.getJsonRow(relative_path)
|
json_row = cur.getJsonRow(relative_path)
|
||||||
|
|
||||||
# Check matched mappings in schema
|
# Check matched mappings in schema
|
||||||
|
@ -311,7 +311,7 @@ class Db(object):
|
||||||
changed = True
|
changed = True
|
||||||
if changed:
|
if changed:
|
||||||
# Add the custom col values
|
# Add the custom col values
|
||||||
data_json_row.update({key: val for key, val in data.iteritems() if key in dbmap["to_json_table"]})
|
data_json_row.update({key: val for key, val in data.items() if key in dbmap["to_json_table"]})
|
||||||
cur.execute("INSERT OR REPLACE INTO json ?", data_json_row)
|
cur.execute("INSERT OR REPLACE INTO json ?", data_json_row)
|
||||||
|
|
||||||
# Insert data to tables
|
# Insert data to tables
|
||||||
|
@ -333,7 +333,7 @@ class Db(object):
|
||||||
|
|
||||||
# Fill import cols from table cols
|
# Fill import cols from table cols
|
||||||
if not import_cols:
|
if not import_cols:
|
||||||
import_cols = set(map(lambda item: item[0], self.schema["tables"][table_name]["cols"]))
|
import_cols = set([item[0] for item in self.schema["tables"][table_name]["cols"]])
|
||||||
|
|
||||||
cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],))
|
cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],))
|
||||||
|
|
||||||
|
@ -341,7 +341,7 @@ class Db(object):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if key_col: # Map as dict
|
if key_col: # Map as dict
|
||||||
for key, val in data[node].iteritems():
|
for key, val in data[node].items():
|
||||||
if val_col: # Single value
|
if val_col: # Single value
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"INSERT OR REPLACE INTO %s ?" % table_name,
|
"INSERT OR REPLACE INTO %s ?" % table_name,
|
||||||
|
@ -355,9 +355,9 @@ class Db(object):
|
||||||
row[key_col] = key
|
row[key_col] = key
|
||||||
# Replace in value if necessary
|
# Replace in value if necessary
|
||||||
if replaces:
|
if replaces:
|
||||||
for replace_key, replace in replaces.iteritems():
|
for replace_key, replace in replaces.items():
|
||||||
if replace_key in row:
|
if replace_key in row:
|
||||||
for replace_from, replace_to in replace.iteritems():
|
for replace_from, replace_to in replace.items():
|
||||||
row[replace_key] = row[replace_key].replace(replace_from, replace_to)
|
row[replace_key] = row[replace_key].replace(replace_from, replace_to)
|
||||||
|
|
||||||
row["json_id"] = json_row["json_id"]
|
row["json_id"] = json_row["json_id"]
|
||||||
|
@ -402,7 +402,6 @@ if __name__ == "__main__":
|
||||||
dbjson.updateJson("data/users/%s/data.json" % user_dir, cur=cur)
|
dbjson.updateJson("data/users/%s/data.json" % user_dir, cur=cur)
|
||||||
# print ".",
|
# print ".",
|
||||||
cur.logging = True
|
cur.logging = True
|
||||||
cur.execute("COMMIT")
|
print("Done in %.3fs" % (time.time() - s))
|
||||||
print "Done in %.3fs" % (time.time() - s)
|
|
||||||
for query, stats in sorted(dbjson.query_stats.items()):
|
for query, stats in sorted(dbjson.query_stats.items()):
|
||||||
print "-", query, stats
|
print("-", query, stats)
|
||||||
|
|
|
@ -9,9 +9,9 @@ class DbQuery:
|
||||||
# Split main parts of query
|
# Split main parts of query
|
||||||
def parseParts(self, query):
|
def parseParts(self, query):
|
||||||
parts = re.split("(SELECT|FROM|WHERE|ORDER BY|LIMIT)", query)
|
parts = re.split("(SELECT|FROM|WHERE|ORDER BY|LIMIT)", query)
|
||||||
parts = filter(None, parts) # Remove empty parts
|
parts = [_f for _f in parts if _f] # Remove empty parts
|
||||||
parts = map(lambda s: s.strip(), parts) # Remove whitespace
|
parts = [s.strip() for s in parts] # Remove whitespace
|
||||||
return dict(zip(parts[0::2], parts[1::2]))
|
return dict(list(zip(parts[0::2], parts[1::2])))
|
||||||
|
|
||||||
# Parse selected fields SELECT ... FROM
|
# Parse selected fields SELECT ... FROM
|
||||||
def parseFields(self, query_select):
|
def parseFields(self, query_select):
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
from Db import Db
|
from .Db import Db
|
||||||
from DbQuery import DbQuery
|
from .DbQuery import DbQuery
|
||||||
from DbCursor import DbCursor
|
from .DbCursor import DbCursor
|
|
@ -63,10 +63,10 @@ gevent.spawn(testBlock)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
try:
|
try:
|
||||||
print 1 / 0
|
print(1 / 0)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
print type(err).__name__
|
print(type(err).__name__)
|
||||||
print "1/0 error: %s" % formatException(err)
|
print("1/0 error: %s" % formatException(err))
|
||||||
|
|
||||||
def loadJson():
|
def loadJson():
|
||||||
json.loads("Errr")
|
json.loads("Errr")
|
||||||
|
@ -74,13 +74,13 @@ if __name__ == "__main__":
|
||||||
import json
|
import json
|
||||||
try:
|
try:
|
||||||
loadJson()
|
loadJson()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
print err
|
print(err)
|
||||||
print "Json load error: %s" % formatException(err)
|
print("Json load error: %s" % formatException(err))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
raise Notify("nothing...")
|
raise Notify("nothing...")
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
print "Notify: %s" % formatException(err)
|
print("Notify: %s" % formatException(err))
|
||||||
|
|
||||||
loadJson()
|
loadJson()
|
||||||
|
|
|
@ -5,19 +5,20 @@ import gevent
|
||||||
import gevent.hub
|
import gevent.hub
|
||||||
|
|
||||||
from Config import config
|
from Config import config
|
||||||
|
import importlib
|
||||||
|
|
||||||
last_error = None
|
last_error = None
|
||||||
|
|
||||||
def shutdown():
|
def shutdown():
|
||||||
print "Shutting down..."
|
print("Shutting down...")
|
||||||
if "file_server" in dir(sys.modules["main"]) and sys.modules["main"].file_server.running:
|
if "file_server" in dir(sys.modules["main"]) and sys.modules["main"].file_server.running:
|
||||||
try:
|
try:
|
||||||
if "file_server" in dir(sys.modules["main"]):
|
if "file_server" in dir(sys.modules["main"]):
|
||||||
gevent.spawn(sys.modules["main"].file_server.stop)
|
gevent.spawn(sys.modules["main"].file_server.stop)
|
||||||
if "ui_server" in dir(sys.modules["main"]):
|
if "ui_server" in dir(sys.modules["main"]):
|
||||||
gevent.spawn(sys.modules["main"].ui_server.stop)
|
gevent.spawn(sys.modules["main"].ui_server.stop)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
print "Proper shutdown error: %s" % err
|
print("Proper shutdown error: %s" % err)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
@ -67,7 +68,7 @@ else:
|
||||||
sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
|
sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
|
||||||
|
|
||||||
gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet
|
gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet
|
||||||
reload(gevent)
|
importlib.reload(gevent)
|
||||||
|
|
||||||
def handleGreenletError(self, context, type, value, tb):
|
def handleGreenletError(self, context, type, value, tb):
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
|
@ -83,18 +84,18 @@ if __name__ == "__main__":
|
||||||
import time
|
import time
|
||||||
from gevent import monkey
|
from gevent import monkey
|
||||||
monkey.patch_all(thread=False, ssl=False)
|
monkey.patch_all(thread=False, ssl=False)
|
||||||
import Debug
|
from . import Debug
|
||||||
|
|
||||||
def sleeper(num):
|
def sleeper(num):
|
||||||
print "started", num
|
print("started", num)
|
||||||
time.sleep(3)
|
time.sleep(3)
|
||||||
raise Exception("Error")
|
raise Exception("Error")
|
||||||
print "stopped", num
|
print("stopped", num)
|
||||||
thread1 = gevent.spawn(sleeper, 1)
|
thread1 = gevent.spawn(sleeper, 1)
|
||||||
thread2 = gevent.spawn(sleeper, 2)
|
thread2 = gevent.spawn(sleeper, 2)
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
print "killing..."
|
print("killing...")
|
||||||
thread1.kill(exception=Debug.Notify("Worker stopped"))
|
thread1.kill(exception=Debug.Notify("Worker stopped"))
|
||||||
#thread2.throw(Debug.Notify("Throw"))
|
#thread2.throw(Debug.Notify("Throw"))
|
||||||
print "killed"
|
print("killed")
|
||||||
gevent.joinall([thread1,thread2])
|
gevent.joinall([thread1,thread2])
|
||||||
|
|
|
@ -3,6 +3,7 @@ import subprocess
|
||||||
import re
|
import re
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
import functools
|
||||||
|
|
||||||
from Config import config
|
from Config import config
|
||||||
from util import helper
|
from util import helper
|
||||||
|
@ -18,9 +19,9 @@ def findfiles(path, find_ext):
|
||||||
elif f2 == "":
|
elif f2 == "":
|
||||||
return -1
|
return -1
|
||||||
else:
|
else:
|
||||||
return cmp(f1.lower(), f2.lower())
|
return helper.cmp(f1.lower(), f2.lower())
|
||||||
|
|
||||||
for root, dirs, files in sorted(os.walk(path, topdown=False), cmp=sorter):
|
for root, dirs, files in sorted(os.walk(path, topdown=False), key=functools.cmp_to_key(sorter)):
|
||||||
for file in sorted(files):
|
for file in sorted(files):
|
||||||
file_path = root + "/" + file
|
file_path = root + "/" + file
|
||||||
file_ext = file.split(".")[-1]
|
file_ext = file.split(".")[-1]
|
||||||
|
@ -66,16 +67,16 @@ def merge(merged_path):
|
||||||
return # Assets not changed, nothing to do
|
return # Assets not changed, nothing to do
|
||||||
|
|
||||||
if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile
|
if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile
|
||||||
merged_old = open(merged_path, "rb").read().decode("utf8")
|
merged_old = open(merged_path, "rb").read()
|
||||||
old_parts = {}
|
old_parts = {}
|
||||||
for match in re.findall(r"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
|
for match in re.findall(rb"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
|
||||||
old_parts[match[1]] = match[2].strip("\n\r")
|
old_parts[match[1]] = match[2].strip(rb"\n\r")
|
||||||
|
|
||||||
# Merge files
|
# Merge files
|
||||||
parts = []
|
parts = []
|
||||||
s_total = time.time()
|
s_total = time.time()
|
||||||
for file_path in findfiles(merge_dir, find_ext):
|
for file_path in findfiles(merge_dir, find_ext):
|
||||||
parts.append("\n\n/* ---- %s ---- */\n\n" % file_path.replace(config.data_dir, ""))
|
parts.append(b"\n\n/* ---- %s ---- */\n\n" % file_path.replace(config.data_dir, "").encode("utf8"))
|
||||||
if file_path.endswith(".coffee"): # Compile coffee script
|
if file_path.endswith(".coffee"): # Compile coffee script
|
||||||
if file_path in changed or file_path.replace(config.data_dir, "") not in old_parts: # Only recompile if changed or its not compiled before
|
if file_path in changed or file_path.replace(config.data_dir, "") not in old_parts: # Only recompile if changed or its not compiled before
|
||||||
if config.coffeescript_compiler is None:
|
if config.coffeescript_compiler is None:
|
||||||
|
@ -95,31 +96,31 @@ def merge(merged_path):
|
||||||
# Start compiling
|
# Start compiling
|
||||||
s = time.time()
|
s = time.time()
|
||||||
compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
|
compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
|
||||||
out = compiler.stdout.read().decode("utf8")
|
out = compiler.stdout.read()
|
||||||
compiler.wait()
|
compiler.wait()
|
||||||
logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s))
|
logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s))
|
||||||
|
|
||||||
# Check errors
|
# Check errors
|
||||||
if out and out.startswith("("): # No error found
|
if out and out.startswith(b"("): # No error found
|
||||||
parts.append(out)
|
parts.append(out)
|
||||||
else: # Put error message in place of source code
|
else: # Put error message in place of source code
|
||||||
error = out
|
error = out
|
||||||
logging.error("%s Compile error: %s" % (file_path, error))
|
logging.error("%s Compile error: %s" % (file_path, error))
|
||||||
parts.append(
|
parts.append(
|
||||||
"alert('%s compile error: %s');" %
|
b"alert('%s compile error: %s');" %
|
||||||
(file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n"))
|
(file_path, re.escape(error).replace(b"\n", b"\\n").replace(r"\\n", r"\n"))
|
||||||
)
|
)
|
||||||
else: # Not changed use the old_part
|
else: # Not changed use the old_part
|
||||||
parts.append(old_parts[file_path.replace(config.data_dir, "")])
|
parts.append(old_parts[file_path.replace(config.data_dir, "")])
|
||||||
else: # Add to parts
|
else: # Add to parts
|
||||||
parts.append(open(file_path).read().decode("utf8"))
|
parts.append(open(file_path, "rb").read())
|
||||||
|
|
||||||
merged = u"\n".join(parts)
|
merged = b"\n".join(parts)
|
||||||
if ext == "css": # Vendor prefix css
|
if ext == "css": # Vendor prefix css
|
||||||
from lib.cssvendor import cssvendor
|
from lib.cssvendor import cssvendor
|
||||||
merged = cssvendor.prefix(merged)
|
merged = cssvendor.prefix(merged)
|
||||||
merged = merged.replace("\r", "")
|
merged = merged.replace(b"\r", b"")
|
||||||
open(merged_path, "wb").write(merged.encode("utf8"))
|
open(merged_path, "wb").write(merged)
|
||||||
logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total))
|
logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
from DebugReloader import DebugReloader
|
|
|
@ -118,7 +118,7 @@ class FileRequest(object):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
content = json.loads(params["body"])
|
content = json.loads(params["body"])
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.debug("Update for %s is invalid JSON: %s" % (inner_path, err))
|
self.log.debug("Update for %s is invalid JSON: %s" % (inner_path, err))
|
||||||
self.response({"error": "File invalid JSON"})
|
self.response({"error": "File invalid JSON"})
|
||||||
self.connection.badAction(5)
|
self.connection.badAction(5)
|
||||||
|
@ -131,7 +131,7 @@ class FileRequest(object):
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
valid = site.content_manager.verifyFile(inner_path, content)
|
valid = site.content_manager.verifyFile(inner_path, content)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.debug("Update for %s is invalid: %s" % (inner_path, err))
|
self.log.debug("Update for %s is invalid: %s" % (inner_path, err))
|
||||||
valid = False
|
valid = False
|
||||||
|
|
||||||
|
@ -251,10 +251,10 @@ class FileRequest(object):
|
||||||
|
|
||||||
return {"bytes_sent": bytes_sent, "file_size": file_size, "location": params["location"]}
|
return {"bytes_sent": bytes_sent, "file_size": file_size, "location": params["location"]}
|
||||||
|
|
||||||
except RequestError, err:
|
except RequestError as err:
|
||||||
self.log.debug("GetFile %s %s request error: %s" % (self.connection, params["inner_path"], Debug.formatException(err)))
|
self.log.debug("GetFile %s %s request error: %s" % (self.connection, params["inner_path"], Debug.formatException(err)))
|
||||||
self.response({"error": "File read error: %s" % err})
|
self.response({"error": "File read error: %s" % err})
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
if config.verbose:
|
if config.verbose:
|
||||||
self.log.debug("GetFile read error: %s" % Debug.formatException(err))
|
self.log.debug("GetFile read error: %s" % Debug.formatException(err))
|
||||||
self.response({"error": "File read error"})
|
self.response({"error": "File read error"})
|
||||||
|
@ -306,7 +306,7 @@ class FileRequest(object):
|
||||||
if config.verbose:
|
if config.verbose:
|
||||||
self.log.debug(
|
self.log.debug(
|
||||||
"Added %s peers to %s using pex, sending back %s" %
|
"Added %s peers to %s using pex, sending back %s" %
|
||||||
(added, site, {key: len(val) for key, val in packed_peers.iteritems()})
|
(added, site, {key: len(val) for key, val in packed_peers.items()})
|
||||||
)
|
)
|
||||||
|
|
||||||
back = {
|
back = {
|
||||||
|
@ -353,7 +353,7 @@ class FileRequest(object):
|
||||||
back = collections.defaultdict(lambda: collections.defaultdict(list))
|
back = collections.defaultdict(lambda: collections.defaultdict(list))
|
||||||
found = site.worker_manager.findOptionalHashIds(hash_ids, limit=limit)
|
found = site.worker_manager.findOptionalHashIds(hash_ids, limit=limit)
|
||||||
|
|
||||||
for hash_id, peers in found.iteritems():
|
for hash_id, peers in found.items():
|
||||||
for peer in peers:
|
for peer in peers:
|
||||||
ip_type = helper.getIpType(peer.ip)
|
ip_type = helper.getIpType(peer.ip)
|
||||||
if len(back[ip_type][hash_id]) < 20:
|
if len(back[ip_type][hash_id]) < 20:
|
||||||
|
@ -385,7 +385,7 @@ class FileRequest(object):
|
||||||
if config.verbose:
|
if config.verbose:
|
||||||
self.log.debug(
|
self.log.debug(
|
||||||
"Found: %s for %s hashids in %.3fs" %
|
"Found: %s for %s hashids in %.3fs" %
|
||||||
({key: len(val) for key, val in back.iteritems()}, len(params["hash_ids"]), time.time() - s)
|
({key: len(val) for key, val in back.items()}, len(params["hash_ids"]), time.time() - s)
|
||||||
)
|
)
|
||||||
self.response({"peers": back["ipv4"], "peers_onion": back["onion"], "peers_ipv6": back["ipv6"], "my": my_hashes})
|
self.response({"peers": back["ipv4"], "peers_onion": back["onion"], "peers_ipv6": back["ipv6"], "my": my_hashes})
|
||||||
|
|
||||||
|
@ -405,7 +405,7 @@ class FileRequest(object):
|
||||||
|
|
||||||
# Send a simple Pong! answer
|
# Send a simple Pong! answer
|
||||||
def actionPing(self, params):
|
def actionPing(self, params):
|
||||||
self.response("Pong!")
|
self.response(b"Pong!")
|
||||||
|
|
||||||
# Check requested port of the other peer
|
# Check requested port of the other peer
|
||||||
def actionCheckport(self, params):
|
def actionCheckport(self, params):
|
||||||
|
|
|
@ -10,7 +10,7 @@ from gevent.server import StreamServer
|
||||||
import util
|
import util
|
||||||
from util import helper
|
from util import helper
|
||||||
from Config import config
|
from Config import config
|
||||||
from FileRequest import FileRequest
|
from .FileRequest import FileRequest
|
||||||
from Peer import PeerPortchecker
|
from Peer import PeerPortchecker
|
||||||
from Site import SiteManager
|
from Site import SiteManager
|
||||||
from Connection import ConnectionServer
|
from Connection import ConnectionServer
|
||||||
|
@ -41,7 +41,7 @@ class FileServer(ConnectionServer):
|
||||||
port = config.tor_hs_port
|
port = config.tor_hs_port
|
||||||
config.fileserver_port = port
|
config.fileserver_port = port
|
||||||
elif port == 0: # Use random port
|
elif port == 0: # Use random port
|
||||||
port_range_from, port_range_to = map(int, config.fileserver_port_range.split("-"))
|
port_range_from, port_range_to = list(map(int, config.fileserver_port_range.split("-")))
|
||||||
port = self.getRandomPort(ip, port_range_from, port_range_to)
|
port = self.getRandomPort(ip, port_range_from, port_range_to)
|
||||||
config.fileserver_port = port
|
config.fileserver_port = port
|
||||||
if not port:
|
if not port:
|
||||||
|
@ -59,7 +59,7 @@ class FileServer(ConnectionServer):
|
||||||
self.stream_server_proxy = StreamServer(
|
self.stream_server_proxy = StreamServer(
|
||||||
("0.0.0.0", self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100
|
("0.0.0.0", self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100
|
||||||
)
|
)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.info("StreamServer proxy create error: %s" % Debug.formatException(err))
|
self.log.info("StreamServer proxy create error: %s" % Debug.formatException(err))
|
||||||
|
|
||||||
self.port_opened = {}
|
self.port_opened = {}
|
||||||
|
@ -117,7 +117,7 @@ class FileServer(ConnectionServer):
|
||||||
def listenProxy(self):
|
def listenProxy(self):
|
||||||
try:
|
try:
|
||||||
self.stream_server_proxy.serve_forever()
|
self.stream_server_proxy.serve_forever()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
if err.errno == 98: # Address already in use error
|
if err.errno == 98: # Address already in use error
|
||||||
self.log.debug("StreamServer proxy listen error: %s" % err)
|
self.log.debug("StreamServer proxy listen error: %s" % err)
|
||||||
else:
|
else:
|
||||||
|
@ -231,7 +231,7 @@ class FileServer(ConnectionServer):
|
||||||
if not self.port_opened or force_port_check: # Test and open port if not tested yet
|
if not self.port_opened or force_port_check: # Test and open port if not tested yet
|
||||||
if len(self.sites) <= 2: # Don't wait port opening on first startup
|
if len(self.sites) <= 2: # Don't wait port opening on first startup
|
||||||
sites_checking = True
|
sites_checking = True
|
||||||
for address, site in self.sites.items():
|
for address, site in list(self.sites.items()):
|
||||||
gevent.spawn(self.checkSite, site, check_files)
|
gevent.spawn(self.checkSite, site, check_files)
|
||||||
|
|
||||||
self.portCheck()
|
self.portCheck()
|
||||||
|
@ -242,7 +242,7 @@ class FileServer(ConnectionServer):
|
||||||
if not sites_checking:
|
if not sites_checking:
|
||||||
check_pool = gevent.pool.Pool(5)
|
check_pool = gevent.pool.Pool(5)
|
||||||
# Check sites integrity
|
# Check sites integrity
|
||||||
for site in sorted(self.sites.values(), key=lambda site: site.settings.get("modified", 0), reverse=True):
|
for site in sorted(list(self.sites.values()), key=lambda site: site.settings.get("modified", 0), reverse=True):
|
||||||
if not site.settings["serving"]:
|
if not site.settings["serving"]:
|
||||||
continue
|
continue
|
||||||
check_thread = check_pool.spawn(self.checkSite, site, check_files) # Check in new thread
|
check_thread = check_pool.spawn(self.checkSite, site, check_files) # Check in new thread
|
||||||
|
@ -263,7 +263,7 @@ class FileServer(ConnectionServer):
|
||||||
(len(self.connections), self.has_internet, len(peers_protected))
|
(len(self.connections), self.has_internet, len(peers_protected))
|
||||||
)
|
)
|
||||||
|
|
||||||
for address, site in self.sites.items():
|
for address, site in list(self.sites.items()):
|
||||||
if not site.settings["serving"]:
|
if not site.settings["serving"]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -273,7 +273,7 @@ class FileServer(ConnectionServer):
|
||||||
time.sleep(1) # Prevent too quick request
|
time.sleep(1) # Prevent too quick request
|
||||||
|
|
||||||
peers_protected = set([])
|
peers_protected = set([])
|
||||||
for address, site in self.sites.items():
|
for address, site in list(self.sites.items()):
|
||||||
if not site.settings["serving"]:
|
if not site.settings["serving"]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -313,7 +313,7 @@ class FileServer(ConnectionServer):
|
||||||
while 1:
|
while 1:
|
||||||
config.loadTrackersFile()
|
config.loadTrackersFile()
|
||||||
s = time.time()
|
s = time.time()
|
||||||
for address, site in self.sites.items():
|
for address, site in list(self.sites.items()):
|
||||||
if not site.settings["serving"]:
|
if not site.settings["serving"]:
|
||||||
continue
|
continue
|
||||||
gevent.spawn(self.announceSite, site).join(timeout=10)
|
gevent.spawn(self.announceSite, site).join(timeout=10)
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
from FileServer import FileServer
|
from .FileServer import FileServer
|
||||||
from FileRequest import FileRequest
|
from .FileRequest import FileRequest
|
|
@ -6,11 +6,11 @@ import collections
|
||||||
|
|
||||||
import gevent
|
import gevent
|
||||||
|
|
||||||
from cStringIO import StringIO
|
import io
|
||||||
from Debug import Debug
|
from Debug import Debug
|
||||||
from Config import config
|
from Config import config
|
||||||
from util import helper
|
from util import helper
|
||||||
from PeerHashfield import PeerHashfield
|
from .PeerHashfield import PeerHashfield
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
|
|
||||||
if config.use_tempfiles:
|
if config.use_tempfiles:
|
||||||
|
@ -95,7 +95,7 @@ class Peer(object):
|
||||||
self.connection = connection_server.getConnection(self.ip, self.port, site=self.site, is_tracker_connection=self.is_tracker_connection)
|
self.connection = connection_server.getConnection(self.ip, self.port, site=self.site, is_tracker_connection=self.is_tracker_connection)
|
||||||
self.reputation += 1
|
self.reputation += 1
|
||||||
self.connection.sites += 1
|
self.connection.sites += 1
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.onConnectionError("Getting connection error")
|
self.onConnectionError("Getting connection error")
|
||||||
self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" %
|
self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" %
|
||||||
(Debug.formatException(err), self.connection_error, self.hash_failed))
|
(Debug.formatException(err), self.connection_error, self.hash_failed))
|
||||||
|
@ -164,7 +164,7 @@ class Peer(object):
|
||||||
return res
|
return res
|
||||||
else:
|
else:
|
||||||
raise Exception("Invalid response: %s" % res)
|
raise Exception("Invalid response: %s" % res)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
if type(err).__name__ == "Notify": # Greenlet killed by worker
|
if type(err).__name__ == "Notify": # Greenlet killed by worker
|
||||||
self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd))
|
self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd))
|
||||||
break
|
break
|
||||||
|
@ -195,7 +195,7 @@ class Peer(object):
|
||||||
if config.use_tempfiles:
|
if config.use_tempfiles:
|
||||||
buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b')
|
buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b')
|
||||||
else:
|
else:
|
||||||
buff = StringIO()
|
buff = io.BytesIO()
|
||||||
|
|
||||||
s = time.time()
|
s = time.time()
|
||||||
while True: # Read in smaller parts
|
while True: # Read in smaller parts
|
||||||
|
@ -240,7 +240,7 @@ class Peer(object):
|
||||||
with gevent.Timeout(10.0, False): # 10 sec timeout, don't raise exception
|
with gevent.Timeout(10.0, False): # 10 sec timeout, don't raise exception
|
||||||
res = self.request("ping")
|
res = self.request("ping")
|
||||||
|
|
||||||
if res and "body" in res and res["body"] == "Pong!":
|
if res and "body" in res and res["body"] == b"Pong!":
|
||||||
response_time = time.time() - s
|
response_time = time.time() - s
|
||||||
break # All fine, exit from for loop
|
break # All fine, exit from for loop
|
||||||
# Timeout reached or bad response
|
# Timeout reached or bad response
|
||||||
|
@ -267,12 +267,9 @@ class Peer(object):
|
||||||
request["peers_onion"] = packed_peers["onion"]
|
request["peers_onion"] = packed_peers["onion"]
|
||||||
if packed_peers["ipv6"]:
|
if packed_peers["ipv6"]:
|
||||||
request["peers_ipv6"] = packed_peers["ipv6"]
|
request["peers_ipv6"] = packed_peers["ipv6"]
|
||||||
|
|
||||||
res = self.request("pex", request)
|
res = self.request("pex", request)
|
||||||
|
|
||||||
if not res or "error" in res:
|
if not res or "error" in res:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
added = 0
|
added = 0
|
||||||
|
|
||||||
# Remove unsupported peer types
|
# Remove unsupported peer types
|
||||||
|
@ -331,13 +328,13 @@ class Peer(object):
|
||||||
key = "peers"
|
key = "peers"
|
||||||
else:
|
else:
|
||||||
key = "peers_%s" % ip_type
|
key = "peers_%s" % ip_type
|
||||||
for hash, peers in res.get(key, {}).items()[0:30]:
|
for hash, peers in list(res.get(key, {}).items())[0:30]:
|
||||||
if ip_type == "onion":
|
if ip_type == "onion":
|
||||||
unpacker_func = helper.unpackOnionAddress
|
unpacker_func = helper.unpackOnionAddress
|
||||||
else:
|
else:
|
||||||
unpacker_func = helper.unpackAddress
|
unpacker_func = helper.unpackAddress
|
||||||
|
|
||||||
back[hash] += map(unpacker_func, peers)
|
back[hash] += list(map(unpacker_func, peers))
|
||||||
|
|
||||||
for hash in res.get("my", []):
|
for hash in res.get("my", []):
|
||||||
back[hash].append((self.connection.ip, self.connection.port))
|
back[hash].append((self.connection.ip, self.connection.port))
|
||||||
|
|
|
@ -68,8 +68,8 @@ if __name__ == "__main__":
|
||||||
s = time.time()
|
s = time.time()
|
||||||
for i in range(10000):
|
for i in range(10000):
|
||||||
field.appendHashId(i)
|
field.appendHashId(i)
|
||||||
print time.time()-s
|
print(time.time()-s)
|
||||||
s = time.time()
|
s = time.time()
|
||||||
for i in range(10000):
|
for i in range(10000):
|
||||||
field.hasHash("AABB")
|
field.hasHash("AABB")
|
||||||
print time.time()-s
|
print(time.time()-s)
|
|
@ -1,6 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
import urllib
|
import urllib.request
|
||||||
import urllib2
|
import urllib.parse
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
@ -16,10 +16,10 @@ class PeerPortchecker(object):
|
||||||
|
|
||||||
def requestUrl(self, url, post_data=None):
|
def requestUrl(self, url, post_data=None):
|
||||||
if type(post_data) is dict:
|
if type(post_data) is dict:
|
||||||
post_data = urllib.urlencode(post_data)
|
post_data = urllib.parse.urlencode(post_data).encode("utf8")
|
||||||
req = urllib2.Request(url, post_data)
|
req = urllib.request.Request(url, post_data)
|
||||||
req.add_header('Referer', url)
|
req.add_header('Referer', url)
|
||||||
return urllib2.urlopen(req, timeout=20.0)
|
return urllib.request.urlopen(req, timeout=20.0)
|
||||||
|
|
||||||
def portOpen(self, port):
|
def portOpen(self, port):
|
||||||
self.log.info("Trying to open port using UpnpPunch...")
|
self.log.info("Trying to open port using UpnpPunch...")
|
||||||
|
@ -67,7 +67,7 @@ class PeerPortchecker(object):
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def checkCanyouseeme(self, port):
|
def checkCanyouseeme(self, port):
|
||||||
data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read()
|
data = urllib.request.urlopen("http://www.canyouseeme.org/", b"port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8")
|
||||||
message = re.match('.*<p style="padding-left:15px">(.*?)</p>', data, re.DOTALL).group(1)
|
message = re.match('.*<p style="padding-left:15px">(.*?)</p>', data, re.DOTALL).group(1)
|
||||||
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace(" ", " ")) # Strip http tags
|
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace(" ", " ")) # Strip http tags
|
||||||
|
|
||||||
|
@ -85,7 +85,7 @@ class PeerPortchecker(object):
|
||||||
raise Exception("Invalid response: %s" % message)
|
raise Exception("Invalid response: %s" % message)
|
||||||
|
|
||||||
def checkPortchecker(self, port):
|
def checkPortchecker(self, port):
|
||||||
data = urllib2.urlopen("https://portchecker.co/check", "port=%s" % port, timeout=20.0).read()
|
data = urllib.request.urlopen("https://portchecker.co/check", b"port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8")
|
||||||
message = re.match('.*<div id="results-wrapper">(.*?)</div>', data, re.DOTALL).group(1)
|
message = re.match('.*<div id="results-wrapper">(.*?)</div>', data, re.DOTALL).group(1)
|
||||||
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace(" ", " ").strip()) # Strip http tags
|
message = re.sub("<.*?>", "", message.replace("<br>", " ").replace(" ", " ").strip()) # Strip http tags
|
||||||
|
|
||||||
|
@ -109,7 +109,6 @@ class PeerPortchecker(object):
|
||||||
|
|
||||||
ip = re.match('.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL).group(1)
|
ip = re.match('.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL).group(1)
|
||||||
token = re.match('.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1)
|
token = re.match('.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1)
|
||||||
print ip
|
|
||||||
|
|
||||||
post_data = {"host": ip, "port": port, "allow": "on", "token": token, "submit": "Scanning.."}
|
post_data = {"host": ip, "port": port, "allow": "on", "token": token, "submit": "Scanning.."}
|
||||||
data = self.requestUrl(url, post_data).read()
|
data = self.requestUrl(url, post_data).read()
|
||||||
|
@ -168,4 +167,4 @@ if __name__ == "__main__":
|
||||||
peer_portchecker = PeerPortchecker()
|
peer_portchecker = PeerPortchecker()
|
||||||
for func_name in ["checkIpv6scanner", "checkMyaddr", "checkPortchecker", "checkCanyouseeme"]:
|
for func_name in ["checkIpv6scanner", "checkMyaddr", "checkPortchecker", "checkCanyouseeme"]:
|
||||||
s = time.time()
|
s = time.time()
|
||||||
print(func_name, getattr(peer_portchecker, func_name)(3894), "%.3fs" % (time.time() - s))
|
print((func_name, getattr(peer_portchecker, func_name)(3894), "%.3fs" % (time.time() - s)))
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
from Peer import Peer
|
from .Peer import Peer
|
||||||
from PeerHashfield import PeerHashfield
|
from .PeerHashfield import PeerHashfield
|
||||||
|
|
|
@ -7,6 +7,7 @@ from collections import defaultdict
|
||||||
|
|
||||||
from Debug import Debug
|
from Debug import Debug
|
||||||
from Config import config
|
from Config import config
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
|
||||||
class PluginManager:
|
class PluginManager:
|
||||||
|
@ -48,7 +49,7 @@ class PluginManager:
|
||||||
self.log.debug("Loading plugin: %s" % dir_name)
|
self.log.debug("Loading plugin: %s" % dir_name)
|
||||||
try:
|
try:
|
||||||
__import__(dir_name)
|
__import__(dir_name)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err)))
|
self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err)))
|
||||||
if dir_name not in self.plugin_names:
|
if dir_name not in self.plugin_names:
|
||||||
self.plugin_names.append(dir_name)
|
self.plugin_names.append(dir_name)
|
||||||
|
@ -62,19 +63,19 @@ class PluginManager:
|
||||||
self.after_load = []
|
self.after_load = []
|
||||||
self.plugins_before = self.plugins
|
self.plugins_before = self.plugins
|
||||||
self.plugins = defaultdict(list) # Reset registered plugins
|
self.plugins = defaultdict(list) # Reset registered plugins
|
||||||
for module_name, module in sys.modules.items():
|
for module_name, module in list(sys.modules.items()):
|
||||||
if module and "__file__" in dir(module) and self.plugin_path in module.__file__: # Module file within plugin_path
|
if module and getattr(module, "__file__", None) and self.plugin_path in module.__file__: # Module file in plugin_path
|
||||||
if "allow_reload" in dir(module) and not module.allow_reload: # Reload disabled
|
if "allow_reload" in dir(module) and not module.allow_reload: # Reload disabled
|
||||||
# Re-add non-reloadable plugins
|
# Re-add non-reloadable plugins
|
||||||
for class_name, classes in self.plugins_before.iteritems():
|
for class_name, classes in self.plugins_before.items():
|
||||||
for c in classes:
|
for c in classes:
|
||||||
if c.__module__ != module.__name__:
|
if c.__module__ != module.__name__:
|
||||||
continue
|
continue
|
||||||
self.plugins[class_name].append(c)
|
self.plugins[class_name].append(c)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
reload(module)
|
importlib.reload(module)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err)))
|
self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err)))
|
||||||
|
|
||||||
self.loadPlugins() # Load new plugins
|
self.loadPlugins() # Load new plugins
|
||||||
|
@ -82,7 +83,7 @@ class PluginManager:
|
||||||
# Change current classes in memory
|
# Change current classes in memory
|
||||||
import gc
|
import gc
|
||||||
patched = {}
|
patched = {}
|
||||||
for class_name, classes in self.plugins.iteritems():
|
for class_name, classes in self.plugins.items():
|
||||||
classes = classes[:] # Copy the current plugins
|
classes = classes[:] # Copy the current plugins
|
||||||
classes.reverse()
|
classes.reverse()
|
||||||
base_class = self.pluggable[class_name] # Original class
|
base_class = self.pluggable[class_name] # Original class
|
||||||
|
@ -96,8 +97,8 @@ class PluginManager:
|
||||||
|
|
||||||
# Change classes in modules
|
# Change classes in modules
|
||||||
patched = {}
|
patched = {}
|
||||||
for class_name, classes in self.plugins.iteritems():
|
for class_name, classes in self.plugins.items():
|
||||||
for module_name, module in sys.modules.iteritems():
|
for module_name, module in list(sys.modules.items()):
|
||||||
if class_name in dir(module):
|
if class_name in dir(module):
|
||||||
if "__class__" not in dir(getattr(module, class_name)): # Not a class
|
if "__class__" not in dir(getattr(module, class_name)): # Not a class
|
||||||
continue
|
continue
|
||||||
|
@ -134,7 +135,7 @@ def acceptPlugins(base_class):
|
||||||
if str(key) in plugin_manager.subclass_order[class_name]
|
if str(key) in plugin_manager.subclass_order[class_name]
|
||||||
else 9999
|
else 9999
|
||||||
)
|
)
|
||||||
plugin_manager.subclass_order[class_name] = map(str, classes)
|
plugin_manager.subclass_order[class_name] = list(map(str, classes))
|
||||||
|
|
||||||
classes.reverse()
|
classes.reverse()
|
||||||
classes.append(base_class) # Add the class itself to end of inherience line
|
classes.append(base_class) # Add the class itself to end of inherience line
|
||||||
|
@ -181,4 +182,4 @@ if __name__ == "__main__":
|
||||||
else:
|
else:
|
||||||
return "Can't route to", path
|
return "Can't route to", path
|
||||||
|
|
||||||
print Request().route("MainPage")
|
print(Request().route("MainPage"))
|
||||||
|
|
|
@ -7,6 +7,7 @@ import random
|
||||||
import sys
|
import sys
|
||||||
import hashlib
|
import hashlib
|
||||||
import collections
|
import collections
|
||||||
|
import base64
|
||||||
|
|
||||||
import gevent
|
import gevent
|
||||||
import gevent.pool
|
import gevent.pool
|
||||||
|
@ -17,14 +18,14 @@ from Peer import Peer
|
||||||
from Worker import WorkerManager
|
from Worker import WorkerManager
|
||||||
from Debug import Debug
|
from Debug import Debug
|
||||||
from Content import ContentManager
|
from Content import ContentManager
|
||||||
from SiteStorage import SiteStorage
|
from .SiteStorage import SiteStorage
|
||||||
from Crypt import CryptHash
|
from Crypt import CryptHash
|
||||||
from util import helper
|
from util import helper
|
||||||
from util import Diff
|
from util import Diff
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
from File import FileServer
|
from File import FileServer
|
||||||
from SiteAnnouncer import SiteAnnouncer
|
from .SiteAnnouncer import SiteAnnouncer
|
||||||
import SiteManager
|
from . import SiteManager
|
||||||
|
|
||||||
|
|
||||||
@PluginManager.acceptPlugins
|
@PluginManager.acceptPlugins
|
||||||
|
@ -32,7 +33,8 @@ class Site(object):
|
||||||
|
|
||||||
def __init__(self, address, allow_create=True, settings=None):
|
def __init__(self, address, allow_create=True, settings=None):
|
||||||
self.address = str(re.sub("[^A-Za-z0-9]", "", address)) # Make sure its correct address
|
self.address = str(re.sub("[^A-Za-z0-9]", "", address)) # Make sure its correct address
|
||||||
self.address_hash = hashlib.sha256(self.address).digest()
|
self.address_hash = hashlib.sha256(self.address.encode("ascii")).digest()
|
||||||
|
self.address_sha1 = hashlib.sha1(self.address.encode("ascii")).digest()
|
||||||
self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging
|
self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging
|
||||||
self.log = logging.getLogger("Site:%s" % self.address_short)
|
self.log = logging.getLogger("Site:%s" % self.address_short)
|
||||||
self.addEventListeners()
|
self.addEventListeners()
|
||||||
|
@ -127,7 +129,7 @@ class Site(object):
|
||||||
def getSettingsCache(self):
|
def getSettingsCache(self):
|
||||||
back = {}
|
back = {}
|
||||||
back["bad_files"] = self.bad_files
|
back["bad_files"] = self.bad_files
|
||||||
back["hashfield"] = self.content_manager.hashfield.tostring().encode("base64")
|
back["hashfield"] = base64.b64encode(self.content_manager.hashfield.tobytes()).decode("ascii")
|
||||||
return back
|
return back
|
||||||
|
|
||||||
# Max site size in MB
|
# Max site size in MB
|
||||||
|
@ -173,7 +175,7 @@ class Site(object):
|
||||||
# Start download files
|
# Start download files
|
||||||
file_threads = []
|
file_threads = []
|
||||||
if download_files:
|
if download_files:
|
||||||
for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys():
|
for file_relative_path in list(self.content_manager.contents[inner_path].get("files", {}).keys()):
|
||||||
file_inner_path = content_inner_dir + file_relative_path
|
file_inner_path = content_inner_dir + file_relative_path
|
||||||
|
|
||||||
# Try to diff first
|
# Try to diff first
|
||||||
|
@ -204,7 +206,7 @@ class Site(object):
|
||||||
"Patched successfully: %s (diff: %.3fs, verify: %.3fs, write: %.3fs, on_done: %.3fs)" %
|
"Patched successfully: %s (diff: %.3fs, verify: %.3fs, write: %.3fs, on_done: %.3fs)" %
|
||||||
(file_inner_path, time_diff, time_verify, time_write, time_on_done)
|
(file_inner_path, time_diff, time_verify, time_write, time_on_done)
|
||||||
)
|
)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.debug("Failed to patch %s: %s" % (file_inner_path, err))
|
self.log.debug("Failed to patch %s: %s" % (file_inner_path, err))
|
||||||
diff_success = False
|
diff_success = False
|
||||||
|
|
||||||
|
@ -218,7 +220,7 @@ class Site(object):
|
||||||
if inner_path == "content.json":
|
if inner_path == "content.json":
|
||||||
gevent.spawn(self.updateHashfield)
|
gevent.spawn(self.updateHashfield)
|
||||||
|
|
||||||
for file_relative_path in self.content_manager.contents[inner_path].get("files_optional", {}).keys():
|
for file_relative_path in list(self.content_manager.contents[inner_path].get("files_optional", {}).keys()):
|
||||||
file_inner_path = content_inner_dir + file_relative_path
|
file_inner_path = content_inner_dir + file_relative_path
|
||||||
if file_inner_path not in changed and not self.bad_files.get(file_inner_path):
|
if file_inner_path not in changed and not self.bad_files.get(file_inner_path):
|
||||||
continue
|
continue
|
||||||
|
@ -233,7 +235,7 @@ class Site(object):
|
||||||
|
|
||||||
# Wait for includes download
|
# Wait for includes download
|
||||||
include_threads = []
|
include_threads = []
|
||||||
for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys():
|
for file_relative_path in list(self.content_manager.contents[inner_path].get("includes", {}).keys()):
|
||||||
file_inner_path = content_inner_dir + file_relative_path
|
file_inner_path = content_inner_dir + file_relative_path
|
||||||
include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer)
|
include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer)
|
||||||
include_threads.append(include_thread)
|
include_threads.append(include_thread)
|
||||||
|
@ -262,7 +264,7 @@ class Site(object):
|
||||||
def getReachableBadFiles(self):
|
def getReachableBadFiles(self):
|
||||||
if not self.bad_files:
|
if not self.bad_files:
|
||||||
return False
|
return False
|
||||||
return [bad_file for bad_file, retry in self.bad_files.iteritems() if retry < 3]
|
return [bad_file for bad_file, retry in self.bad_files.items() if retry < 3]
|
||||||
|
|
||||||
# Retry download bad files
|
# Retry download bad files
|
||||||
def retryBadFiles(self, force=False):
|
def retryBadFiles(self, force=False):
|
||||||
|
@ -272,7 +274,7 @@ class Site(object):
|
||||||
content_inner_paths = []
|
content_inner_paths = []
|
||||||
file_inner_paths = []
|
file_inner_paths = []
|
||||||
|
|
||||||
for bad_file, tries in self.bad_files.items():
|
for bad_file, tries in list(self.bad_files.items()):
|
||||||
if force or random.randint(0, min(40, tries)) < 4: # Larger number tries = less likely to check every 15min
|
if force or random.randint(0, min(40, tries)) < 4: # Larger number tries = less likely to check every 15min
|
||||||
if bad_file.endswith("content.json"):
|
if bad_file.endswith("content.json"):
|
||||||
content_inner_paths.append(bad_file)
|
content_inner_paths.append(bad_file)
|
||||||
|
@ -286,7 +288,7 @@ class Site(object):
|
||||||
self.pooledDownloadFile(file_inner_paths, only_if_bad=True)
|
self.pooledDownloadFile(file_inner_paths, only_if_bad=True)
|
||||||
|
|
||||||
def checkBadFiles(self):
|
def checkBadFiles(self):
|
||||||
for bad_file in self.bad_files.keys():
|
for bad_file in list(self.bad_files.keys()):
|
||||||
file_info = self.content_manager.getFileInfo(bad_file)
|
file_info = self.content_manager.getFileInfo(bad_file)
|
||||||
if bad_file.endswith("content.json"):
|
if bad_file.endswith("content.json"):
|
||||||
if file_info is False and bad_file != "content.json":
|
if file_info is False and bad_file != "content.json":
|
||||||
|
@ -374,7 +376,7 @@ class Site(object):
|
||||||
queried.append(peer)
|
queried.append(peer)
|
||||||
modified_contents = []
|
modified_contents = []
|
||||||
my_modified = self.content_manager.listModified(since)
|
my_modified = self.content_manager.listModified(since)
|
||||||
for inner_path, modified in res["modified_files"].iteritems(): # Check if the peer has newer files than we
|
for inner_path, modified in res["modified_files"].items(): # Check if the peer has newer files than we
|
||||||
has_newer = int(modified) > my_modified.get(inner_path, 0)
|
has_newer = int(modified) > my_modified.get(inner_path, 0)
|
||||||
has_older = int(modified) < my_modified.get(inner_path, 0)
|
has_older = int(modified) < my_modified.get(inner_path, 0)
|
||||||
if inner_path not in self.bad_files and not self.content_manager.isArchived(inner_path, modified):
|
if inner_path not in self.bad_files and not self.content_manager.isArchived(inner_path, modified):
|
||||||
|
@ -480,7 +482,7 @@ class Site(object):
|
||||||
def redownloadContents(self):
|
def redownloadContents(self):
|
||||||
# Download all content.json again
|
# Download all content.json again
|
||||||
content_threads = []
|
content_threads = []
|
||||||
for inner_path in self.content_manager.contents.keys():
|
for inner_path in list(self.content_manager.contents.keys()):
|
||||||
content_threads.append(self.needFile(inner_path, update=True, blocking=False))
|
content_threads.append(self.needFile(inner_path, update=True, blocking=False))
|
||||||
|
|
||||||
self.log.debug("Waiting %s content.json to finish..." % len(content_threads))
|
self.log.debug("Waiting %s content.json to finish..." % len(content_threads))
|
||||||
|
@ -523,7 +525,7 @@ class Site(object):
|
||||||
})
|
})
|
||||||
if result:
|
if result:
|
||||||
break
|
break
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.error("Publish error: %s" % Debug.formatException(err))
|
self.log.error("Publish error: %s" % Debug.formatException(err))
|
||||||
result = {"exception": Debug.formatException(err)}
|
result = {"exception": Debug.formatException(err)}
|
||||||
|
|
||||||
|
@ -563,7 +565,7 @@ class Site(object):
|
||||||
peers = set(peers)
|
peers = set(peers)
|
||||||
|
|
||||||
self.log.info("Publishing %s to %s/%s peers (connected: %s) diffs: %s (%.2fk)..." % (
|
self.log.info("Publishing %s to %s/%s peers (connected: %s) diffs: %s (%.2fk)..." % (
|
||||||
inner_path, limit, len(self.peers), num_connected_peers, diffs.keys(), float(len(str(diffs))) / 1024
|
inner_path, limit, len(self.peers), num_connected_peers, list(diffs.keys()), float(len(str(diffs))) / 1024
|
||||||
))
|
))
|
||||||
|
|
||||||
if not peers:
|
if not peers:
|
||||||
|
@ -631,8 +633,8 @@ class Site(object):
|
||||||
)
|
)
|
||||||
|
|
||||||
# Copy files
|
# Copy files
|
||||||
for content_inner_path, content in self.content_manager.contents.items():
|
for content_inner_path, content in list(self.content_manager.contents.items()):
|
||||||
file_relative_paths = content.get("files", {}).keys()
|
file_relative_paths = list(content.get("files", {}).keys())
|
||||||
|
|
||||||
# Sign content.json at the end to make sure every file is included
|
# Sign content.json at the end to make sure every file is included
|
||||||
file_relative_paths.sort()
|
file_relative_paths.sort()
|
||||||
|
@ -812,7 +814,7 @@ class Site(object):
|
||||||
self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, connected, len(self.peers)))
|
self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, connected, len(self.peers)))
|
||||||
|
|
||||||
if connected < need: # Need more than we have
|
if connected < need: # Need more than we have
|
||||||
for peer in self.peers.values():
|
for peer in list(self.peers.values()):
|
||||||
if not peer.connection or not peer.connection.connected: # No peer connection or disconnected
|
if not peer.connection or not peer.connection.connected: # No peer connection or disconnected
|
||||||
peer.pex() # Initiate peer exchange
|
peer.pex() # Initiate peer exchange
|
||||||
if peer.connection and peer.connection.connected:
|
if peer.connection and peer.connection.connected:
|
||||||
|
@ -831,7 +833,7 @@ class Site(object):
|
||||||
|
|
||||||
# Return: Probably peers verified to be connectable recently
|
# Return: Probably peers verified to be connectable recently
|
||||||
def getConnectablePeers(self, need_num=5, ignore=[], allow_private=True):
|
def getConnectablePeers(self, need_num=5, ignore=[], allow_private=True):
|
||||||
peers = self.peers.values()
|
peers = list(self.peers.values())
|
||||||
found = []
|
found = []
|
||||||
for peer in peers:
|
for peer in peers:
|
||||||
if peer.key.endswith(":0"):
|
if peer.key.endswith(":0"):
|
||||||
|
@ -874,7 +876,7 @@ class Site(object):
|
||||||
# Add random peers
|
# Add random peers
|
||||||
need_more = need_num - len(found)
|
need_more = need_num - len(found)
|
||||||
found_more = sorted(
|
found_more = sorted(
|
||||||
self.peers.values()[0:need_more * 50],
|
list(self.peers.values())[0:need_more * 50],
|
||||||
key=lambda peer: peer.reputation,
|
key=lambda peer: peer.reputation,
|
||||||
reverse=True
|
reverse=True
|
||||||
)[0:need_more * 2]
|
)[0:need_more * 2]
|
||||||
|
@ -906,7 +908,7 @@ class Site(object):
|
||||||
|
|
||||||
# Cleanup probably dead peers and close connection if too much
|
# Cleanup probably dead peers and close connection if too much
|
||||||
def cleanupPeers(self, peers_protected=[]):
|
def cleanupPeers(self, peers_protected=[]):
|
||||||
peers = self.peers.values()
|
peers = list(self.peers.values())
|
||||||
if len(peers) > 20:
|
if len(peers) > 20:
|
||||||
# Cleanup old peers
|
# Cleanup old peers
|
||||||
removed = 0
|
removed = 0
|
||||||
|
@ -1019,7 +1021,7 @@ class Site(object):
|
||||||
# Send site status update to websocket clients
|
# Send site status update to websocket clients
|
||||||
def updateWebsocket(self, **kwargs):
|
def updateWebsocket(self, **kwargs):
|
||||||
if kwargs:
|
if kwargs:
|
||||||
param = {"event": kwargs.items()[0]}
|
param = {"event": list(kwargs.items())[0]}
|
||||||
else:
|
else:
|
||||||
param = None
|
param = None
|
||||||
for ws in self.websockets:
|
for ws in self.websockets:
|
||||||
|
|
|
@ -1,17 +1,16 @@
|
||||||
import random
|
import random
|
||||||
import time
|
import time
|
||||||
import hashlib
|
import hashlib
|
||||||
import urllib
|
import urllib.request
|
||||||
import urllib2
|
|
||||||
import struct
|
import struct
|
||||||
import socket
|
import socket
|
||||||
import re
|
import re
|
||||||
import collections
|
import collections
|
||||||
|
|
||||||
from lib import bencode
|
import bencode
|
||||||
from lib.subtl.subtl import UdpTrackerClient
|
from lib.subtl.subtl import UdpTrackerClient
|
||||||
from lib.PySocks import socks
|
import socks
|
||||||
from lib.PySocks import sockshandler
|
import sockshandler
|
||||||
import gevent
|
import gevent
|
||||||
|
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
|
@ -69,7 +68,7 @@ class SiteAnnouncer(object):
|
||||||
back = []
|
back = []
|
||||||
# Type of addresses they can reach me
|
# Type of addresses they can reach me
|
||||||
if config.trackers_proxy == "disable":
|
if config.trackers_proxy == "disable":
|
||||||
for ip_type, opened in self.site.connection_server.port_opened.items():
|
for ip_type, opened in list(self.site.connection_server.port_opened.items()):
|
||||||
if opened:
|
if opened:
|
||||||
back.append(ip_type)
|
back.append(ip_type)
|
||||||
if self.site.connection_server.tor_manager.start_onions:
|
if self.site.connection_server.tor_manager.start_onions:
|
||||||
|
@ -221,7 +220,7 @@ class SiteAnnouncer(object):
|
||||||
if error:
|
if error:
|
||||||
self.stats[tracker]["status"] = "error"
|
self.stats[tracker]["status"] = "error"
|
||||||
self.stats[tracker]["time_status"] = time.time()
|
self.stats[tracker]["time_status"] = time.time()
|
||||||
self.stats[tracker]["last_error"] = str(err).decode("utf8", "ignore")
|
self.stats[tracker]["last_error"] = str(error)
|
||||||
self.stats[tracker]["time_last_error"] = time.time()
|
self.stats[tracker]["time_last_error"] = time.time()
|
||||||
self.stats[tracker]["num_error"] += 1
|
self.stats[tracker]["num_error"] += 1
|
||||||
self.stats[tracker]["num_request"] += 1
|
self.stats[tracker]["num_request"] += 1
|
||||||
|
@ -359,9 +358,9 @@ class SiteAnnouncer(object):
|
||||||
try:
|
try:
|
||||||
peer_data = bencode.decode(response)["peers"]
|
peer_data = bencode.decode(response)["peers"]
|
||||||
response = None
|
response = None
|
||||||
peer_count = len(peer_data) / 6
|
peer_count = int(len(peer_data) / 6)
|
||||||
peers = []
|
peers = []
|
||||||
for peer_offset in xrange(peer_count):
|
for peer_offset in range(peer_count):
|
||||||
off = 6 * peer_offset
|
off = 6 * peer_offset
|
||||||
peer = peer_data[off:off + 6]
|
peer = peer_data[off:off + 6]
|
||||||
addr, port = struct.unpack('!LH', peer)
|
addr, port = struct.unpack('!LH', peer)
|
||||||
|
@ -379,7 +378,7 @@ class SiteAnnouncer(object):
|
||||||
peers = self.site.getConnectedPeers()
|
peers = self.site.getConnectedPeers()
|
||||||
|
|
||||||
if len(peers) == 0: # Small number of connected peers for this site, connect to any
|
if len(peers) == 0: # Small number of connected peers for this site, connect to any
|
||||||
peers = self.site.peers.values()
|
peers = list(self.site.peers.values())
|
||||||
need_num = 10
|
need_num = 10
|
||||||
|
|
||||||
random.shuffle(peers)
|
random.shuffle(peers)
|
||||||
|
@ -399,7 +398,7 @@ class SiteAnnouncer(object):
|
||||||
|
|
||||||
def updateWebsocket(self, **kwargs):
|
def updateWebsocket(self, **kwargs):
|
||||||
if kwargs:
|
if kwargs:
|
||||||
param = {"event": kwargs.items()[0]}
|
param = {"event": list(kwargs.items())[0]}
|
||||||
else:
|
else:
|
||||||
param = None
|
param = None
|
||||||
|
|
||||||
|
|
|
@ -28,11 +28,11 @@ class SiteManager(object):
|
||||||
def load(self, cleanup=True, startup=False):
|
def load(self, cleanup=True, startup=False):
|
||||||
self.log.debug("Loading sites...")
|
self.log.debug("Loading sites...")
|
||||||
self.loaded = False
|
self.loaded = False
|
||||||
from Site import Site
|
from .Site import Site
|
||||||
address_found = []
|
address_found = []
|
||||||
added = 0
|
added = 0
|
||||||
# Load new adresses
|
# Load new adresses
|
||||||
for address, settings in json.load(open("%s/sites.json" % config.data_dir)).iteritems():
|
for address, settings in json.load(open("%s/sites.json" % config.data_dir)).items():
|
||||||
if address not in self.sites:
|
if address not in self.sites:
|
||||||
if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)):
|
if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)):
|
||||||
# Root content.json exists, try load site
|
# Root content.json exists, try load site
|
||||||
|
@ -40,7 +40,7 @@ class SiteManager(object):
|
||||||
try:
|
try:
|
||||||
site = Site(address, settings=settings)
|
site = Site(address, settings=settings)
|
||||||
site.content_manager.contents.get("content.json")
|
site.content_manager.contents.get("content.json")
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.debug("Error loading site %s: %s" % (address, err))
|
self.log.debug("Error loading site %s: %s" % (address, err))
|
||||||
continue
|
continue
|
||||||
self.sites[address] = site
|
self.sites[address] = site
|
||||||
|
@ -56,7 +56,7 @@ class SiteManager(object):
|
||||||
|
|
||||||
# Remove deleted adresses
|
# Remove deleted adresses
|
||||||
if cleanup:
|
if cleanup:
|
||||||
for address in self.sites.keys():
|
for address in list(self.sites.keys()):
|
||||||
if address not in address_found:
|
if address not in address_found:
|
||||||
del(self.sites[address])
|
del(self.sites[address])
|
||||||
self.log.debug("Removed site: %s" % address)
|
self.log.debug("Removed site: %s" % address)
|
||||||
|
@ -93,7 +93,7 @@ class SiteManager(object):
|
||||||
data = {}
|
data = {}
|
||||||
# Generate data file
|
# Generate data file
|
||||||
s = time.time()
|
s = time.time()
|
||||||
for address, site in self.list().iteritems():
|
for address, site in self.list().items():
|
||||||
if recalculate_size:
|
if recalculate_size:
|
||||||
site.settings["size"], site.settings["size_optional"] = site.content_manager.getTotalSize() # Update site size
|
site.settings["size"], site.settings["size_optional"] = site.content_manager.getTotalSize() # Update site size
|
||||||
data[address] = site.settings
|
data[address] = site.settings
|
||||||
|
@ -108,7 +108,7 @@ class SiteManager(object):
|
||||||
time_write = time.time() - s
|
time_write = time.time() - s
|
||||||
|
|
||||||
# Remove cache from site settings
|
# Remove cache from site settings
|
||||||
for address, site in self.list().iteritems():
|
for address, site in self.list().items():
|
||||||
site.settings["cache"] = {}
|
site.settings["cache"] = {}
|
||||||
|
|
||||||
self.log.debug("Saved sites in %.2fs (generate: %.2fs, write: %.2fs)" % (time.time() - s, time_generate, time_write))
|
self.log.debug("Saved sites in %.2fs (generate: %.2fs, write: %.2fs)" % (time.time() - s, time_generate, time_write))
|
||||||
|
@ -134,12 +134,12 @@ class SiteManager(object):
|
||||||
|
|
||||||
# Return or create site and start download site files
|
# Return or create site and start download site files
|
||||||
def need(self, address, all_file=True, settings=None):
|
def need(self, address, all_file=True, settings=None):
|
||||||
from Site import Site
|
from .Site import Site
|
||||||
site = self.get(address)
|
site = self.get(address)
|
||||||
if not site: # Site not exist yet
|
if not site: # Site not exist yet
|
||||||
self.sites_changed = int(time.time())
|
self.sites_changed = int(time.time())
|
||||||
# Try to find site with differect case
|
# Try to find site with differect case
|
||||||
for recover_address, recover_site in self.sites.items():
|
for recover_address, recover_site in list(self.sites.items()):
|
||||||
if recover_address.lower() == address.lower():
|
if recover_address.lower() == address.lower():
|
||||||
return recover_site
|
return recover_site
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ from Translate import translate as _
|
||||||
class SiteStorage(object):
|
class SiteStorage(object):
|
||||||
def __init__(self, site, allow_create=True):
|
def __init__(self, site, allow_create=True):
|
||||||
self.site = site
|
self.site = site
|
||||||
self.directory = u"%s/%s" % (config.data_dir, self.site.address) # Site data diretory
|
self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
|
||||||
self.allowed_dir = os.path.abspath(self.directory) # Only serve file within this dir
|
self.allowed_dir = os.path.abspath(self.directory) # Only serve file within this dir
|
||||||
self.log = site.log
|
self.log = site.log
|
||||||
self.db = None # Db class
|
self.db = None # Db class
|
||||||
|
@ -59,7 +59,7 @@ class SiteStorage(object):
|
||||||
def getDbSchema(self):
|
def getDbSchema(self):
|
||||||
try:
|
try:
|
||||||
schema = self.loadJson("dbschema.json")
|
schema = self.loadJson("dbschema.json")
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
raise Exception("dbschema.json is not a valid JSON: %s" % err)
|
raise Exception("dbschema.json is not a valid JSON: %s" % err)
|
||||||
return schema
|
return schema
|
||||||
|
|
||||||
|
@ -92,7 +92,7 @@ class SiteStorage(object):
|
||||||
# Return possible db files for the site
|
# Return possible db files for the site
|
||||||
def getDbFiles(self):
|
def getDbFiles(self):
|
||||||
found = 0
|
found = 0
|
||||||
for content_inner_path, content in self.site.content_manager.contents.iteritems():
|
for content_inner_path, content in self.site.content_manager.contents.items():
|
||||||
# content.json file itself
|
# content.json file itself
|
||||||
if self.isFile(content_inner_path):
|
if self.isFile(content_inner_path):
|
||||||
yield content_inner_path, self.getPath(content_inner_path)
|
yield content_inner_path, self.getPath(content_inner_path)
|
||||||
|
@ -100,7 +100,7 @@ class SiteStorage(object):
|
||||||
self.log.error("[MISSING] %s" % content_inner_path)
|
self.log.error("[MISSING] %s" % content_inner_path)
|
||||||
# Data files in content.json
|
# Data files in content.json
|
||||||
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
|
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
|
||||||
for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys():
|
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
|
||||||
if not file_relative_path.endswith(".json") and not file_relative_path.endswith("json.gz"):
|
if not file_relative_path.endswith(".json") and not file_relative_path.endswith("json.gz"):
|
||||||
continue # We only interesed in json files
|
continue # We only interesed in json files
|
||||||
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
|
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
|
||||||
|
@ -181,7 +181,7 @@ class SiteStorage(object):
|
||||||
self.event_db_busy.get() # Wait for event
|
self.event_db_busy.get() # Wait for event
|
||||||
try:
|
try:
|
||||||
res = self.getDb().execute(query, params)
|
res = self.getDb().execute(query, params)
|
||||||
except sqlite3.DatabaseError, err:
|
except sqlite3.DatabaseError as err:
|
||||||
if err.__class__.__name__ == "DatabaseError":
|
if err.__class__.__name__ == "DatabaseError":
|
||||||
self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query))
|
self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query))
|
||||||
self.rebuildDb()
|
self.rebuildDb()
|
||||||
|
@ -240,7 +240,7 @@ class SiteStorage(object):
|
||||||
os.rename(self.getPath(inner_path_before), self.getPath(inner_path_after))
|
os.rename(self.getPath(inner_path_before), self.getPath(inner_path_after))
|
||||||
err = None
|
err = None
|
||||||
break
|
break
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.error("%s rename error: %s (retry #%s)" % (inner_path_before, err, retry))
|
self.log.error("%s rename error: %s (retry #%s)" % (inner_path_before, err, retry))
|
||||||
time.sleep(0.1 + retry)
|
time.sleep(0.1 + retry)
|
||||||
if err:
|
if err:
|
||||||
|
@ -297,7 +297,7 @@ class SiteStorage(object):
|
||||||
self.log.debug("Loading json file to db: %s (file: %s)" % (inner_path, file))
|
self.log.debug("Loading json file to db: %s (file: %s)" % (inner_path, file))
|
||||||
try:
|
try:
|
||||||
self.updateDbFile(inner_path, file)
|
self.updateDbFile(inner_path, file)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
|
self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
|
||||||
self.closeDb()
|
self.closeDb()
|
||||||
|
|
||||||
|
@ -363,9 +363,9 @@ class SiteStorage(object):
|
||||||
return self.directory
|
return self.directory
|
||||||
|
|
||||||
if ".." in inner_path:
|
if ".." in inner_path:
|
||||||
raise Exception(u"File not allowed: %s" % inner_path)
|
raise Exception("File not allowed: %s" % inner_path)
|
||||||
|
|
||||||
return u"%s/%s" % (self.directory, inner_path)
|
return "%s/%s" % (self.directory, inner_path)
|
||||||
|
|
||||||
# Get site dir relative path
|
# Get site dir relative path
|
||||||
def getInnerPath(self, path):
|
def getInnerPath(self, path):
|
||||||
|
@ -375,7 +375,7 @@ class SiteStorage(object):
|
||||||
if path.startswith(self.directory):
|
if path.startswith(self.directory):
|
||||||
inner_path = path[len(self.directory) + 1:]
|
inner_path = path[len(self.directory) + 1:]
|
||||||
else:
|
else:
|
||||||
raise Exception(u"File not allowed: %s" % path)
|
raise Exception("File not allowed: %s" % path)
|
||||||
return inner_path
|
return inner_path
|
||||||
|
|
||||||
# Verify all files sha512sum using content.json
|
# Verify all files sha512sum using content.json
|
||||||
|
@ -390,7 +390,7 @@ class SiteStorage(object):
|
||||||
self.log.debug("VerifyFile content.json not exists")
|
self.log.debug("VerifyFile content.json not exists")
|
||||||
self.site.needFile("content.json", update=True) # Force update to fix corrupt file
|
self.site.needFile("content.json", update=True) # Force update to fix corrupt file
|
||||||
self.site.content_manager.loadContent() # Reload content.json
|
self.site.content_manager.loadContent() # Reload content.json
|
||||||
for content_inner_path, content in self.site.content_manager.contents.items():
|
for content_inner_path, content in list(self.site.content_manager.contents.items()):
|
||||||
back["num_content"] += 1
|
back["num_content"] += 1
|
||||||
i += 1
|
i += 1
|
||||||
if i % 50 == 0:
|
if i % 50 == 0:
|
||||||
|
@ -400,7 +400,7 @@ class SiteStorage(object):
|
||||||
self.log.debug("[MISSING] %s" % content_inner_path)
|
self.log.debug("[MISSING] %s" % content_inner_path)
|
||||||
bad_files.append(content_inner_path)
|
bad_files.append(content_inner_path)
|
||||||
|
|
||||||
for file_relative_path in content.get("files", {}).keys():
|
for file_relative_path in list(content.get("files", {}).keys()):
|
||||||
back["num_file"] += 1
|
back["num_file"] += 1
|
||||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||||
file_inner_path = file_inner_path.strip("/") # Strip leading /
|
file_inner_path = file_inner_path.strip("/") # Strip leading /
|
||||||
|
@ -418,7 +418,7 @@ class SiteStorage(object):
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
ok = False
|
ok = False
|
||||||
|
|
||||||
if not ok:
|
if not ok:
|
||||||
|
@ -430,7 +430,7 @@ class SiteStorage(object):
|
||||||
# Optional files
|
# Optional files
|
||||||
optional_added = 0
|
optional_added = 0
|
||||||
optional_removed = 0
|
optional_removed = 0
|
||||||
for file_relative_path in content.get("files_optional", {}).keys():
|
for file_relative_path in list(content.get("files_optional", {}).keys()):
|
||||||
back["num_optional"] += 1
|
back["num_optional"] += 1
|
||||||
file_node = content["files_optional"][file_relative_path]
|
file_node = content["files_optional"][file_relative_path]
|
||||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||||
|
@ -451,7 +451,7 @@ class SiteStorage(object):
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
ok = False
|
ok = False
|
||||||
|
|
||||||
if ok:
|
if ok:
|
||||||
|
@ -475,7 +475,7 @@ class SiteStorage(object):
|
||||||
)
|
)
|
||||||
|
|
||||||
self.site.content_manager.contents.db.processDelayed()
|
self.site.content_manager.contents.db.processDelayed()
|
||||||
time.sleep(0.0001) # Context switch to avoid gevent hangs
|
time.sleep(0.001) # Context switch to avoid gevent hangs
|
||||||
return back
|
return back
|
||||||
|
|
||||||
# Check and try to fix site files integrity
|
# Check and try to fix site files integrity
|
||||||
|
@ -497,15 +497,15 @@ class SiteStorage(object):
|
||||||
def deleteFiles(self):
|
def deleteFiles(self):
|
||||||
self.log.debug("Deleting files from content.json...")
|
self.log.debug("Deleting files from content.json...")
|
||||||
files = [] # Get filenames
|
files = [] # Get filenames
|
||||||
for content_inner_path in self.site.content_manager.contents.keys():
|
for content_inner_path in list(self.site.content_manager.contents.keys()):
|
||||||
content = self.site.content_manager.contents.get(content_inner_path, {})
|
content = self.site.content_manager.contents.get(content_inner_path, {})
|
||||||
files.append(content_inner_path)
|
files.append(content_inner_path)
|
||||||
# Add normal files
|
# Add normal files
|
||||||
for file_relative_path in content.get("files", {}).keys():
|
for file_relative_path in list(content.get("files", {}).keys()):
|
||||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||||
files.append(file_inner_path)
|
files.append(file_inner_path)
|
||||||
# Add optional files
|
# Add optional files
|
||||||
for file_relative_path in content.get("files_optional", {}).keys():
|
for file_relative_path in list(content.get("files_optional", {}).keys()):
|
||||||
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||||
files.append(file_inner_path)
|
files.append(file_inner_path)
|
||||||
|
|
||||||
|
@ -518,7 +518,7 @@ class SiteStorage(object):
|
||||||
db_path = self.getPath(schema["db_file"])
|
db_path = self.getPath(schema["db_file"])
|
||||||
if os.path.isfile(db_path):
|
if os.path.isfile(db_path):
|
||||||
os.unlink(db_path)
|
os.unlink(db_path)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.error("Db file delete error: %s" % err)
|
self.log.error("Db file delete error: %s" % err)
|
||||||
|
|
||||||
for inner_path in files:
|
for inner_path in files:
|
||||||
|
@ -528,8 +528,8 @@ class SiteStorage(object):
|
||||||
try:
|
try:
|
||||||
os.unlink(path)
|
os.unlink(path)
|
||||||
break
|
break
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
self.log.error(u"Error removing %s: %s, try #%s" % (inner_path, err, retry))
|
self.log.error("Error removing %s: %s, try #%s" % (inner_path, err, retry))
|
||||||
time.sleep(float(retry) / 10)
|
time.sleep(float(retry) / 10)
|
||||||
self.onUpdated(inner_path, False)
|
self.onUpdated(inner_path, False)
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
from Site import Site
|
from .Site import Site
|
||||||
from SiteStorage import SiteStorage
|
from .SiteStorage import SiteStorage
|
||||||
from SiteAnnouncer import SiteAnnouncer
|
from .SiteAnnouncer import SiteAnnouncer
|
||||||
|
|
|
@ -8,7 +8,7 @@ import socket
|
||||||
import ssl
|
import ssl
|
||||||
sys.path.append(os.path.abspath("..")) # Imports relative to src dir
|
sys.path.append(os.path.abspath("..")) # Imports relative to src dir
|
||||||
|
|
||||||
import cStringIO as StringIO
|
import io as StringIO
|
||||||
import gevent
|
import gevent
|
||||||
|
|
||||||
from gevent.server import StreamServer
|
from gevent.server import StreamServer
|
||||||
|
@ -46,8 +46,8 @@ def handle(sock_raw, addr):
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
sock.sendall(data)
|
sock.sendall(data)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
print err
|
print(err)
|
||||||
try:
|
try:
|
||||||
sock.shutdown(gevent.socket.SHUT_WR)
|
sock.shutdown(gevent.socket.SHUT_WR)
|
||||||
sock.close()
|
sock.close()
|
||||||
|
@ -102,7 +102,7 @@ def getData():
|
||||||
total_num += 1
|
total_num += 1
|
||||||
total_bytes += buff.tell()
|
total_bytes += buff.tell()
|
||||||
if not data:
|
if not data:
|
||||||
print "No data"
|
print("No data")
|
||||||
|
|
||||||
sock.shutdown(gevent.socket.SHUT_WR)
|
sock.shutdown(gevent.socket.SHUT_WR)
|
||||||
sock.close()
|
sock.close()
|
||||||
|
@ -119,8 +119,8 @@ def info():
|
||||||
else:
|
else:
|
||||||
memory_info = process.get_memory_info
|
memory_info = process.get_memory_info
|
||||||
while 1:
|
while 1:
|
||||||
print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s,
|
print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, end=' ')
|
||||||
print "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20)
|
print("using", clipher, "Mem:", memory_info()[0] / float(2 ** 20))
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
gevent.spawn(info)
|
gevent.spawn(info)
|
||||||
|
@ -132,7 +132,7 @@ for test in range(1):
|
||||||
gevent.joinall(clients)
|
gevent.joinall(clients)
|
||||||
|
|
||||||
|
|
||||||
print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s
|
print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s)
|
||||||
|
|
||||||
# Separate client/server process:
|
# Separate client/server process:
|
||||||
# 10*10*100:
|
# 10*10*100:
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
class Spy:
|
class Spy:
|
||||||
def __init__(self, obj, func_name):
|
def __init__(self, obj, func_name):
|
||||||
self.obj = obj
|
self.obj = obj
|
||||||
self.func_name = func_name
|
self.__name__ = func_name
|
||||||
self.func_original = getattr(self.obj, func_name)
|
self.func_original = getattr(self.obj, func_name)
|
||||||
self.calls = []
|
self.calls = []
|
||||||
|
|
||||||
|
@ -10,11 +10,11 @@ class Spy:
|
||||||
call = dict(enumerate(args, 1))
|
call = dict(enumerate(args, 1))
|
||||||
call[0] = cls
|
call[0] = cls
|
||||||
call.update(kwargs)
|
call.update(kwargs)
|
||||||
print "Logging", call
|
print("Logging", call)
|
||||||
self.calls.append(call)
|
self.calls.append(call)
|
||||||
return self.func_original(cls, *args, **kwargs)
|
return self.func_original(cls, *args, **kwargs)
|
||||||
setattr(self.obj, self.func_name, loggedFunc)
|
setattr(self.obj, self.__name__, loggedFunc)
|
||||||
return self.calls
|
return self.calls
|
||||||
|
|
||||||
def __exit__(self, *args, **kwargs):
|
def __exit__(self, *args, **kwargs):
|
||||||
setattr(self.obj, self.func_name, self.func_original)
|
setattr(self.obj, self.__name__, self.func_original)
|
|
@ -1,6 +1,6 @@
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
from cStringIO import StringIO
|
import io
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -52,7 +52,7 @@ class TestContent:
|
||||||
|
|
||||||
# Normal data
|
# Normal data
|
||||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
||||||
data = StringIO(json.dumps(data_dict))
|
data = io.StringIO(json.dumps(data_dict))
|
||||||
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||||
# Reset
|
# Reset
|
||||||
del data_dict["signs"]
|
del data_dict["signs"]
|
||||||
|
@ -60,7 +60,7 @@ class TestContent:
|
||||||
# Too large
|
# Too large
|
||||||
data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json
|
data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json
|
||||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
||||||
data = StringIO(json.dumps(data_dict))
|
data = io.StringIO(json.dumps(data_dict))
|
||||||
with pytest.raises(VerifyError) as err:
|
with pytest.raises(VerifyError) as err:
|
||||||
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||||
assert "Include too large" in str(err)
|
assert "Include too large" in str(err)
|
||||||
|
@ -72,7 +72,7 @@ class TestContent:
|
||||||
# Not allowed file
|
# Not allowed file
|
||||||
data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"]
|
data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"]
|
||||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
||||||
data = StringIO(json.dumps(data_dict))
|
data = io.StringIO(json.dumps(data_dict))
|
||||||
with pytest.raises(VerifyError) as err:
|
with pytest.raises(VerifyError) as err:
|
||||||
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||||
assert "File not allowed" in str(err)
|
assert "File not allowed" in str(err)
|
||||||
|
@ -83,7 +83,7 @@ class TestContent:
|
||||||
|
|
||||||
# Should work again
|
# Should work again
|
||||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)}
|
||||||
data = StringIO(json.dumps(data_dict))
|
data = io.StringIO(json.dumps(data_dict))
|
||||||
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||||
|
|
||||||
@pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"])
|
@pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"])
|
||||||
|
@ -166,7 +166,7 @@ class TestContent:
|
||||||
data_dict["signs"] = {
|
data_dict["signs"] = {
|
||||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||||
}
|
}
|
||||||
data = StringIO(json.dumps(data_dict))
|
data = io.StringIO(json.dumps(data_dict))
|
||||||
with pytest.raises(VerifyError) as err:
|
with pytest.raises(VerifyError) as err:
|
||||||
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||||
assert "Wrong site address" in str(err)
|
assert "Wrong site address" in str(err)
|
||||||
|
@ -178,7 +178,7 @@ class TestContent:
|
||||||
data_dict["signs"] = {
|
data_dict["signs"] = {
|
||||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||||
}
|
}
|
||||||
data = StringIO(json.dumps(data_dict))
|
data = io.StringIO(json.dumps(data_dict))
|
||||||
with pytest.raises(VerifyError) as err:
|
with pytest.raises(VerifyError) as err:
|
||||||
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||||
assert "Wrong inner_path" in str(err)
|
assert "Wrong inner_path" in str(err)
|
||||||
|
@ -190,7 +190,7 @@ class TestContent:
|
||||||
data_dict["signs"] = {
|
data_dict["signs"] = {
|
||||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||||
}
|
}
|
||||||
data = StringIO(json.dumps(data_dict))
|
data = io.StringIO(json.dumps(data_dict))
|
||||||
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||||
|
|
||||||
def testVerifyInnerPath(self, site):
|
def testVerifyInnerPath(self, site):
|
||||||
|
@ -206,7 +206,7 @@ class TestContent:
|
||||||
data_dict["signs"] = {
|
data_dict["signs"] = {
|
||||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||||
}
|
}
|
||||||
data = StringIO(json.dumps(data_dict))
|
data = io.StringIO(json.dumps(data_dict))
|
||||||
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||||
|
|
||||||
for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg"]:
|
for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg"]:
|
||||||
|
@ -218,7 +218,7 @@ class TestContent:
|
||||||
data_dict["signs"] = {
|
data_dict["signs"] = {
|
||||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
|
||||||
}
|
}
|
||||||
data = StringIO(json.dumps(data_dict))
|
data = io.StringIO(json.dumps(data_dict))
|
||||||
with pytest.raises(VerifyError) as err:
|
with pytest.raises(VerifyError) as err:
|
||||||
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
|
||||||
assert "Invalid relative path" in str(err)
|
assert "Invalid relative path" in str(err)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import json
|
import json
|
||||||
from cStringIO import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import cStringIO as StringIO
|
import io
|
||||||
|
|
||||||
|
|
||||||
class TestDb:
|
class TestDb:
|
||||||
|
@ -63,11 +63,11 @@ class TestDb:
|
||||||
# Large ammount of IN values
|
# Large ammount of IN values
|
||||||
assert db.execute(
|
assert db.execute(
|
||||||
"SELECT COUNT(*) AS num FROM test WHERE ?",
|
"SELECT COUNT(*) AS num FROM test WHERE ?",
|
||||||
{"not__test_id": range(2, 3000)}
|
{"not__test_id": list(range(2, 3000))}
|
||||||
).fetchone()["num"] == 2
|
).fetchone()["num"] == 2
|
||||||
assert db.execute(
|
assert db.execute(
|
||||||
"SELECT COUNT(*) AS num FROM test WHERE ?",
|
"SELECT COUNT(*) AS num FROM test WHERE ?",
|
||||||
{"test_id": range(50, 3000)}
|
{"test_id": list(range(50, 3000))}
|
||||||
).fetchone()["num"] == 50
|
).fetchone()["num"] == 50
|
||||||
|
|
||||||
assert db.execute(
|
assert db.execute(
|
||||||
|
@ -103,7 +103,7 @@ class TestDb:
|
||||||
|
|
||||||
|
|
||||||
def testUpdateJson(self, db):
|
def testUpdateJson(self, db):
|
||||||
f = StringIO.StringIO()
|
f = io.StringIO()
|
||||||
f.write("""
|
f.write("""
|
||||||
{
|
{
|
||||||
"test": [
|
"test": [
|
||||||
|
@ -118,7 +118,7 @@ class TestDb:
|
||||||
|
|
||||||
def testUnsafePattern(self, db):
|
def testUnsafePattern(self, db):
|
||||||
db.schema["maps"] = {"[A-Za-z.]*": db.schema["maps"]["data.json"]} # Only repetition of . supported
|
db.schema["maps"] = {"[A-Za-z.]*": db.schema["maps"]["data.json"]} # Only repetition of . supported
|
||||||
f = StringIO.StringIO()
|
f = io.StringIO()
|
||||||
f.write("""
|
f.write("""
|
||||||
{
|
{
|
||||||
"test": [
|
"test": [
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import cStringIO as StringIO
|
import io
|
||||||
|
|
||||||
from util import Diff
|
from util import Diff
|
||||||
|
|
||||||
|
@ -31,19 +31,19 @@ class TestDiff:
|
||||||
) == [("-", 11)]
|
) == [("-", 11)]
|
||||||
|
|
||||||
def testDiffLimit(self):
|
def testDiffLimit(self):
|
||||||
old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix")
|
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
|
||||||
new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix")
|
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix")
|
||||||
actions = Diff.diff(list(old_f), list(new_f), limit=1024)
|
actions = Diff.diff(list(old_f), list(new_f), limit=1024)
|
||||||
assert actions
|
assert actions
|
||||||
|
|
||||||
old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix")
|
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
|
||||||
new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix"*1024)
|
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix"*1024)
|
||||||
actions = Diff.diff(list(old_f), list(new_f), limit=1024)
|
actions = Diff.diff(list(old_f), list(new_f), limit=1024)
|
||||||
assert actions is False
|
assert actions is False
|
||||||
|
|
||||||
def testPatch(self):
|
def testPatch(self):
|
||||||
old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix")
|
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
|
||||||
new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix")
|
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix")
|
||||||
actions = Diff.diff(
|
actions = Diff.diff(
|
||||||
list(old_f),
|
list(old_f),
|
||||||
list(new_f)
|
list(new_f)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import cStringIO as StringIO
|
import io
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import time
|
import time
|
||||||
|
@ -20,10 +20,10 @@ class TestFileRequest:
|
||||||
|
|
||||||
# Normal request
|
# Normal request
|
||||||
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
|
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
|
||||||
assert "sign" in response["body"]
|
assert b"sign" in response["body"]
|
||||||
|
|
||||||
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json")})
|
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json")})
|
||||||
assert "sign" in response["body"]
|
assert b"sign" in response["body"]
|
||||||
|
|
||||||
# Invalid file
|
# Invalid file
|
||||||
response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0})
|
response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0})
|
||||||
|
@ -57,25 +57,25 @@ class TestFileRequest:
|
||||||
connection = client.getConnection(file_server.ip, 1544)
|
connection = client.getConnection(file_server.ip, 1544)
|
||||||
file_server.sites[site.address] = site
|
file_server.sites[site.address] = site
|
||||||
|
|
||||||
buff = StringIO.StringIO()
|
buff = io.BytesIO()
|
||||||
response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff)
|
response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff)
|
||||||
assert "stream_bytes" in response
|
assert "stream_bytes" in response
|
||||||
assert "sign" in buff.getvalue()
|
assert b"sign" in buff.getvalue()
|
||||||
|
|
||||||
# Invalid file
|
# Invalid file
|
||||||
buff = StringIO.StringIO()
|
buff = io.BytesIO()
|
||||||
response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff)
|
response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff)
|
||||||
assert "File read error" in response["error"]
|
assert "File read error" in response["error"]
|
||||||
|
|
||||||
# Location over size
|
# Location over size
|
||||||
buff = StringIO.StringIO()
|
buff = io.BytesIO()
|
||||||
response = connection.request(
|
response = connection.request(
|
||||||
"streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff
|
"streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff
|
||||||
)
|
)
|
||||||
assert "File read error" in response["error"]
|
assert "File read error" in response["error"]
|
||||||
|
|
||||||
# Stream from parent dir
|
# Stream from parent dir
|
||||||
buff = StringIO.StringIO()
|
buff = io.BytesIO()
|
||||||
response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff)
|
response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff)
|
||||||
assert "File read error" in response["error"]
|
assert "File read error" in response["error"]
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
import time
|
import time
|
||||||
from cStringIO import StringIO
|
import io
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from File import FileServer
|
from File import FileServer
|
||||||
from File import FileRequest
|
from File import FileRequest
|
||||||
from Crypt import CryptHash
|
from Crypt import CryptHash
|
||||||
import Spy
|
from . import Spy
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("resetSettings")
|
@pytest.mark.usefixtures("resetSettings")
|
||||||
|
@ -43,17 +43,17 @@ class TestPeer:
|
||||||
|
|
||||||
# Testing streamFile
|
# Testing streamFile
|
||||||
buff = peer_file_server.getFile(site_temp.address, "content.json", streaming=True)
|
buff = peer_file_server.getFile(site_temp.address, "content.json", streaming=True)
|
||||||
assert "sign" in buff.getvalue()
|
assert b"sign" in buff.getvalue()
|
||||||
|
|
||||||
# Testing getFile
|
# Testing getFile
|
||||||
buff = peer_file_server.getFile(site_temp.address, "content.json")
|
buff = peer_file_server.getFile(site_temp.address, "content.json")
|
||||||
assert "sign" in buff.getvalue()
|
assert b"sign" in buff.getvalue()
|
||||||
|
|
||||||
connection.close()
|
connection.close()
|
||||||
client.stop()
|
client.stop()
|
||||||
|
|
||||||
def testHashfield(self, site):
|
def testHashfield(self, site):
|
||||||
sample_hash = site.content_manager.contents["content.json"]["files_optional"].values()[0]["sha512"]
|
sample_hash = list(site.content_manager.contents["content.json"]["files_optional"].values())[0]["sha512"]
|
||||||
|
|
||||||
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
|
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
|
||||||
|
|
||||||
|
@ -65,7 +65,7 @@ class TestPeer:
|
||||||
assert site.content_manager.hashfield.getHashId(sample_hash) in site.content_manager.hashfield
|
assert site.content_manager.hashfield.getHashId(sample_hash) in site.content_manager.hashfield
|
||||||
|
|
||||||
# Add new hash
|
# Add new hash
|
||||||
new_hash = CryptHash.sha512sum(StringIO("hello"))
|
new_hash = CryptHash.sha512sum(io.BytesIO(b"hello"))
|
||||||
assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
|
assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
|
||||||
assert site.content_manager.hashfield.appendHash(new_hash)
|
assert site.content_manager.hashfield.appendHash(new_hash)
|
||||||
assert not site.content_manager.hashfield.appendHash(new_hash) # Don't add second time
|
assert not site.content_manager.hashfield.appendHash(new_hash) # Don't add second time
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue