Merge branch 'master' of github.com:HelloZeroNet/ZeroNet

This commit is contained in:
Barrabin Fc. ⚑ 2016-03-03 20:56:43 -03:00
commit 58ce98e0fc
13 changed files with 93 additions and 40 deletions

View file

@ -15,6 +15,7 @@ RUN pip install msgpack-python --upgrade
#Add Zeronet source
ADD . /root
VOLUME /root/data
#Slimming down Docker containers
RUN apt-get clean -y

View file

@ -113,7 +113,7 @@ It downloads the latest version of ZeroNet then starts it automatically.
* Open http://127.0.0.1:43110/ in your browser
### Docker
* `docker run -p 15441:15441 -p 43110:43110 nofish/zeronet`
* `docker run -d -v <local_data_folder>:/root/data -p 15441:15441 -p 43110:43110 nofish/zeronet`
* Open http://127.0.0.1:43110/ in your browser
## Current limitations

View file

@ -1,5 +1,5 @@
from Plugin import PluginManager
import re
import re, time
@PluginManager.registerTo("UiWebsocket")
class UiWebsocketPlugin(object):
@ -35,6 +35,8 @@ class UiWebsocketPlugin(object):
continue
for row in res:
row = dict(row)
if row["date_added"] > time.time() + 60:
continue # Feed item is in the future, skip it
row["site"] = address
row["feed_name"] = name
rows.append(row)

View file

@ -492,12 +492,22 @@ class UiWebsocketPlugin(object):
def actionSiteSetOwned(self, to, owned):
permissions = self.getPermissions(to)
if "Multiuser" in PluginManager.plugin_manager.plugin_names:
self.cmd("notification", ["info", "This function is disabled on this proxy"])
return False
if "ADMIN" not in permissions:
return self.response(to, "You don't have permission to run this command")
self.site.settings["own"] = bool(owned)
def actionSiteSetAutodownloadoptional(self, to, owned):
permissions = self.getPermissions(to)
if "Multiuser" in PluginManager.plugin_manager.plugin_names:
self.cmd("notification", ["info", "This function is disabled on this proxy"])
return False
if "ADMIN" not in permissions:
return self.response(to, "You don't have permission to run this command")
self.site.settings["autodownloadoptional"] = bool(owned)

View file

@ -225,31 +225,31 @@ class Sidebar extends Class
), 300
# Site limit button
@tag.find("#button-sitelimit").on "click", =>
@tag.find("#button-sitelimit").off("click").on "click", =>
wrapper.ws.cmd "siteSetLimit", $("#input-sitelimit").val(), =>
wrapper.notifications.add "done-sitelimit", "done", "Site storage limit modified!", 5000
@updateHtmlTag()
return false
# Owned checkbox
@tag.find("#checkbox-owned").on "click", =>
@tag.find("#checkbox-owned").off("click").on "click", =>
wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")]
# Owned checkbox
@tag.find("#checkbox-autodownloadoptional").on "click", =>
@tag.find("#checkbox-autodownloadoptional").off("click").on "click", =>
wrapper.ws.cmd "siteSetAutodownloadoptional", [@tag.find("#checkbox-autodownloadoptional").is(":checked")]
# Change identity button
@tag.find("#button-identity").on "click", =>
@tag.find("#button-identity").off("click").on "click", =>
wrapper.ws.cmd "certSelect"
return false
# Owned checkbox
@tag.find("#checkbox-owned").on "click", =>
@tag.find("#checkbox-owned").off("click").on "click", =>
wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")]
# Save settings
@tag.find("#button-settings").on "click", =>
@tag.find("#button-settings").off("click").on "click", =>
wrapper.ws.cmd "fileGet", "content.json", (res) =>
data = JSON.parse(res)
data["title"] = $("#settings-title").val()
@ -264,7 +264,7 @@ class Sidebar extends Class
return false
# Sign content.json
@tag.find("#button-sign").on "click", =>
@tag.find("#button-sign").off("click").on "click", =>
inner_path = @tag.find("#select-contents").val()
if wrapper.site_info.privatekey
@ -282,7 +282,7 @@ class Sidebar extends Class
return false
# Publish content.json
@tag.find("#button-publish").on "click", =>
@tag.find("#button-publish").off("click").on "click", =>
inner_path = @tag.find("#select-contents").val()
@tag.find("#button-publish").addClass "loading"
wrapper.ws.cmd "sitePublish", {"inner_path": inner_path, "sign": false}, =>

View file

@ -404,7 +404,7 @@ window.initScrollable = function () {
}), 300);
};
})(this));
this.tag.find("#button-sitelimit").on("click", (function(_this) {
this.tag.find("#button-sitelimit").off("click").on("click", (function(_this) {
return function() {
wrapper.ws.cmd("siteSetLimit", $("#input-sitelimit").val(), function() {
wrapper.notifications.add("done-sitelimit", "done", "Site storage limit modified!", 5000);
@ -413,28 +413,28 @@ window.initScrollable = function () {
return false;
};
})(this));
this.tag.find("#checkbox-owned").on("click", (function(_this) {
this.tag.find("#checkbox-owned").off("click").on("click", (function(_this) {
return function() {
return wrapper.ws.cmd("siteSetOwned", [_this.tag.find("#checkbox-owned").is(":checked")]);
};
})(this));
this.tag.find("#checkbox-autodownloadoptional").on("click", (function(_this) {
this.tag.find("#checkbox-autodownloadoptional").off("click").on("click", (function(_this) {
return function() {
return wrapper.ws.cmd("siteSetAutodownloadoptional", [_this.tag.find("#checkbox-autodownloadoptional").is(":checked")]);
};
})(this));
this.tag.find("#button-identity").on("click", (function(_this) {
this.tag.find("#button-identity").off("click").on("click", (function(_this) {
return function() {
wrapper.ws.cmd("certSelect");
return false;
};
})(this));
this.tag.find("#checkbox-owned").on("click", (function(_this) {
this.tag.find("#checkbox-owned").off("click").on("click", (function(_this) {
return function() {
return wrapper.ws.cmd("siteSetOwned", [_this.tag.find("#checkbox-owned").is(":checked")]);
};
})(this));
this.tag.find("#button-settings").on("click", (function(_this) {
this.tag.find("#button-settings").off("click").on("click", (function(_this) {
return function() {
wrapper.ws.cmd("fileGet", "content.json", function(res) {
var data, json_raw;
@ -454,7 +454,7 @@ window.initScrollable = function () {
return false;
};
})(this));
this.tag.find("#button-sign").on("click", (function(_this) {
this.tag.find("#button-sign").off("click").on("click", (function(_this) {
return function() {
var inner_path;
inner_path = _this.tag.find("#select-contents").val();
@ -474,7 +474,7 @@ window.initScrollable = function () {
return false;
};
})(this));
this.tag.find("#button-publish").on("click", (function(_this) {
this.tag.find("#button-publish").off("click").on("click", (function(_this) {
return function() {
var inner_path;
inner_path = _this.tag.find("#select-contents").val();

View file

@ -171,6 +171,9 @@ class UiWebsocketPlugin(object):
self.actionUserLoginForm(0)
# Disable not Multiuser safe functions
def actionSiteDelete(self, to, *args, **kwargs):
self.cmd("notification", ["info", "This function is disabled on this proxy"])
def actionConfigSet(self, to, *args, **kwargs):
self.cmd("notification", ["info", "This function is disabled on this proxy"])

View file

@ -8,7 +8,7 @@ class Config(object):
def __init__(self, argv):
self.version = "0.3.6"
self.rev = 909
self.rev = 932
self.argv = argv
self.action = None
self.config_file = "zeronet.conf"
@ -133,6 +133,7 @@ class Config(object):
self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D',
metavar='address')
self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, metavar='size')
self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=15, metavar='connected_limit')
self.parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip')
self.parser.add_argument('--fileserver_port', help='FileServer bind port', default=15441, type=int, metavar='port')

View file

@ -107,6 +107,9 @@ class Peer(object):
self.onConnectionError()
return None # Connection failed
if config.debug:
self.log("Send request: %s %s" % (params.get("site", ""), cmd))
for retry in range(1, 4): # Retry 3 times
try:
res = self.connection.request(cmd, params, stream_to)

View file

@ -80,7 +80,7 @@ class Site(object):
if self.address in sites_settings:
self.settings = sites_settings[self.address]
else:
self.settings = {"own": False, "serving": True, "permissions": []} # Default
self.settings = {"own": False, "serving": True, "permissions": [], "added": int(time.time())} # Default
# Add admin permissions to homepage
if self.address == config.homepage and "ADMIN" not in self.settings["permissions"]:
@ -162,6 +162,9 @@ class Site(object):
gevent.joinall(file_threads)
self.log.debug("%s: DownloadContent ended in %.2fs" % (inner_path, time.time() - s))
if not self.worker_manager.tasks:
self.onComplete() # No more task trigger site complete
return True
# Return bad files with less than 3 retry
@ -342,7 +345,7 @@ class Site(object):
if result and "ok" in result:
published.append(peer)
self.log.info("[OK] %s: %s" % (peer.key, result["ok"]))
self.log.info("[OK] %s: %s %s/%s" % (peer.key, result["ok"], len(published), limit))
else:
if result == {"exception": "Timeout"}:
peer.onConnectionError()
@ -350,21 +353,29 @@ class Site(object):
# Update content.json on peers
@util.Noparallel()
def publish(self, limit=5, inner_path="content.json"):
def publish(self, limit="default", inner_path="content.json"):
published = [] # Successfully published (Peer)
publishers = [] # Publisher threads
if not self.peers:
self.announce()
threads = 5
if limit == "default":
if len(self.peers) > 50:
limit = 3
threads = 3
else:
limit = 5
connected_peers = self.getConnectedPeers()
if len(connected_peers) > limit * 2: # Publish to already connected peers if possible
peers = connected_peers
else:
peers = self.peers.values()
self.log.info("Publishing to %s/%s peers (connected: %s)..." % (
min(len(self.peers), limit), len(self.peers), len(connected_peers)
self.log.info("Publishing %s to %s/%s peers (connected: %s)..." % (
inner_path, limit, len(self.peers), len(connected_peers)
))
if not peers:
@ -372,7 +383,7 @@ class Site(object):
random.shuffle(peers)
event_done = gevent.event.AsyncResult()
for i in range(min(len(self.peers), limit, 5)): # Max 5 thread
for i in range(min(len(self.peers), limit, threads)):
publisher = gevent.spawn(self.publisher, inner_path, peers, published, limit, event_done)
publishers.append(publisher)
@ -389,12 +400,12 @@ class Site(object):
] # Every connected passive peer that we not published to
self.log.info(
"Successfuly published to %s peers, publishing to %s more passive peers" %
(len(published), len(passive_peers))
)
"Successfuly %s published to %s peers, publishing to %s more passive peers" % (
inner_path, len(published), len(passive_peers)
))
for peer in passive_peers:
gevent.spawn(self.publisher, inner_path, passive_peers, published, limit=10)
gevent.spawn(self.publisher, inner_path, passive_peers, published, limit=limit+3)
# Send my hashfield to every connected peer if changed
gevent.spawn(self.sendMyHashfield, 100)
@ -762,11 +773,13 @@ class Site(object):
def getConnectedPeers(self):
return [peer for peer in self.peers.values() if peer.connection and peer.connection.connected]
# Cleanup probably dead peers
# Cleanup probably dead peers and close connection if too much
def cleanupPeers(self):
peers = self.peers.values()
if len(peers) < 20:
return False
# Cleanup old peers
removed = 0
for peer in peers:
@ -783,6 +796,30 @@ class Site(object):
if removed:
self.log.debug("Cleanup peers result: Removed %s, left: %s" % (removed, len(self.peers)))
# Close peers if too much
closed = 0
connected_peers = self.getConnectedPeers()
need_to_close = len(connected_peers) - config.connected_limit
# First try to remove active peers
if need_to_close > 0:
for peer in connected_peers:
if not peer.key.endswith(":0"): # Connectable peer
peer.remove()
closed += 1
if closed >= need_to_close:
break
# Also remove passive peers if still more than we need
if closed < need_to_close:
for peer in connected_peers:
peer.remove()
closed += 1
if closed >= need_to_close:
break
if need_to_close > 0:
self.log.debug("Connected: %s, Need to close: %s, Closed: %s" % (len(connected_peers), need_to_close, closed))
# Send hashfield to peers
def sendMyHashfield(self, limit=3):
if not self.content_manager.hashfield: # No optional files

View file

@ -151,7 +151,7 @@ class TorManager:
conn.connect((self.ip, self.port))
res_protocol = self.send("PROTOCOLINFO", conn)
version = re.search('Tor="([0-9\.]+)"', res_protocol).group(1)
version = re.search('Tor="([0-9\.]+)', res_protocol).group(1)
# Version 0.2.7.5 required because ADD_ONION support
assert int(version.replace(".", "0")) >= 20705, "Tor version >=0.2.7.5 required"
@ -170,7 +170,7 @@ class TorManager:
except Exception, err:
self.conn = None
self.status = "Error (%s)" % err
self.log.error("Tor controller connect error: %s" % err)
self.log.error("Tor controller connect error: %s" % Debug.formatException(err))
self.enabled = False
return self.conn

View file

@ -495,7 +495,7 @@ class UiRequest(object):
# Send file not found error
def error404(self, path=""):
self.sendHeader(404)
return self.formatError("Not Found", path.encode("utf8"), details=False)
return self.formatError("Not Found", cgi.escape(path.encode("utf8")), details=False)
# Internal server error
def error500(self, message=":("):

View file

@ -322,17 +322,13 @@ class Actions(object):
logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port)))
peer = Peer(peer_ip, peer_port)
for i in range(5):
s = time.time()
print peer.ping(),
print "Response time: %.3fs (crypt: %s)" % (time.time() - s, peer.connection.crypt)
print "Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt)
time.sleep(1)
peer.remove()
print "Reconnect test..."
peer = Peer(peer_ip, peer_port)
for i in range(5):
s = time.time()
print peer.ping(),
print "Response time: %.3fs (crypt: %s)" % (time.time() - s, peer.connection.crypt)
print "Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt)
time.sleep(1)