Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol

This commit is contained in:
HelloZeroNet 2016-01-05 00:20:52 +01:00
parent c9578e9037
commit e9d2cdfd37
99 changed files with 9476 additions and 267 deletions

View file

@ -11,11 +11,13 @@ from Config import config
from util import RateLimit
from util import StreamingMsgpack
from util import helper
from Plugin import PluginManager
FILE_BUFF = 1024 * 512
# Request from me
# Incoming requests
@PluginManager.acceptPlugins
class FileRequest(object):
__slots__ = ("server", "connection", "req_id", "sites", "log", "responded")
@ -50,36 +52,25 @@ class FileRequest(object):
# Route file requests
def route(self, cmd, req_id, params):
self.req_id = req_id
# Don't allow other sites than locked
if "site" in params and self.connection.site_lock and self.connection.site_lock not in (params["site"], "global"):
self.response({"error": "Invalid site"})
self.log.error("Site lock violation: %s != %s" % (self.connection.site_lock != params["site"]))
return False
if cmd == "getFile":
self.actionGetFile(params)
elif cmd == "streamFile":
self.actionStreamFile(params)
elif cmd == "update":
if cmd == "update":
event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"])
if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second
self.response({"ok": "File update queued"})
# If called more than once within 10 sec only keep the last update
RateLimit.callAsync(event, 10, self.actionUpdate, params)
elif cmd == "pex":
self.actionPex(params)
elif cmd == "listModified":
self.actionListModified(params)
elif cmd == "getHashfield":
self.actionGetHashfield(params)
elif cmd == "findHashIds":
self.actionFindHashIds(params)
elif cmd == "setHashfield":
self.actionSetHashfield(params)
elif cmd == "siteReload":
self.actionSiteReload(params)
elif cmd == "sitePublish":
self.actionSitePublish(params)
elif cmd == "ping":
self.actionPing()
else:
self.actionUnknown(cmd, params)
func_name = "action" + cmd[0].upper() + cmd[1:]
func = getattr(self, func_name, None)
if func:
func(params)
else:
self.actionUnknown(cmd, params)
# Update a site file request
def actionUpdate(self, params):
@ -117,7 +108,10 @@ class FileRequest(object):
self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]})
elif valid is None: # Not changed
peer = site.addPeer(*params["peer"], return_peer=True) # Add or get peer
if params.get("peer"):
peer = site.addPeer(*params["peer"], return_peer=True) # Add or get peer
else:
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer
if peer:
self.log.debug(
"Same version, adding new peer for locked files: %s, tasks: %s" %
@ -148,7 +142,7 @@ class FileRequest(object):
file.seek(params["location"])
file.read_bytes = FILE_BUFF
file_size = os.fstat(file.fileno()).st_size
assert params["location"] < file_size
assert params["location"] <= file_size, "Bad file location"
back = {
"body": file,
@ -190,7 +184,7 @@ class FileRequest(object):
file.seek(params["location"])
file_size = os.fstat(file.fileno()).st_size
stream_bytes = min(FILE_BUFF, file_size - params["location"])
assert stream_bytes >= 0
assert stream_bytes >= 0, "Stream bytes out of range"
back = {
"size": file_size,
@ -236,18 +230,36 @@ class FileRequest(object):
connected_peer.connect(self.connection) # Assign current connection to peer
# Add sent peers to site
for packed_address in params["peers"]:
for packed_address in params.get("peers", []):
address = helper.unpackAddress(packed_address)
got_peer_keys.append("%s:%s" % address)
if site.addPeer(*address):
added += 1
# Add sent peers to site
for packed_address in params.get("peers_onion", []):
address = helper.unpackOnionAddress(packed_address)
got_peer_keys.append("%s:%s" % address)
if site.addPeer(*address):
added += 1
# Send back peers that is not in the sent list and connectable (not port 0)
packed_peers = [peer.packMyAddress() for peer in site.getConnectablePeers(params["need"], got_peer_keys)]
packed_peers = helper.packPeers(site.getConnectablePeers(params["need"], got_peer_keys))
if added:
site.worker_manager.onPeers()
self.log.debug("Added %s peers to %s using pex, sending back %s" % (added, site, len(packed_peers)))
self.response({"peers": packed_peers})
self.log.debug(
"Added %s peers to %s using pex, sending back %s" %
(added, site, len(packed_peers["ip4"]) + len(packed_peers["onion"]))
)
back = {}
if packed_peers["ip4"]:
back["peers"] = packed_peers["ip4"]
if packed_peers["onion"]:
back["peers_onion"] = packed_peers["onion"]
self.response(back)
# Get modified content.json files since
def actionListModified(self, params):
@ -316,7 +328,7 @@ class FileRequest(object):
self.response({"error": "Unknown site"})
return False
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection) # Add or get peer
if not peer.connection:
peer.connect(self.connection)
peer.hashfield.replaceFromString(params["hashfield_raw"])
@ -343,7 +355,7 @@ class FileRequest(object):
self.response({"ok": "Successfuly published to %s peers" % num})
# Send a simple Pong! answer
def actionPing(self):
def actionPing(self, params):
self.response("Pong!")
# Unknown command

View file

@ -49,9 +49,13 @@ class FileServer(ConnectionServer):
if self.port_opened:
return True # Port already opened
if check: # Check first if its already opened
if self.testOpenport(port)["result"] is True:
time.sleep(1) # Wait for port open
if self.testOpenport(port, use_alternative=False)["result"] is True:
return True # Port already opened
if config.tor == "always": # Port opening won't work in Tor mode
return False
self.log.info("Trying to open port using UpnpPunch...")
try:
upnp_punch = UpnpPunch.open_port(self.port, 'ZeroNet')
@ -67,15 +71,14 @@ class FileServer(ConnectionServer):
return False
# Test if the port is open
def testOpenport(self, port=None):
time.sleep(1) # Wait for port open
def testOpenport(self, port=None, use_alternative=True):
if not port:
port = self.port
back = self.testOpenportPortchecker(port)
if back["result"] is True: # Successful port check
return back
else: # Alternative port checker
if back["result"] is not True and use_alternative: # If no success try alternative checker
return self.testOpenportCanyouseeme(port)
else:
return back
def testOpenportPortchecker(self, port=None):
self.log.info("Checking port %s using portchecker.co..." % port)
@ -151,16 +154,24 @@ class FileServer(ConnectionServer):
# Check site file integrity
def checkSite(self, site):
if site.settings["serving"]:
site.announce() # Announce site to tracker
site.announce(mode="startup") # Announce site to tracker
site.update() # Update site's content.json and download changed files
site.sendMyHashfield()
site.updateHashfield()
if self.port_opened is False: # In passive mode keep 5 active peer connection to get the updates
site.needConnections()
# Check sites integrity
def checkSites(self):
if self.port_opened is None: # Test and open port if not tested yet
if len(self.sites) <= 2: # Faster announce on first startup
for address, site in self.sites.items():
gevent.spawn(self.checkSite, site)
self.openport()
if not self.port_opened:
self.tor_manager.startOnions()
self.log.debug("Checking sites integrity..")
for address, site in self.sites.items(): # Check sites integrity
gevent.spawn(self.checkSite, site) # Check in new thread
@ -170,36 +181,30 @@ class FileServer(ConnectionServer):
# Announce sites every 20 min
def announceSites(self):
import gc
first_announce = True # First start
while 1:
# Sites healthcare every 20 min
# Sites health care every 20 min
if config.trackers_file:
config.loadTrackersFile()
for address, site in self.sites.items():
if site.settings["serving"]:
if first_announce: # Announce to all trackers on startup
site.announce()
else: # If not first run only use PEX
site.announcePex()
if not site.settings["serving"]:
continue
if site.peers:
site.announcePex()
# Retry failed files
if site.bad_files:
site.retryBadFiles()
# Retry failed files
if site.bad_files:
site.retryBadFiles()
site.cleanupPeers()
site.cleanupPeers()
# In passive mode keep 5 active peer connection to get the updates
if self.port_opened is False:
site.needConnections()
if first_announce: # Send my optional files to peers
site.sendMyHashfield()
site.updateHashfield()
# In passive mode keep 5 active peer connection to get the updates
if self.port_opened is False:
site.needConnections()
time.sleep(2) # Prevent too quick request
site = None
gc.collect() # Implicit grabage collection
gc.collect() # Implicit garbage collection
# Find new peers
for tracker_i in range(len(config.trackers)):
@ -207,13 +212,15 @@ class FileServer(ConnectionServer):
if config.trackers_file:
config.loadTrackersFile()
for address, site in self.sites.items():
site.announce(num=1, pex=False)
if not site.settings["serving"]:
continue
site.announce(mode="update", pex=False)
if site.settings["own"]: # Check connections more frequently on own sites to speed-up first connections
site.needConnections()
site.sendMyHashfield(3)
site.updateHashfield(1)
time.sleep(2)
first_announce = False
# Detects if computer back from wakeup
def wakeupWatcher(self):
last_time = time.time()