version 0.2.8, Namecoin domains using internal resolver site, --disable_zeromq option to skip backward compatiblity layer and save some memory, connectionserver firstchar error fixes, missing unpacker crash fix, sitemanager class to allow extensions, add loaded plugin list to websocket api, faster content publishing, mark updating file as bad, remove coppersurfer tracker add eddie4, internal server error with error displaying, allow site domains in UiRequest, better progress bar, wait for siteinfo before before using localstorage, csslater hide only if opacity is 0

This commit is contained in:
HelloZeroNet 2015-03-30 23:44:29 +02:00
parent 78f97dcbe8
commit b122f47100
26 changed files with 673 additions and 124 deletions

View file

@ -181,10 +181,12 @@ class Site:
# Publish worker
def publisher(self, inner_path, peers, published, limit):
def publisher(self, inner_path, peers, published, limit, event_done):
timeout = 5+int(self.storage.getSize(inner_path)/1024) # Timeout: 5sec + size in kb
while 1:
if not peers or len(published) >= limit: break # All peers done, or published engouht
if not peers or len(published) >= limit:
event_done.set(True)
break # All peers done, or published engouht
peer = peers.pop(0)
result = {"exception": "Timeout"}
@ -216,11 +218,14 @@ class Site:
peers = self.peers.values()
random.shuffle(peers)
for i in range(limit):
publisher = gevent.spawn(self.publisher, inner_path, peers, published, limit)
event_done = gevent.event.AsyncResult()
for i in range(min(1+len(self.peers), limit)/2):
publisher = gevent.spawn(self.publisher, inner_path, peers, published, limit, event_done)
publishers.append(publisher)
gevent.joinall(publishers) # Wait for all publishers
event_done.get() # Wait for done
if len(published) < min(len(self.peers), limit): time.sleep(0.2) # If less than we need sleep a bit
if len(published) == 0: gevent.join(publishers) # No successful publish, wait for all publisher
self.log.info("Successfuly published to %s peers" % len(published))
return len(published)
@ -233,6 +238,7 @@ class Site:
elif self.settings["serving"] == False: # Site not serving
return False
else: # Wait until file downloaded
self.bad_files[inner_path] = True # Mark as bad file
if not self.content_manager.contents.get("content.json"): # No content.json, download it first!
self.log.debug("Need content.json first")
self.announce()
@ -348,7 +354,7 @@ class Site:
def fileDone(self, inner_path):
# File downloaded, remove it from bad files
if inner_path in self.bad_files:
self.log.info("Bad file solved: %s" % inner_path)
self.log.debug("Bad file solved: %s" % inner_path)
del(self.bad_files[inner_path])
# Update content.json last downlad time

View file

@ -1,75 +1,90 @@
import json, logging, time, re, os
import gevent
from Plugin import PluginManager
TRACKERS = [
("udp", "open.demonii.com", 1337),
("udp", "sugoi.pomf.se", 2710),
("udp", "tracker.coppersurfer.tk", 80),
#("udp", "tracker.coppersurfer.tk", 80),
("udp", "tracker.leechers-paradise.org", 6969),
("udp", "9.rarbg.com", 2710),
#("udp", "www.eddie4.nl", 6969), Backup trackers
#("udp", "trackr.sytes.net", 80),
("udp", "www.eddie4.nl", 6969),
#("udp", "trackr.sytes.net", 80), #Backup trackers
#("udp", "tracker4.piratux.com", 6969)
]
# Load all sites from data/sites.json
def load():
from Site import Site
global sites
if not sites: sites = {}
address_found = []
added = 0
# Load new adresses
for address in json.load(open("data/sites.json")):
if address not in sites and os.path.isfile("data/%s/content.json" % address):
sites[address] = Site(address)
added += 1
address_found.append(address)
# Remove deleted adresses
for address in sites.keys():
if address not in address_found:
del(sites[address])
logging.debug("Removed site: %s" % address)
@PluginManager.acceptPlugins
class SiteManager(object):
def __init__(self):
self.sites = None
if added: logging.debug("SiteManager added %s sites" % added)
# Load all sites from data/sites.json
def load(self):
from Site import Site
if not self.sites: self.sites = {}
address_found = []
added = 0
# Load new adresses
for address in json.load(open("data/sites.json")):
if address not in self.sites and os.path.isfile("data/%s/content.json" % address):
self.sites[address] = Site(address)
added += 1
address_found.append(address)
# Remove deleted adresses
for address in self.sites.keys():
if address not in address_found:
del(self.sites[address])
logging.debug("Removed site: %s" % address)
if added: logging.debug("SiteManager added %s sites" % added)
# Checks if its a valid address
def isAddress(address):
return re.match("^[A-Za-z0-9]{26,35}$", address)
# Checks if its a valid address
def isAddress(self, address):
return re.match("^[A-Za-z0-9]{26,35}$", address)
# Return site and start download site files
def need(address, all_file=True):
from Site import Site
new = False
if address not in sites: # Site not exits yet
if not isAddress(address): return False # Not address: %s % address
logging.debug("Added new site: %s" % address)
sites[address] = Site(address)
if not sites[address].settings["serving"]: # Maybe it was deleted before
sites[address].settings["serving"] = True
sites[address].saveSettings()
new = True
site = sites[address]
if all_file: site.download()
return site
# Return: Site object or None if not found
def get(self, address):
if self.sites == None: # Not loaded yet
self.load()
return self.sites.get(address)
def delete(address):
global sites
logging.debug("SiteManager deleted site: %s" % address)
del(sites[address])
# Return or create site and start download site files
def need(self, address, all_file=True):
from Site import Site
new = False
site = self.get(address)
if not site: # Site not exits yet
if not self.isAddress(address): return False # Not address: %s % address
logging.debug("Added new site: %s" % address)
site = Site(address)
self.sites[address] = site
if not site.settings["serving"]: # Maybe it was deleted before
site.settings["serving"] = True
site.saveSettings()
new = True
if all_file: site.download()
return site
# Lazy load sites
def list():
if sites == None: # Not loaded yet
load()
return sites
def delete(self, address):
logging.debug("SiteManager deleted site: %s" % address)
del(self.sites[address])
sites = None
# Lazy load sites
def list(self):
if self.sites == None: # Not loaded yet
self.load()
return self.sites
site_manager = SiteManager() # Singletone
peer_blacklist = [] # Dont download from this peers