rev259, Protection against connection flood, Fix site size limit error dialog, Convert ConnectionServer and ContentManager to PEP8 format
This commit is contained in:
parent
f63b711972
commit
dc791a31ab
6 changed files with 301 additions and 339 deletions
|
@ -4,7 +4,7 @@ import ConfigParser
|
|||
class Config(object):
|
||||
def __init__(self):
|
||||
self.version = "0.3.1"
|
||||
self.rev = 247
|
||||
self.rev = 259
|
||||
self.parser = self.createArguments()
|
||||
argv = sys.argv[:] # Copy command line arguments
|
||||
argv = self.parseConfig(argv) # Add arguments from config file
|
||||
|
|
|
@ -1,8 +1,14 @@
|
|||
import logging
|
||||
import random
|
||||
import string
|
||||
import time
|
||||
import sys
|
||||
|
||||
import gevent
|
||||
import msgpack
|
||||
from gevent.server import StreamServer
|
||||
from gevent.pool import Pool
|
||||
import socket, os, logging, random, string, time, sys
|
||||
import gevent, msgpack
|
||||
import cStringIO as StringIO
|
||||
|
||||
from Debug import Debug
|
||||
from Connection import Connection
|
||||
from Config import config
|
||||
|
@ -10,250 +16,163 @@ from Crypt import CryptConnection
|
|||
|
||||
|
||||
class ConnectionServer:
|
||||
def __init__(self, ip=None, port=None, request_handler=None):
|
||||
self.ip = ip
|
||||
self.port = port
|
||||
self.last_connection_id = 1 # Connection id incrementer
|
||||
self.log = logging.getLogger("ConnServer")
|
||||
self.port_opened = None
|
||||
|
||||
self.connections = [] # Connections
|
||||
self.ips = {} # Connection by ip
|
||||
self.peer_ids = {} # Connections by peer_ids
|
||||
def __init__(self, ip=None, port=None, request_handler=None):
|
||||
self.ip = ip
|
||||
self.port = port
|
||||
self.last_connection_id = 1 # Connection id incrementer
|
||||
self.log = logging.getLogger("ConnServer")
|
||||
self.port_opened = None
|
||||
|
||||
self.running = True
|
||||
self.thread_checker = gevent.spawn(self.checkConnections)
|
||||
self.connections = [] # Connections
|
||||
self.ip_incoming = {} # Incoming connections from ip in the last minute to avoid connection flood
|
||||
self.ips = {} # Connection by ip
|
||||
self.peer_ids = {} # Connections by peer_ids
|
||||
|
||||
self.bytes_recv = 0
|
||||
self.bytes_sent = 0
|
||||
self.running = True
|
||||
self.thread_checker = gevent.spawn(self.checkConnections)
|
||||
|
||||
self.peer_id = "-ZN0"+config.version.replace(".", "")+"-"+''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(12)) # Bittorrent style peerid
|
||||
self.bytes_recv = 0
|
||||
self.bytes_sent = 0
|
||||
|
||||
# Check msgpack version
|
||||
if msgpack.version[0] == 0 and msgpack.version[1] < 4:
|
||||
self.log.error("Error: Too old msgpack version: %s (>0.4.0 required), please update using `sudo pip install msgpack-python --upgrade`" % str(msgpack.version))
|
||||
import sys
|
||||
sys.exit(0)
|
||||
# Bittorrent style peerid
|
||||
self.peer_id = "-ZN0%s-%s" % (
|
||||
config.version.replace(".", ""),
|
||||
''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(12))
|
||||
)
|
||||
|
||||
if port: # Listen server on a port
|
||||
self.pool = Pool(1000) # do not accept more than 1000 connections
|
||||
self.stream_server = StreamServer((ip.replace("*", ""), port), self.handleIncomingConnection, spawn=self.pool, backlog=100)
|
||||
if request_handler: self.handleRequest = request_handler
|
||||
# Check msgpack version
|
||||
if msgpack.version[0] == 0 and msgpack.version[1] < 4:
|
||||
self.log.error(
|
||||
"Error: Too old msgpack version: %s (>0.4.0 required), please update using `sudo pip install msgpack-python --upgrade`" %
|
||||
str(msgpack.version)
|
||||
)
|
||||
sys.exit(0)
|
||||
|
||||
CryptConnection.manager.loadCerts()
|
||||
if port: # Listen server on a port
|
||||
self.pool = Pool(1000) # do not accept more than 1000 connections
|
||||
self.stream_server = StreamServer((ip.replace("*", ""), port), self.handleIncomingConnection, spawn=self.pool, backlog=100)
|
||||
if request_handler:
|
||||
self.handleRequest = request_handler
|
||||
|
||||
CryptConnection.manager.loadCerts()
|
||||
|
||||
def start(self):
|
||||
self.running = True
|
||||
self.log.debug("Binding to: %s:%s, (msgpack: %s), supported crypt: %s" % (self.ip, self.port, ".".join(map(str, msgpack.version)), CryptConnection.manager.crypt_supported ) )
|
||||
try:
|
||||
self.stream_server.serve_forever() # Start normal connection server
|
||||
except Exception, err:
|
||||
self.log.info("StreamServer bind error, must be running already: %s" % err)
|
||||
def start(self):
|
||||
self.running = True
|
||||
self.log.debug("Binding to: %s:%s, (msgpack: %s), supported crypt: %s" % (
|
||||
self.ip, self.port,
|
||||
".".join(map(str, msgpack.version)), CryptConnection.manager.crypt_supported)
|
||||
)
|
||||
try:
|
||||
self.stream_server.serve_forever() # Start normal connection server
|
||||
except Exception, err:
|
||||
self.log.info("StreamServer bind error, must be running already: %s" % err)
|
||||
|
||||
def stop(self):
|
||||
self.running = False
|
||||
self.stream_server.stop()
|
||||
|
||||
def stop(self):
|
||||
self.running = False
|
||||
self.stream_server.stop()
|
||||
def handleIncomingConnection(self, sock, addr):
|
||||
ip, port = addr
|
||||
|
||||
# Connection flood protection
|
||||
if ip in self.ip_incoming:
|
||||
self.ip_incoming[ip] += 1
|
||||
if self.ip_incoming[ip] > 3: # Allow 3 in 1 minute from same ip
|
||||
self.log.debug("Connection flood detected from %s" % ip)
|
||||
time.sleep(30)
|
||||
sock.close()
|
||||
return False
|
||||
else:
|
||||
self.ip_incoming[ip] = 0
|
||||
|
||||
def handleIncomingConnection(self, sock, addr):
|
||||
ip, port = addr
|
||||
connection = Connection(self, ip, port, sock)
|
||||
self.connections.append(connection)
|
||||
self.ips[ip] = connection
|
||||
connection.handleIncomingConnection(sock)
|
||||
connection = Connection(self, ip, port, sock)
|
||||
self.connections.append(connection)
|
||||
self.ips[ip] = connection
|
||||
connection.handleIncomingConnection(sock)
|
||||
|
||||
def getConnection(self, ip=None, port=None, peer_id=None, create=True):
|
||||
if peer_id and peer_id in self.peer_ids: # Find connection by peer id
|
||||
connection = self.peer_ids.get(peer_id)
|
||||
if not connection.connected and create:
|
||||
succ = connection.event_connected.get() # Wait for connection
|
||||
if not succ:
|
||||
raise Exception("Connection event return error")
|
||||
return connection
|
||||
# Find connection by ip
|
||||
if ip in self.ips:
|
||||
connection = self.ips[ip]
|
||||
if not connection.connected and create:
|
||||
succ = connection.event_connected.get() # Wait for connection
|
||||
if not succ:
|
||||
raise Exception("Connection event return error")
|
||||
return connection
|
||||
# Recover from connection pool
|
||||
for connection in self.connections:
|
||||
if connection.ip == ip:
|
||||
if not connection.connected and create:
|
||||
succ = connection.event_connected.get() # Wait for connection
|
||||
if not succ:
|
||||
raise Exception("Connection event return error")
|
||||
return connection
|
||||
|
||||
# No connection found
|
||||
if create: # Allow to create new connection if not found
|
||||
if port == 0:
|
||||
raise Exception("This peer is not connectable")
|
||||
try:
|
||||
connection = Connection(self, ip, port)
|
||||
self.ips[ip] = connection
|
||||
self.connections.append(connection)
|
||||
succ = connection.connect()
|
||||
if not succ:
|
||||
connection.close()
|
||||
raise Exception("Connection event return error")
|
||||
|
||||
def getConnection(self, ip=None, port=None, peer_id=None, create=True):
|
||||
if peer_id and peer_id in self.peer_ids: # Find connection by peer id
|
||||
connection = self.peer_ids.get(peer_id)
|
||||
if not connection.connected and create:
|
||||
succ = connection.event_connected.get() # Wait for connection
|
||||
if not succ: raise Exception("Connection event return error")
|
||||
return connection
|
||||
# Find connection by ip
|
||||
if ip in self.ips:
|
||||
connection = self.ips[ip]
|
||||
if not connection.connected and create:
|
||||
succ = connection.event_connected.get() # Wait for connection
|
||||
if not succ: raise Exception("Connection event return error")
|
||||
return connection
|
||||
# Recover from connection pool
|
||||
for connection in self.connections:
|
||||
if connection.ip == ip:
|
||||
if not connection.connected and create:
|
||||
succ = connection.event_connected.get() # Wait for connection
|
||||
if not succ: raise Exception("Connection event return error")
|
||||
return connection
|
||||
except Exception, err:
|
||||
self.log.debug("%s Connect error: %s" % (ip, Debug.formatException(err)))
|
||||
connection.close()
|
||||
raise err
|
||||
return connection
|
||||
else:
|
||||
return None
|
||||
|
||||
# No connection found
|
||||
if create: # Allow to create new connection if not found
|
||||
if port == 0:
|
||||
raise Exception("This peer is not connectable")
|
||||
try:
|
||||
connection = Connection(self, ip, port)
|
||||
self.ips[ip] = connection
|
||||
self.connections.append(connection)
|
||||
succ = connection.connect()
|
||||
if not succ:
|
||||
connection.close()
|
||||
raise Exception("Connection event return error")
|
||||
def removeConnection(self, connection):
|
||||
self.log.debug("Removing %s..." % connection)
|
||||
if self.ips.get(connection.ip) == connection: # Delete if same as in registry
|
||||
del self.ips[connection.ip]
|
||||
if connection.peer_id and self.peer_ids.get(connection.peer_id) == connection: # Delete if same as in registry
|
||||
del self.peer_ids[connection.peer_id]
|
||||
if connection in self.connections:
|
||||
self.connections.remove(connection)
|
||||
|
||||
except Exception, err:
|
||||
self.log.debug("%s Connect error: %s" % (ip, Debug.formatException(err)))
|
||||
connection.close()
|
||||
raise err
|
||||
return connection
|
||||
else:
|
||||
return None
|
||||
def checkConnections(self):
|
||||
while self.running:
|
||||
time.sleep(60) # Sleep 1 min
|
||||
self.ip_incoming = {}
|
||||
for connection in self.connections[:]: # Make a copy
|
||||
idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time)
|
||||
|
||||
if connection.unpacker and idle > 30: # Delete the unpacker if not needed
|
||||
del connection.unpacker
|
||||
connection.unpacker = None
|
||||
connection.log("Unpacker deleted")
|
||||
|
||||
if idle > 60 * 60: # Wake up after 1h
|
||||
connection.log("[Cleanup] After wakeup, idle: %s" % idle)
|
||||
connection.close()
|
||||
|
||||
def removeConnection(self, connection):
|
||||
self.log.debug("Removing %s..." % connection)
|
||||
if self.ips.get(connection.ip) == connection: # Delete if same as in registry
|
||||
del self.ips[connection.ip]
|
||||
if connection.peer_id and self.peer_ids.get(connection.peer_id) == connection: # Delete if same as in registry
|
||||
del self.peer_ids[connection.peer_id]
|
||||
if connection in self.connections:
|
||||
self.connections.remove(connection)
|
||||
elif idle > 20 * 60 and connection.last_send_time < time.time() - 10: # Idle more than 20 min and we not send request in last 10 sec
|
||||
if not connection.ping(): # send ping request
|
||||
connection.close()
|
||||
|
||||
elif idle > 10 and connection.incomplete_buff_recv > 0: # Incompelte data with more than 10 sec idle
|
||||
connection.log("[Cleanup] Connection buff stalled")
|
||||
connection.close()
|
||||
|
||||
elif idle > 10 and connection.waiting_requests and time.time() - connection.last_send_time > 10: # Sent command and no response in 10 sec
|
||||
connection.log("[Cleanup] Command %s timeout: %s" % (connection.last_cmd, time.time() - connection.last_send_time))
|
||||
connection.close()
|
||||
|
||||
def checkConnections(self):
|
||||
while self.running:
|
||||
time.sleep(60) # Sleep 1 min
|
||||
for connection in self.connections[:]: # Make a copy
|
||||
idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time)
|
||||
|
||||
if connection.unpacker and idle > 30: # Delete the unpacker if not needed
|
||||
del connection.unpacker
|
||||
connection.unpacker = None
|
||||
connection.log("Unpacker deleted")
|
||||
|
||||
if idle > 60*60: # Wake up after 1h
|
||||
connection.log("[Cleanup] After wakeup, idle: %s" % idle)
|
||||
connection.close()
|
||||
|
||||
elif idle > 20*60 and connection.last_send_time < time.time()-10: # Idle more than 20 min and we not send request in last 10 sec
|
||||
if not connection.ping(): # send ping request
|
||||
connection.close()
|
||||
|
||||
elif idle > 10 and connection.incomplete_buff_recv > 0: # Incompelte data with more than 10 sec idle
|
||||
connection.log("[Cleanup] Connection buff stalled")
|
||||
connection.close()
|
||||
|
||||
elif idle > 10 and connection.waiting_requests and time.time() - connection.last_send_time > 10: # Sent command and no response in 10 sec
|
||||
connection.log("[Cleanup] Command %s timeout: %s" % (connection.last_cmd, time.time() - connection.last_send_time))
|
||||
connection.close()
|
||||
|
||||
elif idle > 60 and connection.protocol == "?": # No connection after 1 min
|
||||
connection.log("[Cleanup] Connect timeout: %s" % idle)
|
||||
connection.close()
|
||||
|
||||
|
||||
# -- TESTING --
|
||||
|
||||
def testCreateServer():
|
||||
global server
|
||||
server = ConnectionServer("127.0.0.1", 1234, testRequestHandler)
|
||||
server.start()
|
||||
|
||||
|
||||
def testRequestHandler(connection, req):
|
||||
print req
|
||||
if req["cmd"] == "Bigdata":
|
||||
connection.send({"res": "HelloWorld"*1024})
|
||||
else:
|
||||
connection.send({"res": "pong"})
|
||||
|
||||
|
||||
def testClient(num):
|
||||
time.sleep(1)
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.connect(("localhost", 1234))
|
||||
for i in range(10):
|
||||
print "[C%s] send..." % num
|
||||
s.sendall(msgpack.packb({"cmd": "[C] Ping"}))
|
||||
print "[C%s] recv..." % num
|
||||
print "[C%s] %s" % (num, repr(s.recv(1024)))
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
def testSlowClient(num):
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.connect(("localhost", 1234))
|
||||
for i in range(1):
|
||||
print "[C%s] send..." % num
|
||||
s.sendall(msgpack.packb({"cmd": "Bigdata"}))
|
||||
print "[C%s] recv..." % num
|
||||
gevent.spawn_later(1, lambda s: s.send(msgpack.packb({"cmd": "[Z] Ping"})), s)
|
||||
while 1:
|
||||
data = s.recv(1000)
|
||||
if not data: break
|
||||
print "[C%s] %s" % (num, data)
|
||||
time.sleep(1)
|
||||
#s.sendall(msgpack.packb({"cmd": "[C] Ping"}))
|
||||
|
||||
|
||||
def testZmqClient(num):
|
||||
import zmq.green as zmq
|
||||
c = zmq.Context(1)
|
||||
for i in range(10):
|
||||
s = c.socket(zmq.REQ)
|
||||
s.connect('tcp://127.0.0.1:1234')
|
||||
print "[Z%s] send..." % num
|
||||
s.send(msgpack.packb({"cmd": "[Z] Ping %s" % i}))
|
||||
print "[Z%s] recv..." % num
|
||||
print "[Z%s] %s" % (num, s.recv(1024))
|
||||
s.close()
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
def testZmqSlowClient(num):
|
||||
import zmq.green as zmq
|
||||
c = zmq.Context(1)
|
||||
s = c.socket(zmq.REQ)
|
||||
for i in range(1):
|
||||
s.connect('tcp://127.0.0.1:1234')
|
||||
print "[Z%s] send..." % num
|
||||
s.send(msgpack.packb({"cmd": "Bigdata"}))
|
||||
print "[Z%s] recv..." % num
|
||||
#gevent.spawn_later(1, lambda s: s.send(msgpack.packb({"cmd": "[Z] Ping"})), s)
|
||||
while 1:
|
||||
data = s.recv(1024*1024)
|
||||
if not data: break
|
||||
print "[Z%s] %s" % (num, data)
|
||||
time.sleep(1)
|
||||
s.send(msgpack.packb({"cmd": "[Z] Ping"}))
|
||||
|
||||
|
||||
def testConnection():
|
||||
global server
|
||||
connection = server.getConnection("127.0.0.1", 1234)
|
||||
connection.send({"res": "Sending: Hello!"})
|
||||
print connection
|
||||
|
||||
|
||||
def greenletsNum():
|
||||
from greenlet import greenlet
|
||||
import gc
|
||||
while 1:
|
||||
print len([ob for ob in gc.get_objects() if isinstance(ob, greenlet)])
|
||||
time.sleep(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
from gevent import monkey; monkey.patch_all(thread=False)
|
||||
import sys, time
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
gevent.spawn(testZmqClient, 1)
|
||||
gevent.spawn(greenletsNum)
|
||||
#gevent.spawn(testClient, 1)
|
||||
#gevent.spawn_later(1, testConnection)
|
||||
print "Running server..."
|
||||
server = None
|
||||
testCreateServer()
|
||||
|
||||
elif idle > 60 and connection.protocol == "?": # No connection after 1 min
|
||||
connection.log("[Cleanup] Connect timeout: %s" % idle)
|
||||
connection.close()
|
||||
|
|
|
@ -1,20 +1,29 @@
|
|||
import json, time, re, os, gevent, copy
|
||||
import json
|
||||
import time
|
||||
import re
|
||||
import os
|
||||
import copy
|
||||
|
||||
import gevent
|
||||
|
||||
from Debug import Debug
|
||||
from Crypt import CryptHash
|
||||
from Config import config
|
||||
|
||||
|
||||
class ContentManager(object):
|
||||
|
||||
def __init__(self, site):
|
||||
self.site = site
|
||||
self.log = self.site.log
|
||||
self.contents = {} # Known content.json (without files and includes)
|
||||
self.contents = {} # Known content.json (without files and includes)
|
||||
self.loadContent(add_bad_files=False)
|
||||
self.site.settings["size"] = self.getTotalSize()
|
||||
|
||||
# Load content.json to self.content
|
||||
# Return: Changed files ["index.html", "data/messages.json"]
|
||||
def loadContent(self, content_inner_path="content.json", add_bad_files=True, load_includes=True):
|
||||
content_inner_path = content_inner_path.strip("/") # Remove / from begning
|
||||
content_inner_path = content_inner_path.strip("/") # Remove / from begning
|
||||
old_content = self.contents.get(content_inner_path)
|
||||
content_path = self.site.storage.getPath(content_inner_path)
|
||||
content_path_dir = self.toDir(self.site.storage.getPath(content_inner_path))
|
||||
|
@ -28,7 +37,7 @@ class ContentManager(object):
|
|||
return False
|
||||
else:
|
||||
self.log.error("Content.json not exist: %s" % content_path)
|
||||
return False # Content.json not exist
|
||||
return False # Content.json not exist
|
||||
|
||||
try:
|
||||
# Get the files where the sha512 changed
|
||||
|
@ -36,40 +45,44 @@ class ContentManager(object):
|
|||
for relative_path, info in new_content.get("files", {}).items():
|
||||
if "sha512" in info:
|
||||
hash_type = "sha512"
|
||||
else: # Backward compatiblity
|
||||
else: # Backward compatiblity
|
||||
hash_type = "sha1"
|
||||
|
||||
new_hash = info[hash_type]
|
||||
if old_content and old_content["files"].get(relative_path): # We have the file in the old content
|
||||
if old_content and old_content["files"].get(relative_path): # We have the file in the old content
|
||||
old_hash = old_content["files"][relative_path].get(hash_type)
|
||||
else: # The file is not in the old content
|
||||
else: # The file is not in the old content
|
||||
old_hash = None
|
||||
if old_hash != new_hash: changed.append(content_dir+relative_path)
|
||||
if old_hash != new_hash:
|
||||
changed.append(content_dir + relative_path)
|
||||
|
||||
# Load includes
|
||||
if load_includes and "includes" in new_content:
|
||||
for relative_path, info in new_content["includes"].items():
|
||||
include_inner_path = content_dir+relative_path
|
||||
if self.site.storage.isFile(include_inner_path): # Content.json exists, load it
|
||||
include_inner_path = content_dir + relative_path
|
||||
if self.site.storage.isFile(include_inner_path): # Content.json exists, load it
|
||||
success = self.loadContent(include_inner_path, add_bad_files=add_bad_files)
|
||||
if success: changed += success # Add changed files
|
||||
else: # Content.json not exist, add to changed files
|
||||
if success:
|
||||
changed += success # Add changed files
|
||||
else: # Content.json not exist, add to changed files
|
||||
self.log.debug("Missing include: %s" % include_inner_path)
|
||||
changed += [include_inner_path]
|
||||
|
||||
# Load blind user includes (all subdir)
|
||||
if load_includes and "user_contents" in new_content:
|
||||
for relative_dir in os.listdir(content_path_dir):
|
||||
include_inner_path = content_dir+relative_dir+"/content.json"
|
||||
if not self.site.storage.isFile(include_inner_path): continue # Content.json not exist
|
||||
include_inner_path = content_dir + relative_dir + "/content.json"
|
||||
if not self.site.storage.isFile(include_inner_path):
|
||||
continue # Content.json not exist
|
||||
success = self.loadContent(include_inner_path, add_bad_files=add_bad_files, load_includes=False)
|
||||
if success: changed += success # Add changed files
|
||||
if success:
|
||||
changed += success # Add changed files
|
||||
|
||||
# Update the content
|
||||
self.contents[content_inner_path] = new_content
|
||||
except Exception, err:
|
||||
self.log.error("Content.json parse error: %s" % Debug.formatException(err))
|
||||
return False # Content.json parse error
|
||||
return False # Content.json parse error
|
||||
|
||||
# Add changed files to bad files
|
||||
if add_bad_files:
|
||||
|
@ -77,7 +90,8 @@ class ContentManager(object):
|
|||
self.site.bad_files[inner_path] = True
|
||||
|
||||
if new_content["modified"] > self.site.settings.get("modified", 0):
|
||||
self.site.settings["modified"] = min(time.time()+60*10, new_content["modified"]) # Dont store modifications in the far future (more than 10 minute)
|
||||
# Dont store modifications in the far future (more than 10 minute)
|
||||
self.site.settings["modified"] = min(time.time() + 60 * 10, new_content["modified"])
|
||||
|
||||
return changed
|
||||
|
||||
|
@ -86,8 +100,9 @@ class ContentManager(object):
|
|||
def getTotalSize(self, ignore=None):
|
||||
total_size = 0
|
||||
for inner_path, content in self.contents.iteritems():
|
||||
if inner_path == ignore: continue
|
||||
total_size += self.site.storage.getSize(inner_path) # Size of content.json
|
||||
if inner_path == ignore:
|
||||
continue
|
||||
total_size += self.site.storage.getSize(inner_path) # Size of content.json
|
||||
for file, info in content.get("files", {}).iteritems():
|
||||
total_size += info["size"]
|
||||
return total_size
|
||||
|
@ -95,12 +110,12 @@ class ContentManager(object):
|
|||
# Find the file info line from self.contents
|
||||
# Return: { "sha512": "c29d73d30ee8c9c1b5600e8a84447a6de15a3c3db6869aca4a2a578c1721f518", "size": 41 , "content_inner_path": "content.json"}
|
||||
def getFileInfo(self, inner_path):
|
||||
dirs = inner_path.split("/") # Parent dirs of content.json
|
||||
inner_path_parts = [dirs.pop()] # Filename relative to content.json
|
||||
dirs = inner_path.split("/") # Parent dirs of content.json
|
||||
inner_path_parts = [dirs.pop()] # Filename relative to content.json
|
||||
while True:
|
||||
content_inner_path = "%s/content.json" % "/".join(dirs)
|
||||
content = self.contents.get(content_inner_path.strip("/"))
|
||||
if content and "files" in content: # Check if content.json exists
|
||||
if content and "files" in content: # Check if content.json exists
|
||||
back = content["files"].get("/".join(inner_path_parts))
|
||||
if back:
|
||||
back["content_inner_path"] = content_inner_path
|
||||
|
@ -124,13 +139,14 @@ class ContentManager(object):
|
|||
# Get rules for the file
|
||||
# Return: The rules for the file or False if not allowed
|
||||
def getRules(self, inner_path, content=None):
|
||||
if not inner_path.endswith("content.json"): # Find the files content.json first
|
||||
if not inner_path.endswith("content.json"): # Find the files content.json first
|
||||
file_info = self.getFileInfo(inner_path)
|
||||
if not file_info: return False # File not found
|
||||
if not file_info:
|
||||
return False # File not found
|
||||
inner_path = file_info["content_inner_path"]
|
||||
dirs = inner_path.split("/") # Parent dirs of content.json
|
||||
inner_path_parts = [dirs.pop()] # Filename relative to content.json
|
||||
inner_path_parts.insert(0, dirs.pop()) # Dont check in self dir
|
||||
dirs = inner_path.split("/") # Parent dirs of content.json
|
||||
inner_path_parts = [dirs.pop()] # Filename relative to content.json
|
||||
inner_path_parts.insert(0, dirs.pop()) # Dont check in self dir
|
||||
while True:
|
||||
content_inner_path = "%s/content.json" % "/".join(dirs)
|
||||
parent_content = self.contents.get(content_inner_path.strip("/"))
|
||||
|
@ -138,23 +154,23 @@ class ContentManager(object):
|
|||
return parent_content["includes"].get("/".join(inner_path_parts))
|
||||
elif parent_content and "user_contents" in parent_content:
|
||||
return self.getUserContentRules(parent_content, inner_path, content)
|
||||
else: # No inner path in this dir, lets try the parent dir
|
||||
else: # No inner path in this dir, lets try the parent dir
|
||||
if dirs:
|
||||
inner_path_parts.insert(0, dirs.pop())
|
||||
else: # No more parent dirs
|
||||
else: # No more parent dirs
|
||||
break
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# Get rules for a user file
|
||||
# Return: The rules of the file or False if not allowed
|
||||
def getUserContentRules(self, parent_content, inner_path, content):
|
||||
user_contents = parent_content["user_contents"]
|
||||
user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) # Delivered for directory
|
||||
user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) # Delivered for directory
|
||||
|
||||
try:
|
||||
if not content: content = self.site.storage.loadJson(inner_path) # Read the file if no content specified
|
||||
if not content:
|
||||
content = self.site.storage.loadJson(inner_path) # Read the file if no content specified
|
||||
except (Exception, ): # Content.json not exist
|
||||
return {"signers": [user_address], "user_address": user_address} # Return information that we know for sure
|
||||
|
||||
|
@ -162,15 +178,16 @@ class ContentManager(object):
|
|||
content["cert_auth_type"] = "unknown"
|
||||
content["cert_user_name"] = "unknown@unknown"
|
||||
"""
|
||||
user_urn = "%s/%s" % (content["cert_auth_type"], content["cert_user_id"]) # web/nofish@zeroid.bit
|
||||
user_urn = "%s/%s" % (content["cert_auth_type"], content["cert_user_id"]) # web/nofish@zeroid.bit
|
||||
|
||||
rules = copy.copy(user_contents["permissions"].get(content["cert_user_id"], {})) # Default rules by username
|
||||
if rules == False:
|
||||
rules = copy.copy(user_contents["permissions"].get(content["cert_user_id"], {})) # Default rules by username
|
||||
if rules is False:
|
||||
return False # User banned
|
||||
if "signers" in rules:
|
||||
rules["signers"] = rules["signers"][:] # Make copy of the signers
|
||||
for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules
|
||||
if not re.match(permission_pattern, user_urn): continue # Rule is not valid for user
|
||||
for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules
|
||||
if not re.match(permission_pattern, user_urn):
|
||||
continue # Rule is not valid for user
|
||||
# Update rules if its better than current recorded ones
|
||||
for key, val in permission_rules.iteritems():
|
||||
if key not in rules:
|
||||
|
@ -182,13 +199,15 @@ class ContentManager(object):
|
|||
if val > rules[key]:
|
||||
rules[key] = val
|
||||
elif hasattr(val, "startswith"): # String, update if longer
|
||||
if len(val) > len(rules[key]): rules[key] = val
|
||||
if len(val) > len(rules[key]):
|
||||
rules[key] = val
|
||||
elif type(val) is list: # List, append
|
||||
rules[key] += val
|
||||
|
||||
rules["cert_signers"] = user_contents["cert_signers"] # Add valid cert signers
|
||||
if "signers" not in rules: rules["signers"] = []
|
||||
rules["signers"].append(user_address) # Add user as valid signer
|
||||
rules["cert_signers"] = user_contents["cert_signers"] # Add valid cert signers
|
||||
if "signers" not in rules:
|
||||
rules["signers"] = []
|
||||
rules["signers"].append(user_address) # Add user as valid signer
|
||||
rules["user_address"] = user_address
|
||||
|
||||
return rules
|
||||
|
@ -197,7 +216,7 @@ class ContentManager(object):
|
|||
# Return: The new content if filewrite = False
|
||||
def sign(self, inner_path="content.json", privatekey=None, filewrite=True, update_changed_files=False, extend=None):
|
||||
content = self.contents.get(inner_path)
|
||||
if not content: # Content not exist yet, load default one
|
||||
if not content: # Content not exist yet, load default one
|
||||
self.log.info("File %s not exist yet, loading default values..." % inner_path)
|
||||
content = {"files": {}, "signs": {}} # Default content.json
|
||||
if inner_path == "content.json": # It's the root content.json, add some more fields
|
||||
|
@ -205,7 +224,8 @@ class ContentManager(object):
|
|||
content["description"] = ""
|
||||
content["signs_required"] = 1
|
||||
content["ignore"] = ""
|
||||
if extend: content.update(extend) # Add custom fields
|
||||
if extend:
|
||||
content.update(extend) # Add custom fields
|
||||
|
||||
directory = self.toDir(self.site.storage.getPath(inner_path))
|
||||
self.log.info("Opening site data directory: %s..." % directory)
|
||||
|
@ -217,18 +237,25 @@ class ContentManager(object):
|
|||
file_path = self.site.storage.getPath("%s/%s" % (root.strip("/"), file_name))
|
||||
file_inner_path = re.sub(re.escape(directory), "", file_path)
|
||||
|
||||
if file_name == "content.json": ignored = True
|
||||
elif content.get("ignore") and re.match(content["ignore"], file_inner_path): ignored = True
|
||||
elif file_name.startswith("."): ignored = True
|
||||
else: ignored = False
|
||||
if file_name == "content.json":
|
||||
ignored = True
|
||||
elif content.get("ignore") and re.match(content["ignore"], file_inner_path):
|
||||
ignored = True
|
||||
elif file_name.startswith("."):
|
||||
ignored = True
|
||||
else:
|
||||
ignored = False
|
||||
|
||||
if ignored: # Ignore content.json, definied regexp and files starting with .
|
||||
if ignored: # Ignore content.json, definied regexp and files starting with .
|
||||
self.log.info("- [SKIPPED] %s" % file_inner_path)
|
||||
else:
|
||||
sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file
|
||||
sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file
|
||||
self.log.info("- %s (SHA512: %s)" % (file_inner_path, sha512sum))
|
||||
hashed_files[file_inner_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
|
||||
if file_inner_path in content["files"].keys() and hashed_files[file_inner_path]["sha512"] != content["files"][file_inner_path].get("sha512"):
|
||||
if (
|
||||
file_inner_path in content["files"].keys()
|
||||
and hashed_files[file_inner_path]["sha512"] != content["files"][file_inner_path].get("sha512")
|
||||
):
|
||||
changed_files.append(file_path)
|
||||
|
||||
self.log.debug("Changed files: %s" % changed_files)
|
||||
|
@ -239,9 +266,9 @@ class ContentManager(object):
|
|||
# Generate new content.json
|
||||
self.log.info("Adding timestamp and sha512sums to new content.json...")
|
||||
|
||||
new_content = content.copy() # Create a copy of current content.json
|
||||
new_content["files"] = hashed_files # Add files sha512 hash
|
||||
new_content["modified"] = time.time() # Add timestamp
|
||||
new_content = content.copy() # Create a copy of current content.json
|
||||
new_content["files"] = hashed_files # Add files sha512 hash
|
||||
new_content["modified"] = time.time() # Add timestamp
|
||||
if inner_path == "content.json":
|
||||
new_content["address"] = self.site.address
|
||||
new_content["zeronet_version"] = config.version
|
||||
|
@ -255,19 +282,22 @@ class ContentManager(object):
|
|||
return self.log.error("Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address))
|
||||
self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
|
||||
|
||||
if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key sign the valid signers
|
||||
if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key sign the valid signers
|
||||
new_content["signers_sign"] = CryptBitcoin.sign("%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey)
|
||||
if not new_content["signers_sign"]: self.log.info("Old style address, signers_sign is none")
|
||||
if not new_content["signers_sign"]:
|
||||
self.log.info("Old style address, signers_sign is none")
|
||||
|
||||
self.log.info("Signing %s..." % inner_path)
|
||||
|
||||
if "signs" in new_content: del(new_content["signs"]) # Delete old signs
|
||||
if "sign" in new_content: del(new_content["sign"]) # Delete old sign (backward compatibility)
|
||||
if "signs" in new_content:
|
||||
del(new_content["signs"]) # Delete old signs
|
||||
if "sign" in new_content:
|
||||
del(new_content["sign"]) # Delete old sign (backward compatibility)
|
||||
|
||||
sign_content = json.dumps(new_content, sort_keys=True)
|
||||
sign = CryptBitcoin.sign(sign_content, privatekey)
|
||||
#new_content["signs"] = content.get("signs", {}) # TODO: Multisig
|
||||
if sign: # If signing is successful (not an old address)
|
||||
# new_content["signs"] = content.get("signs", {}) # TODO: Multisig
|
||||
if sign: # If signing is successful (not an old address)
|
||||
new_content["signs"] = {}
|
||||
new_content["signs"][privatekey_address] = sign
|
||||
|
||||
|
@ -294,7 +324,7 @@ class ContentManager(object):
|
|||
# Return: ["1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6", "13ReyhCsjhpuCVahn1DHdf6eMqqEVev162"]
|
||||
def getValidSigners(self, inner_path, content=None):
|
||||
valid_signers = []
|
||||
if inner_path == "content.json": # Root content.json
|
||||
if inner_path == "content.json": # Root content.json
|
||||
if "content.json" in self.contents and "signers" in self.contents["content.json"]:
|
||||
valid_signers += self.contents["content.json"]["signers"].keys()
|
||||
else:
|
||||
|
@ -314,11 +344,12 @@ class ContentManager(object):
|
|||
from Crypt import CryptBitcoin
|
||||
|
||||
rules = self.getRules(inner_path, content)
|
||||
if not rules.get("cert_signers"): return True # Does not need cert
|
||||
if not rules.get("cert_signers"):
|
||||
return True # Does not need cert
|
||||
|
||||
name, domain = content["cert_user_id"].split("@")
|
||||
cert_address = rules["cert_signers"].get(domain)
|
||||
if not cert_address: # Cert signer not allowed
|
||||
if not cert_address: # Cert signer not allowed
|
||||
self.log.error("Invalid cert signer: %s" % domain)
|
||||
return False
|
||||
return CryptBitcoin.verify("%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"])
|
||||
|
@ -326,21 +357,23 @@ class ContentManager(object):
|
|||
# Checks if the content.json content is valid
|
||||
# Return: True or False
|
||||
def validContent(self, inner_path, content):
|
||||
content_size = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) # Size of new content
|
||||
site_size = self.getTotalSize(ignore=inner_path)+content_size # Site size without old content
|
||||
if site_size > self.site.settings.get("size", 0): self.site.settings["size"] = site_size # Save to settings if larger
|
||||
content_size = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) # Size of new content
|
||||
site_size = self.getTotalSize(ignore=inner_path) + content_size # Site size without old content
|
||||
if site_size > self.site.settings.get("size", 0):
|
||||
self.site.settings["size"] = site_size # Save to settings if larger
|
||||
|
||||
site_size_limit = self.site.getSizeLimit()*1024*1024
|
||||
site_size_limit = self.site.getSizeLimit() * 1024 * 1024
|
||||
|
||||
# Check total site size limit
|
||||
if site_size > site_size_limit:
|
||||
self.log.error("%s: Site too large %s > %s, aborting task..." % (inner_path, site_size, site_size_limit))
|
||||
task = self.site.worker_manager.findTask(inner_path)
|
||||
if task: # Dont try to download from other peers
|
||||
if task: # Dont try to download from other peers
|
||||
self.site.worker_manager.failTask(task)
|
||||
return False
|
||||
|
||||
if inner_path == "content.json": return True # Root content.json is passed
|
||||
if inner_path == "content.json":
|
||||
return True # Root content.json is passed
|
||||
|
||||
# Load include details
|
||||
rules = self.getRules(inner_path, content)
|
||||
|
@ -349,15 +382,15 @@ class ContentManager(object):
|
|||
return False
|
||||
|
||||
# Check include size limit
|
||||
if rules.get("max_size"): # Include size limit
|
||||
if rules.get("max_size"): # Include size limit
|
||||
if content_size > rules["max_size"]:
|
||||
self.log.error("%s: Include too large %s > %s" % (inner_path, content_size, rules["max_size"]))
|
||||
return False
|
||||
|
||||
# Check if content includes allowed
|
||||
if rules.get("includes_allowed") == False and content.get("includes"):
|
||||
if rules.get("includes_allowed") is False and content.get("includes"):
|
||||
self.log.error("%s: Includes not allowed" % inner_path)
|
||||
return False # Includes not allowed
|
||||
return False # Includes not allowed
|
||||
|
||||
# Filename limit
|
||||
if rules.get("files_allowed"):
|
||||
|
@ -370,54 +403,57 @@ class ContentManager(object):
|
|||
|
||||
# Verify file validity
|
||||
# Return: None = Same as before, False = Invalid, True = Valid
|
||||
def verifyFile(self, inner_path, file, ignore_same = True):
|
||||
if inner_path.endswith("content.json"): # content.json: Check using sign
|
||||
def verifyFile(self, inner_path, file, ignore_same=True):
|
||||
if inner_path.endswith("content.json"): # content.json: Check using sign
|
||||
from Crypt import CryptBitcoin
|
||||
try:
|
||||
new_content = json.load(file)
|
||||
if inner_path in self.contents:
|
||||
old_content = self.contents.get(inner_path)
|
||||
# Checks if its newer the ours
|
||||
if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json
|
||||
if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json
|
||||
return None
|
||||
elif old_content["modified"] > new_content["modified"]: # We have newer
|
||||
elif old_content["modified"] > new_content["modified"]: # We have newer
|
||||
self.log.debug("We have newer %s (Our: %s, Sent: %s)" % (inner_path, old_content["modified"], new_content["modified"]))
|
||||
gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers
|
||||
gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers
|
||||
return False
|
||||
if new_content["modified"] > time.time()+60*60*24: # Content modified in the far future (allow 1 day window)
|
||||
if new_content["modified"] > time.time() + 60 * 60 * 24: # Content modified in the far future (allow 1 day window)
|
||||
self.log.error("%s modify is in the future!" % inner_path)
|
||||
return False
|
||||
# Check sign
|
||||
sign = new_content.get("sign")
|
||||
signs = new_content.get("signs", {})
|
||||
if "sign" in new_content: del(new_content["sign"]) # The file signed without the sign
|
||||
if "signs" in new_content: del(new_content["signs"]) # The file signed without the signs
|
||||
sign_content = json.dumps(new_content, sort_keys=True) # Dump the json to string to remove whitepsace
|
||||
if "sign" in new_content:
|
||||
del(new_content["sign"]) # The file signed without the sign
|
||||
if "signs" in new_content:
|
||||
del(new_content["signs"]) # The file signed without the signs
|
||||
sign_content = json.dumps(new_content, sort_keys=True) # Dump the json to string to remove whitepsace
|
||||
|
||||
if not self.validContent(inner_path, new_content): return False # Content not valid (files too large, invalid files)
|
||||
if not self.validContent(inner_path, new_content):
|
||||
return False # Content not valid (files too large, invalid files)
|
||||
|
||||
if signs: # New style signing
|
||||
if signs: # New style signing
|
||||
valid_signers = self.getValidSigners(inner_path, new_content)
|
||||
signs_required = self.getSignsRequired(inner_path, new_content)
|
||||
|
||||
if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json
|
||||
if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json
|
||||
if not CryptBitcoin.verify("%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]):
|
||||
self.log.error("%s invalid signers_sign!" % inner_path)
|
||||
return False
|
||||
|
||||
if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid
|
||||
if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid
|
||||
self.log.error("%s invalid cert!" % inner_path)
|
||||
return False
|
||||
|
||||
valid_signs = 0
|
||||
for address in valid_signers:
|
||||
if address in signs: valid_signs += CryptBitcoin.verify(sign_content, address, signs[address])
|
||||
if valid_signs >= signs_required: break # Break if we has enough signs
|
||||
|
||||
|
||||
if address in signs:
|
||||
valid_signs += CryptBitcoin.verify(sign_content, address, signs[address])
|
||||
if valid_signs >= signs_required:
|
||||
break # Break if we has enough signs
|
||||
|
||||
return valid_signs >= signs_required
|
||||
else: # Old style signing
|
||||
else: # Old style signing
|
||||
return CryptBitcoin.verify(sign_content, self.site.address, sign)
|
||||
|
||||
except Exception, err:
|
||||
|
@ -429,7 +465,7 @@ class ContentManager(object):
|
|||
if file_info:
|
||||
if "sha512" in file_info:
|
||||
hash_valid = CryptHash.sha512sum(file) == file_info["sha512"]
|
||||
elif "sha1" in file_info: # Backward compatibility
|
||||
elif "sha1" in file_info: # Backward compatibility
|
||||
hash_valid = CryptHash.sha1sum(file) == file_info["sha1"]
|
||||
else:
|
||||
hash_valid = False
|
||||
|
@ -439,22 +475,21 @@ class ContentManager(object):
|
|||
return False
|
||||
return hash_valid
|
||||
|
||||
else: # File not in content.json
|
||||
else: # File not in content.json
|
||||
self.log.error("File not in content.json: %s" % inner_path)
|
||||
return False
|
||||
|
||||
|
||||
# Get dir from file
|
||||
# Return: data/site/content.json -> data/site
|
||||
def toDir(self, inner_path):
|
||||
file_dir = re.sub("[^/]*?$", "", inner_path).strip("/")
|
||||
if file_dir: file_dir += "/" # Add / at end if its not the root
|
||||
if file_dir:
|
||||
file_dir += "/" # Add / at end if its not the root
|
||||
return file_dir
|
||||
|
||||
|
||||
def testSign():
|
||||
global config
|
||||
from Config import config
|
||||
from Site import Site
|
||||
site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
|
||||
content_manager = ContentManager(site)
|
||||
|
@ -462,9 +497,7 @@ def testSign():
|
|||
|
||||
|
||||
def testVerify():
|
||||
from Config import config
|
||||
from Site import Site
|
||||
#site = Site("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
|
||||
site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
|
||||
|
||||
content_manager = ContentManager(site)
|
||||
|
@ -478,7 +511,6 @@ def testVerify():
|
|||
|
||||
|
||||
def testInfo():
|
||||
from Config import config
|
||||
from Site import Site
|
||||
site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
|
||||
|
||||
|
@ -493,14 +525,13 @@ def testInfo():
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import os, sys, logging
|
||||
import sys
|
||||
import logging
|
||||
os.chdir("../..")
|
||||
sys.path.insert(0, os.path.abspath("."))
|
||||
sys.path.insert(0, os.path.abspath("src"))
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
from Debug import Debug
|
||||
from Crypt import CryptHash
|
||||
|
||||
#testSign()
|
||||
# testSign()
|
||||
testVerify()
|
||||
#testInfo()
|
||||
# testInfo()
|
||||
|
|
|
@ -185,7 +185,7 @@ class Peer(object):
|
|||
# Stop and remove from site
|
||||
def remove(self):
|
||||
self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed))
|
||||
if self.key in self.site.peers: del(self.site.peers[self.key])
|
||||
if self.site and self.key in self.site.peers: del(self.site.peers[self.key])
|
||||
if self.connection:
|
||||
self.connection.close()
|
||||
|
||||
|
|
|
@ -242,7 +242,12 @@ class Wrapper
|
|||
@setSiteInfo site_info
|
||||
|
||||
if site_info.settings.size > site_info.size_limit*1024*1024 # Site size too large and not displaying it yet
|
||||
@loading.showTooLarge(site_info)
|
||||
if @loading.screen_visible
|
||||
@loading.showTooLarge(site_info)
|
||||
else
|
||||
@displayConfirm "Site is larger than allowed: #{(site_info.settings.size/1024/1024).toFixed(1)}MB/#{site_info.size_limit}MB", "Set limit to #{site_info.next_size_limit}MB", =>
|
||||
@ws.cmd "siteSetLimit", [site_info.next_size_limit], (res) =>
|
||||
@notifications.add("size_limit", "done", res, 5000)
|
||||
|
||||
if site_info.content
|
||||
window.document.title = site_info.content.title+" - ZeroNet"
|
||||
|
@ -286,8 +291,8 @@ class Wrapper
|
|||
@loading.printLine "No peers found"
|
||||
|
||||
if not @site_info and not @loading.screen_visible and $("#inner-iframe").attr("src").indexOf("?") == -1 # First site info and mainpage
|
||||
if site_info.size_limit < site_info.next_size_limit # Need upgrade soon
|
||||
@wrapperConfirm "Running out of size limit (#{(site_info.settings.size/1024/1024).toFixed(1)}MB/#{site_info.size_limit}MB)", "Set limit to #{site_info.next_size_limit}MB", =>
|
||||
if site_info.size_limit*1.1 < site_info.next_size_limit # Need upgrade soon
|
||||
@actionConfirm "Running out of size limit (#{(site_info.settings.size/1024/1024).toFixed(1)}MB/#{site_info.size_limit}MB)", "Set limit to #{site_info.next_size_limit}MB", =>
|
||||
@ws.cmd "siteSetLimit", [site_info.next_size_limit], (res) =>
|
||||
@notifications.add("size_limit", "done", res, 5000)
|
||||
return false
|
||||
|
|
|
@ -681,7 +681,6 @@ jQuery.extend( jQuery.easing,
|
|||
}).call(this);
|
||||
|
||||
|
||||
|
||||
/* ---- src/Ui/media/Sidebar.coffee ---- */
|
||||
|
||||
|
||||
|
@ -1069,7 +1068,15 @@ jQuery.extend( jQuery.easing,
|
|||
_this.address = site_info.address;
|
||||
_this.setSiteInfo(site_info);
|
||||
if (site_info.settings.size > site_info.size_limit * 1024 * 1024) {
|
||||
_this.loading.showTooLarge(site_info);
|
||||
if (_this.loading.screen_visible) {
|
||||
_this.loading.showTooLarge(site_info);
|
||||
} else {
|
||||
_this.displayConfirm("Site is larger than allowed: " + ((site_info.settings.size / 1024 / 1024).toFixed(1)) + "MB/" + site_info.size_limit + "MB", "Set limit to " + site_info.next_size_limit + "MB", function() {
|
||||
return _this.ws.cmd("siteSetLimit", [site_info.next_size_limit], function(res) {
|
||||
return _this.notifications.add("size_limit", "done", res, 5000);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
if (site_info.content) {
|
||||
window.document.title = site_info.content.title + " - ZeroNet";
|
||||
|
@ -1118,8 +1125,8 @@ jQuery.extend( jQuery.easing,
|
|||
}
|
||||
}
|
||||
if (!this.site_info && !this.loading.screen_visible && $("#inner-iframe").attr("src").indexOf("?") === -1) {
|
||||
if (site_info.size_limit < site_info.next_size_limit) {
|
||||
this.wrapperConfirm("Running out of size limit (" + ((site_info.settings.size / 1024 / 1024).toFixed(1)) + "MB/" + site_info.size_limit + "MB)", "Set limit to " + site_info.next_size_limit + "MB", (function(_this) {
|
||||
if (site_info.size_limit * 1.1 < site_info.next_size_limit) {
|
||||
this.actionConfirm("Running out of size limit (" + ((site_info.settings.size / 1024 / 1024).toFixed(1)) + "MB/" + site_info.size_limit + "MB)", "Set limit to " + site_info.next_size_limit + "MB", (function(_this) {
|
||||
return function() {
|
||||
_this.ws.cmd("siteSetLimit", [site_info.next_size_limit], function(res) {
|
||||
return _this.notifications.add("size_limit", "done", res, 5000);
|
||||
|
@ -1211,4 +1218,4 @@ jQuery.extend( jQuery.easing,
|
|||
|
||||
window.wrapper = new Wrapper(ws_url);
|
||||
|
||||
}).call(this);
|
||||
}).call(this);
|
||||
|
|
Loading…
Reference in a new issue