rev242, Implicit SSL support for the future, Exist typo fix
This commit is contained in:
parent
ea921e4ad7
commit
b2e2453e58
12 changed files with 51 additions and 37 deletions
|
@ -4,7 +4,7 @@ import ConfigParser
|
|||
class Config(object):
|
||||
def __init__(self):
|
||||
self.version = "0.3.1"
|
||||
self.rev = 238
|
||||
self.rev = 242
|
||||
self.parser = self.createArguments()
|
||||
argv = sys.argv[:] # Copy command line arguments
|
||||
argv = self.parseConfig(argv) # Add arguments from config file
|
||||
|
|
|
@ -7,7 +7,7 @@ from util import StreamingMsgpack
|
|||
from Crypt import CryptConnection
|
||||
|
||||
class Connection(object):
|
||||
__slots__ = ("sock", "ip", "port", "peer_id", "id", "protocol", "type", "server", "unpacker", "req_id", "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "last_recv_time", "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "last_ping_delay", "last_req_time", "last_cmd", "name", "updateName", "waiting_requests")
|
||||
__slots__ = ("sock", "sock_wrapped", "ip", "port", "peer_id", "id", "protocol", "type", "server", "unpacker", "req_id", "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "last_recv_time", "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "last_ping_delay", "last_req_time", "last_cmd", "name", "updateName", "waiting_requests")
|
||||
|
||||
def __init__(self, server, ip, port, sock=None):
|
||||
self.sock = sock
|
||||
|
@ -24,6 +24,7 @@ class Connection(object):
|
|||
self.req_id = 0 # Last request id
|
||||
self.handshake = {} # Handshake info got from peer
|
||||
self.crypt = None # Connection encryption method
|
||||
self.sock_wrapped = False # Socket wrapped to encryption
|
||||
|
||||
self.connected = False
|
||||
self.event_connected = gevent.event.AsyncResult() # Solves on handshake received
|
||||
|
@ -70,6 +71,11 @@ class Connection(object):
|
|||
self.type = "out"
|
||||
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.sock.connect((self.ip, int(self.port)))
|
||||
# Implicit SSL in the future
|
||||
#self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa")
|
||||
#self.sock.do_handshake()
|
||||
#self.crypt = "tls-rsa"
|
||||
#self.sock_wrapped = True
|
||||
# Detect protocol
|
||||
self.send({"cmd": "handshake", "req_id": 0, "params": self.handshakeInfo()})
|
||||
gevent.spawn(self.messageLoop)
|
||||
|
@ -81,6 +87,11 @@ class Connection(object):
|
|||
def handleIncomingConnection(self, sock):
|
||||
self.log("Incoming connection...")
|
||||
self.type = "in"
|
||||
if sock.recv( 1, gevent.socket.MSG_PEEK ) == "\x16":
|
||||
self.log("Crypt in connection using implicit SSL")
|
||||
self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", True)
|
||||
self.sock_wrapped = True
|
||||
self.crypt = "tls-rsa"
|
||||
self.messageLoop()
|
||||
|
||||
|
||||
|
@ -158,8 +169,8 @@ class Connection(object):
|
|||
ping = time.time()-self.start_time
|
||||
if config.debug_socket: self.log("Handshake response: %s, ping: %s" % (message, ping))
|
||||
self.last_ping_delay = ping
|
||||
# Server switched to crypt, lets do it also
|
||||
if message.get("crypt"):
|
||||
# Server switched to crypt, lets do it also if not crypted already
|
||||
if message.get("crypt") and not self.sock_wrapped:
|
||||
self.crypt = message["crypt"]
|
||||
server = (self.type == "in")
|
||||
self.log("Crypt out connection using: %s (server side: %s)..." % (self.crypt, server))
|
||||
|
@ -177,10 +188,11 @@ class Connection(object):
|
|||
data["to"] = message["req_id"]
|
||||
self.send(data) # Send response to handshake
|
||||
# Sent crypt request to client
|
||||
if self.crypt:
|
||||
if self.crypt and not self.sock_wrapped:
|
||||
server = (self.type == "in")
|
||||
self.log("Crypt in connection using: %s (server side: %s)..." % (self.crypt, server))
|
||||
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server)
|
||||
self.sock_wrapped = True
|
||||
else:
|
||||
self.server.handleRequest(self, message)
|
||||
else: # Old style response, no req_id definied
|
||||
|
|
|
@ -28,8 +28,8 @@ class ContentManager:
|
|||
self.log.error("%s load error: %s" % (content_path, Debug.formatException(err)))
|
||||
return False
|
||||
else:
|
||||
self.log.error("Content.json not exits: %s" % content_path)
|
||||
return False # Content.json not exits
|
||||
self.log.error("Content.json not exist: %s" % content_path)
|
||||
return False # Content.json not exist
|
||||
|
||||
|
||||
try:
|
||||
|
@ -55,7 +55,7 @@ class ContentManager:
|
|||
if self.site.storage.isFile(include_inner_path): # Content.json exists, load it
|
||||
success = self.loadContent(include_inner_path, add_bad_files=add_bad_files)
|
||||
if success: changed += success # Add changed files
|
||||
else: # Content.json not exits, add to changed files
|
||||
else: # Content.json not exist, add to changed files
|
||||
self.log.debug("Missing include: %s" % include_inner_path)
|
||||
changed += [include_inner_path]
|
||||
|
||||
|
@ -63,7 +63,7 @@ class ContentManager:
|
|||
if load_includes and "user_contents" in new_content:
|
||||
for relative_dir in os.listdir(content_path_dir):
|
||||
include_inner_path = content_dir+relative_dir+"/content.json"
|
||||
if not self.site.storage.isFile(include_inner_path): continue # Content.json not exits
|
||||
if not self.site.storage.isFile(include_inner_path): continue # Content.json not exist
|
||||
success = self.loadContent(include_inner_path, add_bad_files=add_bad_files, load_includes=False)
|
||||
if success: changed += success # Add changed files
|
||||
|
||||
|
@ -158,7 +158,7 @@ class ContentManager:
|
|||
|
||||
try:
|
||||
if not content: content = self.site.storage.loadJson(inner_path) # Read the file if no content specificed
|
||||
except: # Content.json not exits
|
||||
except: # Content.json not exist
|
||||
return { "signers": [user_address], "user_address": user_address } # Return information that we know for sure
|
||||
|
||||
"""if not "cert_user_name" in content: # New file, unknown user
|
||||
|
@ -200,8 +200,8 @@ class ContentManager:
|
|||
# Return: The new content if filewrite = False
|
||||
def sign(self, inner_path = "content.json", privatekey=None, filewrite=True, update_changed_files=False, extend=None):
|
||||
content = self.contents.get(inner_path)
|
||||
if not content: # Content not exits yet, load default one
|
||||
self.log.info("File %s not exits yet, loading default values..." % inner_path)
|
||||
if not content: # Content not exist yet, load default one
|
||||
self.log.info("File %s not exist yet, loading default values..." % inner_path)
|
||||
content = {"files": {}, "signs": {}} # Default content.json
|
||||
if inner_path == "content.json": # Its the root content.json, add some more fields
|
||||
content["title"] = "%s - ZeroNet_" % self.site.address
|
||||
|
|
12
src/Db/Db.py
12
src/Db/Db.py
|
@ -18,11 +18,11 @@ class Db:
|
|||
|
||||
def connect(self):
|
||||
self.log.debug("Connecting to %s (sqlite version: %s)..." % (self.db_path, sqlite3.version))
|
||||
if not os.path.isdir(self.db_dir): # Directory not exits yet
|
||||
if not os.path.isdir(self.db_dir): # Directory not exist yet
|
||||
os.makedirs(self.db_dir)
|
||||
self.log.debug("Created Db path: %s" % self.db_dir)
|
||||
if not os.path.isfile(self.db_path):
|
||||
self.log.debug("Db file not exits yet: %s" % self.db_path)
|
||||
self.log.debug("Db file not exist yet: %s" % self.db_path)
|
||||
self.conn = sqlite3.connect(self.db_path)
|
||||
self.conn.row_factory = sqlite3.Row
|
||||
self.conn.isolation_level = None
|
||||
|
@ -53,9 +53,9 @@ class Db:
|
|||
|
||||
|
||||
# Get the table version
|
||||
# Return: Table version or None if not exits
|
||||
# Return: Table version or None if not exist
|
||||
def getTableVersion(self, table_name):
|
||||
"""if not self.table_names: # Get exitsing table names
|
||||
"""if not self.table_names: # Get existing table names
|
||||
res = self.cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
self.table_names = [row["name"] for row in res]
|
||||
if table_name not in self.table_names:
|
||||
|
@ -65,7 +65,7 @@ class Db:
|
|||
if not self.db_keyvalues: # Get db keyvalues
|
||||
try:
|
||||
res = self.cur.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues
|
||||
except sqlite3.OperationalError, err: # Table not exits
|
||||
except sqlite3.OperationalError, err: # Table not exist
|
||||
self.log.debug("Query error: %s" % err)
|
||||
return False
|
||||
|
||||
|
@ -169,7 +169,7 @@ class Db:
|
|||
current_keyvalue_id[row["key"]] = row["keyvalue_id"]
|
||||
|
||||
for key in map["to_keyvalue"]:
|
||||
if key not in current_keyvalue: # Keyvalue not exits yet in the db
|
||||
if key not in current_keyvalue: # Keyvalue not exist yet in the db
|
||||
cur.execute("INSERT INTO keyvalue ?",
|
||||
{"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
|
||||
)
|
||||
|
|
|
@ -75,7 +75,7 @@ class DbCursor:
|
|||
self.execute(index)
|
||||
|
||||
|
||||
# Create table if not exits
|
||||
# Create table if not exist
|
||||
# Return: True if updated
|
||||
def needTable(self, table, cols, indexes=None, version=1):
|
||||
current_version = self.db.getTableVersion(table)
|
||||
|
|
|
@ -20,7 +20,7 @@ def merge(merged_path):
|
|||
else:
|
||||
find_ext = [ext]
|
||||
|
||||
# If exits check the other files modification date
|
||||
# If exist check the other files modification date
|
||||
if os.path.isfile(merged_path):
|
||||
merged_mtime = os.path.getmtime(merged_path)
|
||||
else:
|
||||
|
|
|
@ -325,7 +325,7 @@ class Site:
|
|||
self.log.debug("Cloning to %s, ignore dirs: %s" % (address, default_dirs))
|
||||
|
||||
# Copy root content.json
|
||||
if not new_site.storage.isFile("content.json") and not overwrite: # Content.json not exits yet, create a new one from source site
|
||||
if not new_site.storage.isFile("content.json") and not overwrite: # Content.json not exist yet, create a new one from source site
|
||||
content_json = self.storage.loadJson("content.json")
|
||||
del content_json["domain"]
|
||||
content_json["title"] = "my"+content_json["title"]
|
||||
|
@ -355,7 +355,7 @@ class Site:
|
|||
if "-default" in file_inner_path:
|
||||
file_path_dest = new_site.storage.getPath(file_inner_path.replace("-default", ""))
|
||||
if new_site.storage.isFile(file_path_dest) and not overwrite: # Don't overwrite site files with default ones
|
||||
self.log.debug("[SKIP] Default file: %s (already exits)" % file_inner_path)
|
||||
self.log.debug("[SKIP] Default file: %s (already exist)" % file_inner_path)
|
||||
continue
|
||||
self.log.debug("[COPY] Default file: %s to %s..." % (file_inner_path, file_path_dest))
|
||||
dest_dir = os.path.dirname(file_path_dest)
|
||||
|
@ -376,9 +376,9 @@ class Site:
|
|||
return new_site
|
||||
|
||||
|
||||
# Check and download if file not exits
|
||||
# Check and download if file not exist
|
||||
def needFile(self, inner_path, update=False, blocking=True, peer=None, priority=0):
|
||||
if self.storage.isFile(inner_path) and not update: # File exits, no need to do anything
|
||||
if self.storage.isFile(inner_path) and not update: # File exist, no need to do anything
|
||||
return True
|
||||
elif self.settings["serving"] == False: # Site not serving
|
||||
return False
|
||||
|
|
|
@ -67,7 +67,7 @@ class SiteManager(object):
|
|||
from Site import Site
|
||||
new = False
|
||||
site = self.get(address)
|
||||
if not site: # Site not exits yet
|
||||
if not site: # Site not exist yet
|
||||
if not self.isAddress(address): return False # Not address: %s % address
|
||||
logging.debug("Added new site: %s" % address)
|
||||
site = Site(address)
|
||||
|
|
|
@ -27,7 +27,7 @@ class SiteStorage:
|
|||
schema = self.loadJson("dbschema.json")
|
||||
db_path = self.getPath(schema["db_file"])
|
||||
if check:
|
||||
if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: # Not exits or null
|
||||
if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: # Not exist or null
|
||||
self.rebuildDb()
|
||||
self.db = Db(schema, db_path)
|
||||
if check and not self.db_checked:
|
||||
|
@ -46,7 +46,7 @@ class SiteStorage:
|
|||
if not self.db:
|
||||
self.log.debug("No database, waiting for dbschema.json...")
|
||||
self.site.needFile("dbschema.json", priority=1)
|
||||
self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exits
|
||||
self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist
|
||||
if self.has_db: self.openDb()
|
||||
return self.db
|
||||
|
||||
|
@ -125,7 +125,7 @@ class SiteStorage:
|
|||
# Write content to file
|
||||
def write(self, inner_path, content):
|
||||
file_path = self.getPath(inner_path)
|
||||
# Create dir if not exits
|
||||
# Create dir if not exist
|
||||
file_dir = os.path.dirname(file_path)
|
||||
if not os.path.isdir(file_dir):
|
||||
os.makedirs(file_dir)
|
||||
|
@ -182,12 +182,12 @@ class SiteStorage:
|
|||
return os.path.getsize(self.getPath(inner_path))
|
||||
|
||||
|
||||
# File exits
|
||||
# File exist
|
||||
def isFile(self, inner_path):
|
||||
return os.path.isfile(self.getPath(inner_path))
|
||||
|
||||
|
||||
# Dir exits
|
||||
# Dir exist
|
||||
def isDir(self, inner_path):
|
||||
return os.path.isdir(self.getPath(inner_path))
|
||||
|
||||
|
|
|
@ -217,9 +217,9 @@ class TestCase(unittest.TestCase):
|
|||
site = Site("1Hb9rY98TNnA6TYeozJv4w36bqEiBn6x8Y")
|
||||
user_content = site.storage.loadJson("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
|
||||
|
||||
# File info for not exits file
|
||||
self.assertEqual(site.content_manager.getFileInfo("data/users/notexits/data.json")["content_inner_path"], "data/users/notexits/content.json")
|
||||
self.assertEqual(site.content_manager.getValidSigners("data/users/notexits/data.json"), ["notexits", "1Hb9rY98TNnA6TYeozJv4w36bqEiBn6x8Y"])
|
||||
# File info for not exist file
|
||||
self.assertEqual(site.content_manager.getFileInfo("data/users/notexist/data.json")["content_inner_path"], "data/users/notexist/content.json")
|
||||
self.assertEqual(site.content_manager.getValidSigners("data/users/notexist/data.json"), ["notexist", "1Hb9rY98TNnA6TYeozJv4w36bqEiBn6x8Y"])
|
||||
|
||||
# File info for exsitsing file
|
||||
file_info = site.content_manager.getFileInfo("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/data.json")
|
||||
|
|
|
@ -25,7 +25,7 @@ class User(object):
|
|||
# Save to data/users.json
|
||||
def save(self):
|
||||
users = json.load(open("%s/users.json" % config.data_dir))
|
||||
if not self.master_address in users: users[self.master_address] = {} # Create if not exits
|
||||
if not self.master_address in users: users[self.master_address] = {} # Create if not exist
|
||||
user_data = users[self.master_address]
|
||||
if self.master_seed: user_data["master_seed"] = self.master_seed
|
||||
user_data["sites"] = self.sites
|
||||
|
@ -62,7 +62,7 @@ class User(object):
|
|||
bip32_index = random.randrange(2**256) % 100000000
|
||||
site_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, bip32_index)
|
||||
site_address = CryptBitcoin.privatekeyToAddress(site_privatekey)
|
||||
if site_address in self.sites: raise Exception("Random error: site exits!")
|
||||
if site_address in self.sites: raise Exception("Random error: site exist!")
|
||||
# Save to sites
|
||||
self.getSiteData(site_address)
|
||||
self.sites[site_address]["privatekey"] = site_privatekey
|
||||
|
@ -104,7 +104,7 @@ class User(object):
|
|||
raise Exception("You already have certificate for this domain: %s/%s@%s" % (self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain))
|
||||
elif self.certs.get(domain) == cert_node: # Same, not updated
|
||||
return None
|
||||
else: # Not exits yet, add
|
||||
else: # Not exist yet, add
|
||||
self.certs[domain] = cert_node
|
||||
self.save()
|
||||
return True
|
||||
|
|
|
@ -74,4 +74,6 @@ if not hasattr(_ssl, 'sslwrap'):
|
|||
import inspect
|
||||
_ssl.sslwrap = new_sslwrap
|
||||
__ssl__.SSLSocket = NewSSLSocket
|
||||
logging.debug("Missing sslwrap, readded.")
|
||||
logging.debug("Missing SSLwrap, readded.")
|
||||
|
||||
logging.debug("Python SSL version: %s" % __ssl__.OPENSSL_VERSION)
|
Loading…
Reference in a new issue