Merge pull request #119 from mattseh/cleanup
Changing to PEP8 coding style
This commit is contained in:
commit
c565a12fc3
9 changed files with 1278 additions and 1282 deletions
|
@ -3,514 +3,504 @@ from Debug import Debug
|
||||||
from Crypt import CryptHash
|
from Crypt import CryptHash
|
||||||
from Config import config
|
from Config import config
|
||||||
|
|
||||||
class ContentManager:
|
class ContentManager(object):
|
||||||
def __init__(self, site):
|
def __init__(self, site):
|
||||||
self.site = site
|
self.site = site
|
||||||
self.log = self.site.log
|
self.log = self.site.log
|
||||||
self.contents = {} # Known content.json (without files and includes)
|
self.contents = {} # Known content.json (without files and includes)
|
||||||
self.loadContent(add_bad_files = False)
|
self.loadContent(add_bad_files=False)
|
||||||
self.site.settings["size"] = self.getTotalSize()
|
self.site.settings["size"] = self.getTotalSize()
|
||||||
|
|
||||||
|
# Load content.json to self.content
|
||||||
# Load content.json to self.content
|
# Return: Changed files ["index.html", "data/messages.json"]
|
||||||
# Return: Changed files ["index.html", "data/messages.json"]
|
def loadContent(self, content_inner_path="content.json", add_bad_files=True, load_includes=True):
|
||||||
def loadContent(self, content_inner_path = "content.json", add_bad_files = True, load_includes = True):
|
content_inner_path = content_inner_path.strip("/") # Remove / from begning
|
||||||
content_inner_path = content_inner_path.strip("/") # Remove / from begning
|
old_content = self.contents.get(content_inner_path)
|
||||||
old_content = self.contents.get(content_inner_path)
|
content_path = self.site.storage.getPath(content_inner_path)
|
||||||
content_path = self.site.storage.getPath(content_inner_path)
|
content_path_dir = self.toDir(self.site.storage.getPath(content_inner_path))
|
||||||
content_path_dir = self.toDir(self.site.storage.getPath(content_inner_path))
|
content_dir = self.toDir(content_inner_path)
|
||||||
content_dir = self.toDir(content_inner_path)
|
|
||||||
|
if os.path.isfile(content_path):
|
||||||
if os.path.isfile(content_path):
|
try:
|
||||||
try:
|
new_content = json.load(open(content_path))
|
||||||
new_content = json.load(open(content_path))
|
except Exception, err:
|
||||||
except Exception, err:
|
self.log.error("%s load error: %s" % (content_path, Debug.formatException(err)))
|
||||||
self.log.error("%s load error: %s" % (content_path, Debug.formatException(err)))
|
return False
|
||||||
return False
|
else:
|
||||||
else:
|
self.log.error("Content.json not exist: %s" % content_path)
|
||||||
self.log.error("Content.json not exist: %s" % content_path)
|
return False # Content.json not exist
|
||||||
return False # Content.json not exist
|
|
||||||
|
try:
|
||||||
|
# Get the files where the sha512 changed
|
||||||
try:
|
changed = []
|
||||||
# Get the files where the sha512 changed
|
for relative_path, info in new_content.get("files", {}).items():
|
||||||
changed = []
|
if "sha512" in info:
|
||||||
for relative_path, info in new_content.get("files", {}).items():
|
hash_type = "sha512"
|
||||||
if "sha512" in info:
|
else: # Backward compatiblity
|
||||||
hash_type = "sha512"
|
hash_type = "sha1"
|
||||||
else: # Backward compatiblity
|
|
||||||
hash_type = "sha1"
|
new_hash = info[hash_type]
|
||||||
|
if old_content and old_content["files"].get(relative_path): # We have the file in the old content
|
||||||
new_hash = info[hash_type]
|
old_hash = old_content["files"][relative_path].get(hash_type)
|
||||||
if old_content and old_content["files"].get(relative_path): # We have the file in the old content
|
else: # The file is not in the old content
|
||||||
old_hash = old_content["files"][relative_path].get(hash_type)
|
old_hash = None
|
||||||
else: # The file is not in the old content
|
if old_hash != new_hash: changed.append(content_dir+relative_path)
|
||||||
old_hash = None
|
|
||||||
if old_hash != new_hash: changed.append(content_dir+relative_path)
|
# Load includes
|
||||||
|
if load_includes and "includes" in new_content:
|
||||||
# Load includes
|
for relative_path, info in new_content["includes"].items():
|
||||||
if load_includes and "includes" in new_content:
|
include_inner_path = content_dir+relative_path
|
||||||
for relative_path, info in new_content["includes"].items():
|
if self.site.storage.isFile(include_inner_path): # Content.json exists, load it
|
||||||
include_inner_path = content_dir+relative_path
|
success = self.loadContent(include_inner_path, add_bad_files=add_bad_files)
|
||||||
if self.site.storage.isFile(include_inner_path): # Content.json exists, load it
|
if success: changed += success # Add changed files
|
||||||
success = self.loadContent(include_inner_path, add_bad_files=add_bad_files)
|
else: # Content.json not exist, add to changed files
|
||||||
if success: changed += success # Add changed files
|
self.log.debug("Missing include: %s" % include_inner_path)
|
||||||
else: # Content.json not exist, add to changed files
|
changed += [include_inner_path]
|
||||||
self.log.debug("Missing include: %s" % include_inner_path)
|
|
||||||
changed += [include_inner_path]
|
# Load blind user includes (all subdir)
|
||||||
|
if load_includes and "user_contents" in new_content:
|
||||||
# Load blind user includes (all subdir)
|
for relative_dir in os.listdir(content_path_dir):
|
||||||
if load_includes and "user_contents" in new_content:
|
include_inner_path = content_dir+relative_dir+"/content.json"
|
||||||
for relative_dir in os.listdir(content_path_dir):
|
if not self.site.storage.isFile(include_inner_path): continue # Content.json not exist
|
||||||
include_inner_path = content_dir+relative_dir+"/content.json"
|
success = self.loadContent(include_inner_path, add_bad_files=add_bad_files, load_includes=False)
|
||||||
if not self.site.storage.isFile(include_inner_path): continue # Content.json not exist
|
if success: changed += success # Add changed files
|
||||||
success = self.loadContent(include_inner_path, add_bad_files=add_bad_files, load_includes=False)
|
|
||||||
if success: changed += success # Add changed files
|
# Update the content
|
||||||
|
self.contents[content_inner_path] = new_content
|
||||||
# Update the content
|
except Exception, err:
|
||||||
self.contents[content_inner_path] = new_content
|
self.log.error("Content.json parse error: %s" % Debug.formatException(err))
|
||||||
except Exception, err:
|
return False # Content.json parse error
|
||||||
self.log.error("Content.json parse error: %s" % Debug.formatException(err))
|
|
||||||
return False # Content.json parse error
|
# Add changed files to bad files
|
||||||
|
if add_bad_files:
|
||||||
# Add changed files to bad files
|
for inner_path in changed:
|
||||||
if add_bad_files:
|
self.site.bad_files[inner_path] = True
|
||||||
for inner_path in changed:
|
|
||||||
self.site.bad_files[inner_path] = True
|
if new_content["modified"] > self.site.settings.get("modified", 0):
|
||||||
|
self.site.settings["modified"] = min(time.time()+60*10, new_content["modified"]) # Dont store modifications in the far future (more than 10 minute)
|
||||||
if new_content["modified"] > self.site.settings.get("modified", 0):
|
|
||||||
self.site.settings["modified"] = min(time.time()+60*10, new_content["modified"]) # Dont store modifications in the far future (more than 10 minute)
|
return changed
|
||||||
|
|
||||||
return changed
|
# Get total size of site
|
||||||
|
# Return: 32819 (size of files in kb)
|
||||||
|
def getTotalSize(self, ignore=None):
|
||||||
# Get total size of site
|
total_size = 0
|
||||||
# Return: 32819 (size of files in kb)
|
for inner_path, content in self.contents.iteritems():
|
||||||
def getTotalSize(self, ignore=None):
|
if inner_path == ignore: continue
|
||||||
total_size = 0
|
total_size += self.site.storage.getSize(inner_path) # Size of content.json
|
||||||
for inner_path, content in self.contents.iteritems():
|
for file, info in content.get("files", {}).iteritems():
|
||||||
if inner_path == ignore: continue
|
total_size += info["size"]
|
||||||
total_size += self.site.storage.getSize(inner_path) # Size of content.json
|
return total_size
|
||||||
for file, info in content.get("files", {}).iteritems():
|
|
||||||
total_size += info["size"]
|
# Find the file info line from self.contents
|
||||||
return total_size
|
# Return: { "sha512": "c29d73d30ee8c9c1b5600e8a84447a6de15a3c3db6869aca4a2a578c1721f518", "size": 41 , "content_inner_path": "content.json"}
|
||||||
|
def getFileInfo(self, inner_path):
|
||||||
|
dirs = inner_path.split("/") # Parent dirs of content.json
|
||||||
# Find the file info line from self.contents
|
inner_path_parts = [dirs.pop()] # Filename relative to content.json
|
||||||
# Return: { "sha512": "c29d73d30ee8c9c1b5600e8a84447a6de15a3c3db6869aca4a2a578c1721f518", "size": 41 , "content_inner_path": "content.json"}
|
while True:
|
||||||
def getFileInfo(self, inner_path):
|
content_inner_path = "%s/content.json" % "/".join(dirs)
|
||||||
dirs = inner_path.split("/") # Parent dirs of content.json
|
content = self.contents.get(content_inner_path.strip("/"))
|
||||||
inner_path_parts = [dirs.pop()] # Filename relative to content.json
|
if content and "files" in content: # Check if content.json exists
|
||||||
while True:
|
back = content["files"].get("/".join(inner_path_parts))
|
||||||
content_inner_path = "%s/content.json" % "/".join(dirs)
|
if back:
|
||||||
content = self.contents.get(content_inner_path.strip("/"))
|
back["content_inner_path"] = content_inner_path
|
||||||
if content and "files" in content: # Check if content.json exists
|
return back
|
||||||
back = content["files"].get("/".join(inner_path_parts))
|
|
||||||
if back:
|
if content and "user_contents" in content: # User dir
|
||||||
back["content_inner_path"] = content_inner_path
|
back = content["user_contents"]
|
||||||
return back
|
# Content.json is in the users dir
|
||||||
|
back["content_inner_path"] = re.sub("(.*)/.*?$", "\\1/content.json", inner_path)
|
||||||
if content and "user_contents" in content: # User dir
|
return back
|
||||||
back = content["user_contents"]
|
|
||||||
back["content_inner_path"] = re.sub("(.*)/.*?$", "\\1/content.json", inner_path) # Content.json is in the users dir
|
# No inner path in this dir, lets try the parent dir
|
||||||
return back
|
if dirs:
|
||||||
|
inner_path_parts.insert(0, dirs.pop())
|
||||||
# No inner path in this dir, lets try the parent dir
|
else: # No more parent dirs
|
||||||
if dirs:
|
break
|
||||||
inner_path_parts.insert(0, dirs.pop())
|
|
||||||
else: # No more parent dirs
|
# Not found
|
||||||
break
|
return False
|
||||||
|
|
||||||
return False # Not found
|
# Get rules for the file
|
||||||
|
# Return: The rules for the file or False if not allowed
|
||||||
|
def getRules(self, inner_path, content=None):
|
||||||
# Get rules for the file
|
if not inner_path.endswith("content.json"): # Find the files content.json first
|
||||||
# Return: The rules for the file or False if not allowed
|
file_info = self.getFileInfo(inner_path)
|
||||||
def getRules(self, inner_path, content=None):
|
if not file_info: return False # File not found
|
||||||
if not inner_path.endswith("content.json"): # Find the files content.json first
|
inner_path = file_info["content_inner_path"]
|
||||||
file_info = self.getFileInfo(inner_path)
|
dirs = inner_path.split("/") # Parent dirs of content.json
|
||||||
if not file_info: return False # File not found
|
inner_path_parts = [dirs.pop()] # Filename relative to content.json
|
||||||
inner_path = file_info["content_inner_path"]
|
inner_path_parts.insert(0, dirs.pop()) # Dont check in self dir
|
||||||
dirs = inner_path.split("/") # Parent dirs of content.json
|
while True:
|
||||||
inner_path_parts = [dirs.pop()] # Filename relative to content.json
|
content_inner_path = "%s/content.json" % "/".join(dirs)
|
||||||
inner_path_parts.insert(0, dirs.pop()) # Dont check in self dir
|
parent_content = self.contents.get(content_inner_path.strip("/"))
|
||||||
while True:
|
if parent_content and "includes" in parent_content:
|
||||||
content_inner_path = "%s/content.json" % "/".join(dirs)
|
return parent_content["includes"].get("/".join(inner_path_parts))
|
||||||
parent_content = self.contents.get(content_inner_path.strip("/"))
|
elif parent_content and "user_contents" in parent_content:
|
||||||
if parent_content and "includes" in parent_content:
|
return self.getUserContentRules(parent_content, inner_path, content)
|
||||||
return parent_content["includes"].get("/".join(inner_path_parts))
|
else: # No inner path in this dir, lets try the parent dir
|
||||||
elif parent_content and "user_contents" in parent_content:
|
if dirs:
|
||||||
return self.getUserContentRules(parent_content, inner_path, content)
|
inner_path_parts.insert(0, dirs.pop())
|
||||||
else: # No inner path in this dir, lets try the parent dir
|
else: # No more parent dirs
|
||||||
if dirs:
|
break
|
||||||
inner_path_parts.insert(0, dirs.pop())
|
|
||||||
else: # No more parent dirs
|
return False
|
||||||
break
|
|
||||||
|
|
||||||
return False
|
# Get rules for a user file
|
||||||
|
# Return: The rules of the file or False if not allowed
|
||||||
|
def getUserContentRules(self, parent_content, inner_path, content):
|
||||||
# Get rules for a user file
|
user_contents = parent_content["user_contents"]
|
||||||
# Return: The rules of the file or False if not allowed
|
user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) # Delivered for directory
|
||||||
def getUserContentRules(self, parent_content, inner_path, content):
|
|
||||||
user_contents = parent_content["user_contents"]
|
try:
|
||||||
user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) # Delivered for directory
|
if not content: content = self.site.storage.loadJson(inner_path) # Read the file if no content specified
|
||||||
|
except (Exception, ): # Content.json not exist
|
||||||
try:
|
return {"signers": [user_address], "user_address": user_address} # Return information that we know for sure
|
||||||
if not content: content = self.site.storage.loadJson(inner_path) # Read the file if no content specificed
|
|
||||||
except: # Content.json not exist
|
"""if not "cert_user_name" in content: # New file, unknown user
|
||||||
return { "signers": [user_address], "user_address": user_address } # Return information that we know for sure
|
content["cert_auth_type"] = "unknown"
|
||||||
|
content["cert_user_name"] = "unknown@unknown"
|
||||||
"""if not "cert_user_name" in content: # New file, unknown user
|
"""
|
||||||
content["cert_auth_type"] = "unknown"
|
user_urn = "%s/%s" % (content["cert_auth_type"], content["cert_user_id"]) # web/nofish@zeroid.bit
|
||||||
content["cert_user_name"] = "unknown@unknown"
|
|
||||||
"""
|
rules = copy.copy(user_contents["permissions"].get(content["cert_user_id"], {})) # Default rules by username
|
||||||
user_urn = "%s/%s" % (content["cert_auth_type"], content["cert_user_id"]) # web/nofish@zeroid.bit
|
if not rules:
|
||||||
|
return False # User banned
|
||||||
rules = copy.copy(user_contents["permissions"].get(content["cert_user_id"], {})) # Default rules by username
|
if "signers" in rules:
|
||||||
if rules == False: return False # User banned
|
rules["signers"] = rules["signers"][:] # Make copy of the signers
|
||||||
if "signers" in rules: rules["signers"] = rules["signers"][:] # Make copy of the signers
|
for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules
|
||||||
for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules
|
if not re.match(permission_pattern, user_urn): continue # Rule is not valid for user
|
||||||
if not re.match(permission_pattern, user_urn): continue # Rule is not valid for user
|
# Update rules if its better than current recorded ones
|
||||||
# Update rules if its better than current recorded ones
|
for key, val in permission_rules.iteritems():
|
||||||
for key, val in permission_rules.iteritems():
|
if key not in rules:
|
||||||
if key not in rules:
|
if type(val) is list:
|
||||||
if type(val) is list:
|
rules[key] = val[:] # Make copy
|
||||||
rules[key] = val[:] # Make copy
|
else:
|
||||||
else:
|
rules[key] = val
|
||||||
rules[key] = val
|
elif type(val) is int: # Int, update if larger
|
||||||
elif type(val) is int: # Int, update if larger
|
if val > rules[key]:
|
||||||
if val > rules[key]: rules[key] = val
|
rules[key] = val
|
||||||
elif hasattr(val, "startswith"): # String, update if longer
|
elif hasattr(val, "startswith"): # String, update if longer
|
||||||
if len(val) > len(rules[key]): rules[key] = val
|
if len(val) > len(rules[key]): rules[key] = val
|
||||||
elif type(val) is list: # List, append
|
elif type(val) is list: # List, append
|
||||||
rules[key] += val
|
rules[key] += val
|
||||||
|
|
||||||
rules["cert_signers"] = user_contents["cert_signers"] # Add valid cert signers
|
rules["cert_signers"] = user_contents["cert_signers"] # Add valid cert signers
|
||||||
if "signers" not in rules: rules["signers"] = []
|
if "signers" not in rules: rules["signers"] = []
|
||||||
rules["signers"].append(user_address) # Add user as valid signer
|
rules["signers"].append(user_address) # Add user as valid signer
|
||||||
rules["user_address"] = user_address
|
rules["user_address"] = user_address
|
||||||
|
|
||||||
|
return rules
|
||||||
return rules
|
|
||||||
|
# Create and sign a content.json
|
||||||
|
# Return: The new content if filewrite = False
|
||||||
|
def sign(self, inner_path="content.json", privatekey=None, filewrite=True, update_changed_files=False, extend=None):
|
||||||
# Create and sign a content.json
|
content = self.contents.get(inner_path)
|
||||||
# Return: The new content if filewrite = False
|
if not content: # Content not exist yet, load default one
|
||||||
def sign(self, inner_path = "content.json", privatekey=None, filewrite=True, update_changed_files=False, extend=None):
|
self.log.info("File %s not exist yet, loading default values..." % inner_path)
|
||||||
content = self.contents.get(inner_path)
|
content = {"files": {}, "signs": {}} # Default content.json
|
||||||
if not content: # Content not exist yet, load default one
|
if inner_path == "content.json": # It's the root content.json, add some more fields
|
||||||
self.log.info("File %s not exist yet, loading default values..." % inner_path)
|
content["title"] = "%s - ZeroNet_" % self.site.address
|
||||||
content = {"files": {}, "signs": {}} # Default content.json
|
content["description"] = ""
|
||||||
if inner_path == "content.json": # Its the root content.json, add some more fields
|
content["signs_required"] = 1
|
||||||
content["title"] = "%s - ZeroNet_" % self.site.address
|
content["ignore"] = ""
|
||||||
content["description"] = ""
|
if extend: content.update(extend) # Add custom fields
|
||||||
content["signs_required"] = 1
|
|
||||||
content["ignore"] = ""
|
directory = self.toDir(self.site.storage.getPath(inner_path))
|
||||||
if extend: content.update(extend) # Add custom fields
|
self.log.info("Opening site data directory: %s..." % directory)
|
||||||
|
|
||||||
directory = self.toDir(self.site.storage.getPath(inner_path))
|
hashed_files = {}
|
||||||
self.log.info("Opening site data directory: %s..." % directory)
|
changed_files = [inner_path]
|
||||||
|
for root, dirs, files in os.walk(directory):
|
||||||
hashed_files = {}
|
for file_name in files:
|
||||||
changed_files = [inner_path]
|
file_path = self.site.storage.getPath("%s/%s" % (root.strip("/"), file_name))
|
||||||
for root, dirs, files in os.walk(directory):
|
file_inner_path = re.sub(re.escape(directory), "", file_path)
|
||||||
for file_name in files:
|
|
||||||
file_path = self.site.storage.getPath("%s/%s" % (root.strip("/"), file_name))
|
if file_name == "content.json": ignored = True
|
||||||
file_inner_path = re.sub(re.escape(directory), "", file_path)
|
elif content.get("ignore") and re.match(content["ignore"], file_inner_path): ignored = True
|
||||||
|
elif file_name.startswith("."): ignored = True
|
||||||
if file_name == "content.json": ignored = True
|
else: ignored = False
|
||||||
elif content.get("ignore") and re.match(content["ignore"], file_inner_path): ignored = True
|
|
||||||
elif file_name.startswith("."): ignored = True
|
if ignored: # Ignore content.json, definied regexp and files starting with .
|
||||||
else: ignored = False
|
self.log.info("- [SKIPPED] %s" % file_inner_path)
|
||||||
|
else:
|
||||||
if ignored: # Ignore content.json, definied regexp and files starting with .
|
sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file
|
||||||
self.log.info("- [SKIPPED] %s" % file_inner_path)
|
self.log.info("- %s (SHA512: %s)" % (file_inner_path, sha512sum))
|
||||||
else:
|
hashed_files[file_inner_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
|
||||||
sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file
|
if file_inner_path in content["files"].keys() and hashed_files[file_inner_path]["sha512"] != content["files"][file_inner_path].get("sha512"):
|
||||||
self.log.info("- %s (SHA512: %s)" % (file_inner_path, sha512sum))
|
changed_files.append(file_path)
|
||||||
hashed_files[file_inner_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
|
|
||||||
if file_inner_path in content["files"].keys() and hashed_files[file_inner_path]["sha512"] != content["files"][file_inner_path].get("sha512"):
|
self.log.debug("Changed files: %s" % changed_files)
|
||||||
changed_files.append(file_path)
|
if update_changed_files:
|
||||||
|
for file_path in changed_files:
|
||||||
|
self.site.storage.onUpdated(file_path)
|
||||||
self.log.debug("Changed files: %s" % changed_files)
|
|
||||||
if update_changed_files:
|
# Generate new content.json
|
||||||
for file_path in changed_files:
|
self.log.info("Adding timestamp and sha512sums to new content.json...")
|
||||||
self.site.storage.onUpdated(file_path)
|
|
||||||
|
new_content = content.copy() # Create a copy of current content.json
|
||||||
# Generate new content.json
|
new_content["files"] = hashed_files # Add files sha512 hash
|
||||||
self.log.info("Adding timestamp and sha512sums to new content.json...")
|
new_content["modified"] = time.time() # Add timestamp
|
||||||
|
if inner_path == "content.json":
|
||||||
new_content = content.copy() # Create a copy of current content.json
|
new_content["address"] = self.site.address
|
||||||
new_content["files"] = hashed_files # Add files sha512 hash
|
new_content["zeronet_version"] = config.version
|
||||||
new_content["modified"] = time.time() # Add timestamp
|
new_content["signs_required"] = content.get("signs_required", 1)
|
||||||
if inner_path == "content.json":
|
|
||||||
new_content["address"] = self.site.address
|
from Crypt import CryptBitcoin
|
||||||
new_content["zeronet_version"] = config.version
|
self.log.info("Verifying private key...")
|
||||||
new_content["signs_required"] = content.get("signs_required", 1)
|
privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
|
||||||
|
valid_signers = self.getValidSigners(inner_path, new_content)
|
||||||
from Crypt import CryptBitcoin
|
if privatekey_address not in valid_signers:
|
||||||
self.log.info("Verifying private key...")
|
return self.log.error("Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address))
|
||||||
privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
|
self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
|
||||||
valid_signers = self.getValidSigners(inner_path, new_content)
|
|
||||||
if privatekey_address not in valid_signers:
|
if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key sign the valid signers
|
||||||
return self.log.error("Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address))
|
new_content["signers_sign"] = CryptBitcoin.sign("%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey)
|
||||||
self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
|
if not new_content["signers_sign"]: self.log.info("Old style address, signers_sign is none")
|
||||||
|
|
||||||
if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key sign the valid signers
|
self.log.info("Signing %s..." % inner_path)
|
||||||
new_content["signers_sign"] = CryptBitcoin.sign("%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey)
|
|
||||||
if not new_content["signers_sign"]: self.log.info("Old style address, signers_sign is none")
|
if "signs" in new_content: del(new_content["signs"]) # Delete old signs
|
||||||
|
if "sign" in new_content: del(new_content["sign"]) # Delete old sign (backward compatibility)
|
||||||
self.log.info("Signing %s..." % inner_path)
|
|
||||||
|
sign_content = json.dumps(new_content, sort_keys=True)
|
||||||
if "signs" in new_content: del(new_content["signs"]) # Delete old signs
|
sign = CryptBitcoin.sign(sign_content, privatekey)
|
||||||
if "sign" in new_content: del(new_content["sign"]) # Delete old sign (backward compatibility)
|
#new_content["signs"] = content.get("signs", {}) # TODO: Multisig
|
||||||
|
if sign: # If signing is successful (not an old address)
|
||||||
sign_content = json.dumps(new_content, sort_keys=True)
|
new_content["signs"] = {}
|
||||||
sign = CryptBitcoin.sign(sign_content, privatekey)
|
new_content["signs"][privatekey_address] = sign
|
||||||
#new_content["signs"] = content.get("signs", {}) # TODO: Multisig
|
|
||||||
if sign: # If signing is successful (not an old address)
|
if inner_path == "content.json": # To root content.json add old format sign for backward compatibility
|
||||||
new_content["signs"] = {}
|
oldsign_content = json.dumps(new_content, sort_keys=True)
|
||||||
new_content["signs"][privatekey_address] = sign
|
new_content["sign"] = CryptBitcoin.signOld(oldsign_content, privatekey)
|
||||||
|
|
||||||
if inner_path == "content.json": # To root content.json add old format sign for backward compatibility
|
if not self.validContent(inner_path, new_content):
|
||||||
oldsign_content = json.dumps(new_content, sort_keys=True)
|
self.log.error("Sign failed: Invalid content")
|
||||||
new_content["sign"] = CryptBitcoin.signOld(oldsign_content, privatekey)
|
return False
|
||||||
|
|
||||||
if not self.validContent(inner_path, new_content):
|
if filewrite:
|
||||||
self.log.error("Sign failed: Invalid content")
|
self.log.info("Saving to %s..." % inner_path)
|
||||||
return False
|
self.site.storage.writeJson(inner_path, new_content)
|
||||||
|
|
||||||
if filewrite:
|
self.log.info("File %s signed!" % inner_path)
|
||||||
self.log.info("Saving to %s..." % inner_path)
|
|
||||||
self.site.storage.writeJson(inner_path, new_content)
|
if filewrite: # Written to file
|
||||||
|
return True
|
||||||
self.log.info("File %s signed!" % inner_path)
|
else: # Return the new content
|
||||||
|
return new_content
|
||||||
if filewrite: # Written to file
|
|
||||||
return True
|
# The valid signers of content.json file
|
||||||
else: # Return the new content
|
# Return: ["1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6", "13ReyhCsjhpuCVahn1DHdf6eMqqEVev162"]
|
||||||
return new_content
|
def getValidSigners(self, inner_path, content=None):
|
||||||
|
valid_signers = []
|
||||||
|
if inner_path == "content.json": # Root content.json
|
||||||
# The valid signers of content.json file
|
if "content.json" in self.contents and "signers" in self.contents["content.json"]:
|
||||||
# Return: ["1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6", "13ReyhCsjhpuCVahn1DHdf6eMqqEVev162"]
|
valid_signers += self.contents["content.json"]["signers"].keys()
|
||||||
def getValidSigners(self, inner_path, content=None):
|
else:
|
||||||
valid_signers = []
|
rules = self.getRules(inner_path, content)
|
||||||
if inner_path == "content.json": # Root content.json
|
if rules and "signers" in rules:
|
||||||
if "content.json" in self.contents and "signers" in self.contents["content.json"]:
|
valid_signers += rules["signers"]
|
||||||
valid_signers += self.contents["content.json"]["signers"].keys()
|
|
||||||
else:
|
if self.site.address not in valid_signers:
|
||||||
rules = self.getRules(inner_path, content)
|
valid_signers.append(self.site.address) # Site address always valid
|
||||||
if rules and "signers" in rules:
|
return valid_signers
|
||||||
valid_signers += rules["signers"]
|
|
||||||
|
# Return: The required number of valid signs for the content.json
|
||||||
if self.site.address not in valid_signers: valid_signers.append(self.site.address) # Site address always valid
|
def getSignsRequired(self, inner_path, content=None):
|
||||||
return valid_signers
|
return 1 # Todo: Multisig
|
||||||
|
|
||||||
|
def verifyCert(self, inner_path, content):
|
||||||
# Return: The required number of valid signs for the content.json
|
from Crypt import CryptBitcoin
|
||||||
def getSignsRequired(self, inner_path, content=None):
|
|
||||||
return 1 # Todo: Multisig
|
rules = self.getRules(inner_path, content)
|
||||||
|
if not rules.get("cert_signers"): return True # Does not need cert
|
||||||
|
|
||||||
def verifyCert(self, inner_path, content):
|
name, domain = content["cert_user_id"].split("@")
|
||||||
from Crypt import CryptBitcoin
|
cert_address = rules["cert_signers"].get(domain)
|
||||||
|
if not cert_address: # Cert signer not allowed
|
||||||
rules = self.getRules(inner_path, content)
|
self.log.error("Invalid cert signer: %s" % domain)
|
||||||
if not rules.get("cert_signers"): return True # Does not need cert
|
return False
|
||||||
|
return CryptBitcoin.verify("%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"])
|
||||||
name, domain = content["cert_user_id"].split("@")
|
|
||||||
cert_address = rules["cert_signers"].get(domain)
|
# Checks if the content.json content is valid
|
||||||
if not cert_address: # Cert signer not allowed
|
# Return: True or False
|
||||||
self.log.error("Invalid cert signer: %s" % domain)
|
def validContent(self, inner_path, content):
|
||||||
return False
|
content_size = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) # Size of new content
|
||||||
return CryptBitcoin.verify("%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"])
|
site_size = self.getTotalSize(ignore=inner_path)+content_size # Site size without old content
|
||||||
|
if site_size > self.site.settings.get("size", 0): self.site.settings["size"] = site_size # Save to settings if larger
|
||||||
|
|
||||||
# Checks if the content.json content is valid
|
site_size_limit = self.site.getSizeLimit()*1024*1024
|
||||||
# Return: True or False
|
|
||||||
def validContent(self, inner_path, content):
|
# Check total site size limit
|
||||||
content_size = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) # Size of new content
|
if site_size > site_size_limit:
|
||||||
site_size = self.getTotalSize(ignore=inner_path)+content_size # Site size without old content
|
self.log.error("%s: Site too large %s > %s, aborting task..." % (inner_path, site_size, site_size_limit))
|
||||||
if site_size > self.site.settings.get("size", 0): self.site.settings["size"] = site_size # Save to settings if larger
|
task = self.site.worker_manager.findTask(inner_path)
|
||||||
|
if task: # Dont try to download from other peers
|
||||||
site_size_limit = self.site.getSizeLimit()*1024*1024
|
self.site.worker_manager.failTask(task)
|
||||||
|
return False
|
||||||
# Check total site size limit
|
|
||||||
if site_size > site_size_limit:
|
if inner_path == "content.json": return True # Root content.json is passed
|
||||||
self.log.error("%s: Site too large %s > %s, aborting task..." % (inner_path, site_size, site_size_limit))
|
|
||||||
task = self.site.worker_manager.findTask(inner_path)
|
# Load include details
|
||||||
if task: # Dont try to download from other peers
|
rules = self.getRules(inner_path, content)
|
||||||
self.site.worker_manager.failTask(task)
|
if not rules:
|
||||||
return False
|
self.log.error("%s: No rules" % inner_path)
|
||||||
|
return False
|
||||||
if inner_path == "content.json": return True # Root content.json is passed
|
|
||||||
|
# Check include size limit
|
||||||
# Load include details
|
if rules.get("max_size"): # Include size limit
|
||||||
rules = self.getRules(inner_path, content)
|
if content_size > rules["max_size"]:
|
||||||
if not rules:
|
self.log.error("%s: Include too large %s > %s" % (inner_path, content_size, rules["max_size"]))
|
||||||
self.log.error("%s: No rules" % inner_path)
|
return False
|
||||||
return False
|
|
||||||
|
# Check if content includes allowed
|
||||||
# Check include size limit
|
if rules.get("includes_allowed") == False and content.get("includes"):
|
||||||
if rules.get("max_size"): # Include size limit
|
self.log.error("%s: Includes not allowed" % inner_path)
|
||||||
if content_size > rules["max_size"]:
|
return False # Includes not allowed
|
||||||
self.log.error("%s: Include too large %s > %s" % (inner_path, content_size, rules["max_size"]))
|
|
||||||
return False
|
# Filename limit
|
||||||
|
if rules.get("files_allowed"):
|
||||||
# Check if content includes allowed
|
for file_inner_path in content["files"].keys():
|
||||||
if rules.get("includes_allowed") == False and content.get("includes"):
|
if not re.match("^%s$" % rules["files_allowed"], file_inner_path):
|
||||||
self.log.error("%s: Includes not allowed" % inner_path)
|
self.log.error("%s: File not allowed" % file_inner_path)
|
||||||
return False # Includes not allowed
|
return False
|
||||||
|
|
||||||
# Filename limit
|
return True # All good
|
||||||
if rules.get("files_allowed"):
|
|
||||||
for file_inner_path in content["files"].keys():
|
# Verify file validity
|
||||||
if not re.match("^%s$" % rules["files_allowed"], file_inner_path):
|
# Return: None = Same as before, False = Invalid, True = Valid
|
||||||
self.log.error("%s: File not allowed" % file_inner_path)
|
def verifyFile(self, inner_path, file, ignore_same = True):
|
||||||
return False
|
if inner_path.endswith("content.json"): # content.json: Check using sign
|
||||||
|
from Crypt import CryptBitcoin
|
||||||
return True # All good
|
try:
|
||||||
|
new_content = json.load(file)
|
||||||
|
if inner_path in self.contents:
|
||||||
|
old_content = self.contents.get(inner_path)
|
||||||
# Verify file validity
|
# Checks if its newer the ours
|
||||||
# Return: None = Same as before, False = Invalid, True = Valid
|
if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json
|
||||||
def verifyFile(self, inner_path, file, ignore_same = True):
|
return None
|
||||||
if inner_path.endswith("content.json"): # content.json: Check using sign
|
elif old_content["modified"] > new_content["modified"]: # We have newer
|
||||||
from Crypt import CryptBitcoin
|
self.log.debug("We have newer %s (Our: %s, Sent: %s)" % (inner_path, old_content["modified"], new_content["modified"]))
|
||||||
try:
|
gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers
|
||||||
new_content = json.load(file)
|
return False
|
||||||
if inner_path in self.contents:
|
if new_content["modified"] > time.time()+60*60*24: # Content modified in the far future (allow 1 day window)
|
||||||
old_content = self.contents.get(inner_path)
|
self.log.error("%s modify is in the future!" % inner_path)
|
||||||
# Checks if its newer the ours
|
return False
|
||||||
if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json
|
# Check sign
|
||||||
return None
|
sign = new_content.get("sign")
|
||||||
elif old_content["modified"] > new_content["modified"]: # We have newer
|
signs = new_content.get("signs", {})
|
||||||
self.log.debug("We have newer %s (Our: %s, Sent: %s)" % (inner_path, old_content["modified"], new_content["modified"]))
|
if "sign" in new_content: del(new_content["sign"]) # The file signed without the sign
|
||||||
gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers
|
if "signs" in new_content: del(new_content["signs"]) # The file signed without the signs
|
||||||
return False
|
sign_content = json.dumps(new_content, sort_keys=True) # Dump the json to string to remove whitepsace
|
||||||
if new_content["modified"] > time.time()+60*60*24: # Content modified in the far future (allow 1 day window)
|
|
||||||
self.log.error("%s modify is in the future!" % inner_path)
|
if not self.validContent(inner_path, new_content): return False # Content not valid (files too large, invalid files)
|
||||||
return False
|
|
||||||
# Check sign
|
if signs: # New style signing
|
||||||
sign = new_content.get("sign")
|
valid_signers = self.getValidSigners(inner_path, new_content)
|
||||||
signs = new_content.get("signs", {})
|
signs_required = self.getSignsRequired(inner_path, new_content)
|
||||||
if "sign" in new_content: del(new_content["sign"]) # The file signed without the sign
|
|
||||||
if "signs" in new_content: del(new_content["signs"]) # The file signed without the signs
|
if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json
|
||||||
sign_content = json.dumps(new_content, sort_keys=True) # Dump the json to string to remove whitepsace
|
if not CryptBitcoin.verify("%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]):
|
||||||
|
self.log.error("%s invalid signers_sign!" % inner_path)
|
||||||
if not self.validContent(inner_path, new_content): return False # Content not valid (files too large, invalid files)
|
return False
|
||||||
|
|
||||||
if signs: # New style signing
|
if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid
|
||||||
valid_signers = self.getValidSigners(inner_path, new_content)
|
self.log.error("%s invalid cert!" % inner_path)
|
||||||
signs_required = self.getSignsRequired(inner_path, new_content)
|
return False
|
||||||
|
|
||||||
if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json
|
valid_signs = 0
|
||||||
if not CryptBitcoin.verify("%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]):
|
for address in valid_signers:
|
||||||
self.log.error("%s invalid signers_sign!" % inner_path)
|
if address in signs: valid_signs += CryptBitcoin.verify(sign_content, address, signs[address])
|
||||||
return False
|
if valid_signs >= signs_required: break # Break if we has enough signs
|
||||||
|
|
||||||
if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid
|
|
||||||
self.log.error("%s invalid cert!" % inner_path)
|
|
||||||
return False
|
return valid_signs >= signs_required
|
||||||
|
else: # Old style signing
|
||||||
valid_signs = 0
|
return CryptBitcoin.verify(sign_content, self.site.address, sign)
|
||||||
for address in valid_signers:
|
|
||||||
if address in signs: valid_signs += CryptBitcoin.verify(sign_content, address, signs[address])
|
except Exception, err:
|
||||||
if valid_signs >= signs_required: break # Break if we has enough signs
|
self.log.error("Verify sign error: %s" % Debug.formatException(err))
|
||||||
|
return False
|
||||||
|
|
||||||
|
else: # Check using sha512 hash
|
||||||
return valid_signs >= signs_required
|
file_info = self.getFileInfo(inner_path)
|
||||||
else: # Old style signing
|
if file_info:
|
||||||
return CryptBitcoin.verify(sign_content, self.site.address, sign)
|
if "sha512" in file_info:
|
||||||
|
hash_valid = CryptHash.sha512sum(file) == file_info["sha512"]
|
||||||
except Exception, err:
|
elif "sha1" in file_info: # Backward compatibility
|
||||||
self.log.error("Verify sign error: %s" % Debug.formatException(err))
|
hash_valid = CryptHash.sha1sum(file) == file_info["sha1"]
|
||||||
return False
|
else:
|
||||||
|
hash_valid = False
|
||||||
else: # Check using sha512 hash
|
if file_info["size"] != file.tell():
|
||||||
file_info = self.getFileInfo(inner_path)
|
self.log.error("%s file size does not match %s <> %s, Hash: %s" % (inner_path, file.tell(),
|
||||||
if file_info:
|
file_info["size"], hash_valid))
|
||||||
if "sha512" in file_info:
|
return False
|
||||||
hash_valid = CryptHash.sha512sum(file) == file_info["sha512"]
|
return hash_valid
|
||||||
elif "sha1" in file_info: # Backward compatibility
|
|
||||||
hash_valid = CryptHash.sha1sum(file) == file_info["sha1"]
|
else: # File not in content.json
|
||||||
else:
|
self.log.error("File not in content.json: %s" % inner_path)
|
||||||
hash_valid = False
|
return False
|
||||||
if file_info["size"] != file.tell():
|
|
||||||
self.log.error("%s file size does not match %s <> %s, Hash: %s" % (inner_path, file.tell(), file_info["size"], hash_valid))
|
|
||||||
return False
|
# Get dir from file
|
||||||
return hash_valid
|
# Return: data/site/content.json -> data/site
|
||||||
|
def toDir(self, inner_path):
|
||||||
else: # File not in content.json
|
file_dir = re.sub("[^/]*?$", "", inner_path).strip("/")
|
||||||
self.log.error("File not in content.json: %s" % inner_path)
|
if file_dir: file_dir += "/" # Add / at end if its not the root
|
||||||
return False
|
return file_dir
|
||||||
|
|
||||||
|
|
||||||
# Get dir from file
|
|
||||||
# Return: data/site/content.json -> data/site
|
|
||||||
def toDir(self, inner_path):
|
|
||||||
file_dir = re.sub("[^/]*?$", "", inner_path).strip("/")
|
|
||||||
if file_dir: file_dir += "/" # Add / at end if its not the root
|
|
||||||
return file_dir
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def testSign():
|
def testSign():
|
||||||
global config
|
global config
|
||||||
from Config import config
|
from Config import config
|
||||||
from Site import Site
|
from Site import Site
|
||||||
site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
|
site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
|
||||||
content_manager = ContentManager(site)
|
content_manager = ContentManager(site)
|
||||||
content_manager.sign("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR")
|
content_manager.sign("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR")
|
||||||
|
|
||||||
|
|
||||||
def testVerify():
|
def testVerify():
|
||||||
from Config import config
|
from Config import config
|
||||||
from Site import Site
|
from Site import Site
|
||||||
#site = Site("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
|
#site = Site("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
|
||||||
site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
|
site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
|
||||||
|
|
||||||
content_manager = ContentManager(site)
|
content_manager = ContentManager(site)
|
||||||
print "Loaded contents:", content_manager.contents.keys()
|
print "Loaded contents:", content_manager.contents.keys()
|
||||||
|
|
||||||
file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json"))
|
file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json"))
|
||||||
print "content.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False)
|
print "content.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False)
|
||||||
|
|
||||||
file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json"))
|
file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json"))
|
||||||
print "messages.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False)
|
print "messages.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False)
|
||||||
|
|
||||||
|
|
||||||
def testInfo():
|
def testInfo():
|
||||||
from Config import config
|
from Config import config
|
||||||
from Site import Site
|
from Site import Site
|
||||||
site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
|
site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
|
||||||
|
|
||||||
content_manager = ContentManager(site)
|
content_manager = ContentManager(site)
|
||||||
print content_manager.contents.keys()
|
print content_manager.contents.keys()
|
||||||
|
|
||||||
print content_manager.getFileInfo("index.html")
|
print content_manager.getFileInfo("index.html")
|
||||||
print content_manager.getIncludeInfo("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json")
|
print content_manager.getIncludeInfo("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json")
|
||||||
print content_manager.getValidSigners("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json")
|
print content_manager.getValidSigners("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json")
|
||||||
print content_manager.getValidSigners("data/users/content.json")
|
print content_manager.getValidSigners("data/users/content.json")
|
||||||
print content_manager.getValidSigners("content.json")
|
print content_manager.getValidSigners("content.json")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import os, sys, logging
|
import os, sys, logging
|
||||||
os.chdir("../..")
|
os.chdir("../..")
|
||||||
sys.path.insert(0, os.path.abspath("."))
|
sys.path.insert(0, os.path.abspath("."))
|
||||||
sys.path.insert(0, os.path.abspath("src"))
|
sys.path.insert(0, os.path.abspath("src"))
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
from Debug import Debug
|
from Debug import Debug
|
||||||
from Crypt import CryptHash
|
from Crypt import CryptHash
|
||||||
|
|
||||||
#testSign()
|
#testSign()
|
||||||
testVerify()
|
testVerify()
|
||||||
#testInfo()
|
#testInfo()
|
||||||
|
|
|
@ -1,5 +1,12 @@
|
||||||
import os, msgpack, shutil, gevent, socket, struct, random
|
# Included modules
|
||||||
|
import socket
|
||||||
|
import struct
|
||||||
|
import os
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
|
|
||||||
|
# Third party modules
|
||||||
|
import gevent
|
||||||
|
|
||||||
from Debug import Debug
|
from Debug import Debug
|
||||||
from Config import config
|
from Config import config
|
||||||
from util import RateLimit, StreamingMsgpack
|
from util import RateLimit, StreamingMsgpack
|
||||||
|
@ -8,183 +15,179 @@ FILE_BUFF = 1024*512
|
||||||
|
|
||||||
# Request from me
|
# Request from me
|
||||||
class FileRequest(object):
|
class FileRequest(object):
|
||||||
__slots__ = ("server", "connection", "req_id", "sites", "log", "responded")
|
__slots__ = ("server", "connection", "req_id", "sites", "log", "responded")
|
||||||
|
|
||||||
def __init__(self, server, connection):
|
def __init__(self, server, connection):
|
||||||
self.server = server
|
self.server = server
|
||||||
self.connection = connection
|
self.connection = connection
|
||||||
|
|
||||||
self.req_id = None
|
self.req_id = None
|
||||||
self.sites = self.server.sites
|
self.sites = self.server.sites
|
||||||
self.log = server.log
|
self.log = server.log
|
||||||
self.responded = False # Responded to the request
|
self.responded = False # Responded to the request
|
||||||
|
|
||||||
|
def unpackAddress(self, packed):
|
||||||
|
return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0]
|
||||||
|
|
||||||
def unpackAddress(self, packed):
|
def send(self, msg, streaming=False):
|
||||||
return (socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0])
|
if not self.connection.closed:
|
||||||
|
self.connection.send(msg, streaming)
|
||||||
|
|
||||||
|
def response(self, msg, streaming=False):
|
||||||
|
if self.responded:
|
||||||
|
self.log.debug("Req id %s already responded" % self.req_id)
|
||||||
|
return
|
||||||
|
if not isinstance(msg, dict): # If msg not a dict create a {"body": msg}
|
||||||
|
msg = {"body": msg}
|
||||||
|
msg["cmd"] = "response"
|
||||||
|
msg["to"] = self.req_id
|
||||||
|
self.responded = True
|
||||||
|
self.send(msg, streaming=streaming)
|
||||||
|
|
||||||
def send(self, msg, streaming=False):
|
# Route file requests
|
||||||
if not self.connection.closed:
|
def route(self, cmd, req_id, params):
|
||||||
self.connection.send(msg, streaming)
|
self.req_id = req_id
|
||||||
|
|
||||||
|
if cmd == "getFile":
|
||||||
|
self.actionGetFile(params)
|
||||||
|
elif cmd == "update":
|
||||||
|
event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"])
|
||||||
|
if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second
|
||||||
|
self.response({"ok": "File update queued"})
|
||||||
|
# If called more than once within 10 sec only keep the last update
|
||||||
|
RateLimit.callAsync(event, 10, self.actionUpdate, params)
|
||||||
|
|
||||||
def response(self, msg, streaming=False):
|
elif cmd == "pex":
|
||||||
if self.responded:
|
self.actionPex(params)
|
||||||
self.log.debug("Req id %s already responded" % self.req_id)
|
elif cmd == "listModified":
|
||||||
return
|
self.actionListModified(params)
|
||||||
if not isinstance(msg, dict): # If msg not a dict create a {"body": msg}
|
elif cmd == "ping":
|
||||||
msg = {"body": msg}
|
self.actionPing()
|
||||||
msg["cmd"] = "response"
|
else:
|
||||||
msg["to"] = self.req_id
|
self.actionUnknown(cmd, params)
|
||||||
self.responded = True
|
|
||||||
self.send(msg, streaming=streaming)
|
|
||||||
|
|
||||||
|
# Update a site file request
|
||||||
|
def actionUpdate(self, params):
|
||||||
|
site = self.sites.get(params["site"])
|
||||||
|
if not site or not site.settings["serving"]: # Site unknown or not serving
|
||||||
|
self.response({"error": "Unknown site"})
|
||||||
|
return False
|
||||||
|
if site.settings["own"] and params["inner_path"].endswith("content.json"):
|
||||||
|
self.log.debug("Someone trying to push a file to own site %s, reload local %s first" % (site.address, params["inner_path"]))
|
||||||
|
changed = site.content_manager.loadContent(params["inner_path"], add_bad_files=False)
|
||||||
|
if changed: # Content.json changed locally
|
||||||
|
site.settings["size"] = site.content_manager.getTotalSize() # Update site size
|
||||||
|
buff = StringIO(params["body"])
|
||||||
|
valid = site.content_manager.verifyFile(params["inner_path"], buff)
|
||||||
|
if valid == True: # Valid and changed
|
||||||
|
self.log.info("Update for %s looks valid, saving..." % params["inner_path"])
|
||||||
|
buff.seek(0)
|
||||||
|
site.storage.write(params["inner_path"], buff)
|
||||||
|
|
||||||
# Route file requests
|
site.onFileDone(params["inner_path"]) # Trigger filedone
|
||||||
def route(self, cmd, req_id, params):
|
|
||||||
self.req_id = req_id
|
|
||||||
|
|
||||||
if cmd == "getFile":
|
if params["inner_path"].endswith("content.json"): # Download every changed file from peer
|
||||||
self.actionGetFile(params)
|
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer = True) # Add or get peer
|
||||||
elif cmd == "update":
|
site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"]), "publish_%s" % params["inner_path"]) # On complete publish to other peers
|
||||||
event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"])
|
gevent.spawn(
|
||||||
if not RateLimit.isAllowed(event): # There was already an updat for this file in the last 10 second
|
lambda: site.downloadContent(params["inner_path"], peer=peer)
|
||||||
self.response({"ok": "File update queued"})
|
) # Load new content file and download changed files in new thread
|
||||||
RateLimit.callAsync(event, 10, self.actionUpdate, params) # If called more than once within 10 sec only keep the last update
|
|
||||||
|
|
||||||
elif cmd == "pex":
|
self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]})
|
||||||
self.actionPex(params)
|
|
||||||
elif cmd == "listModified":
|
|
||||||
self.actionListModified(params)
|
|
||||||
elif cmd == "ping":
|
|
||||||
self.actionPing()
|
|
||||||
else:
|
|
||||||
self.actionUnknown(cmd, params)
|
|
||||||
|
|
||||||
|
elif valid == None: # Not changed
|
||||||
|
peer = site.addPeer(*params["peer"], return_peer = True) # Add or get peer
|
||||||
|
if peer:
|
||||||
|
self.log.debug("Same version, adding new peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) )
|
||||||
|
for task in site.worker_manager.tasks: # New peer add to every ongoing task
|
||||||
|
if task["peers"]: site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) # Download file from this peer too if its peer locked
|
||||||
|
|
||||||
# Update a site file request
|
self.response({"ok": "File not changed"})
|
||||||
def actionUpdate(self, params):
|
|
||||||
site = self.sites.get(params["site"])
|
|
||||||
if not site or not site.settings["serving"]: # Site unknown or not serving
|
|
||||||
self.response({"error": "Unknown site"})
|
|
||||||
return False
|
|
||||||
if site.settings["own"] and params["inner_path"].endswith("content.json"):
|
|
||||||
self.log.debug("Someone trying to push a file to own site %s, reload local %s first" % (site.address, params["inner_path"]))
|
|
||||||
changed = site.content_manager.loadContent(params["inner_path"], add_bad_files=False)
|
|
||||||
if changed: # Content.json changed locally
|
|
||||||
site.settings["size"] = site.content_manager.getTotalSize() # Update site size
|
|
||||||
buff = StringIO(params["body"])
|
|
||||||
valid = site.content_manager.verifyFile(params["inner_path"], buff)
|
|
||||||
if valid == True: # Valid and changed
|
|
||||||
self.log.info("Update for %s looks valid, saving..." % params["inner_path"])
|
|
||||||
buff.seek(0)
|
|
||||||
site.storage.write(params["inner_path"], buff)
|
|
||||||
|
|
||||||
site.onFileDone(params["inner_path"]) # Trigger filedone
|
else: # Invalid sign or sha1 hash
|
||||||
|
self.log.debug("Update for %s is invalid" % params["inner_path"])
|
||||||
|
self.response({"error": "File invalid"})
|
||||||
|
|
||||||
if params["inner_path"].endswith("content.json"): # Download every changed file from peer
|
# Send file content request
|
||||||
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer = True) # Add or get peer
|
def actionGetFile(self, params):
|
||||||
site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"]), "publish_%s" % params["inner_path"]) # On complete publish to other peers
|
site = self.sites.get(params["site"])
|
||||||
gevent.spawn(
|
if not site or not site.settings["serving"]: # Site unknown or not serving
|
||||||
lambda: site.downloadContent(params["inner_path"], peer=peer)
|
self.response({"error": "Unknown site"})
|
||||||
) # Load new content file and download changed files in new thread
|
return False
|
||||||
|
try:
|
||||||
|
file_path = site.storage.getPath(params["inner_path"])
|
||||||
|
if config.debug_socket: self.log.debug("Opening file: %s" % file_path)
|
||||||
|
with StreamingMsgpack.FilePart(file_path, "rb") as file:
|
||||||
|
file.seek(params["location"])
|
||||||
|
file.read_bytes = FILE_BUFF
|
||||||
|
back = {"body": file,
|
||||||
|
"size": os.fstat(file.fileno()).st_size,
|
||||||
|
"location": min(file.tell()+FILE_BUFF, os.fstat(file.fileno()).st_size)
|
||||||
|
}
|
||||||
|
if config.debug_socket:
|
||||||
|
self.log.debug("Sending file %s from position %s to %s" % (file_path,
|
||||||
|
params["location"],
|
||||||
|
back["location"]))
|
||||||
|
self.response(back, streaming=True)
|
||||||
|
if config.debug_socket:
|
||||||
|
self.log.debug("File %s sent" % file_path)
|
||||||
|
|
||||||
self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]})
|
# Add peer to site if not added before
|
||||||
|
connected_peer = site.addPeer(self.connection.ip, self.connection.port)
|
||||||
|
if connected_peer: # Just added
|
||||||
|
connected_peer.connect(self.connection) # Assign current connection to peer
|
||||||
|
|
||||||
elif valid == None: # Not changed
|
except Exception, err:
|
||||||
peer = site.addPeer(*params["peer"], return_peer = True) # Add or get peer
|
self.log.debug("GetFile read error: %s" % Debug.formatException(err))
|
||||||
if peer:
|
self.response({"error": "File read error: %s" % Debug.formatException(err)})
|
||||||
self.log.debug("Same version, adding new peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) )
|
return False
|
||||||
for task in site.worker_manager.tasks: # New peer add to every ongoing task
|
|
||||||
if task["peers"]: site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) # Download file from this peer too if its peer locked
|
|
||||||
|
|
||||||
self.response({"ok": "File not changed"})
|
# Peer exchange request
|
||||||
|
def actionPex(self, params):
|
||||||
|
site = self.sites.get(params["site"])
|
||||||
|
if not site or not site.settings["serving"]: # Site unknown or not serving
|
||||||
|
self.response({"error": "Unknown site"})
|
||||||
|
return False
|
||||||
|
|
||||||
else: # Invalid sign or sha1 hash
|
got_peer_keys = []
|
||||||
self.log.debug("Update for %s is invalid" % params["inner_path"])
|
added = 0
|
||||||
self.response({"error": "File invalid"})
|
connected_peer = site.addPeer(self.connection.ip, self.connection.port) # Add requester peer to site
|
||||||
|
if connected_peer: # Just added
|
||||||
|
added += 1
|
||||||
|
connected_peer.connect(self.connection) # Assign current connection to peer
|
||||||
|
|
||||||
|
for peer in params["peers"]: # Add sent peers to site
|
||||||
|
address = self.unpackAddress(peer)
|
||||||
|
got_peer_keys.append("%s:%s" % address)
|
||||||
|
if site.addPeer(*address): added += 1
|
||||||
|
# Send back peers that is not in the sent list and connectable (not port 0)
|
||||||
|
packed_peers = [peer.packAddress() for peer in site.getConnectablePeers(params["need"], got_peer_keys)]
|
||||||
|
if added:
|
||||||
|
site.worker_manager.onPeers()
|
||||||
|
self.log.debug("Added %s peers to %s using pex, sending back %s" % (added, site, len(packed_peers)))
|
||||||
|
self.response({"peers": packed_peers})
|
||||||
|
|
||||||
# Send file content request
|
# Get modified content.json files since
|
||||||
def actionGetFile(self, params):
|
def actionListModified(self, params):
|
||||||
site = self.sites.get(params["site"])
|
site = self.sites.get(params["site"])
|
||||||
if not site or not site.settings["serving"]: # Site unknown or not serving
|
if not site or not site.settings["serving"]: # Site unknown or not serving
|
||||||
self.response({"error": "Unknown site"})
|
self.response({"error": "Unknown site"})
|
||||||
return False
|
return False
|
||||||
try:
|
modified_files = {inner_path: content["modified"]
|
||||||
file_path = site.storage.getPath(params["inner_path"])
|
for inner_path, content in site.content_manager.contents.iteritems()
|
||||||
if config.debug_socket: self.log.debug("Opening file: %s" % file_path)
|
if content["modified"] > params["since"]}
|
||||||
with StreamingMsgpack.FilePart(file_path, "rb") as file:
|
|
||||||
file.seek(params["location"])
|
|
||||||
file.read_bytes = FILE_BUFF
|
|
||||||
back = {}
|
|
||||||
back["body"] = file
|
|
||||||
back["size"] = os.fstat(file.fileno()).st_size
|
|
||||||
back["location"] = min(file.tell()+FILE_BUFF, back["size"])
|
|
||||||
if config.debug_socket: self.log.debug("Sending file %s from position %s to %s" % (file_path, params["location"], back["location"]))
|
|
||||||
self.response(back, streaming=True)
|
|
||||||
if config.debug_socket: self.log.debug("File %s sent" % file_path)
|
|
||||||
|
|
||||||
# Add peer to site if not added before
|
# Add peer to site if not added before
|
||||||
connected_peer = site.addPeer(self.connection.ip, self.connection.port)
|
connected_peer = site.addPeer(self.connection.ip, self.connection.port)
|
||||||
if connected_peer: # Just added
|
if connected_peer: # Just added
|
||||||
connected_peer.connect(self.connection) # Assign current connection to peer
|
connected_peer.connect(self.connection) # Assign current connection to peer
|
||||||
|
|
||||||
except Exception, err:
|
self.response({"modified_files": modified_files})
|
||||||
self.log.debug("GetFile read error: %s" % Debug.formatException(err))
|
|
||||||
self.response({"error": "File read error: %s" % Debug.formatException(err)})
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
# Send a simple Pong! answer
|
||||||
|
def actionPing(self):
|
||||||
|
self.response("Pong!")
|
||||||
|
|
||||||
# Peer exchange request
|
# Unknown command
|
||||||
def actionPex(self, params):
|
def actionUnknown(self, cmd, params):
|
||||||
site = self.sites.get(params["site"])
|
self.response({"error": "Unknown command: %s" % cmd})
|
||||||
if not site or not site.settings["serving"]: # Site unknown or not serving
|
|
||||||
self.response({"error": "Unknown site"})
|
|
||||||
return False
|
|
||||||
|
|
||||||
got_peer_keys = []
|
|
||||||
added = 0
|
|
||||||
connected_peer = site.addPeer(self.connection.ip, self.connection.port) # Add requester peer to site
|
|
||||||
if connected_peer: # Just added
|
|
||||||
added +=1
|
|
||||||
connected_peer.connect(self.connection) # Assign current connection to peer
|
|
||||||
|
|
||||||
for peer in params["peers"]: # Add sent peers to site
|
|
||||||
address = self.unpackAddress(peer)
|
|
||||||
got_peer_keys.append("%s:%s" % address)
|
|
||||||
if site.addPeer(*address): added += 1
|
|
||||||
# Send back peers that is not in the sent list and connectable (not port 0)
|
|
||||||
packed_peers = [peer.packAddress() for peer in site.getConnectablePeers(params["need"], got_peer_keys)]
|
|
||||||
if added:
|
|
||||||
site.worker_manager.onPeers()
|
|
||||||
self.log.debug("Added %s peers to %s using pex, sending back %s" % (added, site, len(packed_peers)))
|
|
||||||
self.response({"peers": packed_peers})
|
|
||||||
|
|
||||||
|
|
||||||
# Get modified content.json files since
|
|
||||||
def actionListModified(self, params):
|
|
||||||
site = self.sites.get(params["site"])
|
|
||||||
if not site or not site.settings["serving"]: # Site unknown or not serving
|
|
||||||
self.response({"error": "Unknown site"})
|
|
||||||
return False
|
|
||||||
modified_files = {inner_path: content["modified"] for inner_path, content in site.content_manager.contents.iteritems() if content["modified"] > params["since"]}
|
|
||||||
|
|
||||||
# Add peer to site if not added before
|
|
||||||
connected_peer = site.addPeer(self.connection.ip, self.connection.port)
|
|
||||||
if connected_peer: # Just added
|
|
||||||
connected_peer.connect(self.connection) # Assign current connection to peer
|
|
||||||
|
|
||||||
self.response({"modified_files": modified_files})
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Send a simple Pong! answer
|
|
||||||
def actionPing(self):
|
|
||||||
self.response("Pong!")
|
|
||||||
|
|
||||||
|
|
||||||
# Unknown command
|
|
||||||
def actionUnknown(self, cmd, params):
|
|
||||||
self.response({"error": "Unknown command: %s" % cmd})
|
|
||||||
|
|
345
src/Peer/Peer.py
345
src/Peer/Peer.py
|
@ -5,207 +5,198 @@ from Debug import Debug
|
||||||
|
|
||||||
# Communicate remote peers
|
# Communicate remote peers
|
||||||
class Peer(object):
|
class Peer(object):
|
||||||
__slots__ = ("ip", "port", "site", "key", "connection_server", "connection", "last_found", "last_response", "last_ping", "added", "connection_error", "hash_failed", "download_bytes", "download_time")
|
__slots__ = ("ip", "port", "site", "key", "connection_server", "connection", "last_found", "last_response",
|
||||||
|
"last_ping", "added", "connection_error", "hash_failed", "download_bytes", "download_time")
|
||||||
|
|
||||||
def __init__(self, ip, port, site=None):
|
def __init__(self, ip, port, site=None):
|
||||||
self.ip = ip
|
self.ip = ip
|
||||||
self.port = port
|
self.port = port
|
||||||
self.site = site
|
self.site = site
|
||||||
self.key = "%s:%s" % (ip, port)
|
self.key = "%s:%s" % (ip, port)
|
||||||
self.connection_server = sys.modules["main"].file_server
|
self.connection_server = sys.modules["main"].file_server
|
||||||
|
|
||||||
self.connection = None
|
self.connection = None
|
||||||
self.last_found = None # Time of last found in the torrent tracker
|
self.last_found = None # Time of last found in the torrent tracker
|
||||||
self.last_response = None # Time of last successfull response from peer
|
self.last_response = None # Time of last successful response from peer
|
||||||
self.last_ping = None # Last response time for ping
|
self.last_ping = None # Last response time for ping
|
||||||
self.added = time.time()
|
self.added = time.time()
|
||||||
|
|
||||||
self.connection_error = 0 # Series of connection error
|
self.connection_error = 0 # Series of connection error
|
||||||
self.hash_failed = 0 # Number of bad files from peer
|
self.hash_failed = 0 # Number of bad files from peer
|
||||||
self.download_bytes = 0 # Bytes downloaded
|
self.download_bytes = 0 # Bytes downloaded
|
||||||
self.download_time = 0 # Time spent to download
|
self.download_time = 0 # Time spent to download
|
||||||
|
|
||||||
|
def log(self, text):
|
||||||
|
if self.site:
|
||||||
|
self.site.log.debug("%s:%s %s" % (self.ip, self.port, text))
|
||||||
|
else:
|
||||||
|
logging.debug("%s:%s %s" % (self.ip, self.port, text))
|
||||||
|
|
||||||
def log(self, text):
|
# Connect to host
|
||||||
if self.site:
|
def connect(self, connection=None):
|
||||||
self.site.log.debug("%s:%s %s" % (self.ip, self.port, text))
|
if self.connection:
|
||||||
else:
|
self.log("Getting connection (Closing %s)..." % self.connection)
|
||||||
logging.debug("%s:%s %s" % (self.ip, self.port, text))
|
self.connection.close()
|
||||||
|
else:
|
||||||
|
self.log("Getting connection...")
|
||||||
|
|
||||||
|
if connection: # Connection specified
|
||||||
|
self.connection = connection
|
||||||
|
else: # Try to find from connection pool or create new connection
|
||||||
|
self.connection = None
|
||||||
|
|
||||||
# Connect to host
|
try:
|
||||||
def connect(self, connection = None):
|
self.connection = self.connection_server.getConnection(self.ip, self.port)
|
||||||
if self.connection:
|
except Exception, err:
|
||||||
self.log("Getting connection (Closing %s)..." % self.connection)
|
self.onConnectionError()
|
||||||
self.connection.close()
|
self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed))
|
||||||
else:
|
self.connection = None
|
||||||
self.log("Getting connection...")
|
|
||||||
|
|
||||||
if connection: # Connection specificed
|
# Check if we have connection to peer
|
||||||
self.connection = connection
|
def findConnection(self):
|
||||||
else: # Try to find from connection pool or create new connection
|
if self.connection and self.connection.connected: # We have connection to peer
|
||||||
self.connection = None
|
return self.connection
|
||||||
|
else: # Try to find from other sites connections
|
||||||
|
self.connection = self.connection_server.getConnection(self.ip, self.port, create=False) # Do not create new connection if not found
|
||||||
|
return self.connection
|
||||||
|
|
||||||
try:
|
def __str__(self):
|
||||||
self.connection = self.connection_server.getConnection(self.ip, self.port)
|
return "Peer %-12s" % self.ip
|
||||||
except Exception, err:
|
|
||||||
self.onConnectionError()
|
|
||||||
self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed))
|
|
||||||
self.connection = None
|
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<%s>" % self.__str__()
|
||||||
|
|
||||||
# Check if we have connection to peer
|
# Peer ip:port to packed 6byte format
|
||||||
def findConnection(self):
|
def packAddress(self):
|
||||||
if self.connection and self.connection.connected: # We have connection to peer
|
return socket.inet_aton(self.ip)+struct.pack("H", self.port)
|
||||||
return self.connection
|
|
||||||
else: # Try to find from other sites connections
|
|
||||||
self.connection = self.connection_server.getConnection(self.ip, self.port, create=False) # Do not create new connection if not found
|
|
||||||
return self.connection
|
|
||||||
|
|
||||||
|
def unpackAddress(self, packed):
|
||||||
|
return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0]
|
||||||
|
|
||||||
def __str__(self):
|
# Found a peer on tracker
|
||||||
return "Peer %-12s" % self.ip
|
def found(self):
|
||||||
|
self.last_found = time.time()
|
||||||
|
|
||||||
def __repr__(self):
|
# Send a command to peer
|
||||||
return "<%s>" % self.__str__()
|
def request(self, cmd, params={}):
|
||||||
|
if not self.connection or self.connection.closed:
|
||||||
|
self.connect()
|
||||||
|
if not self.connection:
|
||||||
|
self.onConnectionError()
|
||||||
|
return None # Connection failed
|
||||||
|
|
||||||
|
#if cmd != "ping" and self.last_response and time.time() - self.last_response > 20*60: # If last response if older than 20 minute, ping first to see if still alive
|
||||||
|
# if not self.ping(): return None
|
||||||
|
|
||||||
# Peer ip:port to packed 6byte format
|
for retry in range(1,3): # Retry 3 times
|
||||||
def packAddress(self):
|
#if config.debug_socket: self.log.debug("sendCmd: %s %s" % (cmd, params.get("inner_path")))
|
||||||
return socket.inet_aton(self.ip)+struct.pack("H", self.port)
|
try:
|
||||||
|
response = self.connection.request(cmd, params)
|
||||||
|
if not response:
|
||||||
|
raise Exception("Send error")
|
||||||
|
#if config.debug_socket: self.log.debug("Got response to: %s" % cmd)
|
||||||
|
if "error" in response:
|
||||||
|
self.log("%s error: %s" % (cmd, response["error"]))
|
||||||
|
self.onConnectionError()
|
||||||
|
else: # Successful request, reset connection error num
|
||||||
|
self.connection_error = 0
|
||||||
|
self.last_response = time.time()
|
||||||
|
return response
|
||||||
|
except Exception, err:
|
||||||
|
if type(err).__name__ == "Notify": # Greenlet killed by worker
|
||||||
|
self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd))
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
self.onConnectionError()
|
||||||
|
self.log("%s (connection_error: %s, hash_failed: %s, retry: %s)" % (Debug.formatException(err),
|
||||||
|
self.connection_error,
|
||||||
|
self.hash_failed, retry))
|
||||||
|
time.sleep(1*retry)
|
||||||
|
self.connect()
|
||||||
|
return None # Failed after 4 retry
|
||||||
|
|
||||||
|
# Get a file content from peer
|
||||||
|
def getFile(self, site, inner_path):
|
||||||
|
location = 0
|
||||||
|
buff = StringIO()
|
||||||
|
s = time.time()
|
||||||
|
while True: # Read in 512k parts
|
||||||
|
back = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location}) # Get file content from last location
|
||||||
|
if not back or "body" not in back: # Error
|
||||||
|
return False
|
||||||
|
|
||||||
def unpackAddress(self, packed):
|
buff.write(back["body"])
|
||||||
return (socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0])
|
back["body"] = None # Save memory
|
||||||
|
if back["location"] == back["size"]: # End of file
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
location = back["location"]
|
||||||
|
self.download_bytes += back["location"]
|
||||||
|
self.download_time += (time.time() - s)
|
||||||
|
buff.seek(0)
|
||||||
|
return buff
|
||||||
|
|
||||||
|
# Send a ping request
|
||||||
|
def ping(self):
|
||||||
|
response_time = None
|
||||||
|
for retry in range(1, 3): # Retry 3 times
|
||||||
|
s = time.time()
|
||||||
|
with gevent.Timeout(10.0, False): # 10 sec timeout, don't raise exception
|
||||||
|
response = self.request("ping")
|
||||||
|
|
||||||
# Found a peer on tracker
|
if response and "body" in response and response["body"] == "Pong!":
|
||||||
def found(self):
|
response_time = time.time()-s
|
||||||
self.last_found = time.time()
|
break # All fine, exit from for loop
|
||||||
|
# Timeout reached or bad response
|
||||||
|
self.onConnectionError()
|
||||||
|
self.connect()
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
if response_time:
|
||||||
|
self.log("Ping: %.3f" % response_time)
|
||||||
|
else:
|
||||||
|
self.log("Ping failed")
|
||||||
|
self.last_ping = response_time
|
||||||
|
return response_time
|
||||||
|
|
||||||
# Send a command to peer
|
# Request peer exchange from peer
|
||||||
def request(self, cmd, params = {}):
|
def pex(self, site=None, need_num=5):
|
||||||
if not self.connection or self.connection.closed:
|
if not site:
|
||||||
self.connect()
|
site = self.site # If no site defined request peers for this site
|
||||||
if not self.connection:
|
# give him/her 5 connectible peers
|
||||||
self.onConnectionError()
|
packed_peers = [peer.packAddress() for peer in self.site.getConnectablePeers(5)]
|
||||||
return None # Connection failed
|
response = self.request("pex", {"site": site.address, "peers": packed_peers, "need": need_num})
|
||||||
|
if not response or "error" in response:
|
||||||
|
return False
|
||||||
|
added = 0
|
||||||
|
for peer in response.get("peers", []):
|
||||||
|
address = self.unpackAddress(peer)
|
||||||
|
if site.addPeer(*address):
|
||||||
|
added += 1
|
||||||
|
if added:
|
||||||
|
self.log("Added peers using pex: %s" % added)
|
||||||
|
return added
|
||||||
|
|
||||||
#if cmd != "ping" and self.last_response and time.time() - self.last_response > 20*60: # If last response if older than 20 minute, ping first to see if still alive
|
# List modified files since the date
|
||||||
# if not self.ping(): return None
|
# Return: {inner_path: modification date,...}
|
||||||
|
def listModified(self, since):
|
||||||
|
return self.request("listModified", {"since": since, "site": self.site.address})
|
||||||
|
|
||||||
for retry in range(1,3): # Retry 3 times
|
# Stop and remove from site
|
||||||
#if config.debug_socket: self.log.debug("sendCmd: %s %s" % (cmd, params.get("inner_path")))
|
def remove(self):
|
||||||
try:
|
self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed))
|
||||||
response = self.connection.request(cmd, params)
|
if self.key in self.site.peers: del(self.site.peers[self.key])
|
||||||
if not response: raise Exception("Send error")
|
if self.connection:
|
||||||
#if config.debug_socket: self.log.debug("Got response to: %s" % cmd)
|
self.connection.close()
|
||||||
if "error" in response:
|
|
||||||
self.log("%s error: %s" % (cmd, response["error"]))
|
|
||||||
self.onConnectionError()
|
|
||||||
else: # Successful request, reset connection error num
|
|
||||||
self.connection_error = 0
|
|
||||||
self.last_response = time.time()
|
|
||||||
return response
|
|
||||||
except Exception, err:
|
|
||||||
if type(err).__name__ == "Notify": # Greenlet kill by worker
|
|
||||||
self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd))
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
self.onConnectionError()
|
|
||||||
self.log("%s (connection_error: %s, hash_failed: %s, retry: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed, retry))
|
|
||||||
time.sleep(1*retry)
|
|
||||||
self.connect()
|
|
||||||
return None # Failed after 4 retry
|
|
||||||
|
|
||||||
|
# - EVENTS -
|
||||||
|
|
||||||
# Get a file content from peer
|
# On connection error
|
||||||
def getFile(self, site, inner_path):
|
def onConnectionError(self):
|
||||||
location = 0
|
self.connection_error += 1
|
||||||
buff = StringIO()
|
if self.connection_error >= 3: # Dead peer
|
||||||
s = time.time()
|
self.remove()
|
||||||
while 1: # Read in 512k parts
|
|
||||||
back = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location}) # Get file content from last location
|
|
||||||
if not back or "body" not in back: # Error
|
|
||||||
return False
|
|
||||||
|
|
||||||
buff.write(back["body"])
|
# Done working with peer
|
||||||
back["body"] = None # Save memory
|
def onWorkerDone(self):
|
||||||
if back["location"] == back["size"]: # End of file
|
pass
|
||||||
break
|
|
||||||
else:
|
|
||||||
location = back["location"]
|
|
||||||
self.download_bytes += back["location"]
|
|
||||||
self.download_time += (time.time() - s)
|
|
||||||
buff.seek(0)
|
|
||||||
return buff
|
|
||||||
|
|
||||||
|
|
||||||
# Send a ping request
|
|
||||||
def ping(self):
|
|
||||||
response_time = None
|
|
||||||
for retry in range(1,3): # Retry 3 times
|
|
||||||
s = time.time()
|
|
||||||
with gevent.Timeout(10.0, False): # 10 sec timeout, dont raise exception
|
|
||||||
response = self.request("ping")
|
|
||||||
|
|
||||||
if response and "body" in response and response["body"] == "Pong!":
|
|
||||||
response_time = time.time()-s
|
|
||||||
break # All fine, exit from for loop
|
|
||||||
# Timeout reached or bad response
|
|
||||||
self.onConnectionError()
|
|
||||||
self.connect()
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
if response_time:
|
|
||||||
self.log("Ping: %.3f" % response_time)
|
|
||||||
else:
|
|
||||||
self.log("Ping failed")
|
|
||||||
self.last_ping = response_time
|
|
||||||
return response_time
|
|
||||||
|
|
||||||
|
|
||||||
# Request peer exchange from peer
|
|
||||||
def pex(self, site=None, need_num=5):
|
|
||||||
if not site: site = self.site # If no site definied request peers for this site
|
|
||||||
packed_peers = [peer.packAddress() for peer in self.site.getConnectablePeers(5)] # give him/her 5 connectable peers
|
|
||||||
response = self.request("pex", {"site": site.address, "peers": packed_peers, "need": need_num})
|
|
||||||
if not response or "error" in response:
|
|
||||||
return False
|
|
||||||
added = 0
|
|
||||||
for peer in response.get("peers", []):
|
|
||||||
address = self.unpackAddress(peer)
|
|
||||||
if (site.addPeer(*address)): added += 1
|
|
||||||
if added:
|
|
||||||
self.log("Added peers using pex: %s" % added)
|
|
||||||
return added
|
|
||||||
|
|
||||||
|
|
||||||
# List modified files since the date
|
|
||||||
# Return: {inner_path: modification date,...}
|
|
||||||
def listModified(self, since):
|
|
||||||
response = self.request("listModified", {"since": since, "site": self.site.address})
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
# Stop and remove from site
|
|
||||||
def remove(self):
|
|
||||||
self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed))
|
|
||||||
if self.key in self.site.peers: del(self.site.peers[self.key])
|
|
||||||
if self.connection:
|
|
||||||
self.connection.close()
|
|
||||||
|
|
||||||
|
|
||||||
# - EVENTS -
|
|
||||||
|
|
||||||
# On connection error
|
|
||||||
def onConnectionError(self):
|
|
||||||
self.connection_error += 1
|
|
||||||
if self.connection_error >= 3: # Dead peer
|
|
||||||
self.remove()
|
|
||||||
|
|
||||||
|
|
||||||
# Done working with peer
|
|
||||||
def onWorkerDone(self):
|
|
||||||
pass
|
|
||||||
|
|
229
src/User/User.py
229
src/User/User.py
|
@ -6,133 +6,124 @@ from Config import config
|
||||||
|
|
||||||
@PluginManager.acceptPlugins
|
@PluginManager.acceptPlugins
|
||||||
class User(object):
|
class User(object):
|
||||||
def __init__(self, master_address=None, master_seed=None, data={}):
|
def __init__(self, master_address=None, master_seed=None, data={}):
|
||||||
if master_seed:
|
if master_seed:
|
||||||
self.master_seed = master_seed
|
self.master_seed = master_seed
|
||||||
self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed)
|
self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed)
|
||||||
elif master_address:
|
elif master_address:
|
||||||
self.master_address = master_address
|
self.master_address = master_address
|
||||||
self.master_seed = data.get("master_seed")
|
self.master_seed = data.get("master_seed")
|
||||||
else:
|
else:
|
||||||
self.master_seed = CryptBitcoin.newSeed()
|
self.master_seed = CryptBitcoin.newSeed()
|
||||||
self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed)
|
self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed)
|
||||||
self.sites = data.get("sites", {})
|
self.sites = data.get("sites", {})
|
||||||
self.certs = data.get("certs", {})
|
self.certs = data.get("certs", {})
|
||||||
|
|
||||||
self.log = logging.getLogger("User:%s" % self.master_address)
|
self.log = logging.getLogger("User:%s" % self.master_address)
|
||||||
|
|
||||||
|
# Save to data/users.json
|
||||||
|
def save(self):
|
||||||
|
users = json.load(open("%s/users.json" % config.data_dir))
|
||||||
|
if self.master_address not in users:
|
||||||
|
users[self.master_address] = {} # Create if not exist
|
||||||
|
user_data = users[self.master_address]
|
||||||
|
if self.master_seed: user_data["master_seed"] = self.master_seed
|
||||||
|
user_data["sites"] = self.sites
|
||||||
|
user_data["certs"] = self.certs
|
||||||
|
open("%s/users.json" % config.data_dir, "w").write(json.dumps(users, indent=2, sort_keys=True))
|
||||||
|
self.log.debug("Saved")
|
||||||
|
|
||||||
# Save to data/users.json
|
def getAddressAuthIndex(self, address):
|
||||||
def save(self):
|
return int(address.encode("hex"), 16)
|
||||||
users = json.load(open("%s/users.json" % config.data_dir))
|
|
||||||
if not self.master_address in users: users[self.master_address] = {} # Create if not exist
|
|
||||||
user_data = users[self.master_address]
|
|
||||||
if self.master_seed: user_data["master_seed"] = self.master_seed
|
|
||||||
user_data["sites"] = self.sites
|
|
||||||
user_data["certs"] = self.certs
|
|
||||||
open("%s/users.json" % config.data_dir, "w").write(json.dumps(users, indent=2, sort_keys=True))
|
|
||||||
self.log.debug("Saved")
|
|
||||||
|
|
||||||
|
# Get user site data
|
||||||
|
# Return: {"auth_address": "xxx", "auth_privatekey": "xxx"}
|
||||||
|
def getSiteData(self, address, create=True):
|
||||||
|
if address not in self.sites: # Generate new BIP32 child key based on site address
|
||||||
|
if not create: return {"auth_address": None, "auth_privatekey": None} # Dont create user yet
|
||||||
|
s = time.time()
|
||||||
|
address_id = self.getAddressAuthIndex(address) # Convert site address to int
|
||||||
|
auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id)
|
||||||
|
self.sites[address] = {
|
||||||
|
"auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey),
|
||||||
|
"auth_privatekey": auth_privatekey
|
||||||
|
}
|
||||||
|
self.save()
|
||||||
|
self.log.debug("Added new site: %s in %.3fs" % (address, time.time()-s))
|
||||||
|
return self.sites[address]
|
||||||
|
|
||||||
def getAddressAuthIndex(self, address):
|
# Get data for a new, unique site
|
||||||
return int(address.encode("hex"), 16)
|
# Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}]
|
||||||
|
def getNewSiteData(self):
|
||||||
|
import random
|
||||||
|
bip32_index = random.randrange(2**256) % 100000000
|
||||||
|
site_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, bip32_index)
|
||||||
|
site_address = CryptBitcoin.privatekeyToAddress(site_privatekey)
|
||||||
|
if site_address in self.sites: raise Exception("Random error: site exist!")
|
||||||
|
# Save to sites
|
||||||
|
self.getSiteData(site_address)
|
||||||
|
self.sites[site_address]["privatekey"] = site_privatekey
|
||||||
|
self.save()
|
||||||
|
return site_address, bip32_index, self.sites[site_address]
|
||||||
|
|
||||||
|
# Get BIP32 address from site address
|
||||||
|
# Return: BIP32 auth address
|
||||||
|
def getAuthAddress(self, address, create=True):
|
||||||
|
cert = self.getCert(address)
|
||||||
|
if cert:
|
||||||
|
return cert["auth_address"]
|
||||||
|
else:
|
||||||
|
return self.getSiteData(address, create)["auth_address"]
|
||||||
|
|
||||||
# Get user site data
|
def getAuthPrivatekey(self, address, create=True):
|
||||||
# Return: {"auth_address": "xxx", "auth_privatekey": "xxx"}
|
cert = self.getCert(address)
|
||||||
def getSiteData(self, address, create=True):
|
if cert:
|
||||||
if not address in self.sites: # Genreate new BIP32 child key based on site address
|
return cert["auth_privatekey"]
|
||||||
if not create: return {"auth_address": None, "auth_privatekey": None} # Dont create user yet
|
else:
|
||||||
s = time.time()
|
return self.getSiteData(address, create)["auth_privatekey"]
|
||||||
address_id = self.getAddressAuthIndex(address) # Convert site address to int
|
|
||||||
auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id)
|
|
||||||
self.sites[address] = {
|
|
||||||
"auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey),
|
|
||||||
"auth_privatekey": auth_privatekey
|
|
||||||
}
|
|
||||||
self.save()
|
|
||||||
self.log.debug("Added new site: %s in %.3fs" % (address, time.time()-s))
|
|
||||||
return self.sites[address]
|
|
||||||
|
|
||||||
|
# Add cert for the user
|
||||||
|
def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign):
|
||||||
|
domain = domain.lower()
|
||||||
|
auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0] # Find privatekey by auth address
|
||||||
|
cert_node = {
|
||||||
|
"auth_address": auth_address,
|
||||||
|
"auth_privatekey": auth_privatekey,
|
||||||
|
"auth_type": auth_type,
|
||||||
|
"auth_user_name": auth_user_name,
|
||||||
|
"cert_sign": cert_sign
|
||||||
|
}
|
||||||
|
# Check if we have already cert for that domain and its not the same
|
||||||
|
if self.certs.get(domain) and self.certs[domain] != cert_node:
|
||||||
|
raise Exception("You already have certificate for this domain: %s/%s@%s" % (self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain))
|
||||||
|
elif self.certs.get(domain) == cert_node: # Same, not updated
|
||||||
|
return None
|
||||||
|
else: # Not exist yet, add
|
||||||
|
self.certs[domain] = cert_node
|
||||||
|
self.save()
|
||||||
|
return True
|
||||||
|
|
||||||
# Get data for a new, unique site
|
def setCert(self, address, domain):
|
||||||
# Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}]
|
site_data = self.getSiteData(address)
|
||||||
def getNewSiteData(self):
|
if domain:
|
||||||
import random
|
site_data["cert"] = domain
|
||||||
bip32_index = random.randrange(2**256) % 100000000
|
else:
|
||||||
site_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, bip32_index)
|
del site_data["cert"]
|
||||||
site_address = CryptBitcoin.privatekeyToAddress(site_privatekey)
|
self.save()
|
||||||
if site_address in self.sites: raise Exception("Random error: site exist!")
|
return site_data
|
||||||
# Save to sites
|
|
||||||
self.getSiteData(site_address)
|
|
||||||
self.sites[site_address]["privatekey"] = site_privatekey
|
|
||||||
self.save()
|
|
||||||
return site_address, bip32_index, self.sites[site_address]
|
|
||||||
|
|
||||||
|
# Get cert for the site address
|
||||||
|
# Return: { "auth_address": ..., "auth_privatekey":..., "auth_type": "web", "auth_user_name": "nofish", "cert_sign": ... } or None
|
||||||
|
def getCert(self, address):
|
||||||
|
site_data = self.getSiteData(address, create=False)
|
||||||
|
if not site_data or not "cert" in site_data: return None # Site dont have cert
|
||||||
|
return self.certs.get(site_data["cert"])
|
||||||
|
|
||||||
# Get BIP32 address from site address
|
# Get cert user name for the site address
|
||||||
# Return: BIP32 auth address
|
# Return: user@certprovider.bit or None
|
||||||
def getAuthAddress(self, address, create=True):
|
def getCertUserId(self, address):
|
||||||
cert = self.getCert(address)
|
site_data = self.getSiteData(address, create=False)
|
||||||
if cert:
|
if not site_data or not "cert" in site_data: return None # Site dont have cert
|
||||||
return cert["auth_address"]
|
cert = self.certs.get(site_data["cert"])
|
||||||
else:
|
if cert:
|
||||||
return self.getSiteData(address, create)["auth_address"]
|
return cert["auth_user_name"]+"@"+site_data["cert"]
|
||||||
|
|
||||||
|
|
||||||
def getAuthPrivatekey(self, address, create=True):
|
|
||||||
cert = self.getCert(address)
|
|
||||||
if cert:
|
|
||||||
return cert["auth_privatekey"]
|
|
||||||
else:
|
|
||||||
return self.getSiteData(address, create)["auth_privatekey"]
|
|
||||||
|
|
||||||
|
|
||||||
# Add cert for the user
|
|
||||||
def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign):
|
|
||||||
domain = domain.lower()
|
|
||||||
auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0] # Find privatekey by auth address
|
|
||||||
cert_node = {
|
|
||||||
"auth_address": auth_address,
|
|
||||||
"auth_privatekey": auth_privatekey,
|
|
||||||
"auth_type": auth_type,
|
|
||||||
"auth_user_name": auth_user_name,
|
|
||||||
"cert_sign": cert_sign
|
|
||||||
}
|
|
||||||
# Check if we have already cert for that domain and its not the same
|
|
||||||
if self.certs.get(domain) and self.certs[domain] != cert_node:
|
|
||||||
raise Exception("You already have certificate for this domain: %s/%s@%s" % (self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain))
|
|
||||||
elif self.certs.get(domain) == cert_node: # Same, not updated
|
|
||||||
return None
|
|
||||||
else: # Not exist yet, add
|
|
||||||
self.certs[domain] = cert_node
|
|
||||||
self.save()
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def setCert(self, address, domain):
|
|
||||||
site_data = self.getSiteData(address)
|
|
||||||
if domain:
|
|
||||||
site_data["cert"] = domain
|
|
||||||
else:
|
|
||||||
del site_data["cert"]
|
|
||||||
self.save()
|
|
||||||
return site_data
|
|
||||||
|
|
||||||
|
|
||||||
# Get cert for the site address
|
|
||||||
# Return: { "auth_address": ..., "auth_privatekey":..., "auth_type": "web", "auth_user_name": "nofish", "cert_sign": ... } or None
|
|
||||||
def getCert(self, address):
|
|
||||||
site_data = self.getSiteData(address, create=False)
|
|
||||||
if not site_data or not "cert" in site_data: return None # Site dont have cert
|
|
||||||
return self.certs.get(site_data["cert"])
|
|
||||||
|
|
||||||
|
|
||||||
# Get cert user name for the site address
|
|
||||||
# Return: user@certprovider.bit or None
|
|
||||||
def getCertUserId(self, address):
|
|
||||||
site_data = self.getSiteData(address, create=False)
|
|
||||||
if not site_data or not "cert" in site_data: return None # Site dont have cert
|
|
||||||
cert = self.certs.get(site_data["cert"])
|
|
||||||
if cert:
|
|
||||||
return cert["auth_user_name"]+"@"+site_data["cert"]
|
|
|
@ -1,4 +1,9 @@
|
||||||
import json, logging, os
|
# Included modules
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# ZeroNet Modules
|
||||||
from User import User
|
from User import User
|
||||||
from Plugin import PluginManager
|
from Plugin import PluginManager
|
||||||
from Config import config
|
from Config import config
|
||||||
|
@ -6,74 +11,73 @@ from Config import config
|
||||||
|
|
||||||
@PluginManager.acceptPlugins
|
@PluginManager.acceptPlugins
|
||||||
class UserManager(object):
|
class UserManager(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.users = {}
|
self.users = {}
|
||||||
|
|
||||||
|
# Load all user from data/users.json
|
||||||
|
def load(self):
|
||||||
|
if not self.users:
|
||||||
|
self.users = {}
|
||||||
|
|
||||||
|
user_found = []
|
||||||
|
added = 0
|
||||||
|
# Load new users
|
||||||
|
for master_address, data in json.load(open("%s/users.json" % config.data_dir)).items():
|
||||||
|
if master_address not in self.users:
|
||||||
|
user = User(master_address, data=data)
|
||||||
|
self.users[master_address] = user
|
||||||
|
added += 1
|
||||||
|
user_found.append(master_address)
|
||||||
|
|
||||||
|
# Remove deleted adresses
|
||||||
|
for master_address in self.users.keys():
|
||||||
|
if master_address not in user_found:
|
||||||
|
del(self.users[master_address])
|
||||||
|
logging.debug("Removed user: %s" % master_address)
|
||||||
|
|
||||||
|
if added:
|
||||||
|
logging.debug("UserManager added %s users" % added)
|
||||||
|
|
||||||
|
# Create new user
|
||||||
|
# Return: User
|
||||||
|
def create(self, master_address=None, master_seed=None):
|
||||||
|
user = User(master_address, master_seed)
|
||||||
|
logging.debug("Created user: %s" % user.master_address)
|
||||||
|
if user.master_address: # If successfully created
|
||||||
|
self.users[user.master_address] = user
|
||||||
|
user.save()
|
||||||
|
return user
|
||||||
|
|
||||||
|
# List all users from data/users.json
|
||||||
|
# Return: {"usermasteraddr": User}
|
||||||
|
def list(self):
|
||||||
|
if self.users == {}: # Not loaded yet
|
||||||
|
self.load()
|
||||||
|
return self.users
|
||||||
|
|
||||||
|
# Get user based on master_address
|
||||||
|
# Return: User or None
|
||||||
|
def get(self, master_address=None):
|
||||||
|
users = self.list()
|
||||||
|
if users:
|
||||||
|
return users.values()[0] # Single user mode, always return the first
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
# Load all user from data/users.json
|
user_manager = UserManager() # Singleton
|
||||||
def load(self):
|
|
||||||
if not self.users: self.users = {}
|
|
||||||
|
|
||||||
user_found = []
|
|
||||||
added = 0
|
|
||||||
# Load new users
|
|
||||||
for master_address, data in json.load(open("%s/users.json" % config.data_dir)).items():
|
|
||||||
if master_address not in self.users:
|
|
||||||
user = User(master_address, data=data)
|
|
||||||
self.users[master_address] = user
|
|
||||||
added += 1
|
|
||||||
user_found.append(master_address)
|
|
||||||
|
|
||||||
# Remove deleted adresses
|
|
||||||
for master_address in self.users.keys():
|
|
||||||
if master_address not in user_found:
|
|
||||||
del(self.users[master_address])
|
|
||||||
logging.debug("Removed user: %s" % master_address)
|
|
||||||
|
|
||||||
if added: logging.debug("UserManager added %s users" % added)
|
|
||||||
|
|
||||||
|
|
||||||
# Create new user
|
|
||||||
# Return: User
|
|
||||||
def create(self, master_address=None, master_seed=None):
|
|
||||||
user = User(master_address, master_seed)
|
|
||||||
logging.debug("Created user: %s" % user.master_address)
|
|
||||||
if user.master_address: # If successfully created
|
|
||||||
self.users[user.master_address] = user
|
|
||||||
user.save()
|
|
||||||
return user
|
|
||||||
|
|
||||||
|
|
||||||
# List all users from data/users.json
|
|
||||||
# Return: {"usermasteraddr": User}
|
|
||||||
def list(self):
|
|
||||||
if self.users == {}: # Not loaded yet
|
|
||||||
self.load()
|
|
||||||
return self.users
|
|
||||||
|
|
||||||
|
|
||||||
# Get user based on master_address
|
|
||||||
# Return: User or None
|
|
||||||
def get(self, master_address=None):
|
|
||||||
users = self.list()
|
|
||||||
if users:
|
|
||||||
return users.values()[0] # Single user mode, always return the first
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
user_manager = UserManager() # Singletone
|
|
||||||
|
|
||||||
# Debug: Reload User.py
|
# Debug: Reload User.py
|
||||||
def reloadModule():
|
def reloadModule():
|
||||||
return "Not used"
|
return "Not used"
|
||||||
|
|
||||||
import imp
|
import imp
|
||||||
global User, UserManager, user_manager
|
global User, UserManager, user_manager
|
||||||
User = imp.load_source("User", "src/User/User.py").User # Reload source
|
User = imp.load_source("User", "src/User/User.py").User # Reload source
|
||||||
#module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module
|
#module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module
|
||||||
#UserManager = module.UserManager
|
#UserManager = module.UserManager
|
||||||
#user_manager = module.user_manager
|
#user_manager = module.user_manager
|
||||||
# Reload users
|
# Reload users
|
||||||
user_manager = UserManager()
|
user_manager = UserManager()
|
||||||
user_manager.load()
|
user_manager.load()
|
||||||
|
|
|
@ -2,7 +2,7 @@ import gevent, time, logging, shutil, os
|
||||||
from Peer import Peer
|
from Peer import Peer
|
||||||
from Debug import Debug
|
from Debug import Debug
|
||||||
|
|
||||||
class Worker:
|
class Worker(object):
|
||||||
def __init__(self, manager, peer):
|
def __init__(self, manager, peer):
|
||||||
self.manager = manager
|
self.manager = manager
|
||||||
self.peer = peer
|
self.peer = peer
|
||||||
|
|
462
src/main.py
462
src/main.py
|
@ -1,5 +1,19 @@
|
||||||
import os, sys
|
# Included modules
|
||||||
update_after_shutdown = False # If set True then update and restart zeronet after main loop ended
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import urllib2
|
||||||
|
|
||||||
|
|
||||||
|
# Third party modules
|
||||||
|
import gevent
|
||||||
|
from gevent import monkey
|
||||||
|
|
||||||
|
# ZeroNet modules
|
||||||
|
import logging
|
||||||
|
|
||||||
|
update_after_shutdown = False # If set True then update and restart zeronet after main loop ended
|
||||||
|
|
||||||
# Load config
|
# Load config
|
||||||
from Config import config
|
from Config import config
|
||||||
|
@ -7,41 +21,45 @@ from Config import config
|
||||||
# Create necessary files and dirs
|
# Create necessary files and dirs
|
||||||
if not os.path.isdir(config.log_dir): os.mkdir(config.log_dir)
|
if not os.path.isdir(config.log_dir): os.mkdir(config.log_dir)
|
||||||
if not os.path.isdir(config.data_dir): os.mkdir(config.data_dir)
|
if not os.path.isdir(config.data_dir): os.mkdir(config.data_dir)
|
||||||
if not os.path.isfile("%s/sites.json" % config.data_dir): open("%s/sites.json" % config.data_dir, "w").write("{}")
|
if not os.path.isfile("%s/sites.json" % config.data_dir):
|
||||||
if not os.path.isfile("%s/users.json" % config.data_dir): open("%s/users.json" % config.data_dir, "w").write("{}")
|
open("%s/sites.json" % config.data_dir, "w").write("{}")
|
||||||
|
if not os.path.isfile("%s/users.json" % config.data_dir):
|
||||||
|
open("%s/users.json" % config.data_dir, "w").write("{}")
|
||||||
|
|
||||||
|
|
||||||
# Setup logging
|
# Setup logging
|
||||||
import logging
|
|
||||||
if config.action == "main":
|
if config.action == "main":
|
||||||
if os.path.isfile("%s/debug.log" % config.log_dir): # Simple logrotate
|
if os.path.isfile("%s/debug.log" % config.log_dir): # Simple logrotate
|
||||||
if os.path.isfile("%s/debug-last.log" % config.log_dir): os.unlink("%s/debug-last.log" % config.log_dir)
|
if os.path.isfile("%s/debug-last.log" % config.log_dir):
|
||||||
os.rename("%s/debug.log" % config.log_dir, "%s/debug-last.log" % config.log_dir)
|
os.unlink("%s/debug-last.log" % config.log_dir)
|
||||||
logging.basicConfig(format='[%(asctime)s] %(levelname)-8s %(name)s %(message)s', level=logging.DEBUG, filename="%s/debug.log" % config.log_dir)
|
os.rename("%s/debug.log" % config.log_dir, "%s/debug-last.log" % config.log_dir)
|
||||||
|
logging.basicConfig(format='[%(asctime)s] %(levelname)-8s %(name)s %(message)s',
|
||||||
|
level=logging.DEBUG, filename="%s/debug.log" % config.log_dir)
|
||||||
else:
|
else:
|
||||||
logging.basicConfig(level=logging.DEBUG, stream=open(os.devnull,"w")) # No file logging if action is not main
|
logging.basicConfig(level=logging.DEBUG, stream=open(os.devnull, "w")) # No file logging if action is not main
|
||||||
|
|
||||||
# Console logger
|
# Console logger
|
||||||
console_log = logging.StreamHandler()
|
console_log = logging.StreamHandler()
|
||||||
if config.action == "main": # Add time if main action
|
if config.action == "main": # Add time if main action
|
||||||
console_log.setFormatter(logging.Formatter('[%(asctime)s] %(name)s %(message)s', "%H:%M:%S"))
|
console_log.setFormatter(logging.Formatter('[%(asctime)s] %(name)s %(message)s', "%H:%M:%S"))
|
||||||
else:
|
else:
|
||||||
console_log.setFormatter(logging.Formatter('%(name)s %(message)s', "%H:%M:%S"))
|
console_log.setFormatter(logging.Formatter('%(name)s %(message)s', "%H:%M:%S"))
|
||||||
|
|
||||||
logging.getLogger('').addHandler(console_log) # Add console logger
|
logging.getLogger('').addHandler(console_log) # Add console logger
|
||||||
logging.getLogger('').name = "-" # Remove root prefix
|
logging.getLogger('').name = "-" # Remove root prefix
|
||||||
|
|
||||||
|
|
||||||
# Debug dependent configuration
|
# Debug dependent configuration
|
||||||
from Debug import DebugHook
|
from Debug import DebugHook
|
||||||
if config.debug:
|
if config.debug:
|
||||||
console_log.setLevel(logging.DEBUG) # Display everything to console
|
console_log.setLevel(logging.DEBUG) # Display everything to console
|
||||||
else:
|
else:
|
||||||
console_log.setLevel(logging.INFO) # Display only important info to console
|
console_log.setLevel(logging.INFO) # Display only important info to console
|
||||||
|
|
||||||
|
monkey.patch_all(thread=False, ssl=False) # Make time, socket gevent compatible. Not thread: pyfilesystem and system tray icon not compatible, Not ssl: broken in 2.7.9
|
||||||
|
|
||||||
|
|
||||||
from gevent import monkey; monkey.patch_all(thread=False, ssl=False) # Make time, socket gevent compatible. Not thread: pyfilesystem and system tray icon not compatible, Not ssl: broken in 2.7.9
|
|
||||||
import gevent
|
|
||||||
import time
|
|
||||||
|
|
||||||
# Log current config
|
# Log current config
|
||||||
logging.debug("Config: %s" % config)
|
logging.debug("Config: %s" % config)
|
||||||
|
@ -49,11 +67,11 @@ logging.debug("Config: %s" % config)
|
||||||
|
|
||||||
# Socks Proxy monkey patch
|
# Socks Proxy monkey patch
|
||||||
if config.proxy:
|
if config.proxy:
|
||||||
from util import SocksProxy
|
from util import SocksProxy
|
||||||
import urllib2
|
|
||||||
logging.info("Patching sockets to socks proxy: %s" % config.proxy)
|
logging.info("Patching sockets to socks proxy: %s" % config.proxy)
|
||||||
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
|
config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost
|
||||||
SocksProxy.monkeyPath(*config.proxy.split(":"))
|
SocksProxy.monkeyPath(*config.proxy.split(":"))
|
||||||
|
|
||||||
|
|
||||||
# Load plugins
|
# Load plugins
|
||||||
|
@ -64,230 +82,216 @@ PluginManager.plugin_manager.loadPlugins()
|
||||||
# -- Actions --
|
# -- Actions --
|
||||||
|
|
||||||
@PluginManager.acceptPlugins
|
@PluginManager.acceptPlugins
|
||||||
class Actions:
|
class Actions(object):
|
||||||
# Default action: Start serving UiServer and FileServer
|
# Default action: Start serving UiServer and FileServer
|
||||||
def main(self):
|
def main(self):
|
||||||
logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__))
|
logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__))
|
||||||
global ui_server, file_server
|
global ui_server, file_server
|
||||||
from File import FileServer
|
from File import FileServer
|
||||||
from Ui import UiServer
|
from Ui import UiServer
|
||||||
logging.info("Creating UiServer....")
|
logging.info("Creating UiServer....")
|
||||||
ui_server = UiServer()
|
ui_server = UiServer()
|
||||||
|
|
||||||
logging.info("Removing old SSL certs...")
|
logging.info("Removing old SSL certs...")
|
||||||
from Crypt import CryptConnection
|
from Crypt import CryptConnection
|
||||||
CryptConnection.manager.removeCerts()
|
CryptConnection.manager.removeCerts()
|
||||||
|
|
||||||
logging.info("Creating FileServer....")
|
logging.info("Creating FileServer....")
|
||||||
file_server = FileServer()
|
file_server = FileServer()
|
||||||
|
|
||||||
logging.info("Starting servers....")
|
logging.info("Starting servers....")
|
||||||
gevent.joinall([gevent.spawn(ui_server.start), gevent.spawn(file_server.start)])
|
gevent.joinall([gevent.spawn(ui_server.start), gevent.spawn(file_server.start)])
|
||||||
|
|
||||||
|
# Site commands
|
||||||
|
|
||||||
|
def siteCreate(self):
|
||||||
|
logging.info("Generating new privatekey...")
|
||||||
|
from Crypt import CryptBitcoin
|
||||||
|
privatekey = CryptBitcoin.newPrivatekey()
|
||||||
|
logging.info("----------------------------------------------------------------------")
|
||||||
|
logging.info("Site private key: %s" % privatekey)
|
||||||
|
logging.info(" !!! ^ Save it now, required to modify the site ^ !!!")
|
||||||
|
address = CryptBitcoin.privatekeyToAddress(privatekey)
|
||||||
|
logging.info("Site address: %s" % address)
|
||||||
|
logging.info("----------------------------------------------------------------------")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes": break
|
||||||
|
else: logging.info("Please, secure it now, you going to need it to modify your site!")
|
||||||
|
|
||||||
|
logging.info("Creating directory structure...")
|
||||||
|
from Site import Site
|
||||||
|
os.mkdir("%s/%s" % (config.data_dir, address))
|
||||||
|
open("%s/%s/index.html" % (config.data_dir, address), "w").write("Hello %s!" % address)
|
||||||
|
|
||||||
|
logging.info("Creating content.json...")
|
||||||
|
site = Site(address)
|
||||||
|
site.content_manager.sign(privatekey=privatekey)
|
||||||
|
site.settings["own"] = True
|
||||||
|
site.saveSettings()
|
||||||
|
|
||||||
|
logging.info("Site created!")
|
||||||
|
|
||||||
|
def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False):
|
||||||
|
from Site import Site
|
||||||
|
logging.info("Signing site: %s..." % address)
|
||||||
|
site = Site(address, allow_create = False)
|
||||||
|
|
||||||
|
if not privatekey: # If no privatekey in args then ask it now
|
||||||
|
import getpass
|
||||||
|
privatekey = getpass.getpass("Private key (input hidden):")
|
||||||
|
succ = site.content_manager.sign(inner_path=inner_path, privatekey=privatekey, update_changed_files=True)
|
||||||
|
if succ and publish:
|
||||||
|
self.sitePublish(address, inner_path=inner_path)
|
||||||
|
|
||||||
|
def siteVerify(self, address):
|
||||||
|
import time
|
||||||
|
from Site import Site
|
||||||
|
s = time.time()
|
||||||
|
logging.info("Verifing site: %s..." % address)
|
||||||
|
site = Site(address)
|
||||||
|
bad_files = []
|
||||||
|
|
||||||
|
for content_inner_path in site.content_manager.contents:
|
||||||
|
logging.info("Verifing %s signature..." % content_inner_path)
|
||||||
|
if site.content_manager.verifyFile(content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False) == True:
|
||||||
|
logging.info("[OK] %s signed by address %s!" % (content_inner_path, address))
|
||||||
|
else:
|
||||||
|
logging.error("[ERROR] %s: invalid file!" % content_inner_path)
|
||||||
|
bad_files += content_inner_path
|
||||||
|
|
||||||
|
logging.info("Verifying site files...")
|
||||||
|
bad_files += site.storage.verifyFiles()
|
||||||
|
if not bad_files:
|
||||||
|
logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time()-s))
|
||||||
|
else:
|
||||||
|
logging.error("[ERROR] Error during verifying site files!")
|
||||||
|
|
||||||
|
def dbRebuild(self, address):
|
||||||
|
from Site import Site
|
||||||
|
logging.info("Rebuilding site sql cache: %s..." % address)
|
||||||
|
site = Site(address)
|
||||||
|
s = time.time()
|
||||||
|
site.storage.rebuildDb()
|
||||||
|
logging.info("Done in %.3fs" % (time.time()-s))
|
||||||
|
|
||||||
|
def dbQuery(self, address, query):
|
||||||
|
from Site import Site
|
||||||
|
import json
|
||||||
|
site = Site(address)
|
||||||
|
result = []
|
||||||
|
for row in site.storage.query(query):
|
||||||
|
result.append(dict(row))
|
||||||
|
print json.dumps(result, indent=4)
|
||||||
|
|
||||||
|
def siteAnnounce(self, address):
|
||||||
|
from Site.Site import Site
|
||||||
|
logging.info("Announcing site %s to tracker..." % address)
|
||||||
|
site = Site(address)
|
||||||
|
|
||||||
|
s = time.time()
|
||||||
|
site.announce()
|
||||||
|
print "Response time: %.3fs" % (time.time()-s)
|
||||||
|
print site.peers
|
||||||
|
|
||||||
|
|
||||||
# Site commands
|
def siteNeedFile(self, address, inner_path):
|
||||||
|
from Site import Site
|
||||||
def siteCreate(self):
|
site = Site(address)
|
||||||
logging.info("Generating new privatekey...")
|
site.announce()
|
||||||
from Crypt import CryptBitcoin
|
print site.needFile(inner_path, update=True)
|
||||||
privatekey = CryptBitcoin.newPrivatekey()
|
|
||||||
logging.info("----------------------------------------------------------------------")
|
|
||||||
logging.info("Site private key: %s" % privatekey)
|
|
||||||
logging.info(" !!! ^ Save it now, required to modify the site ^ !!!")
|
|
||||||
address = CryptBitcoin.privatekeyToAddress(privatekey)
|
|
||||||
logging.info("Site address: %s" % address)
|
|
||||||
logging.info("----------------------------------------------------------------------")
|
|
||||||
|
|
||||||
while True:
|
|
||||||
if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes": break
|
|
||||||
else: logging.info("Please, secure it now, you going to need it to modify your site!")
|
|
||||||
|
|
||||||
logging.info("Creating directory structure...")
|
|
||||||
from Site import Site
|
|
||||||
os.mkdir("%s/%s" % (config.data_dir, address))
|
|
||||||
open("%s/%s/index.html" % (config.data_dir, address), "w").write("Hello %s!" % address)
|
|
||||||
|
|
||||||
logging.info("Creating content.json...")
|
|
||||||
site = Site(address)
|
|
||||||
site.content_manager.sign(privatekey=privatekey)
|
|
||||||
site.settings["own"] = True
|
|
||||||
site.saveSettings()
|
|
||||||
|
|
||||||
logging.info("Site created!")
|
|
||||||
|
|
||||||
|
|
||||||
def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False):
|
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"):
|
||||||
from Site import Site
|
global file_server
|
||||||
logging.info("Signing site: %s..." % address)
|
from Site import Site
|
||||||
site = Site(address, allow_create = False)
|
from File import FileServer # We need fileserver to handle incoming file requests
|
||||||
|
|
||||||
if not privatekey: # If no privatekey in args then ask it now
|
logging.info("Creating FileServer....")
|
||||||
import getpass
|
file_server = FileServer()
|
||||||
privatekey = getpass.getpass("Private key (input hidden):")
|
file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity
|
||||||
succ = site.content_manager.sign(inner_path=inner_path, privatekey=privatekey, update_changed_files=True)
|
file_server.openport()
|
||||||
if succ and publish:
|
site = file_server.sites[address]
|
||||||
self.sitePublish(address, inner_path=inner_path)
|
site.settings["serving"] = True # Serving the site even if its disabled
|
||||||
|
if peer_ip: # Announce ip specificed
|
||||||
|
site.addPeer(peer_ip, peer_port)
|
||||||
|
else: # Just ask the tracker
|
||||||
|
logging.info("Gathering peers from tracker")
|
||||||
|
site.announce() # Gather peers
|
||||||
|
published = site.publish(20, inner_path) # Push to 20 peers
|
||||||
|
if published > 0:
|
||||||
|
time.sleep(3)
|
||||||
|
logging.info("Serving files (max 60s)...")
|
||||||
|
gevent.joinall([file_server_thread], timeout=60)
|
||||||
|
logging.info("Done.")
|
||||||
|
else:
|
||||||
|
logging.info("No peers found for this site, sitePublish command only works if you already have peers serving your site")
|
||||||
|
|
||||||
|
# Crypto commands
|
||||||
|
def cryptPrivatekeyToAddress(self, privatekey=None):
|
||||||
|
from Crypt import CryptBitcoin
|
||||||
|
if not privatekey: # If no privatekey in args then ask it now
|
||||||
|
import getpass
|
||||||
|
privatekey = getpass.getpass("Private key (input hidden):")
|
||||||
|
|
||||||
def siteVerify(self, address):
|
print CryptBitcoin.privatekeyToAddress(privatekey)
|
||||||
import time
|
|
||||||
from Site import Site
|
|
||||||
s = time.time()
|
|
||||||
logging.info("Verifing site: %s..." % address)
|
|
||||||
site = Site(address)
|
|
||||||
bad_files = []
|
|
||||||
|
|
||||||
for content_inner_path in site.content_manager.contents:
|
def cryptSign(self, message, privatekey):
|
||||||
logging.info("Verifing %s signature..." % content_inner_path)
|
from Crypt import CryptBitcoin
|
||||||
if site.content_manager.verifyFile(content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False) == True:
|
print CryptBitcoin.sign(message, privatekey)
|
||||||
logging.info("[OK] %s signed by address %s!" % (content_inner_path, address))
|
|
||||||
else:
|
|
||||||
logging.error("[ERROR] %s: invalid file!" % content_inner_path)
|
|
||||||
bad_files += content_inner_path
|
|
||||||
|
|
||||||
logging.info("Verifying site files...")
|
# Peer
|
||||||
bad_files += site.storage.verifyFiles()
|
def peerPing(self, peer_ip, peer_port=None):
|
||||||
if not bad_files:
|
if not peer_port:
|
||||||
logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time()-s))
|
peer_port = config.fileserver_port
|
||||||
else:
|
logging.info("Opening a simple connection server")
|
||||||
logging.error("[ERROR] Error during verifying site files!")
|
global file_server
|
||||||
|
from Connection import ConnectionServer
|
||||||
|
file_server = ConnectionServer("127.0.0.1", 1234)
|
||||||
|
|
||||||
|
from Peer import Peer
|
||||||
|
logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port)))
|
||||||
|
peer = Peer(peer_ip, peer_port)
|
||||||
|
for i in range(5):
|
||||||
|
s = time.time()
|
||||||
|
print peer.ping(),
|
||||||
|
print "Response time: %.3fs (crypt: %s)" % (time.time()-s, peer.connection.crypt)
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
def dbRebuild(self, address):
|
def peerGetFile(self, peer_ip, peer_port, site, filename):
|
||||||
from Site import Site
|
logging.info("Opening a simple connection server")
|
||||||
logging.info("Rebuilding site sql cache: %s..." % address)
|
global file_server
|
||||||
site = Site(address)
|
from Connection import ConnectionServer
|
||||||
s = time.time()
|
file_server = ConnectionServer()
|
||||||
site.storage.rebuildDb()
|
|
||||||
logging.info("Done in %.3fs" % (time.time()-s))
|
|
||||||
|
|
||||||
|
from Peer import Peer
|
||||||
|
logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port))
|
||||||
|
peer = Peer(peer_ip, peer_port)
|
||||||
|
s = time.time()
|
||||||
|
print peer.getFile(site, filename).read()
|
||||||
|
print "Response time: %.3fs" % (time.time()-s)
|
||||||
|
|
||||||
def dbQuery(self, address, query):
|
def peerCmd(self, peer_ip, peer_port, cmd, parameters):
|
||||||
from Site import Site
|
logging.info("Opening a simple connection server")
|
||||||
import json
|
global file_server
|
||||||
site = Site(address)
|
from Connection import ConnectionServer
|
||||||
result = []
|
file_server = ConnectionServer()
|
||||||
for row in site.storage.query(query):
|
from Peer import Peer
|
||||||
result.append(dict(row))
|
peer = Peer(peer_ip, peer_port)
|
||||||
print json.dumps(result, indent=4)
|
|
||||||
|
|
||||||
|
import json
|
||||||
def siteAnnounce(self, address):
|
if parameters:
|
||||||
from Site.Site import Site
|
parameters = json.loads(parameters.replace("'", '"'))
|
||||||
logging.info("Announcing site %s to tracker..." % address)
|
else:
|
||||||
site = Site(address)
|
parameters = {}
|
||||||
|
logging.info("Response: %s" % peer.request(cmd, parameters))
|
||||||
s = time.time()
|
|
||||||
site.announce()
|
|
||||||
print "Response time: %.3fs" % (time.time()-s)
|
|
||||||
print site.peers
|
|
||||||
|
|
||||||
|
|
||||||
def siteNeedFile(self, address, inner_path):
|
|
||||||
from Site import Site
|
|
||||||
site = Site(address)
|
|
||||||
site.announce()
|
|
||||||
print site.needFile(inner_path, update=True)
|
|
||||||
|
|
||||||
|
|
||||||
def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"):
|
|
||||||
global file_server
|
|
||||||
from Site import Site
|
|
||||||
from File import FileServer # We need fileserver to handle incoming file requests
|
|
||||||
|
|
||||||
logging.info("Creating FileServer....")
|
|
||||||
file_server = FileServer()
|
|
||||||
file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity
|
|
||||||
file_server.openport()
|
|
||||||
site = file_server.sites[address]
|
|
||||||
site.settings["serving"] = True # Serving the site even if its disabled
|
|
||||||
if peer_ip: # Announce ip specificed
|
|
||||||
site.addPeer(peer_ip, peer_port)
|
|
||||||
else: # Just ask the tracker
|
|
||||||
logging.info("Gathering peers from tracker")
|
|
||||||
site.announce() # Gather peers
|
|
||||||
published = site.publish(20, inner_path) # Push to 20 peers
|
|
||||||
if published > 0:
|
|
||||||
time.sleep(3)
|
|
||||||
logging.info("Serving files (max 60s)...")
|
|
||||||
gevent.joinall([file_server_thread], timeout=60)
|
|
||||||
logging.info("Done.")
|
|
||||||
else:
|
|
||||||
logging.info("No peers found for this site, sitePublish command only works if you already have peers serving your site")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Crypto commands
|
|
||||||
|
|
||||||
def cryptPrivatekeyToAddress(self, privatekey=None):
|
|
||||||
from Crypt import CryptBitcoin
|
|
||||||
if not privatekey: # If no privatekey in args then ask it now
|
|
||||||
import getpass
|
|
||||||
privatekey = getpass.getpass("Private key (input hidden):")
|
|
||||||
|
|
||||||
print CryptBitcoin.privatekeyToAddress(privatekey)
|
|
||||||
|
|
||||||
|
|
||||||
def cryptSign(self, message, privatekey):
|
|
||||||
from Crypt import CryptBitcoin
|
|
||||||
print CryptBitcoin.sign(message, privatekey)
|
|
||||||
|
|
||||||
|
|
||||||
# Peer
|
|
||||||
|
|
||||||
def peerPing(self, peer_ip, peer_port=None):
|
|
||||||
if not peer_port:
|
|
||||||
peer_port = config.fileserver_port
|
|
||||||
logging.info("Opening a simple connection server")
|
|
||||||
global file_server
|
|
||||||
from Connection import ConnectionServer
|
|
||||||
file_server = ConnectionServer("127.0.0.1", 1234)
|
|
||||||
|
|
||||||
from Peer import Peer
|
|
||||||
logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port)))
|
|
||||||
peer = Peer(peer_ip, peer_port)
|
|
||||||
for i in range(5):
|
|
||||||
s = time.time()
|
|
||||||
print peer.ping(),
|
|
||||||
print "Response time: %.3fs (crypt: %s)" % (time.time()-s, peer.connection.crypt)
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
|
|
||||||
def peerGetFile(self, peer_ip, peer_port, site, filename):
|
|
||||||
logging.info("Opening a simple connection server")
|
|
||||||
global file_server
|
|
||||||
from Connection import ConnectionServer
|
|
||||||
file_server = ConnectionServer()
|
|
||||||
|
|
||||||
from Peer import Peer
|
|
||||||
logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port))
|
|
||||||
peer = Peer(peer_ip, peer_port)
|
|
||||||
s = time.time()
|
|
||||||
print peer.getFile(site, filename).read()
|
|
||||||
print "Response time: %.3fs" % (time.time()-s)
|
|
||||||
|
|
||||||
|
|
||||||
def peerCmd(self, peer_ip, peer_port, cmd, parameters):
|
|
||||||
logging.info("Opening a simple connection server")
|
|
||||||
global file_server
|
|
||||||
from Connection import ConnectionServer
|
|
||||||
file_server = ConnectionServer()
|
|
||||||
from Peer import Peer
|
|
||||||
peer = Peer(peer_ip, peer_port)
|
|
||||||
|
|
||||||
import json
|
|
||||||
if parameters:
|
|
||||||
parameters = json.loads(parameters.replace("'", '"'))
|
|
||||||
else:
|
|
||||||
parameters = {}
|
|
||||||
logging.info("Response: %s" % peer.request(cmd, parameters))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
actions = Actions()
|
actions = Actions()
|
||||||
# Starts here when running zeronet.py
|
# Starts here when running zeronet.py
|
||||||
def start():
|
def start():
|
||||||
# Call function
|
# Call function
|
||||||
func = getattr(actions, config.action, None)
|
func = getattr(actions, config.action, None)
|
||||||
action_kwargs = config.getActionArguments()
|
action_kwargs = config.getActionArguments()
|
||||||
func(**action_kwargs)
|
func(**action_kwargs)
|
||||||
|
|
12
start.py
12
start.py
|
@ -1,10 +1,16 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
|
||||||
|
# Included modules
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
# ZeroNet Modules
|
||||||
import zeronet
|
import zeronet
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
sys.argv += ["--open_browser", "default_browser"]
|
sys.argv += ["--open_browser", "default_browser"]
|
||||||
zeronet.main()
|
zeronet.main()
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
85
zeronet.py
85
zeronet.py
|
@ -1,49 +1,56 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# Included modules
|
||||||
|
import os
|
||||||
|
import gc
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
# ZeroNet Modules
|
||||||
|
import update
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
print "- Starting ZeroNet..."
|
print "- Starting ZeroNet..."
|
||||||
import sys, os
|
|
||||||
main = None
|
|
||||||
try:
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src")) # Imports relative to src
|
|
||||||
import main
|
|
||||||
main.start()
|
|
||||||
if main.update_after_shutdown: # Updater
|
|
||||||
import update, sys, os, gc
|
|
||||||
# Try cleanup openssl
|
|
||||||
try:
|
|
||||||
if "lib.opensslVerify" in sys.modules:
|
|
||||||
sys.modules["lib.opensslVerify"].opensslVerify.closeLibrary()
|
|
||||||
except Exception, err:
|
|
||||||
print "Error closing openssl", err
|
|
||||||
|
|
||||||
# Update
|
main = None
|
||||||
update.update()
|
try:
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src")) # Imports relative to src
|
||||||
|
import main
|
||||||
|
main.start()
|
||||||
|
if main.update_after_shutdown: # Updater
|
||||||
|
# Try cleanup openssl
|
||||||
|
try:
|
||||||
|
if "lib.opensslVerify" in sys.modules:
|
||||||
|
sys.modules["lib.opensslVerify"].opensslVerify.closeLibrary()
|
||||||
|
except Exception, err:
|
||||||
|
print "Error closing openssl", err
|
||||||
|
|
||||||
# Close log files
|
# Update
|
||||||
logger = sys.modules["main"].logging.getLogger()
|
update.update()
|
||||||
|
|
||||||
for handler in logger.handlers[:]:
|
# Close log files
|
||||||
handler.flush()
|
logger = sys.modules["main"].logging.getLogger()
|
||||||
handler.close()
|
|
||||||
logger.removeHandler(handler)
|
|
||||||
|
|
||||||
except Exception, err: # Prevent closing
|
for handler in logger.handlers[:]:
|
||||||
import traceback
|
handler.flush()
|
||||||
traceback.print_exc()
|
handler.close()
|
||||||
traceback.print_exc(file=open("log/error.log", "a"))
|
logger.removeHandler(handler)
|
||||||
|
|
||||||
if main and main.update_after_shutdown: # Updater
|
except (Exception, ): # Prevent closing
|
||||||
# Restart
|
traceback.print_exc()
|
||||||
gc.collect() # Garbage collect
|
traceback.print_exc(file=open("log/error.log", "a"))
|
||||||
print "Restarting..."
|
|
||||||
args = sys.argv[:]
|
if main and main.update_after_shutdown: # Updater
|
||||||
args.insert(0, sys.executable)
|
# Restart
|
||||||
if sys.platform == 'win32':
|
gc.collect() # Garbage collect
|
||||||
args = ['"%s"' % arg for arg in args]
|
print "Restarting..."
|
||||||
os.execv(sys.executable, args)
|
args = sys.argv[:]
|
||||||
print "Bye."
|
args.insert(0, sys.executable)
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
args = ['"%s"' % arg for arg in args]
|
||||||
|
os.execv(sys.executable, args)
|
||||||
|
print "Bye."
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue