Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
This commit is contained in:
parent
a7d8d488da
commit
9d7d4f1552
22 changed files with 486 additions and 220 deletions
|
@ -1,2 +1,2 @@
|
||||||
gevent==1.0.1
|
gevent>=1.0.1
|
||||||
msgpack-python==0.4.4
|
msgpack-python>=0.4.4
|
||||||
|
|
|
@ -8,7 +8,7 @@ class Config(object):
|
||||||
|
|
||||||
def __init__(self, argv):
|
def __init__(self, argv):
|
||||||
self.version = "0.3.2"
|
self.version = "0.3.2"
|
||||||
self.rev = 465
|
self.rev = 467
|
||||||
self.argv = argv
|
self.argv = argv
|
||||||
self.action = None
|
self.action = None
|
||||||
self.createParser()
|
self.createParser()
|
||||||
|
|
|
@ -9,7 +9,7 @@ import gevent
|
||||||
from Debug import Debug
|
from Debug import Debug
|
||||||
from Crypt import CryptHash
|
from Crypt import CryptHash
|
||||||
from Config import config
|
from Config import config
|
||||||
|
from util import helper
|
||||||
|
|
||||||
class ContentManager(object):
|
class ContentManager(object):
|
||||||
|
|
||||||
|
@ -26,8 +26,8 @@ class ContentManager(object):
|
||||||
content_inner_path = content_inner_path.strip("/") # Remove / from begning
|
content_inner_path = content_inner_path.strip("/") # Remove / from begning
|
||||||
old_content = self.contents.get(content_inner_path)
|
old_content = self.contents.get(content_inner_path)
|
||||||
content_path = self.site.storage.getPath(content_inner_path)
|
content_path = self.site.storage.getPath(content_inner_path)
|
||||||
content_dir = self.toDir(self.site.storage.getPath(content_inner_path))
|
content_dir = helper.getDirname(self.site.storage.getPath(content_inner_path))
|
||||||
content_inner_dir = self.toDir(content_inner_path)
|
content_inner_dir = helper.getDirname(content_inner_path)
|
||||||
|
|
||||||
if os.path.isfile(content_path):
|
if os.path.isfile(content_path):
|
||||||
try:
|
try:
|
||||||
|
@ -140,16 +140,29 @@ class ContentManager(object):
|
||||||
while True:
|
while True:
|
||||||
content_inner_path = "%s/content.json" % "/".join(dirs)
|
content_inner_path = "%s/content.json" % "/".join(dirs)
|
||||||
content = self.contents.get(content_inner_path.strip("/"))
|
content = self.contents.get(content_inner_path.strip("/"))
|
||||||
if content and "files" in content: # Check if content.json exists
|
|
||||||
|
# Check in files
|
||||||
|
if content and "files" in content:
|
||||||
back = content["files"].get("/".join(inner_path_parts))
|
back = content["files"].get("/".join(inner_path_parts))
|
||||||
if back:
|
if back:
|
||||||
back["content_inner_path"] = content_inner_path
|
back["content_inner_path"] = content_inner_path
|
||||||
|
back["optional"] = False
|
||||||
return back
|
return back
|
||||||
|
|
||||||
if content and "user_contents" in content: # User dir
|
# Check in optional files
|
||||||
|
if content and "files_optional" in content: # Check if file in this content.json
|
||||||
|
back = content["files_optional"].get("/".join(inner_path_parts))
|
||||||
|
if back:
|
||||||
|
back["content_inner_path"] = content_inner_path
|
||||||
|
back["optional"] = True
|
||||||
|
return back
|
||||||
|
|
||||||
|
# Return the rules if user dir
|
||||||
|
if content and "user_contents" in content:
|
||||||
back = content["user_contents"]
|
back = content["user_contents"]
|
||||||
# Content.json is in the users dir
|
# Content.json is in the users dir
|
||||||
back["content_inner_path"] = re.sub("(.*)/.*?$", "\\1/content.json", inner_path)
|
back["content_inner_path"] = re.sub("(.*)/.*?$", "\\1/content.json", inner_path)
|
||||||
|
back["optional"] = None
|
||||||
return back
|
return back
|
||||||
|
|
||||||
# No inner path in this dir, lets try the parent dir
|
# No inner path in this dir, lets try the parent dir
|
||||||
|
@ -234,6 +247,7 @@ class ContentManager(object):
|
||||||
rules["signers"] = []
|
rules["signers"] = []
|
||||||
rules["signers"].append(user_address) # Add user as valid signer
|
rules["signers"].append(user_address) # Add user as valid signer
|
||||||
rules["user_address"] = user_address
|
rules["user_address"] = user_address
|
||||||
|
rules["includes_allowed"] = False
|
||||||
|
|
||||||
return rules
|
return rules
|
||||||
|
|
||||||
|
@ -243,7 +257,7 @@ class ContentManager(object):
|
||||||
files_optional_node = {}
|
files_optional_node = {}
|
||||||
|
|
||||||
for file_relative_path in self.site.storage.list(dir_inner_path):
|
for file_relative_path in self.site.storage.list(dir_inner_path):
|
||||||
file_name = self.toFilename(file_relative_path)
|
file_name = helper.getFilename(file_relative_path)
|
||||||
|
|
||||||
ignored = optional = False
|
ignored = optional = False
|
||||||
if file_name == "content.json":
|
if file_name == "content.json":
|
||||||
|
@ -283,12 +297,12 @@ class ContentManager(object):
|
||||||
if extend:
|
if extend:
|
||||||
content.update(extend) # Add custom fields
|
content.update(extend) # Add custom fields
|
||||||
|
|
||||||
directory = self.toDir(self.site.storage.getPath(inner_path))
|
directory = helper.getDirname(self.site.storage.getPath(inner_path))
|
||||||
inner_directory = self.toDir(inner_path)
|
inner_directory = helper.getDirname(inner_path)
|
||||||
self.log.info("Opening site data directory: %s..." % directory)
|
self.log.info("Opening site data directory: %s..." % directory)
|
||||||
|
|
||||||
changed_files = [inner_path]
|
changed_files = [inner_path]
|
||||||
files_node, files_optional_node = self.hashFiles(self.toDir(inner_path), content.get("ignore"), content.get("optional"))
|
files_node, files_optional_node = self.hashFiles(helper.getDirname(inner_path), content.get("ignore"), content.get("optional"))
|
||||||
|
|
||||||
# Find changed files
|
# Find changed files
|
||||||
files_merged = files_node.copy()
|
files_merged = files_node.copy()
|
||||||
|
@ -310,13 +324,17 @@ class ContentManager(object):
|
||||||
new_content = content.copy() # Create a copy of current content.json
|
new_content = content.copy() # Create a copy of current content.json
|
||||||
new_content["files"] = files_node # Add files sha512 hash
|
new_content["files"] = files_node # Add files sha512 hash
|
||||||
if files_optional_node:
|
if files_optional_node:
|
||||||
new_content["files_optional_node"] = files_optional_node
|
new_content["files_optional"] = files_optional_node
|
||||||
|
elif "files_optional" in new_content:
|
||||||
|
del new_content["files_optional"]
|
||||||
|
|
||||||
new_content["modified"] = time.time() # Add timestamp
|
new_content["modified"] = time.time() # Add timestamp
|
||||||
if inner_path == "content.json":
|
if inner_path == "content.json":
|
||||||
new_content["address"] = self.site.address
|
new_content["address"] = self.site.address
|
||||||
new_content["zeronet_version"] = config.version
|
new_content["zeronet_version"] = config.version
|
||||||
new_content["signs_required"] = content.get("signs_required", 1)
|
new_content["signs_required"] = content.get("signs_required", 1)
|
||||||
|
|
||||||
|
# Verify private key
|
||||||
from Crypt import CryptBitcoin
|
from Crypt import CryptBitcoin
|
||||||
self.log.info("Verifying private key...")
|
self.log.info("Verifying private key...")
|
||||||
privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
|
privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
|
||||||
|
@ -409,6 +427,7 @@ class ContentManager(object):
|
||||||
# Return: True or False
|
# Return: True or False
|
||||||
def verifyContent(self, inner_path, content):
|
def verifyContent(self, inner_path, content):
|
||||||
content_size = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) # Size of new content
|
content_size = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) # Size of new content
|
||||||
|
content_size_optional = sum([file["size"] for file in content.get("files_optional", {}).values()])
|
||||||
site_size = self.getTotalSize(ignore=inner_path) + content_size # Site size without old content
|
site_size = self.getTotalSize(ignore=inner_path) + content_size # Site size without old content
|
||||||
if site_size > self.site.settings.get("size", 0):
|
if site_size > self.site.settings.get("size", 0):
|
||||||
self.site.settings["size"] = site_size # Save to settings if larger
|
self.site.settings["size"] = site_size # Save to settings if larger
|
||||||
|
@ -433,23 +452,34 @@ class ContentManager(object):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Check include size limit
|
# Check include size limit
|
||||||
if rules.get("max_size"): # Include size limit
|
if rules.get("max_size") is not None: # Include size limit
|
||||||
if content_size > rules["max_size"]:
|
if content_size > rules["max_size"]:
|
||||||
self.log.error("%s: Include too large %s > %s" % (inner_path, content_size, rules["max_size"]))
|
self.log.error("%s: Include too large %s > %s" % (inner_path, content_size, rules["max_size"]))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Check if content includes allowed
|
if rules.get("max_size_optional") is not None: # Include optional files limit
|
||||||
if rules.get("includes_allowed") is False and content.get("includes"):
|
if content_size_optional > rules["max_size_optional"]:
|
||||||
self.log.error("%s: Includes not allowed" % inner_path)
|
self.log.error("%s: Include optional files too large %s > %s" % (inner_path, content_size_optional, rules["max_size_optional"]))
|
||||||
return False # Includes not allowed
|
return False
|
||||||
|
|
||||||
# Filename limit
|
# Filename limit
|
||||||
if rules.get("files_allowed"):
|
if rules.get("files_allowed"):
|
||||||
for file_inner_path in content["files"].keys():
|
for file_inner_path in content["files"].keys():
|
||||||
if not re.match("^%s$" % rules["files_allowed"], file_inner_path):
|
if not re.match("^%s$" % rules["files_allowed"], file_inner_path):
|
||||||
self.log.error("%s: File not allowed" % file_inner_path)
|
self.log.error("%s %s: File not allowed" % (inner_path, file_inner_path))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if rules.get("files_allowed_optional"):
|
||||||
|
for file_inner_path in content.get("files_optional", {}).keys():
|
||||||
|
if not re.match("^%s$" % rules["files_allowed_optional"], file_inner_path):
|
||||||
|
self.log.error("%s %s: Optional file not allowed" % (inner_path, file_inner_path))
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check if content includes allowed
|
||||||
|
if rules.get("includes_allowed") is False and content.get("includes"):
|
||||||
|
self.log.error("%s: Includes not allowed" % inner_path)
|
||||||
|
return False # Includes not allowed
|
||||||
|
|
||||||
return True # All good
|
return True # All good
|
||||||
|
|
||||||
# Verify file validity
|
# Verify file validity
|
||||||
|
@ -507,7 +537,7 @@ class ContentManager(object):
|
||||||
valid_signs += CryptBitcoin.verify(sign_content, address, signs[address])
|
valid_signs += CryptBitcoin.verify(sign_content, address, signs[address])
|
||||||
if valid_signs >= signs_required:
|
if valid_signs >= signs_required:
|
||||||
break # Break if we has enough signs
|
break # Break if we has enough signs
|
||||||
|
self.log.debug("%s: Valid signs: %s/%s" % (inner_path, valid_signs, signs_required))
|
||||||
return valid_signs >= signs_required
|
return valid_signs >= signs_required
|
||||||
else: # Old style signing
|
else: # Old style signing
|
||||||
return CryptBitcoin.verify(sign_content, self.site.address, sign)
|
return CryptBitcoin.verify(sign_content, self.site.address, sign)
|
||||||
|
@ -537,19 +567,6 @@ class ContentManager(object):
|
||||||
self.log.error("File not in content.json: %s" % inner_path)
|
self.log.error("File not in content.json: %s" % inner_path)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Get dir from file
|
|
||||||
# Return: data/site/content.json -> data/site
|
|
||||||
def toDir(self, inner_path):
|
|
||||||
file_dir = re.sub("[^/]*?$", "", inner_path).strip("/")
|
|
||||||
if file_dir:
|
|
||||||
file_dir += "/" # Add / at end if its not the root
|
|
||||||
return file_dir
|
|
||||||
|
|
||||||
# Get dir from file
|
|
||||||
# Return: data/site/content.json -> data/site
|
|
||||||
def toFilename(self, inner_path):
|
|
||||||
return re.sub("^.*/", "", inner_path)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
def testSign():
|
def testSign():
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
import logging
|
import logging
|
||||||
import gevent
|
|
||||||
import time
|
import time
|
||||||
|
import array
|
||||||
|
|
||||||
|
import gevent
|
||||||
|
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
from Debug import Debug
|
from Debug import Debug
|
||||||
|
@ -14,8 +16,8 @@ if config.use_tempfiles:
|
||||||
# Communicate remote peers
|
# Communicate remote peers
|
||||||
class Peer(object):
|
class Peer(object):
|
||||||
__slots__ = (
|
__slots__ = (
|
||||||
"ip", "port", "site", "key", "connection", "last_found", "last_response",
|
"ip", "port", "site", "key", "connection", "last_found", "last_response", "last_ping", "last_hashfield",
|
||||||
"last_ping", "added", "connection_error", "hash_failed", "download_bytes", "download_time"
|
"hashfield", "added", "connection_error", "hash_failed", "download_bytes", "download_time"
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self, ip, port, site=None):
|
def __init__(self, ip, port, site=None):
|
||||||
|
@ -25,6 +27,8 @@ class Peer(object):
|
||||||
self.key = "%s:%s" % (ip, port)
|
self.key = "%s:%s" % (ip, port)
|
||||||
|
|
||||||
self.connection = None
|
self.connection = None
|
||||||
|
self.hashfield = array.array("H") # Got optional files hash_id
|
||||||
|
self.last_hashfield = None # Last time hashfiled downloaded
|
||||||
self.last_found = time.time() # Time of last found in the torrent tracker
|
self.last_found = time.time() # Time of last found in the torrent tracker
|
||||||
self.last_response = None # Time of last successful response from peer
|
self.last_response = None # Time of last successful response from peer
|
||||||
self.last_ping = None # Last response time for ping
|
self.last_ping = None # Last response time for ping
|
||||||
|
@ -230,6 +234,34 @@ class Peer(object):
|
||||||
if self.connection:
|
if self.connection:
|
||||||
self.connection.close()
|
self.connection.close()
|
||||||
|
|
||||||
|
# - HASHFIELD -
|
||||||
|
|
||||||
|
def updateHashfield(self, force=False):
|
||||||
|
# Don't update hashfield again in 15 min
|
||||||
|
if self.last_hashfield and time.time() - self.last_hashfield > 60 * 15 and not force:
|
||||||
|
return False
|
||||||
|
|
||||||
|
response = self.request("getHashfield", {"site": self.site.address})
|
||||||
|
if not response or "error" in response:
|
||||||
|
return False
|
||||||
|
self.last_hashfield = time.time()
|
||||||
|
self.hashfield = response["hashfield"]
|
||||||
|
|
||||||
|
return self.hashfield
|
||||||
|
|
||||||
|
def setHashfield(self, hashfield_dump):
|
||||||
|
self.hashfield.fromstring(hashfield_dump)
|
||||||
|
|
||||||
|
def hasHash(self, hash_id):
|
||||||
|
return hash_id in self.hashfield
|
||||||
|
|
||||||
|
# Return: ["ip:port", "ip:port",...]
|
||||||
|
def findHash(self, hash_id):
|
||||||
|
response = self.request("findHash", {"site": self.site.address, "hash_id": hash_id})
|
||||||
|
if not response or "error" in response:
|
||||||
|
return False
|
||||||
|
return [helper.unpackAddress(peer) for peer in response["peers"]]
|
||||||
|
|
||||||
# - EVENTS -
|
# - EVENTS -
|
||||||
|
|
||||||
# On connection error
|
# On connection error
|
||||||
|
|
|
@ -112,7 +112,7 @@ class Site:
|
||||||
s = time.time()
|
s = time.time()
|
||||||
self.log.debug("Downloading %s..." % inner_path)
|
self.log.debug("Downloading %s..." % inner_path)
|
||||||
found = self.needFile(inner_path, update=self.bad_files.get(inner_path))
|
found = self.needFile(inner_path, update=self.bad_files.get(inner_path))
|
||||||
content_inner_dir = self.content_manager.toDir(inner_path)
|
content_inner_dir = helper.getDirname(inner_path)
|
||||||
if not found:
|
if not found:
|
||||||
self.log.debug("Download %s failed, check_modifications: %s" % (inner_path, check_modifications))
|
self.log.debug("Download %s failed, check_modifications: %s" % (inner_path, check_modifications))
|
||||||
if check_modifications: # Download failed, but check modifications if its succed later
|
if check_modifications: # Download failed, but check modifications if its succed later
|
||||||
|
@ -386,7 +386,7 @@ class Site:
|
||||||
# Copy files
|
# Copy files
|
||||||
for content_inner_path, content in self.content_manager.contents.items():
|
for content_inner_path, content in self.content_manager.contents.items():
|
||||||
for file_relative_path in sorted(content["files"].keys()):
|
for file_relative_path in sorted(content["files"].keys()):
|
||||||
file_inner_path = self.content_manager.toDir(content_inner_path) + file_relative_path # Relative to content.json
|
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to content.json
|
||||||
file_inner_path = file_inner_path.strip("/") # Strip leading /
|
file_inner_path = file_inner_path.strip("/") # Strip leading /
|
||||||
if file_inner_path.split("/")[0] in default_dirs: # Dont copy directories that has -default postfixed alternative
|
if file_inner_path.split("/")[0] in default_dirs: # Dont copy directories that has -default postfixed alternative
|
||||||
self.log.debug("[SKIP] %s (has default alternative)" % file_inner_path)
|
self.log.debug("[SKIP] %s (has default alternative)" % file_inner_path)
|
||||||
|
|
|
@ -3,7 +3,6 @@ import re
|
||||||
import shutil
|
import shutil
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
import sys
|
|
||||||
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import gevent.event
|
import gevent.event
|
||||||
|
@ -11,6 +10,7 @@ import gevent.event
|
||||||
from Db import Db
|
from Db import Db
|
||||||
from Debug import Debug
|
from Debug import Debug
|
||||||
from Config import config
|
from Config import config
|
||||||
|
from util import helper
|
||||||
|
|
||||||
|
|
||||||
class SiteStorage:
|
class SiteStorage:
|
||||||
|
@ -98,7 +98,7 @@ class SiteStorage:
|
||||||
for file_relative_path in content["files"].keys():
|
for file_relative_path in content["files"].keys():
|
||||||
if not file_relative_path.endswith(".json"):
|
if not file_relative_path.endswith(".json"):
|
||||||
continue # We only interesed in json files
|
continue # We only interesed in json files
|
||||||
content_inner_path_dir = self.site.content_manager.toDir(content_inner_path) # Content.json dir relative to site
|
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
|
||||||
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
|
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
|
||||||
file_inner_path = file_inner_path.strip("/") # Strip leading /
|
file_inner_path = file_inner_path.strip("/") # Strip leading /
|
||||||
file_path = self.getPath(file_inner_path)
|
file_path = self.getPath(file_inner_path)
|
||||||
|
@ -170,7 +170,6 @@ class SiteStorage:
|
||||||
else:
|
else:
|
||||||
yield file_name
|
yield file_name
|
||||||
|
|
||||||
|
|
||||||
# Site content updated
|
# Site content updated
|
||||||
def onUpdated(self, inner_path):
|
def onUpdated(self, inner_path):
|
||||||
file_path = self.getPath(inner_path)
|
file_path = self.getPath(inner_path)
|
||||||
|
@ -255,7 +254,7 @@ class SiteStorage:
|
||||||
self.log.debug("[MISSING] %s" % content_inner_path)
|
self.log.debug("[MISSING] %s" % content_inner_path)
|
||||||
bad_files.append(content_inner_path)
|
bad_files.append(content_inner_path)
|
||||||
for file_relative_path in content["files"].keys():
|
for file_relative_path in content["files"].keys():
|
||||||
file_inner_path = self.site.content_manager.toDir(content_inner_path) + file_relative_path # Relative to site dir
|
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||||
file_inner_path = file_inner_path.strip("/") # Strip leading /
|
file_inner_path = file_inner_path.strip("/") # Strip leading /
|
||||||
file_path = self.getPath(file_inner_path)
|
file_path = self.getPath(file_inner_path)
|
||||||
if not os.path.isfile(file_path):
|
if not os.path.isfile(file_path):
|
||||||
|
@ -304,8 +303,13 @@ class SiteStorage:
|
||||||
files = [] # Get filenames
|
files = [] # Get filenames
|
||||||
for content_inner_path, content in self.site.content_manager.contents.items():
|
for content_inner_path, content in self.site.content_manager.contents.items():
|
||||||
files.append(content_inner_path)
|
files.append(content_inner_path)
|
||||||
for file_relative_path in content["files"].keys():
|
# Add normal files
|
||||||
file_inner_path = self.site.content_manager.toDir(content_inner_path) + file_relative_path # Relative to site dir
|
for file_relative_path in content.get("files", {}).keys():
|
||||||
|
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||||
|
files.append(file_inner_path)
|
||||||
|
# Add optional files
|
||||||
|
for file_relative_path in content.get("files_optional", {}).keys():
|
||||||
|
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
|
||||||
files.append(file_inner_path)
|
files.append(file_inner_path)
|
||||||
|
|
||||||
for inner_path in files:
|
for inner_path in files:
|
||||||
|
|
17
src/Test/Spy.py
Normal file
17
src/Test/Spy.py
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
class Spy:
|
||||||
|
def __init__(self, obj, func_name):
|
||||||
|
self.obj = obj
|
||||||
|
self.func_name = func_name
|
||||||
|
self.func_original = getattr(self.obj, func_name)
|
||||||
|
self.calls = []
|
||||||
|
|
||||||
|
def __enter__(self, *args, **kwargs):
|
||||||
|
def loggedFunc(cls, *args, **kwags):
|
||||||
|
print "Logging", self, args, kwargs
|
||||||
|
self.calls.append(args)
|
||||||
|
return self.func_original(cls, *args, **kwargs)
|
||||||
|
setattr(self.obj, self.func_name, loggedFunc)
|
||||||
|
return self.calls
|
||||||
|
|
||||||
|
def __exit__(self, *args, **kwargs):
|
||||||
|
setattr(self.obj, self.func_name, self.func_original)
|
|
@ -9,7 +9,7 @@ from Crypt import CryptBitcoin
|
||||||
|
|
||||||
@pytest.mark.usefixtures("resetSettings")
|
@pytest.mark.usefixtures("resetSettings")
|
||||||
class TestContent:
|
class TestContent:
|
||||||
def testIncludes(self, site):
|
def testInclude(self, site):
|
||||||
# Rules defined in parent content.json
|
# Rules defined in parent content.json
|
||||||
rules = site.content_manager.getRules("data/test_include/content.json")
|
rules = site.content_manager.getRules("data/test_include/content.json")
|
||||||
|
|
||||||
|
@ -34,7 +34,7 @@ class TestContent:
|
||||||
# Valid signers for root content.json
|
# Valid signers for root content.json
|
||||||
assert site.content_manager.getValidSigners("content.json") == ["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"]
|
assert site.content_manager.getValidSigners("content.json") == ["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"]
|
||||||
|
|
||||||
def testLimits(self, site):
|
def testInlcudeLimits(self, site):
|
||||||
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
|
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
|
||||||
# Data validation
|
# Data validation
|
||||||
data_dict = {
|
data_dict = {
|
||||||
|
@ -48,7 +48,7 @@ class TestContent:
|
||||||
}
|
}
|
||||||
|
|
||||||
# Normal data
|
# Normal data
|
||||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey) }
|
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)}
|
||||||
data = StringIO(json.dumps(data_dict))
|
data = StringIO(json.dumps(data_dict))
|
||||||
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||||
# Reset
|
# Reset
|
||||||
|
@ -56,7 +56,7 @@ class TestContent:
|
||||||
|
|
||||||
# Too large
|
# Too large
|
||||||
data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json
|
data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json
|
||||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey) }
|
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)}
|
||||||
data = StringIO(json.dumps(data_dict))
|
data = StringIO(json.dumps(data_dict))
|
||||||
assert not site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
assert not site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||||
# Reset
|
# Reset
|
||||||
|
@ -65,7 +65,7 @@ class TestContent:
|
||||||
|
|
||||||
# Not allowed file
|
# Not allowed file
|
||||||
data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"]
|
data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"]
|
||||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey) }
|
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)}
|
||||||
data = StringIO(json.dumps(data_dict))
|
data = StringIO(json.dumps(data_dict))
|
||||||
assert not site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
assert not site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||||
# Reset
|
# Reset
|
||||||
|
@ -73,6 +73,58 @@ class TestContent:
|
||||||
del data_dict["signs"]
|
del data_dict["signs"]
|
||||||
|
|
||||||
# Should work again
|
# Should work again
|
||||||
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey) }
|
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)}
|
||||||
data = StringIO(json.dumps(data_dict))
|
data = StringIO(json.dumps(data_dict))
|
||||||
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"])
|
||||||
|
def testSign(self, site, inner_path):
|
||||||
|
# Bad privatekey
|
||||||
|
assert not site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False)
|
||||||
|
|
||||||
|
# Good privatekey
|
||||||
|
content = site.content_manager.sign(inner_path, privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False)
|
||||||
|
content_old = site.content_manager.contents[inner_path] # Content before the sign
|
||||||
|
assert not content_old == content # Timestamp changed
|
||||||
|
assert site.address in content["signs"] # Used the site's private key to sign
|
||||||
|
if inner_path == "content.json":
|
||||||
|
assert len(content["files"]) == 17
|
||||||
|
elif inner_path == "data/test-include/content.json":
|
||||||
|
assert len(content["files"]) == 1
|
||||||
|
elif inner_path == "data/users/content.json":
|
||||||
|
assert len(content["files"]) == 0
|
||||||
|
|
||||||
|
# Everything should be same as before except the modified timestamp and the signs
|
||||||
|
assert (
|
||||||
|
{key: val for key, val in content_old.items() if key not in ["modified", "signs", "sign"]}
|
||||||
|
==
|
||||||
|
{key: val for key, val in content.items() if key not in ["modified", "signs", "sign"]}
|
||||||
|
)
|
||||||
|
|
||||||
|
def testSignOptionalFiles(self, site):
|
||||||
|
site.content_manager.contents["content.json"]["optional"] = "((data/img/zero.*))"
|
||||||
|
content_optional = site.content_manager.sign(privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False)
|
||||||
|
|
||||||
|
del site.content_manager.contents["content.json"]["optional"]
|
||||||
|
content_nooptional = site.content_manager.sign(privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False)
|
||||||
|
|
||||||
|
assert len(content_nooptional.get("files_optional", {})) == 0
|
||||||
|
assert len(content_optional["files_optional"]) > 0
|
||||||
|
assert len(content_nooptional["files"]) > len(content_optional["files"])
|
||||||
|
|
||||||
|
def testFileInfo(self, site):
|
||||||
|
assert "sha512" in site.content_manager.getFileInfo("index.html")
|
||||||
|
assert not site.content_manager.getFileInfo("notexist")
|
||||||
|
|
||||||
|
# Optional file
|
||||||
|
file_info_optional = site.content_manager.getFileInfo("data/optional.txt")
|
||||||
|
assert "sha512" in file_info_optional
|
||||||
|
assert file_info_optional["optional"] is True
|
||||||
|
|
||||||
|
# Not exists yet user content.json
|
||||||
|
assert "cert_signers" in site.content_manager.getFileInfo("data/users/unknown/content.json")
|
||||||
|
|
||||||
|
# Optional user file
|
||||||
|
file_info_optional = site.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
|
||||||
|
assert "sha512" in file_info_optional
|
||||||
|
assert file_info_optional["optional"] is True
|
||||||
|
|
|
@ -50,6 +50,66 @@ class TestUserContent:
|
||||||
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
|
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
|
||||||
assert rules is False
|
assert rules is False
|
||||||
|
|
||||||
|
def testVerify(self, site):
|
||||||
|
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
|
||||||
|
user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json"
|
||||||
|
data_dict = site.content_manager.contents[user_inner_path]
|
||||||
|
users_content = site.content_manager.contents["data/users/content.json"]
|
||||||
|
|
||||||
|
data = StringIO(json.dumps(data_dict))
|
||||||
|
assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
|
||||||
|
|
||||||
|
# Test max size exception by setting allowed to 0
|
||||||
|
rules = site.content_manager.getRules(user_inner_path, data_dict)
|
||||||
|
assert rules["max_size"] == 10000
|
||||||
|
assert users_content["user_contents"]["permission_rules"][".*"]["max_size"] == 10000
|
||||||
|
|
||||||
|
users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 0
|
||||||
|
rules = site.content_manager.getRules(user_inner_path, data_dict)
|
||||||
|
assert rules["max_size"] == 0
|
||||||
|
data = StringIO(json.dumps(data_dict))
|
||||||
|
assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
|
||||||
|
users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 10000 # Reset
|
||||||
|
|
||||||
|
# Test max optional size exception
|
||||||
|
# 1 MB gif = Allowed
|
||||||
|
data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024
|
||||||
|
del data_dict["signs"] # Remove signs before signing
|
||||||
|
data_dict["signs"] = {
|
||||||
|
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
|
||||||
|
}
|
||||||
|
data = StringIO(json.dumps(data_dict))
|
||||||
|
assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
|
||||||
|
|
||||||
|
# 100 MB gif = Not allowed
|
||||||
|
data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 100 * 1024 * 1024
|
||||||
|
del data_dict["signs"] # Remove signs before signing
|
||||||
|
data_dict["signs"] = {
|
||||||
|
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
|
||||||
|
}
|
||||||
|
data = StringIO(json.dumps(data_dict))
|
||||||
|
assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
|
||||||
|
data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024 # Reset
|
||||||
|
|
||||||
|
# hello.exe = Not allowed
|
||||||
|
data_dict["files_optional"]["hello.exe"] = data_dict["files_optional"]["peanut-butter-jelly-time.gif"]
|
||||||
|
del data_dict["signs"] # Remove signs before signing
|
||||||
|
data_dict["signs"] = {
|
||||||
|
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
|
||||||
|
}
|
||||||
|
data = StringIO(json.dumps(data_dict))
|
||||||
|
assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
|
||||||
|
del data_dict["files_optional"]["hello.exe"] # Reset
|
||||||
|
|
||||||
|
# Includes not allowed in user content
|
||||||
|
data_dict["includes"] = { "other.json": { } }
|
||||||
|
del data_dict["signs"] # Remove signs before signing
|
||||||
|
data_dict["signs"] = {
|
||||||
|
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
|
||||||
|
}
|
||||||
|
data = StringIO(json.dumps(data_dict))
|
||||||
|
assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
|
||||||
|
|
||||||
def testCert(self, site):
|
def testCert(self, site):
|
||||||
# user_addr = "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C"
|
# user_addr = "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C"
|
||||||
user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
|
user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
|
||||||
|
|
|
@ -1,17 +1,39 @@
|
||||||
class Spy:
|
import socket
|
||||||
def __init__(self, obj, func_name):
|
|
||||||
self.obj = obj
|
|
||||||
self.func_name = func_name
|
|
||||||
self.func_original = getattr(self.obj, func_name)
|
|
||||||
self.calls = []
|
|
||||||
|
|
||||||
def __enter__(self, *args, **kwargs):
|
import pytest
|
||||||
def loggedFunc(cls, *args, **kwags):
|
from util import helper
|
||||||
print "Logging", self, args, kwargs
|
|
||||||
self.calls.append(args)
|
|
||||||
return self.func_original(cls, *args, **kwargs)
|
|
||||||
setattr(self.obj, self.func_name, loggedFunc)
|
|
||||||
return self.calls
|
|
||||||
|
|
||||||
def __exit__(self, *args, **kwargs):
|
|
||||||
setattr(self.obj, self.func_name, self.func_original)
|
@pytest.mark.usefixtures("resetSettings")
|
||||||
|
class TestHelper:
|
||||||
|
def testShellquote(self):
|
||||||
|
assert helper.shellquote("hel'lo") == "\"hel'lo\"" # Allow '
|
||||||
|
assert helper.shellquote('hel"lo') == '"hello"' # Remove "
|
||||||
|
assert helper.shellquote("hel'lo", 'hel"lo') == ('"hel\'lo"', '"hello"')
|
||||||
|
|
||||||
|
def testPackAddress(self):
|
||||||
|
assert len(helper.packAddress("1.1.1.1", 1)) == 6
|
||||||
|
assert helper.unpackAddress(helper.packAddress("1.1.1.1", 1)) == ("1.1.1.1", 1)
|
||||||
|
|
||||||
|
with pytest.raises(socket.error):
|
||||||
|
helper.packAddress("999.1.1.1", 1)
|
||||||
|
|
||||||
|
with pytest.raises(socket.error):
|
||||||
|
helper.unpackAddress("X")
|
||||||
|
|
||||||
|
def testGetDirname(self):
|
||||||
|
assert helper.getDirname("data/users/content.json") == "data/users/"
|
||||||
|
assert helper.getDirname("data/users") == "data/"
|
||||||
|
assert helper.getDirname("") == ""
|
||||||
|
assert helper.getDirname("content.json") == ""
|
||||||
|
assert helper.getDirname("data/users/") == "data/users/"
|
||||||
|
assert helper.getDirname("/data/users/content.json") == "/data/users/"
|
||||||
|
|
||||||
|
|
||||||
|
def testGetFilename(self):
|
||||||
|
assert helper.getFilename("data/users/content.json") == "content.json"
|
||||||
|
assert helper.getFilename("data/users") == "users"
|
||||||
|
assert helper.getFilename("") == ""
|
||||||
|
assert helper.getFilename("content.json") == "content.json"
|
||||||
|
assert helper.getFilename("data/users/") == ""
|
||||||
|
assert helper.getFilename("/data/users/content.json") == "content.json"
|
|
@ -62,36 +62,3 @@ class TestSite:
|
||||||
assert new_site.address in SiteManager.site_manager.sites
|
assert new_site.address in SiteManager.site_manager.sites
|
||||||
SiteManager.site_manager.delete(new_site.address)
|
SiteManager.site_manager.delete(new_site.address)
|
||||||
assert new_site.address not in SiteManager.site_manager.sites
|
assert new_site.address not in SiteManager.site_manager.sites
|
||||||
|
|
||||||
@pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"])
|
|
||||||
def testSign(self, site, inner_path):
|
|
||||||
# Bad privatekey
|
|
||||||
assert not site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False)
|
|
||||||
|
|
||||||
# Good privatekey
|
|
||||||
content = site.content_manager.sign(inner_path, privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False)
|
|
||||||
content_old = site.content_manager.contents[inner_path] # Content before the sign
|
|
||||||
assert not content_old == content # Timestamp changed
|
|
||||||
assert site.address in content["signs"] # Used the site's private key to sign
|
|
||||||
if inner_path == "content.json":
|
|
||||||
assert len(content["files"]) == 24
|
|
||||||
elif inner_path == "data/test-include/content.json":
|
|
||||||
assert len(content["files"]) == 1
|
|
||||||
elif inner_path == "data/users/content.json":
|
|
||||||
assert len(content["files"]) == 0
|
|
||||||
|
|
||||||
# Everything should be same as before except the modified timestamp and the signs
|
|
||||||
assert (
|
|
||||||
{key: val for key, val in content_old.items() if key not in ["modified", "signs", "sign"]}
|
|
||||||
==
|
|
||||||
{key: val for key, val in content.items() if key not in ["modified", "signs", "sign"]}
|
|
||||||
)
|
|
||||||
|
|
||||||
def testSignOptionalFiles(self, site):
|
|
||||||
site.content_manager.contents["content.json"]["optional"] = "((data/img/zero.*))"
|
|
||||||
content_optional = site.content_manager.sign(privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False)
|
|
||||||
|
|
||||||
del site.content_manager.contents["content.json"]["optional"]
|
|
||||||
content_nooptional = site.content_manager.sign(privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False)
|
|
||||||
|
|
||||||
assert len(content_nooptional["files"]) > len(content_optional["files"])
|
|
||||||
|
|
63
src/Test/TestSiteDownload.py
Normal file
63
src/Test/TestSiteDownload.py
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
import pytest
|
||||||
|
import mock
|
||||||
|
import time
|
||||||
|
|
||||||
|
from Connection import ConnectionServer
|
||||||
|
from Config import config
|
||||||
|
from Site import Site
|
||||||
|
from File import FileRequest
|
||||||
|
import Spy
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("resetTempSettings")
|
||||||
|
@pytest.mark.usefixtures("resetSettings")
|
||||||
|
class TestWorker:
|
||||||
|
def testDownload(self, file_server, site, site_temp):
|
||||||
|
client = ConnectionServer("127.0.0.1", 1545)
|
||||||
|
assert site.storage.directory == config.data_dir + "/" + site.address
|
||||||
|
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
|
||||||
|
|
||||||
|
# Init source server
|
||||||
|
site.connection_server = file_server
|
||||||
|
file_server.sites[site.address] = site
|
||||||
|
|
||||||
|
# Init client server
|
||||||
|
site_temp.connection_server = client
|
||||||
|
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
|
||||||
|
|
||||||
|
site_temp.addPeer("127.0.0.1", 1544)
|
||||||
|
with Spy.Spy(FileRequest, "route") as requests:
|
||||||
|
def boostRequest(inner_path):
|
||||||
|
# I really want these file
|
||||||
|
if inner_path == "index.html":
|
||||||
|
print "needFile"
|
||||||
|
site_temp.needFile("data/img/multiuser.png", priority=9, blocking=False)
|
||||||
|
site_temp.needFile("data/img/direct_domains.png", priority=10, blocking=False)
|
||||||
|
site_temp.onFileDone.append(boostRequest)
|
||||||
|
site_temp.download(blind_includes=True).join(timeout=5)
|
||||||
|
file_requests = [request[2]["inner_path"] for request in requests if request[0] in ("getFile", "streamFile")]
|
||||||
|
# Test priority
|
||||||
|
assert file_requests[0:2] == ["content.json", "index.html"] # Must-have files
|
||||||
|
assert file_requests[2:4] == ["data/img/direct_domains.png", "data/img/multiuser.png"] # Directly requested files
|
||||||
|
assert file_requests[4:6] == ["css/all.css", "js/all.js"] # Important assets
|
||||||
|
assert file_requests[6] == "dbschema.json" # Database map
|
||||||
|
assert "-default" in file_requests[-1] # Put default files for cloning to the end
|
||||||
|
|
||||||
|
# Check files
|
||||||
|
bad_files = site_temp.storage.verifyFiles(quick_check=True)
|
||||||
|
|
||||||
|
# -1 because data/users/1J6... user has invalid cert
|
||||||
|
assert len(site_temp.content_manager.contents) == len(site.content_manager.contents) - 1
|
||||||
|
assert not bad_files
|
||||||
|
|
||||||
|
# Optional file
|
||||||
|
assert not site_temp.storage.isFile("data/optional.txt")
|
||||||
|
assert site.storage.isFile("data/optional.txt")
|
||||||
|
site_temp.needFile("data/optional.txt")
|
||||||
|
assert site_temp.storage.isFile("data/optional.txt")
|
||||||
|
|
||||||
|
# Optional user file
|
||||||
|
assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
|
||||||
|
site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
|
||||||
|
assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
|
||||||
|
|
||||||
|
assert site_temp.storage.deleteFiles()
|
|
@ -1,15 +1,13 @@
|
||||||
import shutil
|
|
||||||
import os
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from Site import SiteManager
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("resetSettings")
|
@pytest.mark.usefixtures("resetSettings")
|
||||||
class TestSiteStorage:
|
class TestSiteStorage:
|
||||||
def testList(self, site):
|
def testList(self, site):
|
||||||
list_root = list(site.storage.list(""))
|
# Rootdir
|
||||||
assert "content.json" in list_root
|
list_root = list(site.storage.list(""))
|
||||||
assert "css/all.css" in list_root
|
assert "content.json" in list_root
|
||||||
|
assert "css/all.css" in list_root
|
||||||
|
|
||||||
|
# Subdir
|
||||||
assert list(site.storage.list("data-default")) == ["data.json", "users/content-default.json"]
|
assert list(site.storage.list("data-default")) == ["data.json", "users/content-default.json"]
|
||||||
|
|
|
@ -22,6 +22,7 @@ from Config import config
|
||||||
config.argv = ["none"] # Dont pass any argv to config parser
|
config.argv = ["none"] # Dont pass any argv to config parser
|
||||||
config.parse()
|
config.parse()
|
||||||
config.data_dir = "src/Test/testdata" # Use test data for unittests
|
config.data_dir = "src/Test/testdata" # Use test data for unittests
|
||||||
|
config.debug_socket = True # Use test data for unittests
|
||||||
logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
|
logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
|
||||||
|
|
||||||
from Site import Site
|
from Site import Site
|
||||||
|
@ -76,7 +77,7 @@ def resetTempSettings(request):
|
||||||
request.addfinalizer(cleanup)
|
request.addfinalizer(cleanup)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture()
|
||||||
def site():
|
def site():
|
||||||
site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
|
site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
|
||||||
return site
|
return site
|
||||||
|
|
|
@ -1,133 +1,136 @@
|
||||||
{
|
{
|
||||||
"address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT",
|
"address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT",
|
||||||
"background-color": "white",
|
"background-color": "white",
|
||||||
"description": "Blogging platform Demo",
|
"description": "Blogging platform Demo",
|
||||||
"domain": "Blog.ZeroNetwork.bit",
|
"domain": "Blog.ZeroNetwork.bit",
|
||||||
"files": {
|
"files": {
|
||||||
"css/all.css": {
|
"css/all.css": {
|
||||||
"sha512": "65ddd3a2071a0f48c34783aa3b1bde4424bdea344630af05a237557a62bd55dc",
|
"sha512": "65ddd3a2071a0f48c34783aa3b1bde4424bdea344630af05a237557a62bd55dc",
|
||||||
"size": 112710
|
"size": 112710
|
||||||
},
|
},
|
||||||
"data-default/data.json": {
|
"data-default/data.json": {
|
||||||
"sha512": "3f5c5a220bde41b464ab116cce0bd670dd0b4ff5fe4a73d1dffc4719140038f2",
|
"sha512": "3f5c5a220bde41b464ab116cce0bd670dd0b4ff5fe4a73d1dffc4719140038f2",
|
||||||
"size": 196
|
"size": 196
|
||||||
},
|
},
|
||||||
"data-default/users/content-default.json": {
|
"data-default/users/content-default.json": {
|
||||||
"sha512": "0603ce08f7abb92b3840ad0cf40e95ea0b3ed3511b31524d4d70e88adba83daa",
|
"sha512": "0603ce08f7abb92b3840ad0cf40e95ea0b3ed3511b31524d4d70e88adba83daa",
|
||||||
"size": 679
|
"size": 679
|
||||||
},
|
},
|
||||||
"data/data.json": {
|
"data/data.json": {
|
||||||
"sha512": "0f2321c905b761a05c360a389e1de149d952b16097c4ccf8310158356e85fb52",
|
"sha512": "0f2321c905b761a05c360a389e1de149d952b16097c4ccf8310158356e85fb52",
|
||||||
"size": 31126
|
"size": 31126
|
||||||
},
|
},
|
||||||
"data/img/autoupdate.png": {
|
"data/img/autoupdate.png": {
|
||||||
"sha512": "d2b4dc8e0da2861ea051c0c13490a4eccf8933d77383a5b43de447c49d816e71",
|
"sha512": "d2b4dc8e0da2861ea051c0c13490a4eccf8933d77383a5b43de447c49d816e71",
|
||||||
"size": 24460
|
"size": 24460
|
||||||
},
|
},
|
||||||
"data/img/direct_domains.png": {
|
"data/img/direct_domains.png": {
|
||||||
"sha512": "5f14b30c1852735ab329b22496b1e2ea751cb04704789443ad73a70587c59719",
|
"sha512": "5f14b30c1852735ab329b22496b1e2ea751cb04704789443ad73a70587c59719",
|
||||||
"size": 16185
|
"size": 16185
|
||||||
},
|
},
|
||||||
"data/img/domain.png": {
|
"data/img/domain.png": {
|
||||||
"sha512": "ce87e0831f4d1e95a95d7120ca4d33f8273c6fce9f5bbedf7209396ea0b57b6a",
|
"sha512": "ce87e0831f4d1e95a95d7120ca4d33f8273c6fce9f5bbedf7209396ea0b57b6a",
|
||||||
"size": 11881
|
"size": 11881
|
||||||
},
|
},
|
||||||
"data/img/memory.png": {
|
"data/img/memory.png": {
|
||||||
"sha512": "dd56515085b4a79b5809716f76f267ec3a204be3ee0d215591a77bf0f390fa4e",
|
"sha512": "dd56515085b4a79b5809716f76f267ec3a204be3ee0d215591a77bf0f390fa4e",
|
||||||
"size": 12775
|
"size": 12775
|
||||||
},
|
},
|
||||||
"data/img/multiuser.png": {
|
"data/img/multiuser.png": {
|
||||||
"sha512": "88e3f795f9b86583640867897de6efc14e1aa42f93e848ed1645213e6cc210c6",
|
"sha512": "88e3f795f9b86583640867897de6efc14e1aa42f93e848ed1645213e6cc210c6",
|
||||||
"size": 29480
|
"size": 29480
|
||||||
},
|
},
|
||||||
"data/img/progressbar.png": {
|
"data/img/progressbar.png": {
|
||||||
"sha512": "23d592ae386ce14158cec34d32a3556771725e331c14d5a4905c59e0fe980ebf",
|
"sha512": "23d592ae386ce14158cec34d32a3556771725e331c14d5a4905c59e0fe980ebf",
|
||||||
"size": 13294
|
"size": 13294
|
||||||
},
|
},
|
||||||
"data/img/slides.png": {
|
"data/img/slides.png": {
|
||||||
"sha512": "1933db3b90ab93465befa1bd0843babe38173975e306286e08151be9992f767e",
|
"sha512": "1933db3b90ab93465befa1bd0843babe38173975e306286e08151be9992f767e",
|
||||||
"size": 14439
|
"size": 14439
|
||||||
},
|
},
|
||||||
"data/img/slots_memory.png": {
|
"data/img/slots_memory.png": {
|
||||||
"sha512": "82a250e6da909d7f66341e5b5c443353958f86728cd3f06e988b6441e6847c29",
|
"sha512": "82a250e6da909d7f66341e5b5c443353958f86728cd3f06e988b6441e6847c29",
|
||||||
"size": 9488
|
"size": 9488
|
||||||
},
|
},
|
||||||
"data/img/trayicon.png": {
|
"data/img/trayicon.png": {
|
||||||
"sha512": "e7ae65bf280f13fb7175c1293dad7d18f1fcb186ebc9e1e33850cdaccb897b8f",
|
"sha512": "e7ae65bf280f13fb7175c1293dad7d18f1fcb186ebc9e1e33850cdaccb897b8f",
|
||||||
"size": 19040
|
"size": 19040
|
||||||
},
|
},
|
||||||
"data/img/zeroblog-comments.png": {
|
|
||||||
"sha512": "efe4e815a260e555303e5c49e550a689d27a8361f64667bd4a91dbcccb83d2b4",
|
|
||||||
"size": 24001
|
|
||||||
},
|
|
||||||
"data/img/zeroid.png": {
|
|
||||||
"sha512": "b46d541a9e51ba2ddc8a49955b7debbc3b45fd13467d3c20ef104e9d938d052b",
|
|
||||||
"size": 18875
|
|
||||||
},
|
|
||||||
"data/img/zeroname.png": {
|
|
||||||
"sha512": "bab45a1bb2087b64e4f69f756b2ffa5ad39b7fdc48c83609cdde44028a7a155d",
|
|
||||||
"size": 36031
|
|
||||||
},
|
|
||||||
"data/img/zerotalk-mark.png": {
|
|
||||||
"sha512": "a335b2fedeb8d291ca68d3091f567c180628e80f41de4331a5feb19601d078af",
|
|
||||||
"size": 44862
|
|
||||||
},
|
|
||||||
"data/img/zerotalk-upvote.png": {
|
|
||||||
"sha512": "b1ffd7f948b4f99248dde7efe256c2efdfd997f7e876fb9734f986ef2b561732",
|
|
||||||
"size": 41092
|
|
||||||
},
|
|
||||||
"data/img/zerotalk.png": {
|
|
||||||
"sha512": "54d10497a1ffca9a4780092fd1bd158c15f639856d654d2eb33a42f9d8e33cd8",
|
|
||||||
"size": 26606
|
|
||||||
},
|
|
||||||
"data/test_include/data.json": {
|
|
||||||
"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906",
|
|
||||||
"size": 505
|
|
||||||
},
|
|
||||||
"dbschema.json": {
|
"dbschema.json": {
|
||||||
"sha512": "7b756e8e475d4d6b345a24e2ae14254f5c6f4aa67391a94491a026550fe00df8",
|
"sha512": "7b756e8e475d4d6b345a24e2ae14254f5c6f4aa67391a94491a026550fe00df8",
|
||||||
"size": 1529
|
"size": 1529
|
||||||
},
|
},
|
||||||
"img/loading.gif": {
|
"img/loading.gif": {
|
||||||
"sha512": "8a42b98962faea74618113166886be488c09dad10ca47fe97005edc5fb40cc00",
|
"sha512": "8a42b98962faea74618113166886be488c09dad10ca47fe97005edc5fb40cc00",
|
||||||
"size": 723
|
"size": 723
|
||||||
},
|
},
|
||||||
"index.html": {
|
"index.html": {
|
||||||
"sha512": "c4039ebfc4cb6f116cac05e803a18644ed70404474a572f0d8473f4572f05df3",
|
"sha512": "c4039ebfc4cb6f116cac05e803a18644ed70404474a572f0d8473f4572f05df3",
|
||||||
"size": 4667
|
"size": 4667
|
||||||
},
|
},
|
||||||
"js/all.js": {
|
"js/all.js": {
|
||||||
"sha512": "034c97535f3c9b3fbebf2dcf61a38711dae762acf1a99168ae7ddc7e265f582c",
|
"sha512": "034c97535f3c9b3fbebf2dcf61a38711dae762acf1a99168ae7ddc7e265f582c",
|
||||||
"size": 201178
|
"size": 201178
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"ignore": "((js|css)/(?!all.(js|css))|data/.*db|data/users/.*/.*)",
|
"files_optional": {
|
||||||
|
"data/img/zeroblog-comments.png": {
|
||||||
|
"sha512": "efe4e815a260e555303e5c49e550a689d27a8361f64667bd4a91dbcccb83d2b4",
|
||||||
|
"size": 24001
|
||||||
|
},
|
||||||
|
"data/img/zeroid.png": {
|
||||||
|
"sha512": "b46d541a9e51ba2ddc8a49955b7debbc3b45fd13467d3c20ef104e9d938d052b",
|
||||||
|
"size": 18875
|
||||||
|
},
|
||||||
|
"data/img/zeroname.png": {
|
||||||
|
"sha512": "bab45a1bb2087b64e4f69f756b2ffa5ad39b7fdc48c83609cdde44028a7a155d",
|
||||||
|
"size": 36031
|
||||||
|
},
|
||||||
|
"data/img/zerotalk-mark.png": {
|
||||||
|
"sha512": "a335b2fedeb8d291ca68d3091f567c180628e80f41de4331a5feb19601d078af",
|
||||||
|
"size": 44862
|
||||||
|
},
|
||||||
|
"data/img/zerotalk-upvote.png": {
|
||||||
|
"sha512": "b1ffd7f948b4f99248dde7efe256c2efdfd997f7e876fb9734f986ef2b561732",
|
||||||
|
"size": 41092
|
||||||
|
},
|
||||||
|
"data/img/zerotalk.png": {
|
||||||
|
"sha512": "54d10497a1ffca9a4780092fd1bd158c15f639856d654d2eb33a42f9d8e33cd8",
|
||||||
|
"size": 26606
|
||||||
|
},
|
||||||
|
"data/optional.txt": {
|
||||||
|
"sha512": "c6f81db0e9f8206c971c9e5826e3ba823ffbb1a3a900f8047652a8bf78ea98fd",
|
||||||
|
"size": 6
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ignore": "((js|css)/(?!all.(js|css))|data/.*db|data/users/.*/.*|data/test_include/.*)",
|
||||||
"includes": {
|
"includes": {
|
||||||
"data/test_include/content.json": {
|
"data/test_include/content.json": {
|
||||||
"added": 1424976057,
|
"added": 1424976057,
|
||||||
"files_allowed": "data.json",
|
"files_allowed": "data.json",
|
||||||
"includes_allowed": false,
|
"includes_allowed": false,
|
||||||
"max_size": 20000,
|
"max_size": 20000,
|
||||||
"signers": [ "15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo" ],
|
"signers": [ "15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo" ],
|
||||||
"signers_required": 1,
|
"signers_required": 1,
|
||||||
"user_id": 47,
|
"user_id": 47,
|
||||||
"user_name": "test"
|
"user_name": "test"
|
||||||
},
|
},
|
||||||
"data/users/content.json": {
|
"data/users/content.json": {
|
||||||
"signers": [ "1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f" ],
|
"signers": [ "1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f" ],
|
||||||
"signers_required": 1
|
"signers_required": 1
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"modified": 1443393859.801,
|
"modified": 1443645832.748,
|
||||||
|
"optional": "(data/img/zero.*|data/optional.txt)",
|
||||||
"sign": [
|
"sign": [
|
||||||
30041653970398729892154852118727733790145614202537425646336077462070348808967,
|
33155653220731268227776289017011639520872180216646876377169089096034035969487,
|
||||||
96823925597554846684463773054016176426938620086211253074026312396122955360853
|
36744504416132878244552522451563313660303086381031784548929582417244124447603
|
||||||
],
|
],
|
||||||
"signers_sign": "HDNmWJHM2diYln4pkdL+qYOvgE7MdwayzeG+xEUZBgp1HtOjBJS+knDEVQsBkjcOPicDG2it1r6R1eQrmogqSP0=",
|
"signers_sign": "HDNmWJHM2diYln4pkdL+qYOvgE7MdwayzeG+xEUZBgp1HtOjBJS+knDEVQsBkjcOPicDG2it1r6R1eQrmogqSP0=",
|
||||||
"signs": {
|
"signs": {
|
||||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "HKBBvaOi1v20ZuSVORtD4bBRRgf/85QDVy4HaaPX3fFDAKYmvUWK+Jbp3yIGElMmPoO2+YljFLyromAoEwWd6Eg="
|
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "HP+qomOGy0hTFX1HjHv8iQIL6E22qNynb+IijEblL2lm8SgsyiOxKGaVkD6/eE6xYGeYHSnhSii2Gw/04z3okNM="
|
||||||
},
|
},
|
||||||
"signs_required": 1,
|
"signs_required": 1,
|
||||||
"title": "ZeroBlog",
|
"title": "ZeroBlog",
|
||||||
"zeronet_version": "0.3.2"
|
"zeronet_version": "0.3.2"
|
||||||
}
|
}
|
1
src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/optional.txt
vendored
Normal file
1
src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT/data/optional.txt
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
hello!
|
|
@ -1,15 +1,22 @@
|
||||||
{
|
{
|
||||||
"cert_auth_type": "web",
|
"cert_auth_type": "web",
|
||||||
"cert_sign": "HBsTrjTmv+zD1iY93tSci8n9DqdEtYwzxJmRppn4/b+RYktcANGm5tXPOb+Duw3AJcgWDcGUvQVgN1D9QAwIlCw=",
|
"cert_sign": "HBsTrjTmv+zD1iY93tSci8n9DqdEtYwzxJmRppn4/b+RYktcANGm5tXPOb+Duw3AJcgWDcGUvQVgN1D9QAwIlCw=",
|
||||||
"cert_user_id": "toruser@zeroid.bit",
|
"cert_user_id": "toruser@zeroid.bit",
|
||||||
"files": {
|
"files": {
|
||||||
"data.json": {
|
"data.json": {
|
||||||
"sha512": "4868b5e6d70a55d137db71c2e276bda80437e0235ac670962acc238071296b45",
|
"sha512": "4868b5e6d70a55d137db71c2e276bda80437e0235ac670962acc238071296b45",
|
||||||
"size": 168
|
"size": 168
|
||||||
}
|
|
||||||
},
|
|
||||||
"modified": 1432491109.11,
|
|
||||||
"signs": {
|
|
||||||
"1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9": "HMy7ZwwqE0Sk8O+5hTx/ejFW6KtIDbID6fGblCodUTpz4mJZ5GwApBHSVLMYL43vvGT/vKZOiQoJ5tQTeFVbbkk="
|
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"files_optional": {
|
||||||
|
"peanut-butter-jelly-time.gif": {
|
||||||
|
"sha512": "a238fd27bda2a06f07f9f246954b34dcf82e6472aebdecc2c5dc1f01a50721ef",
|
||||||
|
"size": 1606
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"modified": 1443645834.763,
|
||||||
|
"optional": ".*\\.(jpg|png|gif)",
|
||||||
|
"signs": {
|
||||||
|
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "HD+/5Jmew6BotfeWsfpTFKXUVIY5MyjKQx5KRnT6WO0nMBLxaI6/sTb+6ZXq0tXjXNkmlt36/UICYQcYQjCRhkY="
|
||||||
|
}
|
||||||
}
|
}
|
Binary file not shown.
After Width: | Height: | Size: 1.6 KiB |
|
@ -1,15 +1,15 @@
|
||||||
{
|
{
|
||||||
"cert_auth_type": "web",
|
"cert_auth_type": "web",
|
||||||
"cert_sign": "HBsTrjTmv+zD1iY93tSci8n9DqdEtYwzxJmRppn4/b+RYktcANGm5tXPOb+Duw3AJcgWDcGUvQVgN1D9QAwIlCw=",
|
"cert_sign": "HBsTrjTmv+zD1iY93tSci8n9DqdEtYwzxJmRppn4/b+RYktcANGm5tXPOb+Duw3AJcgWDcGUvQVgN1D9QAwIlCw=",
|
||||||
"cert_user_id": "toruser@zeroid.bit",
|
"cert_user_id": "toruser@zeroid.bit",
|
||||||
"files": {
|
"files": {
|
||||||
"data.json": {
|
"data.json": {
|
||||||
"sha512": "4868b5e6d70a55d137db71c2e276bda80437e0235ac670962acc238071296b45",
|
"sha512": "4868b5e6d70a55d137db71c2e276bda80437e0235ac670962acc238071296b45",
|
||||||
"size": 168
|
"size": 168
|
||||||
}
|
|
||||||
},
|
|
||||||
"modified": 1432491109.11,
|
|
||||||
"signs": {
|
|
||||||
"1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9": "HMy7ZwwqE0Sk8O+5hTx/ejFW6KtIDbID6fGblCodUTpz4mJZ5GwApBHSVLMYL43vvGT/vKZOiQoJ5tQTeFVbbkk="
|
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"modified": 1443645835.157,
|
||||||
|
"signs": {
|
||||||
|
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "HNIXDWV1kpqKtsJ+yrNKLvks/FDYIpVmx7xgkXPJ6NZiajCMHrgEwLH9QRiq6rs3nOCs0P08eRhlgZLvC+3U6ps="
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -1,9 +1,9 @@
|
||||||
{
|
{
|
||||||
"files": {},
|
"files": {},
|
||||||
"ignore": ".*",
|
"ignore": ".*",
|
||||||
"modified": 1443088330.941,
|
"modified": 1443645833.247,
|
||||||
"signs": {
|
"signs": {
|
||||||
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G/YCfchtojDA7EjXk5Xa6af5EaEME14LDAvVE9P8PCDb2ncWN79ZTMsczAx7N3HYyM9Vdqn+8or4hh28z4ITKqU="
|
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G2EcaEKdzzfbpRITDcaBajNwjaIIJW3zp1YQGIMJcxfw3tLnn6uv/goImvbzvuTXKkl5fQKmBowK2Bg1xXJ3078="
|
||||||
},
|
},
|
||||||
"user_contents": {
|
"user_contents": {
|
||||||
"cert_signers": {
|
"cert_signers": {
|
||||||
|
@ -12,7 +12,9 @@
|
||||||
"permission_rules": {
|
"permission_rules": {
|
||||||
".*": {
|
".*": {
|
||||||
"files_allowed": "data.json",
|
"files_allowed": "data.json",
|
||||||
|
"files_allowed_optional": ".*\\.(png|jpg|gif)",
|
||||||
"max_size": 10000,
|
"max_size": 10000,
|
||||||
|
"max_size_optional": 10000000,
|
||||||
"signers": [ "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" ]
|
"signers": [ "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" ]
|
||||||
},
|
},
|
||||||
"bitid/.*@zeroid.bit": { "max_size": 40000 },
|
"bitid/.*@zeroid.bit": { "max_size": 40000 },
|
||||||
|
|
|
@ -83,7 +83,9 @@ class WorkerManager:
|
||||||
elif inner_path.endswith(".css"):
|
elif inner_path.endswith(".css"):
|
||||||
priority += 5 # boost css files priority
|
priority += 5 # boost css files priority
|
||||||
elif inner_path.endswith(".js"):
|
elif inner_path.endswith(".js"):
|
||||||
priority += 3 # boost js files priority
|
priority += 4 # boost js files priority
|
||||||
|
elif inner_path.endswith("dbschema.json"):
|
||||||
|
priority += 3 # boost database specification
|
||||||
elif inner_path.endswith("content.json"):
|
elif inner_path.endswith("content.json"):
|
||||||
priority += 1 # boost included content.json files priority a bit
|
priority += 1 # boost included content.json files priority a bit
|
||||||
elif inner_path.endswith(".json"):
|
elif inner_path.endswith(".json"):
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import os
|
import os
|
||||||
import socket
|
import socket
|
||||||
import struct
|
import struct
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
def atomicWrite(dest, content, mode="w"):
|
def atomicWrite(dest, content, mode="w"):
|
||||||
|
@ -16,10 +17,27 @@ def shellquote(*args):
|
||||||
else:
|
else:
|
||||||
return tuple(['"%s"' % arg.replace('"', "") for arg in args])
|
return tuple(['"%s"' % arg.replace('"', "") for arg in args])
|
||||||
|
|
||||||
|
|
||||||
# ip, port to packed 6byte format
|
# ip, port to packed 6byte format
|
||||||
def packAddress(ip, port):
|
def packAddress(ip, port):
|
||||||
return socket.inet_aton(ip) + struct.pack("H", port)
|
return socket.inet_aton(ip) + struct.pack("H", port)
|
||||||
|
|
||||||
|
|
||||||
# From 6byte format to ip, port
|
# From 6byte format to ip, port
|
||||||
def unpackAddress(packed):
|
def unpackAddress(packed):
|
||||||
return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0]
|
return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0]
|
||||||
|
|
||||||
|
|
||||||
|
# Get dir from file
|
||||||
|
# Return: data/site/content.json -> data/site
|
||||||
|
def getDirname(path):
|
||||||
|
file_dir = re.sub("[^/]*?$", "", path).rstrip("/")
|
||||||
|
if file_dir:
|
||||||
|
file_dir += "/" # Add / at end if its not the root
|
||||||
|
return file_dir
|
||||||
|
|
||||||
|
|
||||||
|
# Get dir from file
|
||||||
|
# Return: data/site/content.json -> content.json
|
||||||
|
def getFilename(path):
|
||||||
|
return re.sub("^.*/", "", path)
|
||||||
|
|
Loading…
Reference in a new issue