version 0.3.0, rev187, Trusted authorization sites support, --publish option on signing, cryptSign command line option, OpenSSL enabled on OSX, Crypto verify allows list of valid addresses, Option for version 2 json DB tables, DbCursor SELECT parameters bugfix, Add peer to site on ListModified, Download blind includes when new site added, Publish command better messages, Multi-threaded announce, New http Torrent trackers, Wait for dbschema.json on query, Handle json import errors, More compact writeJson storage command, Testcase for signing and verifying, Workaround to make non target=_top links work, More clean UiWebsocket command route, Send cert_user_id on siteinfo, Notify other local clients on local file modify, Option to wait for file download before sql query, File rules websocket API command, Cert add and select, set websocket API command, Put focus on innerframe, innerloaded wrapper api command to add hashtag, Allow more file error on big sites, Keep worker running after stuked on done task, New more stable openSSL layer that works on OSX, Noparallel parameter bugfix, RateLimit allowed again interval bugfix, Updater skips non-writeable files, Try to close openssl dll before update
This commit is contained in:
parent
c874726aba
commit
7e4f6bd38e
33 changed files with 1716 additions and 595 deletions
|
@ -3,8 +3,8 @@ import ConfigParser
|
|||
|
||||
class Config(object):
|
||||
def __init__(self):
|
||||
self.version = "0.2.9"
|
||||
self.rev = 134
|
||||
self.version = "0.3.0"
|
||||
self.rev = 187
|
||||
self.parser = self.createArguments()
|
||||
argv = sys.argv[:] # Copy command line arguments
|
||||
argv = self.parseConfig(argv) # Add arguments from config file
|
||||
|
@ -28,10 +28,13 @@ class Config(object):
|
|||
coffeescript = "type %s | tools\\coffee\\coffee.cmd"
|
||||
else:
|
||||
coffeescript = None
|
||||
if sys.platform.startswith("Darwin"): # For some reasons openssl doesnt works on mac yet (https://github.com/HelloZeroNet/ZeroNet/issues/94)
|
||||
""" Probably fixed
|
||||
if sys.platform.lower().startswith("darwin"): # For some reasons openssl doesnt works on mac yet (https://github.com/HelloZeroNet/ZeroNet/issues/94)
|
||||
use_openssl = False
|
||||
else:
|
||||
use_openssl = True
|
||||
"""
|
||||
use_openssl = True
|
||||
|
||||
# Create parser
|
||||
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
|
@ -49,6 +52,7 @@ class Config(object):
|
|||
action.add_argument('address', help='Site to sign')
|
||||
action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?')
|
||||
action.add_argument('--inner_path', help='File you want to sign (default: content.json)', default="content.json", metavar="inner_path")
|
||||
action.add_argument('--publish', help='Publish site after the signing', action='store_true')
|
||||
|
||||
# SitePublish
|
||||
action = subparsers.add_parser("sitePublish", help='Publish site to other peers: address')
|
||||
|
@ -89,6 +93,10 @@ class Config(object):
|
|||
action.add_argument('cmd', help='Command to execute')
|
||||
action.add_argument('parameters', help='Parameters to command', nargs='?')
|
||||
|
||||
# CryptSign
|
||||
action = subparsers.add_parser("cryptSign", help='Sign message using Bitcoin private key')
|
||||
action.add_argument('message', help='Message to sign')
|
||||
action.add_argument('privatekey', help='Private key')
|
||||
|
||||
|
||||
# Config parameters
|
||||
|
|
|
@ -106,7 +106,7 @@ class Connection(object):
|
|||
self.incomplete_buff_recv = 0
|
||||
self.handleMessage(message)
|
||||
message = None
|
||||
buf = None
|
||||
buff = None
|
||||
except Exception, err:
|
||||
if not self.closed: self.log("Socket error: %s" % Debug.formatException(err))
|
||||
self.close() # MessageLoop ended, close connection
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import json, time, re, os, gevent
|
||||
import json, time, re, os, gevent, copy
|
||||
from Debug import Debug
|
||||
from Crypt import CryptHash
|
||||
from Config import config
|
||||
|
@ -18,13 +18,14 @@ class ContentManager:
|
|||
content_inner_path = content_inner_path.strip("/") # Remove / from begning
|
||||
old_content = self.contents.get(content_inner_path)
|
||||
content_path = self.site.storage.getPath(content_inner_path)
|
||||
content_path_dir = self.toDir(self.site.storage.getPath(content_inner_path))
|
||||
content_dir = self.toDir(content_inner_path)
|
||||
|
||||
if os.path.isfile(content_path):
|
||||
try:
|
||||
new_content = json.load(open(content_path))
|
||||
except Exception, err:
|
||||
self.log.error("Content.json load error: %s" % Debug.formatException(err))
|
||||
self.log.error("%s load error: %s" % (content_path, Debug.formatException(err)))
|
||||
return False
|
||||
else:
|
||||
self.log.error("Content.json not exits: %s" % content_path)
|
||||
|
@ -58,6 +59,14 @@ class ContentManager:
|
|||
self.log.debug("Missing include: %s" % include_inner_path)
|
||||
changed += [include_inner_path]
|
||||
|
||||
# Load blind user includes (all subdir)
|
||||
if load_includes and "user_contents" in new_content:
|
||||
for relative_dir in os.listdir(content_path_dir):
|
||||
include_inner_path = content_dir+relative_dir+"/content.json"
|
||||
if not self.site.storage.isFile(include_inner_path): continue # Content.json not exits
|
||||
success = self.loadContent(include_inner_path, add_bad_files=add_bad_files, load_includes=False)
|
||||
if success: changed += success # Add changed files
|
||||
|
||||
# Update the content
|
||||
self.contents[content_inner_path] = new_content
|
||||
except Exception, err:
|
||||
|
@ -97,19 +106,27 @@ class ContentManager:
|
|||
content = self.contents.get(content_inner_path.strip("/"))
|
||||
if content and "files" in content: # Check if content.json exists
|
||||
back = content["files"].get("/".join(inner_path_parts))
|
||||
if not back: return False
|
||||
back["content_inner_path"] = content_inner_path
|
||||
if back:
|
||||
back["content_inner_path"] = content_inner_path
|
||||
return back
|
||||
|
||||
if content and "user_contents" in content: # User dir
|
||||
back = content["user_contents"]
|
||||
back["content_inner_path"] = re.sub("(.*)/.*?$", "\\1/content.json", inner_path) # Content.json is in the users dir
|
||||
return back
|
||||
else: # No inner path in this dir, lets try the parent dir
|
||||
if dirs:
|
||||
inner_path_parts.insert(0, dirs.pop())
|
||||
else: # No more parent dirs
|
||||
break
|
||||
|
||||
# No inner path in this dir, lets try the parent dir
|
||||
if dirs:
|
||||
inner_path_parts.insert(0, dirs.pop())
|
||||
else: # No more parent dirs
|
||||
break
|
||||
|
||||
return False # Not found
|
||||
|
||||
|
||||
def getIncludeInfo(self, inner_path):
|
||||
# Get rules for the file
|
||||
# Return: The rules for the file or False if not allowed
|
||||
def getRules(self, inner_path, content=None):
|
||||
if not inner_path.endswith("content.json"): # Find the files content.json first
|
||||
file_info = self.getFileInfo(inner_path)
|
||||
if not file_info: return False # File not found
|
||||
|
@ -119,9 +136,11 @@ class ContentManager:
|
|||
inner_path_parts.insert(0, dirs.pop()) # Dont check in self dir
|
||||
while True:
|
||||
content_inner_path = "%s/content.json" % "/".join(dirs)
|
||||
content = self.contents.get(content_inner_path.strip("/"))
|
||||
if content and "includes" in content:
|
||||
return content["includes"].get("/".join(inner_path_parts))
|
||||
parent_content = self.contents.get(content_inner_path.strip("/"))
|
||||
if parent_content and "includes" in parent_content:
|
||||
return parent_content["includes"].get("/".join(inner_path_parts))
|
||||
elif parent_content and "user_contents" in parent_content:
|
||||
return self.getUserContentRules(parent_content, inner_path, content)
|
||||
else: # No inner path in this dir, lets try the parent dir
|
||||
if dirs:
|
||||
inner_path_parts.insert(0, dirs.pop())
|
||||
|
@ -131,10 +150,55 @@ class ContentManager:
|
|||
return False
|
||||
|
||||
|
||||
# Get rules for a user file
|
||||
# Return: The rules of the file or False if not allowed
|
||||
def getUserContentRules(self, parent_content, inner_path, content):
|
||||
user_contents = parent_content["user_contents"]
|
||||
user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) # Delivered for directory
|
||||
|
||||
try:
|
||||
if not content: content = self.site.storage.loadJson(inner_path) # Read the file if no content specificed
|
||||
except: # Content.json not exits
|
||||
return { "signers": [user_address], "user_address": user_address } # Return information that we know for sure
|
||||
|
||||
"""if not "cert_user_name" in content: # New file, unknown user
|
||||
content["cert_auth_type"] = "unknown"
|
||||
content["cert_user_name"] = "unknown@unknown"
|
||||
"""
|
||||
user_urn = "%s/%s" % (content["cert_auth_type"], content["cert_user_id"]) # web/nofish@zeroid.bit
|
||||
|
||||
rules = copy.copy(user_contents["permissions"].get(content["cert_user_id"], {})) # Default rules by username
|
||||
if rules == False: return False # User banned
|
||||
if "signers" in rules: rules["signers"] = rules["signers"][:] # Make copy of the signers
|
||||
for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules
|
||||
if not re.match(permission_pattern, user_urn): continue # Rule is not valid for user
|
||||
# Update rules if its better than current recorded ones
|
||||
for key, val in permission_rules.iteritems():
|
||||
if key not in rules:
|
||||
if type(val) is list:
|
||||
rules[key] = val[:] # Make copy
|
||||
else:
|
||||
rules[key] = val
|
||||
elif type(val) is int: # Int, update if larger
|
||||
if val > rules[key]: rules[key] = val
|
||||
elif hasattr(val, "startswith"): # String, update if longer
|
||||
if len(val) > len(rules[key]): rules[key] = val
|
||||
elif type(val) is list: # List, append
|
||||
rules[key] += val
|
||||
|
||||
rules["cert_signers"] = user_contents["cert_signers"] # Add valid cert signers
|
||||
if "signers" not in rules: rules["signers"] = []
|
||||
rules["signers"].append(user_address) # Add user as valid signer
|
||||
rules["user_address"] = user_address
|
||||
|
||||
|
||||
return rules
|
||||
|
||||
|
||||
|
||||
# Create and sign a content.json
|
||||
# Return: The new content if filewrite = False
|
||||
def sign(self, inner_path = "content.json", privatekey=None, filewrite=True, update_changed_files=False):
|
||||
def sign(self, inner_path = "content.json", privatekey=None, filewrite=True, update_changed_files=False, extend=None):
|
||||
content = self.contents.get(inner_path)
|
||||
if not content: # Content not exits yet, load default one
|
||||
self.log.info("File %s not exits yet, loading default values..." % inner_path)
|
||||
|
@ -144,6 +208,7 @@ class ContentManager:
|
|||
content["description"] = ""
|
||||
content["signs_required"] = 1
|
||||
content["ignore"] = ""
|
||||
if extend: content.update(extend) # Add custom fields
|
||||
|
||||
directory = self.toDir(self.site.storage.getPath(inner_path))
|
||||
self.log.info("Opening site data directory: %s..." % directory)
|
||||
|
@ -154,8 +219,13 @@ class ContentManager:
|
|||
for file_name in files:
|
||||
file_path = self.site.storage.getPath("%s/%s" % (root.strip("/"), file_name))
|
||||
file_inner_path = re.sub(re.escape(directory), "", file_path)
|
||||
|
||||
if file_name == "content.json" or (content.get("ignore") and re.match(content["ignore"], file_inner_path)) or file_name.startswith("."): # Ignore content.json, definied regexp and files starting with .
|
||||
|
||||
if file_name == "content.json": ignored = True
|
||||
elif content.get("ignore") and re.match(content["ignore"], file_inner_path): ignored = True
|
||||
elif file_name.startswith("."): ignored = True
|
||||
else: ignored = False
|
||||
|
||||
if ignored: # Ignore content.json, definied regexp and files starting with .
|
||||
self.log.info("- [SKIPPED] %s" % file_inner_path)
|
||||
else:
|
||||
sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file
|
||||
|
@ -184,7 +254,7 @@ class ContentManager:
|
|||
from Crypt import CryptBitcoin
|
||||
self.log.info("Verifying private key...")
|
||||
privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
|
||||
valid_signers = self.getValidSigners(inner_path)
|
||||
valid_signers = self.getValidSigners(inner_path, new_content)
|
||||
if privatekey_address not in valid_signers:
|
||||
return self.log.error("Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address))
|
||||
self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
|
||||
|
@ -215,7 +285,7 @@ class ContentManager:
|
|||
|
||||
if filewrite:
|
||||
self.log.info("Saving to %s..." % inner_path)
|
||||
json.dump(new_content, open(self.site.storage.getPath(inner_path), "w"), indent=2, sort_keys=True)
|
||||
self.site.storage.writeJson(inner_path, new_content)
|
||||
|
||||
self.log.info("File %s signed!" % inner_path)
|
||||
|
||||
|
@ -227,25 +297,39 @@ class ContentManager:
|
|||
|
||||
# The valid signers of content.json file
|
||||
# Return: ["1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6", "13ReyhCsjhpuCVahn1DHdf6eMqqEVev162"]
|
||||
def getValidSigners(self, inner_path):
|
||||
def getValidSigners(self, inner_path, content=None):
|
||||
valid_signers = []
|
||||
if inner_path == "content.json": # Root content.json
|
||||
if "content.json" in self.contents and "signers" in self.contents["content.json"]:
|
||||
valid_signers += self.contents["content.json"]["signers"].keys()
|
||||
else:
|
||||
include_info = self.getIncludeInfo(inner_path)
|
||||
if include_info and "signers" in include_info:
|
||||
valid_signers += include_info["signers"]
|
||||
rules = self.getRules(inner_path, content)
|
||||
if rules and "signers" in rules:
|
||||
valid_signers += rules["signers"]
|
||||
|
||||
if self.site.address not in valid_signers: valid_signers.append(self.site.address) # Site address always valid
|
||||
return valid_signers
|
||||
|
||||
|
||||
# Return: The required number of valid signs for the content.json
|
||||
def getSignsRequired(self, inner_path):
|
||||
def getSignsRequired(self, inner_path, content=None):
|
||||
return 1 # Todo: Multisig
|
||||
|
||||
|
||||
def verifyCert(self, inner_path, content):
|
||||
from Crypt import CryptBitcoin
|
||||
|
||||
rules = self.getRules(inner_path, content)
|
||||
if not rules.get("cert_signers"): return True # Does not need cert
|
||||
|
||||
name, domain = content["cert_user_id"].split("@")
|
||||
cert_address = rules["cert_signers"].get(domain)
|
||||
if not cert_address: # Cert signer not allowed
|
||||
self.log.error("Invalid cert signer: %s" % domain)
|
||||
return False
|
||||
return CryptBitcoin.verify("%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"])
|
||||
|
||||
|
||||
# Checks if the content.json content is valid
|
||||
# Return: True or False
|
||||
def validContent(self, inner_path, content):
|
||||
|
@ -266,26 +350,26 @@ class ContentManager:
|
|||
if inner_path == "content.json": return True # Root content.json is passed
|
||||
|
||||
# Load include details
|
||||
include_info = self.getIncludeInfo(inner_path)
|
||||
if not include_info:
|
||||
self.log.error("%s: No include info" % inner_path)
|
||||
rules = self.getRules(inner_path, content)
|
||||
if not rules:
|
||||
self.log.error("%s: No rules" % inner_path)
|
||||
return False
|
||||
|
||||
# Check include size limit
|
||||
if include_info.get("max_size"): # Include size limit
|
||||
if content_size > include_info["max_size"]:
|
||||
self.log.error("%s: Include too large %s > %s" % (inner_path, content_size, include_info["max_size"]))
|
||||
if rules.get("max_size"): # Include size limit
|
||||
if content_size > rules["max_size"]:
|
||||
self.log.error("%s: Include too large %s > %s" % (inner_path, content_size, rules["max_size"]))
|
||||
return False
|
||||
|
||||
# Check if content includes allowed
|
||||
if include_info.get("includes_allowed") == False and content.get("includes"):
|
||||
if rules.get("includes_allowed") == False and content.get("includes"):
|
||||
self.log.error("%s: Includes not allowed" % inner_path)
|
||||
return False # Includes not allowed
|
||||
|
||||
# Filename limit
|
||||
if include_info.get("files_allowed"):
|
||||
if rules.get("files_allowed"):
|
||||
for file_inner_path in content["files"].keys():
|
||||
if not re.match("^%s$" % include_info["files_allowed"], file_inner_path):
|
||||
if not re.match("^%s$" % rules["files_allowed"], file_inner_path):
|
||||
self.log.error("%s: File not allowed" % file_inner_path)
|
||||
return False
|
||||
|
||||
|
@ -322,19 +406,25 @@ class ContentManager:
|
|||
if not self.validContent(inner_path, new_content): return False # Content not valid (files too large, invalid files)
|
||||
|
||||
if signs: # New style signing
|
||||
valid_signers = self.getValidSigners(inner_path)
|
||||
signs_required = self.getSignsRequired(inner_path)
|
||||
valid_signers = self.getValidSigners(inner_path, new_content)
|
||||
signs_required = self.getSignsRequired(inner_path, new_content)
|
||||
|
||||
if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json
|
||||
if not CryptBitcoin.verify("%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]):
|
||||
self.log.error("%s invalid signers_sign!" % inner_path)
|
||||
return False
|
||||
|
||||
if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid
|
||||
self.log.error("%s invalid cert!" % inner_path)
|
||||
return False
|
||||
|
||||
valid_signs = 0
|
||||
for address in valid_signers:
|
||||
if address in signs: valid_signs += CryptBitcoin.verify(sign_content, address, signs[address])
|
||||
if valid_signs >= signs_required: break # Break if we has enough signs
|
||||
|
||||
|
||||
|
||||
return valid_signs >= signs_required
|
||||
else: # Old style signing
|
||||
return CryptBitcoin.verify(sign_content, self.site.address, sign)
|
||||
|
@ -348,8 +438,10 @@ class ContentManager:
|
|||
if file_info:
|
||||
if "sha512" in file_info:
|
||||
hash_valid = CryptHash.sha512sum(file) == file_info["sha512"]
|
||||
else: # Backward compatibility
|
||||
elif "sha1" in file_info: # Backward compatibility
|
||||
hash_valid = CryptHash.sha1sum(file) == file_info["sha1"]
|
||||
else:
|
||||
hash_valid = False
|
||||
if file_info["size"] != file.tell():
|
||||
self.log.error("%s file size does not match %s <> %s, Hash: %s" % (inner_path, file.tell(), file_info["size"], hash_valid))
|
||||
return False
|
||||
|
|
|
@ -62,7 +62,11 @@ def verify(data, address, sign): # Verify data using address and sign
|
|||
else: # Use pure-python
|
||||
pub = btctools.ecdsa_recover(data, sign)
|
||||
sign_address = btctools.pubtoaddr(pub)
|
||||
return sign_address == address
|
||||
|
||||
if type(address) is list: # Any address in the list
|
||||
return sign_address in address
|
||||
else: # One possible address
|
||||
return sign_address == address
|
||||
else: # Backward compatible old style
|
||||
bitcoin = BitcoinECC.Bitcoin()
|
||||
return bitcoin.VerifyMessageFromBitcoinAddress(address, data, sign)
|
||||
|
|
26
src/Db/Db.py
26
src/Db/Db.py
|
@ -6,6 +6,7 @@ class Db:
|
|||
self.db_path = db_path
|
||||
self.db_dir = os.path.dirname(db_path)+"/"
|
||||
self.schema = schema
|
||||
self.schema["version"] = self.schema.get("version", 1)
|
||||
self.conn = None
|
||||
self.cur = None
|
||||
self.log = logging.getLogger("Db:%s" % schema["db_name"])
|
||||
|
@ -85,6 +86,7 @@ class Db:
|
|||
cur.execute("BEGIN")
|
||||
|
||||
# Check internal tables
|
||||
# Check keyvalue table
|
||||
changed = cur.needTable("keyvalue", [
|
||||
["keyvalue_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
|
||||
["key", "TEXT"],
|
||||
|
@ -92,15 +94,25 @@ class Db:
|
|||
["json_id", "INTEGER REFERENCES json (json_id)"],
|
||||
],[
|
||||
"CREATE UNIQUE INDEX key_id ON keyvalue(json_id, key)"
|
||||
], version=1)
|
||||
], version=self.schema["version"])
|
||||
if changed: changed_tables.append("keyvalue")
|
||||
|
||||
changed = cur.needTable("json", [
|
||||
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
|
||||
["path", "VARCHAR(255)"]
|
||||
], [
|
||||
"CREATE UNIQUE INDEX path ON json(path)"
|
||||
], version=1)
|
||||
# Check json table
|
||||
if self.schema["version"] == 1:
|
||||
changed = cur.needTable("json", [
|
||||
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
|
||||
["path", "VARCHAR(255)"]
|
||||
], [
|
||||
"CREATE UNIQUE INDEX path ON json(path)"
|
||||
], version=self.schema["version"])
|
||||
else:
|
||||
changed = cur.needTable("json", [
|
||||
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
|
||||
["directory", "VARCHAR(255)"],
|
||||
["file_name", "VARCHAR(255)"]
|
||||
], [
|
||||
"CREATE UNIQUE INDEX path ON json(directory, file_name)"
|
||||
], version=self.schema["version"])
|
||||
if changed: changed_tables.append("json")
|
||||
|
||||
# Check schema tables
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import time
|
||||
import time, re
|
||||
|
||||
# Special sqlite cursor
|
||||
class DbCursor:
|
||||
|
@ -12,7 +12,7 @@ class DbCursor:
|
|||
def execute(self, query, params=None):
|
||||
if isinstance(params, dict): # Make easier select and insert by allowing dict params
|
||||
if query.startswith("SELECT") or query.startswith("DELETE"): # Convert param dict to SELECT * FROM table WHERE key = ?, key2 = ? format
|
||||
wheres = ", ".join([key+" = ?" for key in params])
|
||||
wheres = "AND ".join([key+" = ?" for key in params])
|
||||
query = query.replace("?", wheres)
|
||||
params = params.values()
|
||||
else: # Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format
|
||||
|
@ -94,12 +94,21 @@ class DbCursor:
|
|||
# Get or create a row for json file
|
||||
# Return: The database row
|
||||
def getJsonRow(self, file_path):
|
||||
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
|
||||
row = res.fetchone()
|
||||
if not row: # No row yet, create it
|
||||
self.execute("INSERT INTO json ?", {"path": file_path})
|
||||
directory, file_name = re.match("^(.*?)/*([^/]*)$", file_path).groups()
|
||||
if self.db.schema["version"] == 1:
|
||||
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
|
||||
row = res.fetchone()
|
||||
if not row: # No row yet, create it
|
||||
self.execute("INSERT INTO json ?", {"path": file_path})
|
||||
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
|
||||
row = res.fetchone()
|
||||
else:
|
||||
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
|
||||
row = res.fetchone()
|
||||
if not row: # No row yet, create it
|
||||
self.execute("INSERT INTO json ?", {"directory": directory, "file_name": file_name})
|
||||
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
|
||||
row = res.fetchone()
|
||||
return row
|
||||
|
||||
def close(self):
|
||||
|
|
|
@ -170,6 +170,12 @@ class FileRequest(object):
|
|||
self.response({"error": "Unknown site"})
|
||||
return False
|
||||
modified_files = {inner_path: content["modified"] for inner_path, content in site.content_manager.contents.iteritems() if content["modified"] > params["since"]}
|
||||
|
||||
# Add peer to site if not added before
|
||||
connected_peer = site.addPeer(self.connection.ip, self.connection.port)
|
||||
if connected_peer: # Just added
|
||||
connected_peer.connect(self.connection) # Assign current connection to peer
|
||||
|
||||
self.response({"modified_files": modified_files})
|
||||
|
||||
|
||||
|
|
|
@ -131,6 +131,7 @@ class Peer(object):
|
|||
return False
|
||||
|
||||
buff.write(back["body"])
|
||||
back["body"] = None # Save memory
|
||||
if back["location"] == back["size"]: # End of file
|
||||
break
|
||||
else:
|
||||
|
|
188
src/Site/Site.py
188
src/Site/Site.py
|
@ -151,17 +151,18 @@ class Site:
|
|||
self.log.debug("Start downloading...%s" % self.bad_files)
|
||||
gevent.spawn(self.announce)
|
||||
if check_size: # Check the size first
|
||||
valid = downloadContent(download_files=False)
|
||||
valid = downloadContent(download_files=False) # Just download content.json files
|
||||
if not valid: return False # Cant download content.jsons or size is not fits
|
||||
|
||||
# Download everything
|
||||
found = self.downloadContent("content.json")
|
||||
self.checkModifications(0) # Download multiuser blind includes
|
||||
|
||||
return found
|
||||
|
||||
|
||||
# Update worker, try to find client that supports listModifications command
|
||||
def updater(self, peers_try, queried):
|
||||
since = self.settings.get("modified", 60*60*24)-60*60*24 # Get modified since last update - 1day
|
||||
def updater(self, peers_try, queried, since):
|
||||
while 1:
|
||||
if not peers_try or len(queried) >= 3: # Stop after 3 successful query
|
||||
break
|
||||
|
@ -179,16 +180,9 @@ class Site:
|
|||
gevent.spawn(self.downloadContent, inner_path) # Download the content.json + the changed files
|
||||
|
||||
|
||||
|
||||
# Update content.json from peers and download changed files
|
||||
# Return: None
|
||||
@util.Noparallel()
|
||||
def update(self, announce=False):
|
||||
self.content_manager.loadContent("content.json") # Reload content.json
|
||||
self.content_updated = None # Reset content updated time
|
||||
self.updateWebsocket(updating=True)
|
||||
if announce: self.announce()
|
||||
|
||||
# Check modified content.json files from peers and add modified files to bad_files
|
||||
# Return: Successfully queried peers [Peer, Peer...]
|
||||
def checkModifications(self, since=None):
|
||||
peers_try = [] # Try these peers
|
||||
queried = [] # Successfully queried from these peers
|
||||
|
||||
|
@ -200,15 +194,30 @@ class Site:
|
|||
elif len(peers_try) < 5: # Backup peers, add to end of the try list
|
||||
peers_try.append(peer)
|
||||
|
||||
self.log.debug("Try to get listModifications from peers: %s" % peers_try)
|
||||
if since == None: # No since definied, download from last modification time-1day
|
||||
since = self.settings.get("modified", 60*60*24)-60*60*24
|
||||
self.log.debug("Try to get listModifications from peers: %s since: %s" % (peers_try, since))
|
||||
|
||||
updaters = []
|
||||
for i in range(3):
|
||||
updaters.append(gevent.spawn(self.updater, peers_try, queried))
|
||||
updaters.append(gevent.spawn(self.updater, peers_try, queried, since))
|
||||
|
||||
gevent.joinall(updaters, timeout=5) # Wait 5 sec to workers
|
||||
time.sleep(0.1)
|
||||
self.log.debug("Queried listModifications from: %s" % queried)
|
||||
return queried
|
||||
|
||||
|
||||
# Update content.json from peers and download changed files
|
||||
# Return: None
|
||||
@util.Noparallel()
|
||||
def update(self, announce=False):
|
||||
self.content_manager.loadContent("content.json") # Reload content.json
|
||||
self.content_updated = None # Reset content updated time
|
||||
self.updateWebsocket(updating=True)
|
||||
if announce: self.announce()
|
||||
|
||||
queried = self.checkModifications()
|
||||
|
||||
if not queried: # Not found any client that supports listModifications
|
||||
self.log.debug("Fallback to old-style update")
|
||||
|
@ -279,10 +288,11 @@ class Site:
|
|||
# Update content.json on peers
|
||||
@util.Noparallel()
|
||||
def publish(self, limit=5, inner_path="content.json"):
|
||||
self.log.info( "Publishing to %s/%s peers..." % (limit, len(self.peers)) )
|
||||
self.log.info( "Publishing to %s/%s peers..." % (min(len(self.peers), limit), len(self.peers)) )
|
||||
published = [] # Successfully published (Peer)
|
||||
publishers = [] # Publisher threads
|
||||
peers = self.peers.values()
|
||||
if not peers: return 0 # No peers found
|
||||
|
||||
random.shuffle(peers)
|
||||
event_done = gevent.event.AsyncResult()
|
||||
|
@ -381,12 +391,79 @@ class Site:
|
|||
self.log.debug("Queried pex from %s peers got %s new peers." % (done, added))
|
||||
|
||||
|
||||
# Gather peers from tracker
|
||||
# Return: Complete time or False on error
|
||||
def announceTracker(self, protocol, ip, port, fileserver_port, address_hash, my_peer_id):
|
||||
s = time.time()
|
||||
if protocol == "udp": # Udp tracker
|
||||
if config.disable_udp: return False # No udp supported
|
||||
tracker = UdpTrackerClient(ip, port)
|
||||
tracker.peer_port = fileserver_port
|
||||
try:
|
||||
tracker.connect()
|
||||
tracker.poll_once()
|
||||
tracker.announce(info_hash=address_hash, num_want=50)
|
||||
back = tracker.poll_once()
|
||||
peers = back["response"]["peers"]
|
||||
except Exception, err:
|
||||
return False
|
||||
|
||||
else: # Http tracker
|
||||
params = {
|
||||
'info_hash': binascii.a2b_hex(address_hash),
|
||||
'peer_id': my_peer_id, 'port': fileserver_port,
|
||||
'uploaded': 0, 'downloaded': 0, 'left': 0, 'compact': 1, 'numwant': 30,
|
||||
'event': 'started'
|
||||
}
|
||||
req = None
|
||||
try:
|
||||
url = "http://"+ip+"?"+urllib.urlencode(params)
|
||||
# Load url
|
||||
with gevent.Timeout(10, False): # Make sure of timeout
|
||||
req = urllib2.urlopen(url, timeout=8)
|
||||
response = req.read()
|
||||
req.fp._sock.recv=None # Hacky avoidance of memory leak for older python versions
|
||||
req.close()
|
||||
req = None
|
||||
if not response:
|
||||
self.log.debug("Http tracker %s response error" % url)
|
||||
return False
|
||||
# Decode peers
|
||||
peer_data = bencode.decode(response)["peers"]
|
||||
response = None
|
||||
peer_count = len(peer_data) / 6
|
||||
peers = []
|
||||
for peer_offset in xrange(peer_count):
|
||||
off = 6 * peer_offset
|
||||
peer = peer_data[off:off + 6]
|
||||
addr, port = struct.unpack('!LH', peer)
|
||||
peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port})
|
||||
except Exception, err:
|
||||
self.log.debug("Http tracker %s error: %s" % (url, err))
|
||||
if req:
|
||||
req.close()
|
||||
req = None
|
||||
return False
|
||||
|
||||
# Adding peers
|
||||
added = 0
|
||||
for peer in peers:
|
||||
if not peer["port"]: continue # Dont add peers with port 0
|
||||
if self.addPeer(peer["addr"], peer["port"]): added += 1
|
||||
if added:
|
||||
self.worker_manager.onPeers()
|
||||
self.updateWebsocket(peers_added=added)
|
||||
self.log.debug("Found %s peers, new: %s" % (len(peers), added))
|
||||
return time.time()-s
|
||||
|
||||
|
||||
# Add myself and get other peers from tracker
|
||||
def announce(self, force=False):
|
||||
if time.time() < self.last_announce+30 and not force: return # No reannouncing within 30 secs
|
||||
self.last_announce = time.time()
|
||||
errors = []
|
||||
address_hash = hashlib.sha1(self.address).hexdigest()
|
||||
slow = []
|
||||
address_hash = hashlib.sha1(self.address).hexdigest() # Site address hash
|
||||
my_peer_id = sys.modules["main"].file_server.peer_id
|
||||
|
||||
if sys.modules["main"].file_server.port_opened:
|
||||
|
@ -396,73 +473,30 @@ class Site:
|
|||
|
||||
s = time.time()
|
||||
announced = 0
|
||||
threads = []
|
||||
|
||||
for protocol, ip, port in SiteManager.TRACKERS:
|
||||
if protocol == "udp": # Udp tracker
|
||||
if config.disable_udp: continue # No udp supported
|
||||
tracker = UdpTrackerClient(ip, port)
|
||||
tracker.peer_port = fileserver_port
|
||||
try:
|
||||
tracker.connect()
|
||||
tracker.poll_once()
|
||||
tracker.announce(info_hash=address_hash, num_want=50)
|
||||
back = tracker.poll_once()
|
||||
peers = back["response"]["peers"]
|
||||
except Exception, err:
|
||||
errors.append("%s://%s:%s" % (protocol, ip, port))
|
||||
continue
|
||||
|
||||
else: # Http tracker
|
||||
params = {
|
||||
'info_hash': binascii.a2b_hex(address_hash),
|
||||
'peer_id': my_peer_id, 'port': fileserver_port,
|
||||
'uploaded': 0, 'downloaded': 0, 'left': 0, 'compact': 1, 'numwant': 30,
|
||||
'event': 'started'
|
||||
}
|
||||
req = None
|
||||
try:
|
||||
url = "http://"+ip+"?"+urllib.urlencode(params)
|
||||
# Load url
|
||||
req = urllib2.urlopen(url, timeout=10)
|
||||
response = req.read()
|
||||
req.fp._sock.recv=None # Hacky avoidance of memory leak for older python versions
|
||||
req.close()
|
||||
req = None
|
||||
# Decode peers
|
||||
peer_data = bencode.decode(response)["peers"]
|
||||
response = None
|
||||
peer_count = len(peer_data) / 6
|
||||
peers = []
|
||||
for peer_offset in xrange(peer_count):
|
||||
off = 6 * peer_offset
|
||||
peer = peer_data[off:off + 6]
|
||||
addr, port = struct.unpack('!LH', peer)
|
||||
peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port})
|
||||
except Exception, err:
|
||||
self.log.debug("Http tracker %s error: %s" % (url, err))
|
||||
errors.append("%s://%s" % (protocol, ip))
|
||||
if req:
|
||||
req.close()
|
||||
req = None
|
||||
continue
|
||||
|
||||
# Adding peers
|
||||
added = 0
|
||||
for peer in peers:
|
||||
if not peer["port"]: continue # Dont add peers with port 0
|
||||
if self.addPeer(peer["addr"], peer["port"]): added += 1
|
||||
if added:
|
||||
self.worker_manager.onPeers()
|
||||
self.updateWebsocket(peers_added=added)
|
||||
self.log.debug("Found %s peers, new: %s" % (len(peers), added))
|
||||
announced += 1
|
||||
for protocol, ip, port in SiteManager.TRACKERS: # Start announce threads
|
||||
thread = gevent.spawn(self.announceTracker, protocol, ip, port, fileserver_port, address_hash, my_peer_id)
|
||||
threads.append(thread)
|
||||
thread.ip = ip
|
||||
thread.protocol = protocol
|
||||
|
||||
gevent.joinall(threads) # Wait for announce finish
|
||||
|
||||
for thread in threads:
|
||||
if thread.value:
|
||||
if thread.value > 1:
|
||||
slow.append("%.2fs %s://%s" % (thread.value, thread.protocol, thread.ip))
|
||||
announced += 1
|
||||
else:
|
||||
errors.append("%s://%s" % (thread.protocol, thread.ip))
|
||||
|
||||
# Save peers num
|
||||
self.settings["peers"] = len(self.peers)
|
||||
self.saveSettings()
|
||||
|
||||
if len(errors) < len(SiteManager.TRACKERS): # Less errors than total tracker nums
|
||||
self.log.debug("Announced port %s to %s trackers in %.3fs, errors: %s" % (fileserver_port, announced, time.time()-s, errors))
|
||||
self.log.debug("Announced port %s to %s trackers in %.3fs, errors: %s, slow: %s" % (fileserver_port, announced, time.time()-s, errors, slow))
|
||||
else:
|
||||
self.log.error("Announced to %s trackers in %.3fs, failed" % (announced, time.time()-s))
|
||||
|
||||
|
|
|
@ -12,9 +12,12 @@ TRACKERS = [
|
|||
#("udp", "trackr.sytes.net", 80),
|
||||
#("udp", "tracker4.piratux.com", 6969)
|
||||
("http", "exodus.desync.com:80/announce", None),
|
||||
("http", "announce.torrentsmd.com:6969/announce", None),
|
||||
#("http", "i.bandito.org/announce", None),
|
||||
#("http", "tracker.tfile.me/announce", None),
|
||||
("http", "tracker.aletorrenty.pl:2710/announce", None),
|
||||
#("http", "torrent.gresille.org/announce", None), # Slow
|
||||
#("http", "announce.torrentsmd.com:6969/announce", None), # Off
|
||||
#("http", "i.bandito.org/announce", None), # Off
|
||||
("http", "retracker.telecom.kz/announce", None)
|
||||
|
||||
]
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import os, re, shutil, json, time, sqlite3
|
||||
import gevent.event
|
||||
from Db import Db
|
||||
from Debug import Debug
|
||||
|
||||
|
||||
class SiteStorage:
|
||||
def __init__(self, site, allow_create=True):
|
||||
|
@ -36,13 +38,17 @@ class SiteStorage:
|
|||
|
||||
def closeDb(self):
|
||||
if self.db: self.db.close()
|
||||
self.event_db_busy = None
|
||||
self.db = None
|
||||
|
||||
|
||||
# Return db class
|
||||
def getDb(self):
|
||||
if not self.db and self.has_db:
|
||||
self.openDb()
|
||||
if not self.db:
|
||||
self.log.debug("No database, waiting for dbschema.json...")
|
||||
self.site.needFile("dbschema.json", priority=1)
|
||||
self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exits
|
||||
if self.has_db: self.openDb()
|
||||
return self.db
|
||||
|
||||
|
||||
|
@ -143,10 +149,13 @@ class SiteStorage:
|
|||
if inner_path == "dbschema.json":
|
||||
self.has_db = self.isFile("dbschema.json")
|
||||
self.getDb().checkTables() # Check if any if table schema changed
|
||||
elif inner_path != "content.json" and inner_path.endswith(".json") and self.has_db: # Load json file to db
|
||||
elif inner_path.endswith(".json") and self.has_db: # Load json file to db
|
||||
self.log.debug("Loading json file to db: %s" % inner_path)
|
||||
self.getDb().loadJson(file_path)
|
||||
|
||||
try:
|
||||
self.getDb().loadJson(file_path)
|
||||
except Exception, err:
|
||||
self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
|
||||
self.closeDb()
|
||||
|
||||
|
||||
# Load and parse json file
|
||||
|
@ -154,6 +163,21 @@ class SiteStorage:
|
|||
with self.open(inner_path) as file:
|
||||
return json.load(file)
|
||||
|
||||
# Write formatted json file
|
||||
def writeJson(self, inner_path, data):
|
||||
content = json.dumps(data, indent=2, sort_keys=True)
|
||||
# Make it a little more compact by removing unnecessary white space
|
||||
def compact_list(match):
|
||||
return "[ "+match.group(1).strip()+" ]"
|
||||
|
||||
def compact_dict(match):
|
||||
return "{ "+match.group(1).strip()+" }"
|
||||
|
||||
content = re.sub("\[([^,\{\[]{10,100}?)\]", compact_list, content, flags=re.DOTALL)
|
||||
content = re.sub("\{([^,\[\{]{10,100}?)\}", compact_dict, content, flags=re.DOTALL)
|
||||
# Write to disk
|
||||
self.write(inner_path, content)
|
||||
|
||||
|
||||
# Get file size
|
||||
def getSize(self, inner_path):
|
||||
|
|
114
src/Test/test.py
114
src/Test/test.py
|
@ -152,7 +152,6 @@ class TestCase(unittest.TestCase):
|
|||
# Cleanup
|
||||
os.unlink("data/test/zeronet.db")
|
||||
os.rmdir("data/test/")
|
||||
print "ok"
|
||||
|
||||
|
||||
def testContentManagerIncludes(self):
|
||||
|
@ -162,12 +161,12 @@ class TestCase(unittest.TestCase):
|
|||
|
||||
site = Site("1TaLk3zM7ZRskJvrh3ZNCDVGXvkJusPKQ")
|
||||
# Include info
|
||||
include_info = site.content_manager.getIncludeInfo("data/users/1BhcaqWViN1YBnNgXb5aq5NtEhKtKdKZMB/content.json")
|
||||
self.assertEqual(include_info["signers"], ['1BhcaqWViN1YBnNgXb5aq5NtEhKtKdKZMB'])
|
||||
self.assertEqual(include_info["user_name"], 'testuser4')
|
||||
self.assertEqual(include_info["max_size"], 10000)
|
||||
self.assertEqual(include_info["includes_allowed"], False)
|
||||
self.assertEqual(include_info["files_allowed"], 'data.json')
|
||||
rules = site.content_manager.getRules("data/users/1BhcaqWViN1YBnNgXb5aq5NtEhKtKdKZMB/content.json")
|
||||
self.assertEqual(rules["signers"], ['1BhcaqWViN1YBnNgXb5aq5NtEhKtKdKZMB'])
|
||||
self.assertEqual(rules["user_name"], 'testuser4')
|
||||
self.assertEqual(rules["max_size"], 10000)
|
||||
self.assertEqual(rules["includes_allowed"], False)
|
||||
self.assertEqual(rules["files_allowed"], 'data.json')
|
||||
# Valid signers
|
||||
self.assertEqual(
|
||||
site.content_manager.getValidSigners("data/users/1BhcaqWViN1YBnNgXb5aq5NtEhKtKdKZMB/content.json"),
|
||||
|
@ -207,9 +206,106 @@ class TestCase(unittest.TestCase):
|
|||
self.assertEqual(site.content_manager.verifyFile("data/users/1BhcaqWViN1YBnNgXb5aq5NtEhKtKdKZMB/content.json", data, ignore_same=False), True)
|
||||
|
||||
|
||||
def testUserContentRules(self):
|
||||
from Site import Site
|
||||
from cStringIO import StringIO
|
||||
import json
|
||||
|
||||
site = Site("1Hb9rY98TNnA6TYeozJv4w36bqEiBn6x8Y")
|
||||
user_content = site.storage.loadJson("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
|
||||
|
||||
# File info for not exits file
|
||||
self.assertEqual(site.content_manager.getFileInfo("data/users/notexits/data.json")["content_inner_path"], "data/users/notexits/content.json")
|
||||
self.assertEqual(site.content_manager.getValidSigners("data/users/notexits/data.json"), ["notexits", "1Hb9rY98TNnA6TYeozJv4w36bqEiBn6x8Y"])
|
||||
|
||||
# File info for exsitsing file
|
||||
file_info = site.content_manager.getFileInfo("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/data.json")
|
||||
valid_signers = site.content_manager.getValidSigners(file_info["content_inner_path"], user_content)
|
||||
self.assertEqual(valid_signers, ['14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet', '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C', '1Hb9rY98TNnA6TYeozJv4w36bqEiBn6x8Y'])
|
||||
|
||||
# Known user
|
||||
user_content["cert_auth_type"] = "web"
|
||||
user_content["cert_user_id"] = "nofish@zeroid.bit"
|
||||
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
|
||||
self.assertEqual(rules["max_size"], 100000)
|
||||
|
||||
# Unknown user
|
||||
user_content["cert_auth_type"] = "web"
|
||||
user_content["cert_user_id"] = "noone@zeroid.bit"
|
||||
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
|
||||
self.assertEqual(rules["max_size"], 10000)
|
||||
|
||||
# User with more size limit by auth type
|
||||
user_content["cert_auth_type"] = "bitmsg"
|
||||
user_content["cert_user_id"] = "noone@zeroid.bit"
|
||||
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
|
||||
self.assertEqual(rules["max_size"], 20000)
|
||||
|
||||
# Banned user
|
||||
user_content["cert_auth_type"] = "web"
|
||||
user_content["cert_user_id"] = "bad@zeroid.bit"
|
||||
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
|
||||
self.assertFalse(rules)
|
||||
|
||||
|
||||
def testUserContentCert(self):
|
||||
from Site import Site
|
||||
from cStringIO import StringIO
|
||||
import json
|
||||
user_addr = "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C"
|
||||
user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
|
||||
cert_addr = "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet"
|
||||
cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA"
|
||||
|
||||
site = Site("1Hb9rY98TNnA6TYeozJv4w36bqEiBn6x8Y")
|
||||
#user_content = site.storage.loadJson("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
|
||||
# site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"] = user_content # Add to content manager
|
||||
# Check if the user file is loaded
|
||||
self.assertTrue("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json" in site.content_manager.contents)
|
||||
user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
|
||||
cert_content = site.content_manager.contents["data/users/content.json"]
|
||||
# Override cert signer
|
||||
cert_content["user_contents"]["cert_signers"]["zeroid.bit"] = ["14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet", "1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz"]
|
||||
|
||||
|
||||
# Valid cert providers
|
||||
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
|
||||
self.assertEqual(rules["cert_signers"], {"zeroid.bit": ["14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet", "1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz"]} )
|
||||
|
||||
# Add cert
|
||||
user_content["cert_sign"] = CryptBitcoin.sign(
|
||||
"1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (user_content["cert_auth_type"], user_content["cert_user_id"].split("@")[0]), cert_priv
|
||||
)
|
||||
|
||||
# Verify cert
|
||||
self.assertTrue(site.content_manager.verifyCert("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content))
|
||||
self.assertFalse(site.content_manager.verifyCert("data/users/badaddress/content.json", user_content))
|
||||
|
||||
|
||||
# Sign user content
|
||||
#signed_content = site.content_manager.sign("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False)
|
||||
signed_content = site.storage.loadJson("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
|
||||
|
||||
# Test user cert
|
||||
self.assertTrue(site.content_manager.verifyFile("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", StringIO(json.dumps(signed_content)), ignore_same=False))
|
||||
|
||||
# Test banned user
|
||||
site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][user_content["cert_user_id"]] = False
|
||||
self.assertFalse(site.content_manager.verifyFile("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", StringIO(json.dumps(signed_content)), ignore_same=False))
|
||||
|
||||
# Test invalid cert
|
||||
user_content["cert_sign"] = CryptBitcoin.sign(
|
||||
"badaddress#%s/%s" % (user_content["cert_auth_type"], user_content["cert_user_id"]), cert_priv
|
||||
)
|
||||
signed_content = site.content_manager.sign("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False)
|
||||
self.assertFalse(site.content_manager.verifyFile("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", StringIO(json.dumps(signed_content)), ignore_same=False))
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import logging
|
||||
logging.getLogger().setLevel(level=logging.CRITICAL)
|
||||
unittest.main(verbosity=2, defaultTest="TestCase.testContentManagerIncludes")
|
||||
logging.getLogger().setLevel(level=logging.FATAL)
|
||||
unittest.main(verbosity=2)
|
||||
#unittest.main(verbosity=2, defaultTest="TestCase.testUserContentCert")
|
||||
|
||||
|
|
|
@ -170,7 +170,7 @@ class UiRequest(object):
|
|||
|
||||
if not site: return False
|
||||
|
||||
extra_headers.append(("X-Frame-Options", "DENY"))
|
||||
#extra_headers.append(("X-Frame-Options", "DENY"))
|
||||
|
||||
self.sendHeader(extra_headers=extra_headers[:])
|
||||
|
||||
|
|
|
@ -96,44 +96,13 @@ class UiWebsocket(object):
|
|||
permissions = permissions[:]
|
||||
permissions.append("ADMIN")
|
||||
|
||||
admin_commands = ("sitePause", "siteResume", "siteDelete", "siteList", "siteSetLimit", "channelJoinAllsite", "serverUpdate", "certSet")
|
||||
|
||||
if cmd == "response": # It's a response to a command
|
||||
return self.actionResponse(req["to"], req["result"])
|
||||
elif cmd == "ping":
|
||||
func = self.actionPing
|
||||
elif cmd == "channelJoin":
|
||||
func = self.actionChannelJoin
|
||||
elif cmd == "siteInfo":
|
||||
func = self.actionSiteInfo
|
||||
elif cmd == "serverInfo":
|
||||
func = self.actionServerInfo
|
||||
elif cmd == "siteUpdate":
|
||||
func = self.actionSiteUpdate
|
||||
elif cmd == "sitePublish":
|
||||
func = self.actionSitePublish
|
||||
elif cmd == "fileWrite":
|
||||
func = self.actionFileWrite
|
||||
elif cmd == "fileGet":
|
||||
func = self.actionFileGet
|
||||
elif cmd == "fileQuery":
|
||||
func = self.actionFileQuery
|
||||
elif cmd == "dbQuery":
|
||||
func = self.actionDbQuery
|
||||
# Admin commands
|
||||
elif cmd == "sitePause" and "ADMIN" in permissions:
|
||||
func = self.actionSitePause
|
||||
elif cmd == "siteResume" and "ADMIN" in permissions:
|
||||
func = self.actionSiteResume
|
||||
elif cmd == "siteDelete" and "ADMIN" in permissions:
|
||||
func = self.actionSiteDelete
|
||||
elif cmd == "siteList" and "ADMIN" in permissions:
|
||||
func = self.actionSiteList
|
||||
elif cmd == "siteSetLimit" and "ADMIN" in permissions:
|
||||
func = self.actionSiteSetLimit
|
||||
elif cmd == "channelJoinAllsite" and "ADMIN" in permissions:
|
||||
func = self.actionChannelJoinAllsite
|
||||
elif cmd == "serverUpdate" and "ADMIN" in permissions:
|
||||
func = self.actionServerUpdate
|
||||
else:
|
||||
elif cmd in admin_commands and "ADMIN" not in permissions: # Admin commands
|
||||
return self.response(req["id"], "You don't have permission to run %s" % cmd)
|
||||
else: # Normal command
|
||||
func_name = "action" + cmd[0].upper() + cmd[1:]
|
||||
func = getattr(self, func_name, None)
|
||||
if not func: # Unknown command
|
||||
|
@ -158,6 +127,7 @@ class UiWebsocket(object):
|
|||
content["includes"] = len(content.get("includes", {}))
|
||||
if "sign" in content: del(content["sign"])
|
||||
if "signs" in content: del(content["signs"])
|
||||
if "signers_sign" in content: del(content["signers_sign"])
|
||||
|
||||
settings = site.settings.copy()
|
||||
del settings["wrapper_key"] # Dont expose wrapper key
|
||||
|
@ -167,6 +137,7 @@ class UiWebsocket(object):
|
|||
"auth_key": self.site.settings["auth_key"], # Obsolete, will be removed
|
||||
"auth_key_sha512": hashlib.sha512(self.site.settings["auth_key"]).hexdigest()[0:64], # Obsolete, will be removed
|
||||
"auth_address": self.user.getAuthAddress(site.address, create=create_user),
|
||||
"cert_user_id": self.user.getCertUserId(site.address),
|
||||
"address": site.address,
|
||||
"settings": settings,
|
||||
"content_updated": site.content_updated,
|
||||
|
@ -236,8 +207,16 @@ class UiWebsocket(object):
|
|||
|
||||
def actionSitePublish(self, to, privatekey=None, inner_path="content.json"):
|
||||
site = self.site
|
||||
extend = {} # Extended info for signing
|
||||
if not inner_path.endswith("content.json"): # Find the content.json first
|
||||
inner_path = site.content_manager.getFileInfo(inner_path)["content_inner_path"]
|
||||
file_info = site.content_manager.getFileInfo(inner_path)
|
||||
inner_path = file_info["content_inner_path"]
|
||||
if "cert_signers" in file_info: # Its an user dir file
|
||||
cert = self.user.getCert(self.site.address)
|
||||
extend["cert_auth_type"] = cert["auth_type"]
|
||||
extend["cert_user_id"] = self.user.getCertUserId(site.address)
|
||||
extend["cert_sign"] = cert["cert_sign"]
|
||||
|
||||
|
||||
if not site.settings["own"] and self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path):
|
||||
return self.response(to, "Forbidden, you can only modify your own sites")
|
||||
|
@ -246,7 +225,7 @@ class UiWebsocket(object):
|
|||
|
||||
# Signing
|
||||
site.content_manager.loadContent(add_bad_files=False) # Reload content.json, ignore errors to make it up-to-date
|
||||
signed = site.content_manager.sign(inner_path, privatekey) # Sign using private key sent by user
|
||||
signed = site.content_manager.sign(inner_path, privatekey, extend=extend) # Sign using private key sent by user
|
||||
if signed:
|
||||
if inner_path == "content_json": self.cmd("notification", ["done", "Private key correct, content signed!", 5000]) # Display message for 5 sec
|
||||
else:
|
||||
|
@ -301,7 +280,13 @@ class UiWebsocket(object):
|
|||
if inner_path.endswith("content.json"):
|
||||
self.site.content_manager.loadContent(inner_path, add_bad_files=False)
|
||||
|
||||
return self.response(to, "ok")
|
||||
self.response(to, "ok")
|
||||
|
||||
# Send sitechanged to other local users
|
||||
for ws in self.site.websockets:
|
||||
if ws != self:
|
||||
ws.event("siteChanged", self.site, {"event": ["file_done", inner_path]})
|
||||
|
||||
|
||||
|
||||
# Find data in json files
|
||||
|
@ -314,7 +299,7 @@ class UiWebsocket(object):
|
|||
|
||||
|
||||
# Sql query
|
||||
def actionDbQuery(self, to, query, params=None):
|
||||
def actionDbQuery(self, to, query, params=None, wait_for=None):
|
||||
rows = []
|
||||
try:
|
||||
res = self.site.storage.query(query, params)
|
||||
|
@ -327,15 +312,95 @@ class UiWebsocket(object):
|
|||
|
||||
|
||||
# Return file content
|
||||
def actionFileGet(self, to, inner_path):
|
||||
def actionFileGet(self, to, inner_path, required=True):
|
||||
try:
|
||||
self.site.needFile(inner_path, priority=1)
|
||||
if required: self.site.needFile(inner_path, priority=1)
|
||||
body = self.site.storage.read(inner_path)
|
||||
except:
|
||||
body = None
|
||||
return self.response(to, body)
|
||||
|
||||
|
||||
def actionFileRules(self, to, inner_path):
|
||||
rules = self.site.content_manager.getRules(inner_path)
|
||||
if inner_path.endswith("content.json"):
|
||||
content = self.site.content_manager.contents.get(inner_path)
|
||||
if content:
|
||||
rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()])
|
||||
else:
|
||||
rules["current_size"] = 0
|
||||
return self.response(to, rules)
|
||||
|
||||
|
||||
# Add certificate to user
|
||||
def actionCertAdd(self, to, domain, auth_type, auth_user_name, cert):
|
||||
try:
|
||||
res = self.user.addCert(self.user.getAuthAddress(self.site.address), domain, auth_type, auth_user_name, cert)
|
||||
if res == True:
|
||||
self.cmd("notification", ["done", "New certificate added: <b>%s/%s@%s</b>." % (auth_type, auth_user_name, domain)])
|
||||
self.response(to, "ok")
|
||||
else:
|
||||
self.response(to, "Not changed")
|
||||
except Exception, err:
|
||||
self.response(to, {"error": err.message})
|
||||
|
||||
|
||||
# Select certificate for site
|
||||
def actionCertSelect(self, to, accepted_domains=[]):
|
||||
accounts = []
|
||||
accounts.append(["", "Unique to site", ""]) # Default option
|
||||
active = "" # Make it active if no other option found
|
||||
|
||||
# Add my certs
|
||||
auth_address = self.user.getAuthAddress(self.site.address) # Current auth address
|
||||
for domain, cert in self.user.certs.items():
|
||||
if auth_address == cert["auth_address"]:
|
||||
active = domain
|
||||
title = cert["auth_user_name"]+"@"+domain
|
||||
if domain in accepted_domains:
|
||||
accounts.append([domain, title, ""])
|
||||
else:
|
||||
accounts.append([domain, title, "disabled"])
|
||||
|
||||
|
||||
# Render the html
|
||||
body = "<span style='padding-bottom: 5px; display: inline-block'>Select account you want to use in this site:</span>"
|
||||
# Accounts
|
||||
for domain, account, css_class in accounts:
|
||||
if domain == active:
|
||||
css_class += " active" # Currently selected option
|
||||
title = "<b>%s</b> <small>(currently selected)</small>" % account
|
||||
else:
|
||||
title = "<b>%s</b>" % account
|
||||
body += "<a href='#Select+account' class='select select-close cert %s' title='%s'>%s</a>" % (css_class, domain, title)
|
||||
# More avalible providers
|
||||
more_domains = [domain for domain in accepted_domains if domain not in self.user.certs] # Domainains we not displayed yet
|
||||
if more_domains:
|
||||
# body+= "<small style='margin-top: 10px; display: block'>Accepted authorization providers by the site:</small>"
|
||||
body+= "<div style='background-color: #F7F7F7; margin-right: -30px'>"
|
||||
for domain in more_domains:
|
||||
body += "<a href='/%s' onclick='wrapper.gotoSite(this)' target='_blank' class='select'><small style='float: right; margin-right: 40px; margin-top: -1px'>Register »</small>%s</a>" % (domain, domain)
|
||||
body+= "</div>"
|
||||
|
||||
body += """
|
||||
<script>
|
||||
$(".notification .select.cert").on("click", function() {
|
||||
$(".notification .select").removeClass('active')
|
||||
wrapper.ws.cmd('certSet', [this.title])
|
||||
return false
|
||||
})
|
||||
</script>
|
||||
"""
|
||||
|
||||
# Send the notification
|
||||
self.cmd("notification", ["ask", body])
|
||||
|
||||
|
||||
# Set certificate that used for authenticate user for site
|
||||
def actionCertSet(self, to, domain):
|
||||
self.user.setCert(self.site.address, domain)
|
||||
self.site.updateWebsocket(cert_changed=domain)
|
||||
|
||||
|
||||
# - Admin actions -
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@ class Notifications
|
|||
|
||||
|
||||
add: (id, type, body, timeout=0) ->
|
||||
@log id, type, body, timeout
|
||||
# Close notifications with same id
|
||||
for elem in $(".notification-#{id}")
|
||||
@close $(elem)
|
||||
|
@ -55,15 +54,14 @@ class Notifications
|
|||
elem.animate({"width": width}, 700, "easeInOutCubic")
|
||||
$(".body", elem).cssLater("box-shadow", "0px 0px 5px rgba(0,0,0,0.1)", 1000)
|
||||
|
||||
# Close button
|
||||
$(".close", elem).on "click", =>
|
||||
# Close button or Confirm button
|
||||
$(".close, .button", elem).on "click", =>
|
||||
@close elem
|
||||
return false
|
||||
|
||||
# Close on button click within body (confirm dialog)
|
||||
$(".button", elem).on "click", =>
|
||||
# Select list
|
||||
$(".select", elem).on "click", =>
|
||||
@close elem
|
||||
return false
|
||||
|
||||
|
||||
close: (elem) ->
|
||||
|
|
|
@ -30,6 +30,12 @@ class Wrapper
|
|||
if window.location.hash
|
||||
src = $("#inner-iframe").attr("src").replace(/#.*/, "")+window.location.hash
|
||||
$("#inner-iframe").attr("src", src)
|
||||
|
||||
###setInterval (->
|
||||
console.log document.hasFocus()
|
||||
), 1000###
|
||||
$("#inner-iframe").focus()
|
||||
|
||||
@
|
||||
|
||||
|
||||
|
@ -67,6 +73,10 @@ class Wrapper
|
|||
if @ws.ws.readyState == 1 and not @wrapperWsInited # If ws already opened
|
||||
@sendInner {"cmd": "wrapperOpenedWebsocket"}
|
||||
@wrapperWsInited = true
|
||||
else if cmd == "innerLoaded"
|
||||
if window.location.hash
|
||||
$("#inner-iframe")[0].src += window.location.hash # Hash tag
|
||||
@log "Added hash to location", $("#inner-iframe")[0].src
|
||||
else if cmd == "wrapperNotification" # Display notification
|
||||
@actionNotification(message)
|
||||
else if cmd == "wrapperConfirm" # Display confirm message
|
||||
|
@ -208,7 +218,6 @@ class Wrapper
|
|||
@inner_loaded = true
|
||||
if not @inner_ready then @sendInner {"cmd": "wrapperReady"} # Inner frame loaded before wrapper
|
||||
#if not @site_error then @loading.hideScreen() # Hide loading screen
|
||||
if window.location.hash then $("#inner-iframe")[0].src += window.location.hash # Hash tag
|
||||
if @ws.ws.readyState == 1 and not @site_info # Ws opened
|
||||
@reloadSiteInfo()
|
||||
else if @site_info and @site_info.content?.title?
|
||||
|
@ -313,13 +322,27 @@ class Wrapper
|
|||
return false
|
||||
|
||||
|
||||
isProxyRequest: ->
|
||||
return window.location.pathname == "/"
|
||||
|
||||
|
||||
gotoSite: (elem) =>
|
||||
href = $(elem).attr("href")
|
||||
if @isProxyRequest() # Fix for proxy request
|
||||
$(elem).attr("href", "http://zero#{href}")
|
||||
|
||||
|
||||
|
||||
log: (args...) ->
|
||||
console.log "[Wrapper]", args...
|
||||
|
||||
origin = window.server_url or window.location.origin
|
||||
|
||||
if window.server_url
|
||||
ws_url = "ws://#{window.server_url.replace('http://', '')}/Websocket?wrapper_key=#{window.wrapper_key}"
|
||||
if origin.indexOf("https:") == 0
|
||||
proto = { ws: 'wss', http: 'https' }
|
||||
else
|
||||
ws_url = "ws://#{window.location.hostname}:#{window.location.port}/Websocket?wrapper_key=#{window.wrapper_key}"
|
||||
proto = { ws: 'ws', http: 'http' }
|
||||
|
||||
ws_url = proto.ws + ":" + origin.replace(proto.http+":", "") + "/Websocket?wrapper_key=" + window.wrapper_key
|
||||
|
||||
window.wrapper = new Wrapper(ws_url)
|
||||
|
|
|
@ -58,6 +58,16 @@ a { color: black }
|
|||
.notification small { color: #AAA }
|
||||
.body-white .notification { box-shadow: 0px 1px 9px rgba(0,0,0,0.1) }
|
||||
|
||||
/* Notification select */
|
||||
.notification .select {
|
||||
display: block; padding: 10px; margin-right: -32px; text-decoration: none; border-left: 3px solid #EEE;
|
||||
margin-top: 1px; transition: all 0.3s; color: #666
|
||||
}
|
||||
.notification .select:hover, .notification .select.active { background-color: #007AFF; border-left: 3px solid #5D68FF; color: white; transition: none }
|
||||
.notification .select:active, .notification .select:focus { background-color: #3396FF; color: white; transition: none; border-left-color: #3396FF }
|
||||
.notification .select.disabled { opacity: 0.5; pointer-events: none }
|
||||
.notification .select small { color: inherit; }
|
||||
|
||||
/* Notification types */
|
||||
.notification-ask .notification-icon { background-color: #f39c12; }
|
||||
.notification-info .notification-icon { font-size: 22px; font-weight: bold; background-color: #2980b9; line-height: 48px }
|
||||
|
@ -115,6 +125,10 @@ a { color: black }
|
|||
box-shadow: 0 0 10px #AF3BFF, 0 0 5px #29d; opacity: 1.0; transform: rotate(3deg) translate(0px, -4px);
|
||||
}
|
||||
|
||||
/* Icons */
|
||||
.icon-profile { font-size: 6px; top: 0em; border-radius: 0.7em 0.7em 0 0; background: #FFFFFF; width: 1.5em; height: 0.7em; position: relative; display: inline-block; margin-right: 4px }
|
||||
.icon-profile::before { position: absolute; content: ""; top: -1em; left: 0.38em; width: 0.8em; height: 0.85em; border-radius: 50%; background: #FFFFFF }
|
||||
|
||||
/* Animations */
|
||||
|
||||
@keyframes flip {
|
||||
|
@ -130,3 +144,9 @@ a { color: black }
|
|||
70% { opacity: 0 }
|
||||
100% { opacity: 0 }
|
||||
}
|
||||
|
||||
/* Print styles */
|
||||
@media print {
|
||||
#inner-iframe { position: fixed; }
|
||||
.progressbar, .fixbutton, .notifications, .loadingscreen { visibility: hidden; }
|
||||
}
|
|
@ -63,6 +63,16 @@ a { color: black }
|
|||
.notification small { color: #AAA }
|
||||
.body-white .notification { -webkit-box-shadow: 0px 1px 9px rgba(0,0,0,0.1) ; -moz-box-shadow: 0px 1px 9px rgba(0,0,0,0.1) ; -o-box-shadow: 0px 1px 9px rgba(0,0,0,0.1) ; -ms-box-shadow: 0px 1px 9px rgba(0,0,0,0.1) ; box-shadow: 0px 1px 9px rgba(0,0,0,0.1) }
|
||||
|
||||
/* Notification select */
|
||||
.notification .select {
|
||||
display: block; padding: 10px; margin-right: -32px; text-decoration: none; border-left: 3px solid #EEE;
|
||||
margin-top: 1px; -webkit-transition: all 0.3s; -moz-transition: all 0.3s; -o-transition: all 0.3s; -ms-transition: all 0.3s; transition: all 0.3s ; color: #666
|
||||
}
|
||||
.notification .select:hover, .notification .select.active { background-color: #007AFF; border-left: 3px solid #5D68FF; color: white; -webkit-transition: none ; -moz-transition: none ; -o-transition: none ; -ms-transition: none ; transition: none }
|
||||
.notification .select:active, .notification .select:focus { background-color: #3396FF; color: white; -webkit-transition: none; -moz-transition: none; -o-transition: none; -ms-transition: none; transition: none ; border-left-color: #3396FF }
|
||||
.notification .select.disabled { opacity: 0.5; pointer-events: none }
|
||||
.notification .select small { color: inherit; }
|
||||
|
||||
/* Notification types */
|
||||
.notification-ask .notification-icon { background-color: #f39c12; }
|
||||
.notification-info .notification-icon { font-size: 22px; font-weight: bold; background-color: #2980b9; line-height: 48px }
|
||||
|
@ -120,6 +130,10 @@ a { color: black }
|
|||
-webkit-box-shadow: 0 0 10px #AF3BFF, 0 0 5px #29d; -moz-box-shadow: 0 0 10px #AF3BFF, 0 0 5px #29d; -o-box-shadow: 0 0 10px #AF3BFF, 0 0 5px #29d; -ms-box-shadow: 0 0 10px #AF3BFF, 0 0 5px #29d; box-shadow: 0 0 10px #AF3BFF, 0 0 5px #29d ; opacity: 1.0; -webkit-transform: rotate(3deg) translate(0px, -4px); -moz-transform: rotate(3deg) translate(0px, -4px); -o-transform: rotate(3deg) translate(0px, -4px); -ms-transform: rotate(3deg) translate(0px, -4px); transform: rotate(3deg) translate(0px, -4px) ;
|
||||
}
|
||||
|
||||
/* Icons */
|
||||
.icon-profile { font-size: 6px; top: 0em; -webkit-border-radius: 0.7em 0.7em 0 0; -moz-border-radius: 0.7em 0.7em 0 0; -o-border-radius: 0.7em 0.7em 0 0; -ms-border-radius: 0.7em 0.7em 0 0; border-radius: 0.7em 0.7em 0 0 ; background: #FFFFFF; width: 1.5em; height: 0.7em; position: relative; display: inline-block; margin-right: 4px }
|
||||
.icon-profile::before { position: absolute; content: ""; top: -1em; left: 0.38em; width: 0.8em; height: 0.85em; -webkit-border-radius: 50%; -moz-border-radius: 50%; -o-border-radius: 50%; -ms-border-radius: 50%; border-radius: 50% ; background: #FFFFFF }
|
||||
|
||||
/* Animations */
|
||||
|
||||
@keyframes flip {
|
||||
|
@ -161,8 +175,9 @@ a { color: black }
|
|||
100% { opacity: 0 }
|
||||
}
|
||||
|
||||
|
||||
/* Print styles */
|
||||
@media print {
|
||||
#inner-iframe { position: fixed; }
|
||||
.progressbar, .fixbutton, .notifications, .loadingscreen { visibility: hidden; }
|
||||
}
|
||||
}
|
|
@ -595,7 +595,6 @@ jQuery.extend( jQuery.easing,
|
|||
if (timeout == null) {
|
||||
timeout = 0;
|
||||
}
|
||||
this.log(id, type, body, timeout);
|
||||
_ref = $(".notification-" + id);
|
||||
for (_i = 0, _len = _ref.length; _i < _len; _i++) {
|
||||
elem = _ref[_i];
|
||||
|
@ -644,16 +643,15 @@ jQuery.extend( jQuery.easing,
|
|||
"width": width
|
||||
}, 700, "easeInOutCubic");
|
||||
$(".body", elem).cssLater("box-shadow", "0px 0px 5px rgba(0,0,0,0.1)", 1000);
|
||||
$(".close", elem).on("click", (function(_this) {
|
||||
$(".close, .button", elem).on("click", (function(_this) {
|
||||
return function() {
|
||||
_this.close(elem);
|
||||
return false;
|
||||
};
|
||||
})(this));
|
||||
return $(".button", elem).on("click", (function(_this) {
|
||||
return $(".select", elem).on("click", (function(_this) {
|
||||
return function() {
|
||||
_this.close(elem);
|
||||
return false;
|
||||
return _this.close(elem);
|
||||
};
|
||||
})(this));
|
||||
};
|
||||
|
@ -683,6 +681,7 @@ jQuery.extend( jQuery.easing,
|
|||
}).call(this);
|
||||
|
||||
|
||||
|
||||
/* ---- src/Ui/media/Sidebar.coffee ---- */
|
||||
|
||||
|
||||
|
@ -742,12 +741,13 @@ jQuery.extend( jQuery.easing,
|
|||
|
||||
|
||||
(function() {
|
||||
var Wrapper, ws_url,
|
||||
var Wrapper, origin, proto, ws_url,
|
||||
__bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; },
|
||||
__slice = [].slice;
|
||||
|
||||
Wrapper = (function() {
|
||||
function Wrapper(ws_url) {
|
||||
this.gotoSite = __bind(this.gotoSite, this);
|
||||
this.setSizeLimit = __bind(this.setSizeLimit, this);
|
||||
this.onLoad = __bind(this.onLoad, this);
|
||||
this.onCloseWebsocket = __bind(this.onCloseWebsocket, this);
|
||||
|
@ -785,6 +785,12 @@ jQuery.extend( jQuery.easing,
|
|||
}
|
||||
};
|
||||
})(this));
|
||||
|
||||
/*setInterval (->
|
||||
console.log document.hasFocus()
|
||||
), 1000
|
||||
*/
|
||||
$("#inner-iframe").focus();
|
||||
this;
|
||||
}
|
||||
|
||||
|
@ -831,6 +837,11 @@ jQuery.extend( jQuery.easing,
|
|||
});
|
||||
return this.wrapperWsInited = true;
|
||||
}
|
||||
} else if (cmd === "innerLoaded") {
|
||||
if (window.location.hash) {
|
||||
$("#inner-iframe")[0].src += window.location.hash;
|
||||
return this.log("Added hash to location", $("#inner-iframe")[0].src);
|
||||
}
|
||||
} else if (cmd === "wrapperNotification") {
|
||||
return this.actionNotification(message);
|
||||
} else if (cmd === "wrapperConfirm") {
|
||||
|
@ -1032,9 +1043,6 @@ jQuery.extend( jQuery.easing,
|
|||
"cmd": "wrapperReady"
|
||||
});
|
||||
}
|
||||
if (window.location.hash) {
|
||||
$("#inner-iframe")[0].src += window.location.hash;
|
||||
}
|
||||
if (this.ws.ws.readyState === 1 && !this.site_info) {
|
||||
return this.reloadSiteInfo();
|
||||
} else if (this.site_info && (((_ref = this.site_info.content) != null ? _ref.title : void 0) != null)) {
|
||||
|
@ -1163,6 +1171,18 @@ jQuery.extend( jQuery.easing,
|
|||
return false;
|
||||
};
|
||||
|
||||
Wrapper.prototype.isProxyRequest = function() {
|
||||
return window.location.pathname === "/";
|
||||
};
|
||||
|
||||
Wrapper.prototype.gotoSite = function(elem) {
|
||||
var href;
|
||||
href = $(elem).attr("href");
|
||||
if (this.isProxyRequest()) {
|
||||
return $(elem).attr("href", "http://zero" + href);
|
||||
}
|
||||
};
|
||||
|
||||
Wrapper.prototype.log = function() {
|
||||
var args;
|
||||
args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
|
||||
|
@ -1173,15 +1193,22 @@ jQuery.extend( jQuery.easing,
|
|||
|
||||
})();
|
||||
|
||||
var origin = window.server_url || window.location.origin;
|
||||
var proto;
|
||||
if (origin.indexOf('https:') === 0) {
|
||||
proto = { ws: 'wss', ht: 'https' };
|
||||
origin = window.server_url || window.location.origin;
|
||||
|
||||
if (origin.indexOf("https:") === 0) {
|
||||
proto = {
|
||||
ws: 'wss',
|
||||
http: 'https'
|
||||
};
|
||||
} else {
|
||||
proto = { ws: 'ws', ht: 'http' };
|
||||
proto = {
|
||||
ws: 'ws',
|
||||
http: 'http'
|
||||
};
|
||||
}
|
||||
ws_url = proto.ws + ":" + (origin.replace(proto.ht + ':', '')) + "/Websocket?wrapper_key=" + window.wrapper_key;
|
||||
|
||||
ws_url = proto.ws + ":" + origin.replace(proto.http + ":", "") + "/Websocket?wrapper_key=" + window.wrapper_key;
|
||||
|
||||
window.wrapper = new Wrapper(ws_url);
|
||||
|
||||
}).call(this);
|
||||
}).call(this);
|
|
@ -11,6 +11,12 @@
|
|||
</head>
|
||||
<body style="{body_style}">
|
||||
|
||||
<script>
|
||||
// If we are inside iframe escape from it
|
||||
if (window.self !== window.top) window.open(window.location.toString(), "_top");
|
||||
if (window.self !== window.top) window.stop();
|
||||
</script>
|
||||
|
||||
<div class="progressbar">
|
||||
<div class="peg"></div>
|
||||
</div>
|
||||
|
@ -40,15 +46,17 @@
|
|||
|
||||
|
||||
<!-- Site Iframe -->
|
||||
<iframe src='{inner_path}{query_string}' id='inner-iframe' sandbox="allow-forms allow-scripts allow-top-navigation"></iframe>
|
||||
<iframe src='{inner_path}{query_string}' id='inner-iframe' sandbox="allow-forms allow-scripts allow-top-navigation allow-popups"></iframe>
|
||||
|
||||
<!-- Site info -->
|
||||
<script>address = "{address}"</script>
|
||||
<script>wrapper_key = "{wrapper_key}"</script>
|
||||
<script>file_inner_path = "{file_inner_path}"</script>
|
||||
<script>permissions = {permissions}</script>
|
||||
<script>show_loadingscreen = {show_loadingscreen}</script>
|
||||
<script>server_url = '{server_url}'</script>
|
||||
<script>
|
||||
address = "{address}"
|
||||
wrapper_key = "{wrapper_key}"
|
||||
file_inner_path = "{file_inner_path}"
|
||||
permissions = {permissions}
|
||||
show_loadingscreen = {show_loadingscreen}
|
||||
server_url = '{server_url}'
|
||||
</script>
|
||||
<script type="text/javascript" src="{server_url}/uimedia/all.js" asyc></script>
|
||||
|
||||
</body>
|
||||
|
|
|
@ -4,17 +4,19 @@ from Plugin import PluginManager
|
|||
|
||||
@PluginManager.acceptPlugins
|
||||
class User(object):
|
||||
def __init__(self, master_address=None, master_seed=None):
|
||||
def __init__(self, master_address=None, master_seed=None, data={}):
|
||||
if master_seed:
|
||||
self.master_seed = master_seed
|
||||
self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed)
|
||||
elif master_address:
|
||||
self.master_address = master_address
|
||||
self.master_seed = None
|
||||
self.master_seed = data.get("master_seed")
|
||||
else:
|
||||
self.master_seed = CryptBitcoin.newSeed()
|
||||
self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed)
|
||||
self.sites = {}
|
||||
self.sites = data.get("sites", {})
|
||||
self.certs = data.get("certs", {})
|
||||
|
||||
self.log = logging.getLogger("User:%s" % self.master_address)
|
||||
|
||||
|
||||
|
@ -22,10 +24,10 @@ class User(object):
|
|||
def save(self):
|
||||
users = json.load(open("data/users.json"))
|
||||
if not self.master_address in users: users[self.master_address] = {} # Create if not exits
|
||||
|
||||
user_data = users[self.master_address]
|
||||
if self.master_seed: user_data["master_seed"] = self.master_seed
|
||||
user_data["sites"] = self.sites
|
||||
user_data["certs"] = self.certs
|
||||
open("data/users.json", "w").write(json.dumps(users, indent=2, sort_keys=True))
|
||||
self.log.debug("Saved")
|
||||
|
||||
|
@ -50,14 +52,66 @@ class User(object):
|
|||
# Get BIP32 address from site address
|
||||
# Return: BIP32 auth address
|
||||
def getAuthAddress(self, address, create=True):
|
||||
return self.getSiteData(address, create)["auth_address"]
|
||||
cert = self.getCert(address)
|
||||
if cert:
|
||||
return cert["auth_address"]
|
||||
else:
|
||||
return self.getSiteData(address, create)["auth_address"]
|
||||
|
||||
|
||||
def getAuthPrivatekey(self, address, create=True):
|
||||
return self.getSiteData(address, create)["auth_privatekey"]
|
||||
cert = self.getCert(address)
|
||||
if cert:
|
||||
return cert["auth_privatekey"]
|
||||
else:
|
||||
return self.getSiteData(address, create)["auth_privatekey"]
|
||||
|
||||
|
||||
# Set user attributes from dict
|
||||
def setData(self, data):
|
||||
for key, val in data.items():
|
||||
setattr(self, key, val)
|
||||
# Add cert for the user
|
||||
def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign):
|
||||
domain = domain.lower()
|
||||
auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0] # Find privatekey by auth address
|
||||
cert_node = {
|
||||
"auth_address": auth_address,
|
||||
"auth_privatekey": auth_privatekey,
|
||||
"auth_type": auth_type,
|
||||
"auth_user_name": auth_user_name,
|
||||
"cert_sign": cert_sign
|
||||
}
|
||||
# Check if we have already cert for that domain and its not the same
|
||||
if self.certs.get(domain) and self.certs[domain] != cert_node:
|
||||
raise Exception("You already have certificate for this domain: %s/%s@%s" % (self.certs[domain]["auth_type"], self.certs[domain]["auth_user_name"], domain))
|
||||
elif self.certs.get(domain) == cert_node: # Same, not updated
|
||||
return None
|
||||
else: # Not exits yet, add
|
||||
self.certs[domain] = cert_node
|
||||
self.save()
|
||||
return True
|
||||
|
||||
|
||||
def setCert(self, address, domain):
|
||||
site_data = self.getSiteData(address)
|
||||
if domain:
|
||||
site_data["cert"] = domain
|
||||
else:
|
||||
del site_data["cert"]
|
||||
self.save()
|
||||
return site_data
|
||||
|
||||
|
||||
# Get cert for the site address
|
||||
# Return: { "auth_address": ..., "auth_privatekey":..., "auth_type": "web", "auth_user_name": "nofish", "cert_sign": ... } or None
|
||||
def getCert(self, address):
|
||||
site_data = self.getSiteData(address, create=False)
|
||||
if not site_data or not "cert" in site_data: return None # Site dont have cert
|
||||
return self.certs.get(site_data["cert"])
|
||||
|
||||
|
||||
# Get cert user name for the site address
|
||||
# Return: user@certprovider.bit or None
|
||||
def getCertUserId(self, address):
|
||||
site_data = self.getSiteData(address, create=False)
|
||||
if not site_data or not "cert" in site_data: return None # Site dont have cert
|
||||
cert = self.certs.get(site_data["cert"])
|
||||
if cert:
|
||||
return cert["auth_user_name"]+"@"+site_data["cert"]
|
|
@ -18,8 +18,7 @@ class UserManager(object):
|
|||
# Load new users
|
||||
for master_address, data in json.load(open("data/users.json")).items():
|
||||
if master_address not in self.users:
|
||||
user = User(master_address)
|
||||
user.setData(data)
|
||||
user = User(master_address, data=data)
|
||||
self.users[master_address] = user
|
||||
added += 1
|
||||
user_found.append(master_address)
|
||||
|
|
|
@ -62,7 +62,7 @@ class Worker:
|
|||
task["failed"].append(self.peer)
|
||||
self.task = None
|
||||
self.peer.hash_failed += 1
|
||||
if self.peer.hash_failed >= 3: # Broken peer
|
||||
if self.peer.hash_failed >= max(len(self.manager.tasks), 3): # More fails than tasks number but atleast 3: Broken peer
|
||||
break
|
||||
task["workers_num"] -= 1
|
||||
time.sleep(1)
|
||||
|
@ -77,9 +77,17 @@ class Worker:
|
|||
self.thread = gevent.spawn(self.downloader)
|
||||
|
||||
|
||||
# Skip current task
|
||||
def skip(self):
|
||||
self.manager.log.debug("%s: Force skipping" % self.key)
|
||||
if self.thread:
|
||||
self.thread.kill(exception=Debug.Notify("Worker stopped"))
|
||||
self.start()
|
||||
|
||||
|
||||
# Force stop the worker
|
||||
def stop(self):
|
||||
self.manager.log.debug("%s: Force stopping, thread" % self.key)
|
||||
self.manager.log.debug("%s: Force stopping" % self.key)
|
||||
self.running = False
|
||||
if self.thread:
|
||||
self.thread.kill(exception=Debug.Notify("Worker stopped"))
|
||||
|
|
|
@ -32,18 +32,23 @@ class WorkerManager:
|
|||
|
||||
# Clean up workers
|
||||
for worker in self.workers.values():
|
||||
if worker.task and worker.task["done"]: worker.stop() # Stop workers with task done
|
||||
if worker.task and worker.task["done"]: worker.skip() # Stop workers with task done
|
||||
|
||||
if not self.tasks: continue
|
||||
|
||||
tasks = self.tasks[:] # Copy it so removing elements wont cause any problem
|
||||
for task in tasks:
|
||||
if (task["time_started"] and time.time() >= task["time_started"]+60) or (time.time() >= task["time_added"]+60 and not self.workers): # Task taking too long time, or no peer after 60sec kill it
|
||||
self.log.debug("Timeout, Cleaning up task: %s" % task)
|
||||
# Clean up workers
|
||||
if task["time_started"] and time.time() >= task["time_started"]+60: # Task taking too long time, skip it
|
||||
self.log.debug("Timeout, Skipping: %s" % task)
|
||||
# Skip to next file workers
|
||||
workers = self.findWorkers(task)
|
||||
for worker in workers:
|
||||
worker.stop()
|
||||
if workers:
|
||||
for worker in workers:
|
||||
worker.skip()
|
||||
else:
|
||||
self.failTask(task)
|
||||
elif time.time() >= task["time_added"]+60 and not self.workers: # No workers left
|
||||
self.log.debug("Timeout, Cleanup task: %s" % task)
|
||||
# Remove task
|
||||
self.failTask(task)
|
||||
|
||||
|
@ -178,12 +183,13 @@ class WorkerManager:
|
|||
|
||||
# Mark a task failed
|
||||
def failTask(self, task):
|
||||
task["done"] = True
|
||||
self.tasks.remove(task) # Remove from queue
|
||||
self.site.onFileFail(task["inner_path"])
|
||||
task["evt"].set(False)
|
||||
if not self.tasks:
|
||||
self.started_task_num = 0
|
||||
if task in self.tasks:
|
||||
task["done"] = True
|
||||
self.tasks.remove(task) # Remove from queue
|
||||
self.site.onFileFail(task["inner_path"])
|
||||
task["evt"].set(False)
|
||||
if not self.tasks:
|
||||
self.started_task_num = 0
|
||||
|
||||
|
||||
# Mark a task done
|
||||
|
|
393
src/lib/opensslVerify/opensslVerify-alter.py
Normal file
393
src/lib/opensslVerify/opensslVerify-alter.py
Normal file
|
@ -0,0 +1,393 @@
|
|||
# Code is borrowed from https://github.com/blocktrail/python-bitcoinlib
|
||||
# Thanks!
|
||||
|
||||
import base64, hashlib
|
||||
|
||||
import ctypes
|
||||
import ctypes.util
|
||||
_bchr = chr
|
||||
_bord = ord
|
||||
try:
|
||||
_ssl = ctypes.CDLL("src/lib/opensslVerify/libeay32.dll")
|
||||
except:
|
||||
_ssl = ctypes.cdll.LoadLibrary(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32')
|
||||
|
||||
import sys
|
||||
|
||||
openssl_version = "%.9X" % _ssl.SSLeay()
|
||||
|
||||
|
||||
# this specifies the curve used with ECDSA.
|
||||
_NID_secp256k1 = 714 # from openssl/obj_mac.h
|
||||
|
||||
# Thx to Sam Devlin for the ctypes magic 64-bit fix.
|
||||
def _check_result (val, func, args):
|
||||
if val == 0:
|
||||
raise ValueError
|
||||
else:
|
||||
return ctypes.c_void_p(val)
|
||||
|
||||
_ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
|
||||
_ssl.EC_KEY_new_by_curve_name.errcheck = _check_result
|
||||
|
||||
# From openssl/ecdsa.h
|
||||
class ECDSA_SIG_st(ctypes.Structure):
|
||||
_fields_ = [("r", ctypes.c_void_p),
|
||||
("s", ctypes.c_void_p)]
|
||||
|
||||
class CECKey:
|
||||
"""Wrapper around OpenSSL's EC_KEY"""
|
||||
|
||||
POINT_CONVERSION_COMPRESSED = 2
|
||||
POINT_CONVERSION_UNCOMPRESSED = 4
|
||||
|
||||
def __init__(self):
|
||||
self.k = _ssl.EC_KEY_new_by_curve_name(_NID_secp256k1)
|
||||
|
||||
def __del__(self):
|
||||
if _ssl:
|
||||
_ssl.EC_KEY_free(self.k)
|
||||
self.k = None
|
||||
|
||||
def set_secretbytes(self, secret):
|
||||
priv_key = _ssl.BN_bin2bn(secret, 32, _ssl.BN_new())
|
||||
group = _ssl.EC_KEY_get0_group(self.k)
|
||||
pub_key = _ssl.EC_POINT_new(group)
|
||||
ctx = _ssl.BN_CTX_new()
|
||||
if not _ssl.EC_POINT_mul(group, pub_key, priv_key, None, None, ctx):
|
||||
raise ValueError("Could not derive public key from the supplied secret.")
|
||||
_ssl.EC_POINT_mul(group, pub_key, priv_key, None, None, ctx)
|
||||
_ssl.EC_KEY_set_private_key(self.k, priv_key)
|
||||
_ssl.EC_KEY_set_public_key(self.k, pub_key)
|
||||
_ssl.EC_POINT_free(pub_key)
|
||||
_ssl.BN_CTX_free(ctx)
|
||||
return self.k
|
||||
|
||||
def set_privkey(self, key):
|
||||
self.mb = ctypes.create_string_buffer(key)
|
||||
return _ssl.d2i_ECPrivateKey(ctypes.byref(self.k), ctypes.byref(ctypes.pointer(self.mb)), len(key))
|
||||
|
||||
def set_pubkey(self, key):
|
||||
self.mb = ctypes.create_string_buffer(key)
|
||||
return _ssl.o2i_ECPublicKey(ctypes.byref(self.k), ctypes.byref(ctypes.pointer(self.mb)), len(key))
|
||||
|
||||
def get_privkey(self):
|
||||
size = _ssl.i2d_ECPrivateKey(self.k, 0)
|
||||
mb_pri = ctypes.create_string_buffer(size)
|
||||
_ssl.i2d_ECPrivateKey(self.k, ctypes.byref(ctypes.pointer(mb_pri)))
|
||||
return mb_pri.raw
|
||||
|
||||
def get_pubkey(self):
|
||||
size = _ssl.i2o_ECPublicKey(self.k, 0)
|
||||
mb = ctypes.create_string_buffer(size)
|
||||
_ssl.i2o_ECPublicKey(self.k, ctypes.byref(ctypes.pointer(mb)))
|
||||
return mb.raw
|
||||
|
||||
def get_raw_ecdh_key(self, other_pubkey):
|
||||
ecdh_keybuffer = ctypes.create_string_buffer(32)
|
||||
r = _ssl.ECDH_compute_key(ctypes.pointer(ecdh_keybuffer), 32,
|
||||
_ssl.EC_KEY_get0_public_key(other_pubkey.k),
|
||||
self.k, 0)
|
||||
if r != 32:
|
||||
raise Exception('CKey.get_ecdh_key(): ECDH_compute_key() failed')
|
||||
return ecdh_keybuffer.raw
|
||||
|
||||
def get_ecdh_key(self, other_pubkey, kdf=lambda k: hashlib.sha256(k).digest()):
|
||||
# FIXME: be warned it's not clear what the kdf should be as a default
|
||||
r = self.get_raw_ecdh_key(other_pubkey)
|
||||
return kdf(r)
|
||||
|
||||
def sign(self, hash):
|
||||
if not isinstance(hash, bytes):
|
||||
raise TypeError('Hash must be bytes instance; got %r' % hash.__class__)
|
||||
if len(hash) != 32:
|
||||
raise ValueError('Hash must be exactly 32 bytes long')
|
||||
|
||||
sig_size0 = ctypes.c_uint32()
|
||||
sig_size0.value = _ssl.ECDSA_size(self.k)
|
||||
mb_sig = ctypes.create_string_buffer(sig_size0.value)
|
||||
result = _ssl.ECDSA_sign(0, hash, len(hash), mb_sig, ctypes.byref(sig_size0), self.k)
|
||||
assert 1 == result
|
||||
if bitcoin.core.script.IsLowDERSignature(mb_sig.raw[:sig_size0.value]):
|
||||
return mb_sig.raw[:sig_size0.value]
|
||||
else:
|
||||
return self.signature_to_low_s(mb_sig.raw[:sig_size0.value])
|
||||
|
||||
def sign_compact(self, hash):
|
||||
if not isinstance(hash, bytes):
|
||||
raise TypeError('Hash must be bytes instance; got %r' % hash.__class__)
|
||||
if len(hash) != 32:
|
||||
raise ValueError('Hash must be exactly 32 bytes long')
|
||||
|
||||
sig_size0 = ctypes.c_uint32()
|
||||
sig_size0.value = _ssl.ECDSA_size(self.k)
|
||||
mb_sig = ctypes.create_string_buffer(sig_size0.value)
|
||||
result = _ssl.ECDSA_sign(0, hash, len(hash), mb_sig, ctypes.byref(sig_size0), self.k)
|
||||
assert 1 == result
|
||||
|
||||
if bitcoin.core.script.IsLowDERSignature(mb_sig.raw[:sig_size0.value]):
|
||||
sig = mb_sig.raw[:sig_size0.value]
|
||||
else:
|
||||
sig = self.signature_to_low_s(mb_sig.raw[:sig_size0.value])
|
||||
|
||||
sig = bitcoin.core.DERSignature.deserialize(sig)
|
||||
|
||||
r_val = sig.r
|
||||
s_val = sig.s
|
||||
|
||||
# assert that the r and s are less than 32 long, excluding leading 0s
|
||||
assert len(r_val) <= 32 or r_val[0:-32] == b'\x00'
|
||||
assert len(s_val) <= 32 or s_val[0:-32] == b'\x00'
|
||||
|
||||
# ensure r and s are always 32 chars long by 0padding
|
||||
r_val = ((b'\x00' * 32) + r_val)[-32:]
|
||||
s_val = ((b'\x00' * 32) + s_val)[-32:]
|
||||
|
||||
# tmp pubkey of self, but always compressed
|
||||
pubkey = CECKey()
|
||||
pubkey.set_pubkey(self.get_pubkey())
|
||||
pubkey.set_compressed(True)
|
||||
|
||||
# bitcoin core does <4, but I've seen other places do <2 and I've never seen a i > 1 so far
|
||||
for i in range(0, 4):
|
||||
cec_key = CECKey()
|
||||
cec_key.set_compressed(True)
|
||||
|
||||
result = cec_key.recover(r_val, s_val, hash, len(hash), i, 1)
|
||||
if result == 1:
|
||||
if cec_key.get_pubkey() == pubkey.get_pubkey():
|
||||
return r_val + s_val, i
|
||||
|
||||
raise ValueError
|
||||
|
||||
def signature_to_low_s(self, sig):
|
||||
der_sig = ECDSA_SIG_st()
|
||||
_ssl.d2i_ECDSA_SIG(ctypes.byref(ctypes.pointer(der_sig)), ctypes.byref(ctypes.c_char_p(sig)), len(sig))
|
||||
group = _ssl.EC_KEY_get0_group(self.k)
|
||||
order = _ssl.BN_new()
|
||||
halforder = _ssl.BN_new()
|
||||
ctx = _ssl.BN_CTX_new()
|
||||
_ssl.EC_GROUP_get_order(group, order, ctx)
|
||||
_ssl.BN_rshift1(halforder, order)
|
||||
|
||||
# Verify that s is over half the order of the curve before we actually subtract anything from it
|
||||
if _ssl.BN_cmp(der_sig.s, halforder) > 0:
|
||||
_ssl.BN_sub(der_sig.s, order, der_sig.s)
|
||||
|
||||
_ssl.BN_free(halforder)
|
||||
_ssl.BN_free(order)
|
||||
_ssl.BN_CTX_free(ctx)
|
||||
|
||||
derlen = _ssl.i2d_ECDSA_SIG(ctypes.pointer(der_sig), 0)
|
||||
if derlen == 0:
|
||||
_ssl.ECDSA_SIG_free(der_sig)
|
||||
return None
|
||||
new_sig = ctypes.create_string_buffer(derlen)
|
||||
_ssl.i2d_ECDSA_SIG(ctypes.pointer(der_sig), ctypes.byref(ctypes.pointer(new_sig)))
|
||||
_ssl.BN_free(der_sig.r)
|
||||
_ssl.BN_free(der_sig.s)
|
||||
|
||||
return new_sig.raw
|
||||
|
||||
def verify(self, hash, sig):
|
||||
"""Verify a DER signature"""
|
||||
if not sig:
|
||||
return false
|
||||
|
||||
# New versions of OpenSSL will reject non-canonical DER signatures. de/re-serialize first.
|
||||
norm_sig = ctypes.c_void_p(0)
|
||||
_ssl.d2i_ECDSA_SIG(ctypes.byref(norm_sig), ctypes.byref(ctypes.c_char_p(sig)), len(sig))
|
||||
|
||||
derlen = _ssl.i2d_ECDSA_SIG(norm_sig, 0)
|
||||
if derlen == 0:
|
||||
_ssl.ECDSA_SIG_free(norm_sig)
|
||||
return false
|
||||
|
||||
norm_der = ctypes.create_string_buffer(derlen)
|
||||
_ssl.i2d_ECDSA_SIG(norm_sig, ctypes.byref(ctypes.pointer(norm_der)))
|
||||
_ssl.ECDSA_SIG_free(norm_sig)
|
||||
|
||||
# -1 = error, 0 = bad sig, 1 = good
|
||||
return _ssl.ECDSA_verify(0, hash, len(hash), norm_der, derlen, self.k) == 1
|
||||
|
||||
def set_compressed(self, compressed):
|
||||
if compressed:
|
||||
form = self.POINT_CONVERSION_COMPRESSED
|
||||
else:
|
||||
form = self.POINT_CONVERSION_UNCOMPRESSED
|
||||
_ssl.EC_KEY_set_conv_form(self.k, form)
|
||||
|
||||
def recover(self, sigR, sigS, msg, msglen, recid, check):
|
||||
"""
|
||||
Perform ECDSA key recovery (see SEC1 4.1.6) for curves over (mod p)-fields
|
||||
recid selects which key is recovered
|
||||
if check is non-zero, additional checks are performed
|
||||
"""
|
||||
i = int(recid / 2)
|
||||
|
||||
r = None
|
||||
s = None
|
||||
ctx = None
|
||||
R = None
|
||||
O = None
|
||||
Q = None
|
||||
|
||||
assert len(sigR) == 32, len(sigR)
|
||||
assert len(sigS) == 32, len(sigS)
|
||||
|
||||
try:
|
||||
r = _ssl.BN_bin2bn(bytes(sigR), len(sigR), _ssl.BN_new())
|
||||
s = _ssl.BN_bin2bn(bytes(sigS), len(sigS), _ssl.BN_new())
|
||||
|
||||
group = _ssl.EC_KEY_get0_group(self.k)
|
||||
ctx = _ssl.BN_CTX_new()
|
||||
order = _ssl.BN_CTX_get(ctx)
|
||||
ctx = _ssl.BN_CTX_new()
|
||||
|
||||
if not _ssl.EC_GROUP_get_order(group, order, ctx):
|
||||
return -2
|
||||
|
||||
x = _ssl.BN_CTX_get(ctx)
|
||||
if not _ssl.BN_copy(x, order):
|
||||
return -1
|
||||
if not _ssl.BN_mul_word(x, i):
|
||||
return -1
|
||||
if not _ssl.BN_add(x, x, r):
|
||||
return -1
|
||||
|
||||
field = _ssl.BN_CTX_get(ctx)
|
||||
if not _ssl.EC_GROUP_get_curve_GFp(group, field, None, None, ctx):
|
||||
return -2
|
||||
|
||||
if _ssl.BN_cmp(x, field) >= 0:
|
||||
return 0
|
||||
|
||||
R = _ssl.EC_POINT_new(group)
|
||||
if R is None:
|
||||
return -2
|
||||
if not _ssl.EC_POINT_set_compressed_coordinates_GFp(group, R, x, recid % 2, ctx):
|
||||
return 0
|
||||
|
||||
if check:
|
||||
O = _ssl.EC_POINT_new(group)
|
||||
if O is None:
|
||||
return -2
|
||||
if not _ssl.EC_POINT_mul(group, O, None, R, order, ctx):
|
||||
return -2
|
||||
if not _ssl.EC_POINT_is_at_infinity(group, O):
|
||||
return 0
|
||||
|
||||
Q = _ssl.EC_POINT_new(group)
|
||||
if Q is None:
|
||||
return -2
|
||||
|
||||
n = _ssl.EC_GROUP_get_degree(group)
|
||||
e = _ssl.BN_CTX_get(ctx)
|
||||
if not _ssl.BN_bin2bn(msg, msglen, e):
|
||||
return -1
|
||||
|
||||
if 8 * msglen > n:
|
||||
_ssl.BN_rshift(e, e, 8 - (n & 7))
|
||||
|
||||
zero = _ssl.BN_CTX_get(ctx)
|
||||
# if not _ssl.BN_zero(zero):
|
||||
# return -1
|
||||
if not _ssl.BN_mod_sub(e, zero, e, order, ctx):
|
||||
return -1
|
||||
rr = _ssl.BN_CTX_get(ctx)
|
||||
if not _ssl.BN_mod_inverse(rr, r, order, ctx):
|
||||
return -1
|
||||
sor = _ssl.BN_CTX_get(ctx)
|
||||
if not _ssl.BN_mod_mul(sor, s, rr, order, ctx):
|
||||
return -1
|
||||
eor = _ssl.BN_CTX_get(ctx)
|
||||
if not _ssl.BN_mod_mul(eor, e, rr, order, ctx):
|
||||
return -1
|
||||
if not _ssl.EC_POINT_mul(group, Q, eor, R, sor, ctx):
|
||||
return -2
|
||||
|
||||
if not _ssl.EC_KEY_set_public_key(self.k, Q):
|
||||
return -2
|
||||
|
||||
return 1
|
||||
finally:
|
||||
if r: _ssl.BN_free(r)
|
||||
if s: _ssl.BN_free(s)
|
||||
if ctx: _ssl.BN_CTX_free(ctx)
|
||||
if R: _ssl.EC_POINT_free(R)
|
||||
if O: _ssl.EC_POINT_free(O)
|
||||
if Q: _ssl.EC_POINT_free(Q)
|
||||
|
||||
|
||||
def recover_compact(hash, sig):
|
||||
"""Recover a public key from a compact signature."""
|
||||
if len(sig) != 65:
|
||||
raise ValueError("Signature should be 65 characters, not [%d]" % (len(sig), ))
|
||||
|
||||
recid = (_bord(sig[0]) - 27) & 3
|
||||
compressed = (_bord(sig[0]) - 27) & 4 != 0
|
||||
|
||||
cec_key = CECKey()
|
||||
cec_key.set_compressed(compressed)
|
||||
|
||||
sigR = sig[1:33]
|
||||
sigS = sig[33:65]
|
||||
|
||||
result = cec_key.recover(sigR, sigS, hash, len(hash), recid, 0)
|
||||
|
||||
if result < 1:
|
||||
return False
|
||||
|
||||
pubkey = cec_key.get_pubkey()
|
||||
|
||||
return pubkey
|
||||
|
||||
def encode(val, base, minlen=0):
|
||||
base, minlen = int(base), int(minlen)
|
||||
code_string = ''.join([chr(x) for x in range(256)])
|
||||
result = ""
|
||||
while val > 0:
|
||||
result = code_string[val % base] + result
|
||||
val //= base
|
||||
return code_string[0] * max(minlen - len(result), 0) + result
|
||||
|
||||
def num_to_var_int(x):
|
||||
x = int(x)
|
||||
if x < 253: return chr(x)
|
||||
elif x < 65536: return chr(253)+encode(x, 256, 2)[::-1]
|
||||
elif x < 4294967296: return chr(254) + encode(x, 256, 4)[::-1]
|
||||
else: return chr(255) + encode(x, 256, 8)[::-1]
|
||||
|
||||
|
||||
def msg_magic(message):
|
||||
return "\x18Bitcoin Signed Message:\n" + num_to_var_int( len(message) ) + message
|
||||
|
||||
|
||||
def getMessagePubkey(message, sig):
|
||||
message = msg_magic(message)
|
||||
hash = hashlib.sha256(hashlib.sha256(message).digest()).digest()
|
||||
sig = base64.b64decode(sig)
|
||||
|
||||
pubkey = recover_compact(hash, sig)
|
||||
return pubkey
|
||||
|
||||
def test():
|
||||
sign = "HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ="
|
||||
pubkey = "044827c756561b8ef6b28b5e53a000805adbf4938ab82e1c2b7f7ea16a0d6face9a509a0a13e794d742210b00581f3e249ebcc705240af2540ea19591091ac1d41"
|
||||
assert getMessagePubkey("hello", sign).encode("hex") == pubkey
|
||||
|
||||
test() # Make sure it working right
|
||||
|
||||
if __name__ == "__main__":
|
||||
import time, sys
|
||||
sys.path.append("..")
|
||||
from pybitcointools import bitcoin as btctools
|
||||
priv = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
|
||||
address = "1N2XWu5soeppX2qUjvrf81rpdbShKJrjTr"
|
||||
sign = btctools.ecdsa_sign("hello", priv) # HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ=
|
||||
|
||||
s = time.time()
|
||||
for i in range(100):
|
||||
pubkey = getMessagePubkey("hello", sign)
|
||||
verified = btctools.pubkey_to_address(pubkey) == address
|
||||
print "100x Verified", verified, time.time()-s
|
192
src/lib/opensslVerify/opensslVerify-alter2.py
Normal file
192
src/lib/opensslVerify/opensslVerify-alter2.py
Normal file
|
@ -0,0 +1,192 @@
|
|||
#!/usr/bin/env python
|
||||
##
|
||||
## @file contrib/verifymessage/python/terracoin_verifymessage.py
|
||||
## @brief terracoin signed message verification sample script.
|
||||
## @author unknown author ; found on pastebin
|
||||
##
|
||||
|
||||
import ctypes
|
||||
import ctypes.util
|
||||
import hashlib
|
||||
import base64
|
||||
addrtype = 0
|
||||
|
||||
try:
|
||||
ssl = ctypes.CDLL("src/lib/opensslVerify/libeay32.dll")
|
||||
except:
|
||||
ssl = ctypes.cdll.LoadLibrary(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32')
|
||||
|
||||
openssl_version = "%.9X" % ssl.SSLeay()
|
||||
|
||||
NID_secp256k1 = 714
|
||||
|
||||
def check_result (val, func, args):
|
||||
if val == 0:
|
||||
raise ValueError
|
||||
else:
|
||||
return ctypes.c_void_p (val)
|
||||
|
||||
ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
|
||||
ssl.EC_KEY_new_by_curve_name.errcheck = check_result
|
||||
|
||||
POINT_CONVERSION_COMPRESSED = 2
|
||||
POINT_CONVERSION_UNCOMPRESSED = 4
|
||||
|
||||
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
|
||||
__b58base = len(__b58chars)
|
||||
|
||||
def b58encode(v):
|
||||
""" encode v, which is a string of bytes, to base58.
|
||||
"""
|
||||
|
||||
long_value = 0L
|
||||
for (i, c) in enumerate(v[::-1]):
|
||||
long_value += (256**i) * ord(c)
|
||||
|
||||
result = ''
|
||||
while long_value >= __b58base:
|
||||
div, mod = divmod(long_value, __b58base)
|
||||
result = __b58chars[mod] + result
|
||||
long_value = div
|
||||
result = __b58chars[long_value] + result
|
||||
|
||||
# Bitcoin does a little leading-zero-compression:
|
||||
# leading 0-bytes in the input become leading-1s
|
||||
nPad = 0
|
||||
for c in v:
|
||||
if c == '\0': nPad += 1
|
||||
else: break
|
||||
|
||||
return (__b58chars[0]*nPad) + result
|
||||
|
||||
def hash_160(public_key):
|
||||
md = hashlib.new('ripemd160')
|
||||
md.update(hashlib.sha256(public_key).digest())
|
||||
return md.digest()
|
||||
|
||||
def hash_160_to_bc_address(h160):
|
||||
vh160 = chr(addrtype) + h160
|
||||
h = Hash(vh160)
|
||||
addr = vh160 + h[0:4]
|
||||
return b58encode(addr)
|
||||
|
||||
def public_key_to_bc_address(public_key):
|
||||
h160 = hash_160(public_key)
|
||||
return hash_160_to_bc_address(h160)
|
||||
|
||||
def msg_magic(message):
|
||||
#return "\x18Bitcoin Signed Message:\n" + chr( len(message) ) + message
|
||||
return "\x18Bitcoin Signed Message:\n" + chr( len(message) ) + message
|
||||
|
||||
def get_address(eckey):
|
||||
size = ssl.i2o_ECPublicKey (eckey, 0)
|
||||
mb = ctypes.create_string_buffer (size)
|
||||
ssl.i2o_ECPublicKey (eckey, ctypes.byref (ctypes.pointer (mb)))
|
||||
return public_key_to_bc_address(mb.raw)
|
||||
|
||||
def Hash(data):
|
||||
return hashlib.sha256(hashlib.sha256(data).digest()).digest()
|
||||
|
||||
def bx(bn, size=32):
|
||||
b = ctypes.create_string_buffer(size)
|
||||
ssl.BN_bn2bin(bn, b);
|
||||
return b.raw.encode('hex')
|
||||
|
||||
def verify_message(address, signature, message):
|
||||
pkey = ssl.EC_KEY_new_by_curve_name (NID_secp256k1)
|
||||
eckey = SetCompactSignature(pkey, Hash(msg_magic(message)), signature)
|
||||
addr = get_address(eckey)
|
||||
return (address == addr)
|
||||
|
||||
def SetCompactSignature(pkey, hash, signature):
|
||||
sig = base64.b64decode(signature)
|
||||
if len(sig) != 65:
|
||||
raise BaseException("Wrong encoding")
|
||||
nV = ord(sig[0])
|
||||
if nV < 27 or nV >= 35:
|
||||
return False
|
||||
if nV >= 31:
|
||||
ssl.EC_KEY_set_conv_form(pkey, POINT_CONVERSION_COMPRESSED)
|
||||
nV -= 4
|
||||
r = ssl.BN_bin2bn (sig[1:33], 32, ssl.BN_new())
|
||||
s = ssl.BN_bin2bn (sig[33:], 32, ssl.BN_new())
|
||||
eckey = ECDSA_SIG_recover_key_GFp(pkey, r, s, hash, len(hash), nV - 27, False);
|
||||
return eckey
|
||||
|
||||
def ECDSA_SIG_recover_key_GFp(eckey, r, s, msg, msglen, recid, check):
|
||||
n = 0
|
||||
i = recid / 2
|
||||
|
||||
group = ssl.EC_KEY_get0_group(eckey)
|
||||
ctx = ssl.BN_CTX_new()
|
||||
ssl.BN_CTX_start(ctx)
|
||||
order = ssl.BN_CTX_get(ctx)
|
||||
ssl.EC_GROUP_get_order(group, order, ctx)
|
||||
x = ssl.BN_CTX_get(ctx)
|
||||
ssl.BN_copy(x, order);
|
||||
ssl.BN_mul_word(x, i);
|
||||
ssl.BN_add(x, x, r)
|
||||
field = ssl.BN_CTX_get(ctx)
|
||||
ssl.EC_GROUP_get_curve_GFp(group, field, None, None, ctx)
|
||||
|
||||
if (ssl.BN_cmp(x, field) >= 0):
|
||||
return False
|
||||
|
||||
R = ssl.EC_POINT_new(group)
|
||||
ssl.EC_POINT_set_compressed_coordinates_GFp(group, R, x, recid % 2, ctx)
|
||||
|
||||
if check:
|
||||
O = ssl.EC_POINT_new(group)
|
||||
ssl.EC_POINT_mul(group, O, None, R, order, ctx)
|
||||
if ssl.EC_POINT_is_at_infinity(group, O):
|
||||
return False
|
||||
|
||||
Q = ssl.EC_POINT_new(group)
|
||||
n = ssl.EC_GROUP_get_degree(group)
|
||||
e = ssl.BN_CTX_get(ctx)
|
||||
ssl.BN_bin2bn(msg, msglen, e)
|
||||
if 8 * msglen > n: ssl.BN_rshift(e, e, 8 - (n & 7))
|
||||
|
||||
|
||||
zero = ssl.BN_CTX_get(ctx)
|
||||
ssl.BN_set_word(zero, 0)
|
||||
ssl.BN_mod_sub(e, zero, e, order, ctx)
|
||||
rr = ssl.BN_CTX_get(ctx);
|
||||
ssl.BN_mod_inverse(rr, r, order, ctx)
|
||||
sor = ssl.BN_CTX_get(ctx)
|
||||
ssl.BN_mod_mul(sor, s, rr, order, ctx)
|
||||
eor = ssl.BN_CTX_get(ctx)
|
||||
ssl.BN_mod_mul(eor, e, rr, order, ctx)
|
||||
ssl.EC_POINT_mul(group, Q, eor, R, sor, ctx)
|
||||
ssl.EC_KEY_set_public_key(eckey, Q)
|
||||
return eckey
|
||||
|
||||
|
||||
def getMessagePubkey(message, sig):
|
||||
pkey = ssl.EC_KEY_new_by_curve_name(NID_secp256k1)
|
||||
eckey = SetCompactSignature(pkey, Hash(msg_magic(message)), sig)
|
||||
size = ssl.i2o_ECPublicKey (eckey, 0)
|
||||
mb = ctypes.create_string_buffer (size)
|
||||
ssl.i2o_ECPublicKey (eckey, ctypes.byref (ctypes.pointer (mb)))
|
||||
return mb.raw
|
||||
|
||||
def test():
|
||||
sign = "HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ="
|
||||
pubkey = "044827c756561b8ef6b28b5e53a000805adbf4938ab82e1c2b7f7ea16a0d6face9a509a0a13e794d742210b00581f3e249ebcc705240af2540ea19591091ac1d41"
|
||||
assert getMessagePubkey("hello", sign).encode("hex") == pubkey
|
||||
|
||||
test() # Make sure it working right
|
||||
|
||||
if __name__ == "__main__":
|
||||
import time, os, sys
|
||||
sys.path.append("..")
|
||||
from pybitcointools import bitcoin as btctools
|
||||
priv = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
|
||||
address = "1N2XWu5soeppX2qUjvrf81rpdbShKJrjTr"
|
||||
sign = btctools.ecdsa_sign("hello", priv) # HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ=
|
||||
|
||||
s = time.time()
|
||||
for i in range(100):
|
||||
pubkey = getMessagePubkey("hello", sign)
|
||||
verified = btctools.pubkey_to_address(pubkey) == address
|
||||
print "100x Verified", verified, time.time()-s
|
|
@ -1,346 +1,242 @@
|
|||
# Code is borrowed from https://github.com/blocktrail/python-bitcoinlib
|
||||
# Thanks!
|
||||
# via http://pastebin.com/H1XikJFd
|
||||
# -*- Mode: Python -*-
|
||||
|
||||
import base64, hashlib
|
||||
# This is a combination of http://pastebin.com/bQtdDzHx and
|
||||
# https://github.com/Bitmessage/PyBitmessage/blob/master/src/pyelliptic/openssl.py
|
||||
# that doesn't crash on OSX.
|
||||
# Long message bug fixed by ZeroNet
|
||||
|
||||
import ctypes
|
||||
import ctypes.util
|
||||
_bchr = chr
|
||||
_bord = ord
|
||||
import hashlib
|
||||
import base64
|
||||
addrtype = 0
|
||||
|
||||
class _OpenSSL:
|
||||
"""
|
||||
Wrapper for OpenSSL using ctypes
|
||||
"""
|
||||
def __init__(self, library):
|
||||
"""
|
||||
Build the wrapper
|
||||
"""
|
||||
try:
|
||||
self._lib = ctypes.CDLL(library)
|
||||
except:
|
||||
self._lib = ctypes.cdll.LoadLibrary(library)
|
||||
|
||||
self.pointer = ctypes.pointer
|
||||
self.c_int = ctypes.c_int
|
||||
self.byref = ctypes.byref
|
||||
self.create_string_buffer = ctypes.create_string_buffer
|
||||
|
||||
self.BN_new = self._lib.BN_new
|
||||
self.BN_new.restype = ctypes.c_void_p
|
||||
self.BN_new.argtypes = []
|
||||
|
||||
self.BN_copy = self._lib.BN_copy
|
||||
self.BN_copy.restype = ctypes.c_void_p
|
||||
self.BN_copy.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
|
||||
|
||||
self.BN_mul_word = self._lib.BN_mul_word
|
||||
self.BN_mul_word.restype = ctypes.c_int
|
||||
self.BN_mul_word.argtypes = [ctypes.c_void_p, ctypes.c_int]
|
||||
|
||||
self.BN_set_word = self._lib.BN_set_word
|
||||
self.BN_set_word.restype = ctypes.c_int
|
||||
self.BN_set_word.argtypes = [ctypes.c_void_p, ctypes.c_int]
|
||||
|
||||
self.BN_add = self._lib.BN_add
|
||||
self.BN_add.restype = ctypes.c_void_p
|
||||
self.BN_add.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
|
||||
ctypes.c_void_p]
|
||||
|
||||
self.BN_mod_sub = self._lib.BN_mod_sub
|
||||
self.BN_mod_sub.restype = ctypes.c_int
|
||||
self.BN_mod_sub.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
|
||||
ctypes.c_void_p,
|
||||
ctypes.c_void_p,
|
||||
ctypes.c_void_p]
|
||||
|
||||
self.BN_mod_mul = self._lib.BN_mod_mul
|
||||
self.BN_mod_mul.restype = ctypes.c_int
|
||||
self.BN_mod_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
|
||||
ctypes.c_void_p,
|
||||
ctypes.c_void_p,
|
||||
ctypes.c_void_p]
|
||||
|
||||
self.BN_mod_inverse = self._lib.BN_mod_inverse
|
||||
self.BN_mod_inverse.restype = ctypes.c_void_p
|
||||
self.BN_mod_inverse.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
|
||||
ctypes.c_void_p,
|
||||
ctypes.c_void_p]
|
||||
|
||||
self.BN_cmp = self._lib.BN_cmp
|
||||
self.BN_cmp.restype = ctypes.c_int
|
||||
self.BN_cmp.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
|
||||
|
||||
self.BN_bn2bin = self._lib.BN_bn2bin
|
||||
self.BN_bn2bin.restype = ctypes.c_int
|
||||
self.BN_bn2bin.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
|
||||
|
||||
self.BN_bin2bn = self._lib.BN_bin2bn
|
||||
self.BN_bin2bn.restype = ctypes.c_void_p
|
||||
self.BN_bin2bn.argtypes = [ctypes.c_void_p, ctypes.c_int,
|
||||
ctypes.c_void_p]
|
||||
|
||||
self.EC_KEY_new_by_curve_name = self._lib.EC_KEY_new_by_curve_name
|
||||
self.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
|
||||
self.EC_KEY_new_by_curve_name.argtypes = [ctypes.c_int]
|
||||
|
||||
self.EC_KEY_get0_group = self._lib.EC_KEY_get0_group
|
||||
self.EC_KEY_get0_group.restype = ctypes.c_void_p
|
||||
self.EC_KEY_get0_group.argtypes = [ctypes.c_void_p]
|
||||
|
||||
self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key
|
||||
self.EC_KEY_set_private_key.restype = ctypes.c_int
|
||||
self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p,
|
||||
ctypes.c_void_p]
|
||||
|
||||
self.EC_KEY_set_public_key = self._lib.EC_KEY_set_public_key
|
||||
self.EC_KEY_set_public_key.restype = ctypes.c_int
|
||||
self.EC_KEY_set_public_key.argtypes = [ctypes.c_void_p,
|
||||
ctypes.c_void_p]
|
||||
|
||||
self.EC_POINT_set_compressed_coordinates_GFp = self._lib.EC_POINT_set_compressed_coordinates_GFp
|
||||
self.EC_POINT_set_compressed_coordinates_GFp.restype = ctypes.c_int
|
||||
self.EC_POINT_set_compressed_coordinates_GFp.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p]
|
||||
|
||||
self.EC_POINT_new = self._lib.EC_POINT_new
|
||||
self.EC_POINT_new.restype = ctypes.c_void_p
|
||||
self.EC_POINT_new.argtypes = [ctypes.c_void_p]
|
||||
|
||||
self.EC_POINT_free = self._lib.EC_POINT_free
|
||||
self.EC_POINT_free.restype = None
|
||||
self.EC_POINT_free.argtypes = [ctypes.c_void_p]
|
||||
|
||||
self.EC_GROUP_get_order = self._lib.EC_GROUP_get_order
|
||||
self.EC_GROUP_get_order.restype = ctypes.c_void_p
|
||||
self.EC_GROUP_get_order.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
|
||||
|
||||
self.EC_GROUP_get_degree = self._lib.EC_GROUP_get_degree
|
||||
self.EC_GROUP_get_degree.restype = ctypes.c_void_p
|
||||
self.EC_GROUP_get_degree.argtypes = [ctypes.c_void_p]
|
||||
|
||||
self.EC_GROUP_get_curve_GFp = self._lib.EC_GROUP_get_curve_GFp
|
||||
self.EC_GROUP_get_curve_GFp.restype = ctypes.c_void_p
|
||||
self.EC_GROUP_get_curve_GFp.argtypes = [ctypes.c_void_p,
|
||||
ctypes.c_void_p,
|
||||
ctypes.c_void_p,
|
||||
ctypes.c_void_p,
|
||||
ctypes.c_void_p]
|
||||
|
||||
self.EC_POINT_mul = self._lib.EC_POINT_mul
|
||||
self.EC_POINT_mul.restype = ctypes.c_int
|
||||
self.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
|
||||
ctypes.c_void_p, ctypes.c_void_p,
|
||||
ctypes.c_void_p, ctypes.c_void_p]
|
||||
|
||||
self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key
|
||||
self.EC_KEY_set_private_key.restype = ctypes.c_int
|
||||
self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p,
|
||||
ctypes.c_void_p]
|
||||
|
||||
self.EC_KEY_set_conv_form = self._lib.EC_KEY_set_conv_form
|
||||
self.EC_KEY_set_conv_form.restype = None
|
||||
self.EC_KEY_set_conv_form.argtypes = [ctypes.c_void_p,
|
||||
ctypes.c_int]
|
||||
|
||||
self.BN_CTX_new = self._lib.BN_CTX_new
|
||||
self._lib.BN_CTX_new.restype = ctypes.c_void_p
|
||||
self._lib.BN_CTX_new.argtypes = []
|
||||
|
||||
self.BN_CTX_start = self._lib.BN_CTX_start
|
||||
self._lib.BN_CTX_start.restype = ctypes.c_void_p
|
||||
self._lib.BN_CTX_start.argtypes = [ctypes.c_void_p]
|
||||
|
||||
self.BN_CTX_get = self._lib.BN_CTX_get
|
||||
self._lib.BN_CTX_get.restype = ctypes.c_void_p
|
||||
self._lib.BN_CTX_get.argtypes = [ctypes.c_void_p]
|
||||
|
||||
self.ECDSA_sign = self._lib.ECDSA_sign
|
||||
self.ECDSA_sign.restype = ctypes.c_int
|
||||
self.ECDSA_sign.argtypes = [ctypes.c_int, ctypes.c_void_p,
|
||||
ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
|
||||
|
||||
self.ECDSA_verify = self._lib.ECDSA_verify
|
||||
self.ECDSA_verify.restype = ctypes.c_int
|
||||
self.ECDSA_verify.argtypes = [ctypes.c_int, ctypes.c_void_p,
|
||||
ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p]
|
||||
|
||||
self.i2o_ECPublicKey = self._lib.i2o_ECPublicKey
|
||||
self.i2o_ECPublicKey.restype = ctypes.c_void_p
|
||||
self.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
|
||||
|
||||
|
||||
|
||||
|
||||
try:
|
||||
_ssl = ctypes.CDLL("src/lib/opensslVerify/libeay32.dll")
|
||||
ssl = _OpenSSL("src/lib/opensslVerify/libeay32.dll")
|
||||
except:
|
||||
_ssl = ctypes.cdll.LoadLibrary(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32')
|
||||
ssl = _OpenSSL(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32')
|
||||
|
||||
import sys
|
||||
openssl_version = "%.9X" % ssl._lib.SSLeay()
|
||||
|
||||
openssl_version = "%.9X" % _ssl.SSLeay()
|
||||
NID_secp256k1 = 714
|
||||
|
||||
|
||||
# this specifies the curve used with ECDSA.
|
||||
_NID_secp256k1 = 714 # from openssl/obj_mac.h
|
||||
|
||||
# Thx to Sam Devlin for the ctypes magic 64-bit fix.
|
||||
def _check_result (val, func, args):
|
||||
def check_result (val, func, args):
|
||||
if val == 0:
|
||||
raise ValueError
|
||||
else:
|
||||
return ctypes.c_void_p(val)
|
||||
return ctypes.c_void_p (val)
|
||||
|
||||
_ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
|
||||
_ssl.EC_KEY_new_by_curve_name.errcheck = _check_result
|
||||
ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
|
||||
ssl.EC_KEY_new_by_curve_name.errcheck = check_result
|
||||
|
||||
# From openssl/ecdsa.h
|
||||
class ECDSA_SIG_st(ctypes.Structure):
|
||||
_fields_ = [("r", ctypes.c_void_p),
|
||||
("s", ctypes.c_void_p)]
|
||||
POINT_CONVERSION_COMPRESSED = 2
|
||||
POINT_CONVERSION_UNCOMPRESSED = 4
|
||||
|
||||
class CECKey:
|
||||
"""Wrapper around OpenSSL's EC_KEY"""
|
||||
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
|
||||
__b58base = len(__b58chars)
|
||||
|
||||
POINT_CONVERSION_COMPRESSED = 2
|
||||
POINT_CONVERSION_UNCOMPRESSED = 4
|
||||
def b58encode(v):
|
||||
""" encode v, which is a string of bytes, to base58.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.k = _ssl.EC_KEY_new_by_curve_name(_NID_secp256k1)
|
||||
long_value = 0L
|
||||
for (i, c) in enumerate(v[::-1]):
|
||||
long_value += (256**i) * ord(c)
|
||||
|
||||
def __del__(self):
|
||||
if _ssl:
|
||||
_ssl.EC_KEY_free(self.k)
|
||||
self.k = None
|
||||
result = ''
|
||||
while long_value >= __b58base:
|
||||
div, mod = divmod(long_value, __b58base)
|
||||
result = __b58chars[mod] + result
|
||||
long_value = div
|
||||
result = __b58chars[long_value] + result
|
||||
|
||||
def set_secretbytes(self, secret):
|
||||
priv_key = _ssl.BN_bin2bn(secret, 32, _ssl.BN_new())
|
||||
group = _ssl.EC_KEY_get0_group(self.k)
|
||||
pub_key = _ssl.EC_POINT_new(group)
|
||||
ctx = _ssl.BN_CTX_new()
|
||||
if not _ssl.EC_POINT_mul(group, pub_key, priv_key, None, None, ctx):
|
||||
raise ValueError("Could not derive public key from the supplied secret.")
|
||||
_ssl.EC_POINT_mul(group, pub_key, priv_key, None, None, ctx)
|
||||
_ssl.EC_KEY_set_private_key(self.k, priv_key)
|
||||
_ssl.EC_KEY_set_public_key(self.k, pub_key)
|
||||
_ssl.EC_POINT_free(pub_key)
|
||||
_ssl.BN_CTX_free(ctx)
|
||||
return self.k
|
||||
# Bitcoin does a little leading-zero-compression:
|
||||
# leading 0-bytes in the input become leading-1s
|
||||
nPad = 0
|
||||
for c in v:
|
||||
if c == '\0': nPad += 1
|
||||
else: break
|
||||
|
||||
def set_privkey(self, key):
|
||||
self.mb = ctypes.create_string_buffer(key)
|
||||
return _ssl.d2i_ECPrivateKey(ctypes.byref(self.k), ctypes.byref(ctypes.pointer(self.mb)), len(key))
|
||||
return (__b58chars[0]*nPad) + result
|
||||
|
||||
def set_pubkey(self, key):
|
||||
self.mb = ctypes.create_string_buffer(key)
|
||||
return _ssl.o2i_ECPublicKey(ctypes.byref(self.k), ctypes.byref(ctypes.pointer(self.mb)), len(key))
|
||||
def hash_160(public_key):
|
||||
md = hashlib.new('ripemd160')
|
||||
md.update(hashlib.sha256(public_key).digest())
|
||||
return md.digest()
|
||||
|
||||
def get_privkey(self):
|
||||
size = _ssl.i2d_ECPrivateKey(self.k, 0)
|
||||
mb_pri = ctypes.create_string_buffer(size)
|
||||
_ssl.i2d_ECPrivateKey(self.k, ctypes.byref(ctypes.pointer(mb_pri)))
|
||||
return mb_pri.raw
|
||||
def hash_160_to_bc_address(h160):
|
||||
vh160 = chr(addrtype) + h160
|
||||
h = Hash(vh160)
|
||||
addr = vh160 + h[0:4]
|
||||
return b58encode(addr)
|
||||
|
||||
def get_pubkey(self):
|
||||
size = _ssl.i2o_ECPublicKey(self.k, 0)
|
||||
mb = ctypes.create_string_buffer(size)
|
||||
_ssl.i2o_ECPublicKey(self.k, ctypes.byref(ctypes.pointer(mb)))
|
||||
return mb.raw
|
||||
|
||||
def get_raw_ecdh_key(self, other_pubkey):
|
||||
ecdh_keybuffer = ctypes.create_string_buffer(32)
|
||||
r = _ssl.ECDH_compute_key(ctypes.pointer(ecdh_keybuffer), 32,
|
||||
_ssl.EC_KEY_get0_public_key(other_pubkey.k),
|
||||
self.k, 0)
|
||||
if r != 32:
|
||||
raise Exception('CKey.get_ecdh_key(): ECDH_compute_key() failed')
|
||||
return ecdh_keybuffer.raw
|
||||
|
||||
def get_ecdh_key(self, other_pubkey, kdf=lambda k: hashlib.sha256(k).digest()):
|
||||
# FIXME: be warned it's not clear what the kdf should be as a default
|
||||
r = self.get_raw_ecdh_key(other_pubkey)
|
||||
return kdf(r)
|
||||
|
||||
def sign(self, hash):
|
||||
if not isinstance(hash, bytes):
|
||||
raise TypeError('Hash must be bytes instance; got %r' % hash.__class__)
|
||||
if len(hash) != 32:
|
||||
raise ValueError('Hash must be exactly 32 bytes long')
|
||||
|
||||
sig_size0 = ctypes.c_uint32()
|
||||
sig_size0.value = _ssl.ECDSA_size(self.k)
|
||||
mb_sig = ctypes.create_string_buffer(sig_size0.value)
|
||||
result = _ssl.ECDSA_sign(0, hash, len(hash), mb_sig, ctypes.byref(sig_size0), self.k)
|
||||
assert 1 == result
|
||||
if bitcoin.core.script.IsLowDERSignature(mb_sig.raw[:sig_size0.value]):
|
||||
return mb_sig.raw[:sig_size0.value]
|
||||
else:
|
||||
return self.signature_to_low_s(mb_sig.raw[:sig_size0.value])
|
||||
|
||||
def sign_compact(self, hash):
|
||||
if not isinstance(hash, bytes):
|
||||
raise TypeError('Hash must be bytes instance; got %r' % hash.__class__)
|
||||
if len(hash) != 32:
|
||||
raise ValueError('Hash must be exactly 32 bytes long')
|
||||
|
||||
sig_size0 = ctypes.c_uint32()
|
||||
sig_size0.value = _ssl.ECDSA_size(self.k)
|
||||
mb_sig = ctypes.create_string_buffer(sig_size0.value)
|
||||
result = _ssl.ECDSA_sign(0, hash, len(hash), mb_sig, ctypes.byref(sig_size0), self.k)
|
||||
assert 1 == result
|
||||
|
||||
if bitcoin.core.script.IsLowDERSignature(mb_sig.raw[:sig_size0.value]):
|
||||
sig = mb_sig.raw[:sig_size0.value]
|
||||
else:
|
||||
sig = self.signature_to_low_s(mb_sig.raw[:sig_size0.value])
|
||||
|
||||
sig = bitcoin.core.DERSignature.deserialize(sig)
|
||||
|
||||
r_val = sig.r
|
||||
s_val = sig.s
|
||||
|
||||
# assert that the r and s are less than 32 long, excluding leading 0s
|
||||
assert len(r_val) <= 32 or r_val[0:-32] == b'\x00'
|
||||
assert len(s_val) <= 32 or s_val[0:-32] == b'\x00'
|
||||
|
||||
# ensure r and s are always 32 chars long by 0padding
|
||||
r_val = ((b'\x00' * 32) + r_val)[-32:]
|
||||
s_val = ((b'\x00' * 32) + s_val)[-32:]
|
||||
|
||||
# tmp pubkey of self, but always compressed
|
||||
pubkey = CECKey()
|
||||
pubkey.set_pubkey(self.get_pubkey())
|
||||
pubkey.set_compressed(True)
|
||||
|
||||
# bitcoin core does <4, but I've seen other places do <2 and I've never seen a i > 1 so far
|
||||
for i in range(0, 4):
|
||||
cec_key = CECKey()
|
||||
cec_key.set_compressed(True)
|
||||
|
||||
result = cec_key.recover(r_val, s_val, hash, len(hash), i, 1)
|
||||
if result == 1:
|
||||
if cec_key.get_pubkey() == pubkey.get_pubkey():
|
||||
return r_val + s_val, i
|
||||
|
||||
raise ValueError
|
||||
|
||||
def signature_to_low_s(self, sig):
|
||||
der_sig = ECDSA_SIG_st()
|
||||
_ssl.d2i_ECDSA_SIG(ctypes.byref(ctypes.pointer(der_sig)), ctypes.byref(ctypes.c_char_p(sig)), len(sig))
|
||||
group = _ssl.EC_KEY_get0_group(self.k)
|
||||
order = _ssl.BN_new()
|
||||
halforder = _ssl.BN_new()
|
||||
ctx = _ssl.BN_CTX_new()
|
||||
_ssl.EC_GROUP_get_order(group, order, ctx)
|
||||
_ssl.BN_rshift1(halforder, order)
|
||||
|
||||
# Verify that s is over half the order of the curve before we actually subtract anything from it
|
||||
if _ssl.BN_cmp(der_sig.s, halforder) > 0:
|
||||
_ssl.BN_sub(der_sig.s, order, der_sig.s)
|
||||
|
||||
_ssl.BN_free(halforder)
|
||||
_ssl.BN_free(order)
|
||||
_ssl.BN_CTX_free(ctx)
|
||||
|
||||
derlen = _ssl.i2d_ECDSA_SIG(ctypes.pointer(der_sig), 0)
|
||||
if derlen == 0:
|
||||
_ssl.ECDSA_SIG_free(der_sig)
|
||||
return None
|
||||
new_sig = ctypes.create_string_buffer(derlen)
|
||||
_ssl.i2d_ECDSA_SIG(ctypes.pointer(der_sig), ctypes.byref(ctypes.pointer(new_sig)))
|
||||
_ssl.BN_free(der_sig.r)
|
||||
_ssl.BN_free(der_sig.s)
|
||||
|
||||
return new_sig.raw
|
||||
|
||||
def verify(self, hash, sig):
|
||||
"""Verify a DER signature"""
|
||||
if not sig:
|
||||
return false
|
||||
|
||||
# New versions of OpenSSL will reject non-canonical DER signatures. de/re-serialize first.
|
||||
norm_sig = ctypes.c_void_p(0)
|
||||
_ssl.d2i_ECDSA_SIG(ctypes.byref(norm_sig), ctypes.byref(ctypes.c_char_p(sig)), len(sig))
|
||||
|
||||
derlen = _ssl.i2d_ECDSA_SIG(norm_sig, 0)
|
||||
if derlen == 0:
|
||||
_ssl.ECDSA_SIG_free(norm_sig)
|
||||
return false
|
||||
|
||||
norm_der = ctypes.create_string_buffer(derlen)
|
||||
_ssl.i2d_ECDSA_SIG(norm_sig, ctypes.byref(ctypes.pointer(norm_der)))
|
||||
_ssl.ECDSA_SIG_free(norm_sig)
|
||||
|
||||
# -1 = error, 0 = bad sig, 1 = good
|
||||
return _ssl.ECDSA_verify(0, hash, len(hash), norm_der, derlen, self.k) == 1
|
||||
|
||||
def set_compressed(self, compressed):
|
||||
if compressed:
|
||||
form = self.POINT_CONVERSION_COMPRESSED
|
||||
else:
|
||||
form = self.POINT_CONVERSION_UNCOMPRESSED
|
||||
_ssl.EC_KEY_set_conv_form(self.k, form)
|
||||
|
||||
def recover(self, sigR, sigS, msg, msglen, recid, check):
|
||||
"""
|
||||
Perform ECDSA key recovery (see SEC1 4.1.6) for curves over (mod p)-fields
|
||||
recid selects which key is recovered
|
||||
if check is non-zero, additional checks are performed
|
||||
"""
|
||||
i = int(recid / 2)
|
||||
|
||||
r = None
|
||||
s = None
|
||||
ctx = None
|
||||
R = None
|
||||
O = None
|
||||
Q = None
|
||||
|
||||
assert len(sigR) == 32, len(sigR)
|
||||
assert len(sigS) == 32, len(sigS)
|
||||
|
||||
try:
|
||||
r = _ssl.BN_bin2bn(bytes(sigR), len(sigR), _ssl.BN_new())
|
||||
s = _ssl.BN_bin2bn(bytes(sigS), len(sigS), _ssl.BN_new())
|
||||
|
||||
group = _ssl.EC_KEY_get0_group(self.k)
|
||||
ctx = _ssl.BN_CTX_new()
|
||||
order = _ssl.BN_CTX_get(ctx)
|
||||
ctx = _ssl.BN_CTX_new()
|
||||
|
||||
if not _ssl.EC_GROUP_get_order(group, order, ctx):
|
||||
return -2
|
||||
|
||||
x = _ssl.BN_CTX_get(ctx)
|
||||
if not _ssl.BN_copy(x, order):
|
||||
return -1
|
||||
if not _ssl.BN_mul_word(x, i):
|
||||
return -1
|
||||
if not _ssl.BN_add(x, x, r):
|
||||
return -1
|
||||
|
||||
field = _ssl.BN_CTX_get(ctx)
|
||||
if not _ssl.EC_GROUP_get_curve_GFp(group, field, None, None, ctx):
|
||||
return -2
|
||||
|
||||
if _ssl.BN_cmp(x, field) >= 0:
|
||||
return 0
|
||||
|
||||
R = _ssl.EC_POINT_new(group)
|
||||
if R is None:
|
||||
return -2
|
||||
if not _ssl.EC_POINT_set_compressed_coordinates_GFp(group, R, x, recid % 2, ctx):
|
||||
return 0
|
||||
|
||||
if check:
|
||||
O = _ssl.EC_POINT_new(group)
|
||||
if O is None:
|
||||
return -2
|
||||
if not _ssl.EC_POINT_mul(group, O, None, R, order, ctx):
|
||||
return -2
|
||||
if not _ssl.EC_POINT_is_at_infinity(group, O):
|
||||
return 0
|
||||
|
||||
Q = _ssl.EC_POINT_new(group)
|
||||
if Q is None:
|
||||
return -2
|
||||
|
||||
n = _ssl.EC_GROUP_get_degree(group)
|
||||
e = _ssl.BN_CTX_get(ctx)
|
||||
if not _ssl.BN_bin2bn(msg, msglen, e):
|
||||
return -1
|
||||
|
||||
if 8 * msglen > n:
|
||||
_ssl.BN_rshift(e, e, 8 - (n & 7))
|
||||
|
||||
zero = _ssl.BN_CTX_get(ctx)
|
||||
# if not _ssl.BN_zero(zero):
|
||||
# return -1
|
||||
if not _ssl.BN_mod_sub(e, zero, e, order, ctx):
|
||||
return -1
|
||||
rr = _ssl.BN_CTX_get(ctx)
|
||||
if not _ssl.BN_mod_inverse(rr, r, order, ctx):
|
||||
return -1
|
||||
sor = _ssl.BN_CTX_get(ctx)
|
||||
if not _ssl.BN_mod_mul(sor, s, rr, order, ctx):
|
||||
return -1
|
||||
eor = _ssl.BN_CTX_get(ctx)
|
||||
if not _ssl.BN_mod_mul(eor, e, rr, order, ctx):
|
||||
return -1
|
||||
if not _ssl.EC_POINT_mul(group, Q, eor, R, sor, ctx):
|
||||
return -2
|
||||
|
||||
if not _ssl.EC_KEY_set_public_key(self.k, Q):
|
||||
return -2
|
||||
|
||||
return 1
|
||||
finally:
|
||||
if r: _ssl.BN_free(r)
|
||||
if s: _ssl.BN_free(s)
|
||||
if ctx: _ssl.BN_CTX_free(ctx)
|
||||
if R: _ssl.EC_POINT_free(R)
|
||||
if O: _ssl.EC_POINT_free(O)
|
||||
if Q: _ssl.EC_POINT_free(Q)
|
||||
|
||||
|
||||
def recover_compact(hash, sig):
|
||||
"""Recover a public key from a compact signature."""
|
||||
if len(sig) != 65:
|
||||
raise ValueError("Signature should be 65 characters, not [%d]" % (len(sig), ))
|
||||
|
||||
recid = (_bord(sig[0]) - 27) & 3
|
||||
compressed = (_bord(sig[0]) - 27) & 4 != 0
|
||||
|
||||
cec_key = CECKey()
|
||||
cec_key.set_compressed(compressed)
|
||||
|
||||
sigR = sig[1:33]
|
||||
sigS = sig[33:65]
|
||||
|
||||
result = cec_key.recover(sigR, sigS, hash, len(hash), recid, 0)
|
||||
|
||||
if result < 1:
|
||||
return False
|
||||
|
||||
pubkey = cec_key.get_pubkey()
|
||||
|
||||
return pubkey
|
||||
def public_key_to_bc_address(public_key):
|
||||
h160 = hash_160(public_key)
|
||||
return hash_160_to_bc_address(h160)
|
||||
|
||||
def encode(val, base, minlen=0):
|
||||
base, minlen = int(base), int(minlen)
|
||||
|
@ -358,18 +254,107 @@ def num_to_var_int(x):
|
|||
elif x < 4294967296: return chr(254) + encode(x, 256, 4)[::-1]
|
||||
else: return chr(255) + encode(x, 256, 8)[::-1]
|
||||
|
||||
|
||||
def msg_magic(message):
|
||||
return "\x18Bitcoin Signed Message:\n" + num_to_var_int( len(message) ) + message
|
||||
return "\x18Bitcoin Signed Message:\n" + num_to_var_int( len(message) ) + message
|
||||
|
||||
def get_address(eckey):
|
||||
size = ssl.i2o_ECPublicKey (eckey, 0)
|
||||
mb = ctypes.create_string_buffer (size)
|
||||
ssl.i2o_ECPublicKey (eckey, ctypes.byref (ctypes.pointer (mb)))
|
||||
return public_key_to_bc_address(mb.raw)
|
||||
|
||||
def Hash(data):
|
||||
return hashlib.sha256(hashlib.sha256(data).digest()).digest()
|
||||
|
||||
def bx(bn, size=32):
|
||||
b = ctypes.create_string_buffer(size)
|
||||
ssl.BN_bn2bin(bn, b);
|
||||
return b.raw.encode('hex')
|
||||
|
||||
def verify_message(address, signature, message):
|
||||
pkey = ssl.EC_KEY_new_by_curve_name(NID_secp256k1)
|
||||
eckey = SetCompactSignature(pkey, Hash(msg_magic(message)), signature)
|
||||
addr = get_address(eckey)
|
||||
return (address == addr)
|
||||
|
||||
def SetCompactSignature(pkey, hash, signature):
|
||||
sig = base64.b64decode(signature)
|
||||
if len(sig) != 65:
|
||||
raise BaseException("Wrong encoding")
|
||||
nV = ord(sig[0])
|
||||
if nV < 27 or nV >= 35:
|
||||
return False
|
||||
if nV >= 31:
|
||||
ssl.EC_KEY_set_conv_form(pkey, POINT_CONVERSION_COMPRESSED)
|
||||
nV -= 4
|
||||
r = ssl.BN_bin2bn (sig[1:33], 32, None)
|
||||
s = ssl.BN_bin2bn (sig[33:], 32, None)
|
||||
eckey = ECDSA_SIG_recover_key_GFp(pkey, r, s, hash, len(hash), nV - 27,
|
||||
False);
|
||||
return eckey
|
||||
|
||||
def ECDSA_SIG_recover_key_GFp(eckey, r, s, msg, msglen, recid, check):
|
||||
n = 0
|
||||
i = recid / 2
|
||||
|
||||
group = ssl.EC_KEY_get0_group(eckey)
|
||||
ctx = ssl.BN_CTX_new()
|
||||
ssl.BN_CTX_start(ctx)
|
||||
order = ssl.BN_CTX_get(ctx)
|
||||
ssl.EC_GROUP_get_order(group, order, ctx)
|
||||
x = ssl.BN_CTX_get(ctx)
|
||||
ssl.BN_copy(x, order);
|
||||
ssl.BN_mul_word(x, i);
|
||||
ssl.BN_add(x, x, r)
|
||||
field = ssl.BN_CTX_get(ctx)
|
||||
ssl.EC_GROUP_get_curve_GFp(group, field, None, None, ctx)
|
||||
|
||||
if (ssl.BN_cmp(x, field) >= 0):
|
||||
return False
|
||||
|
||||
R = ssl.EC_POINT_new(group)
|
||||
ssl.EC_POINT_set_compressed_coordinates_GFp(group, R, x, recid % 2, ctx)
|
||||
|
||||
if check:
|
||||
O = ssl.EC_POINT_new(group)
|
||||
ssl.EC_POINT_mul(group, O, None, R, order, ctx)
|
||||
if ssl.EC_POINT_is_at_infinity(group, O):
|
||||
return False
|
||||
|
||||
Q = ssl.EC_POINT_new(group)
|
||||
n = ssl.EC_GROUP_get_degree(group)
|
||||
e = ssl.BN_CTX_get(ctx)
|
||||
ssl.BN_bin2bn(msg, msglen, e)
|
||||
if 8 * msglen > n: ssl.BN_rshift(e, e, 8 - (n & 7))
|
||||
|
||||
zero = ssl.BN_CTX_get(ctx)
|
||||
ssl.BN_set_word(zero, 0)
|
||||
ssl.BN_mod_sub(e, zero, e, order, ctx)
|
||||
rr = ssl.BN_CTX_get(ctx);
|
||||
ssl.BN_mod_inverse(rr, r, order, ctx)
|
||||
sor = ssl.BN_CTX_get(ctx)
|
||||
ssl.BN_mod_mul(sor, s, rr, order, ctx)
|
||||
eor = ssl.BN_CTX_get(ctx)
|
||||
ssl.BN_mod_mul(eor, e, rr, order, ctx)
|
||||
ssl.EC_POINT_mul(group, Q, eor, R, sor, ctx)
|
||||
ssl.EC_KEY_set_public_key(eckey, Q)
|
||||
return eckey
|
||||
|
||||
def close():
|
||||
import _ctypes
|
||||
if "FreeLibrary" in dir(_ctypes):
|
||||
_ctypes.FreeLibrary(ssl._lib._handle)
|
||||
else:
|
||||
_ctypes.dlclose(ssl._lib._handle)
|
||||
|
||||
|
||||
def getMessagePubkey(message, sig):
|
||||
message = msg_magic(message)
|
||||
hash = hashlib.sha256(hashlib.sha256(message).digest()).digest()
|
||||
sig = base64.b64decode(sig)
|
||||
|
||||
pubkey = recover_compact(hash, sig)
|
||||
return pubkey
|
||||
pkey = ssl.EC_KEY_new_by_curve_name(NID_secp256k1)
|
||||
eckey = SetCompactSignature(pkey, Hash(msg_magic(message)), sig)
|
||||
size = ssl.i2o_ECPublicKey (eckey, 0)
|
||||
mb = ctypes.create_string_buffer (size)
|
||||
ssl.i2o_ECPublicKey (eckey, ctypes.byref (ctypes.pointer (mb)))
|
||||
return mb.raw
|
||||
|
||||
def test():
|
||||
sign = "HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ="
|
||||
|
@ -379,8 +364,10 @@ def test():
|
|||
test() # Make sure it working right
|
||||
|
||||
if __name__ == "__main__":
|
||||
import time
|
||||
import time, os, sys
|
||||
sys.path.append("..")
|
||||
from pybitcointools import bitcoin as btctools
|
||||
print "OpenSSL version %s" % openssl_version
|
||||
priv = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
|
||||
address = "1N2XWu5soeppX2qUjvrf81rpdbShKJrjTr"
|
||||
sign = btctools.ecdsa_sign("hello", priv) # HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ=
|
||||
|
|
16
src/lib/opensslVerify/stablityTest.py
Normal file
16
src/lib/opensslVerify/stablityTest.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
import opensslVerify, gevent, time
|
||||
from gevent import monkey; monkey.patch_all(thread=False, ssl=False)
|
||||
|
||||
def test():
|
||||
data = "A"*1024
|
||||
sign = "G2Jo8dDa+jqvJipft9E3kfrAxjESWLBpVtuGIiEBCD/UUyHmRMYNqnlWeOiaHHpja5LOP+U5CanRALfOjCSYIa8="
|
||||
for i in range(5*1000):
|
||||
if i%1000 == 0:
|
||||
print i, len(data)
|
||||
data += data+"A"
|
||||
time.sleep(0)
|
||||
pub = opensslVerify.getMessagePubkey(data, sign)
|
||||
|
||||
print repr(pub), len(data)
|
||||
|
||||
gevent.joinall([gevent.spawn(test), gevent.spawn(test)])
|
32
src/main.py
32
src/main.py
|
@ -111,7 +111,7 @@ class Actions:
|
|||
logging.info("Site created!")
|
||||
|
||||
|
||||
def siteSign(self, address, privatekey=None, inner_path="content.json"):
|
||||
def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False):
|
||||
from Site import Site
|
||||
logging.info("Signing site: %s..." % address)
|
||||
site = Site(address, allow_create = False)
|
||||
|
@ -119,7 +119,9 @@ class Actions:
|
|||
if not privatekey: # If no privatekey in args then ask it now
|
||||
import getpass
|
||||
privatekey = getpass.getpass("Private key (input hidden):")
|
||||
site.content_manager.sign(inner_path=inner_path, privatekey=privatekey, update_changed_files=True)
|
||||
succ = site.content_manager.sign(inner_path=inner_path, privatekey=privatekey, update_changed_files=True)
|
||||
if succ and publish:
|
||||
self.sitePublish(address, inner_path=inner_path)
|
||||
|
||||
|
||||
def siteVerify(self, address):
|
||||
|
@ -128,16 +130,18 @@ class Actions:
|
|||
s = time.time()
|
||||
logging.info("Verifing site: %s..." % address)
|
||||
site = Site(address)
|
||||
bad_files = []
|
||||
|
||||
for content_inner_path in site.content_manager.contents:
|
||||
logging.info("Verifing %s signature..." % content_inner_path)
|
||||
if site.content_manager.verifyFile(content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False) == True:
|
||||
logging.info("[OK] %s signed by address %s!" % (content_inner_path, address))
|
||||
else:
|
||||
logging.error("[ERROR] %s not signed by address %s!" % (content_inner_path, address))
|
||||
logging.error("[ERROR] %s: invalid file!" % content_inner_path)
|
||||
bad_files += content_inner_path
|
||||
|
||||
logging.info("Verifying site files...")
|
||||
bad_files = site.storage.verifyFiles()
|
||||
bad_files += site.storage.verifyFiles()
|
||||
if not bad_files:
|
||||
logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time()-s))
|
||||
else:
|
||||
|
@ -197,17 +201,20 @@ class Actions:
|
|||
else: # Just ask the tracker
|
||||
logging.info("Gathering peers from tracker")
|
||||
site.announce() # Gather peers
|
||||
site.publish(20, inner_path) # Push to 20 peers
|
||||
time.sleep(3)
|
||||
logging.info("Serving files (max 60s)...")
|
||||
gevent.joinall([file_server_thread], timeout=60)
|
||||
logging.info("Done.")
|
||||
published = site.publish(20, inner_path) # Push to 20 peers
|
||||
if published > 0:
|
||||
time.sleep(3)
|
||||
logging.info("Serving files (max 60s)...")
|
||||
gevent.joinall([file_server_thread], timeout=60)
|
||||
logging.info("Done.")
|
||||
else:
|
||||
logging.info("No peers found for this site, sitePublish command only works if you already have peers serving your site")
|
||||
|
||||
|
||||
|
||||
# Crypto commands
|
||||
|
||||
def cryptoPrivatekeyToAddress(self, privatekey=None):
|
||||
def cryptPrivatekeyToAddress(self, privatekey=None):
|
||||
from Crypt import CryptBitcoin
|
||||
if not privatekey: # If no privatekey in args then ask it now
|
||||
import getpass
|
||||
|
@ -216,6 +223,11 @@ class Actions:
|
|||
print CryptBitcoin.privatekeyToAddress(privatekey)
|
||||
|
||||
|
||||
def cryptSign(self, message, privatekey):
|
||||
from Crypt import CryptBitcoin
|
||||
print CryptBitcoin.sign(message, privatekey)
|
||||
|
||||
|
||||
# Peer
|
||||
|
||||
def peerPing(self, peer_ip, peer_port):
|
||||
|
|
|
@ -9,7 +9,7 @@ class Noparallel(object): # Only allow function running once in same time
|
|||
|
||||
def __call__(self, func):
|
||||
def wrapper(*args, **kwargs):
|
||||
key = (func, tuple(args), tuple(kwargs)) # Unique key for function including parameters
|
||||
key = (func, tuple(args), tuple(kwargs.items())) # Unique key for function including parameters
|
||||
if key in self.threads: # Thread already running (if using blocking mode)
|
||||
thread = self.threads[key]
|
||||
if self.blocking:
|
||||
|
@ -24,14 +24,13 @@ class Noparallel(object): # Only allow function running once in same time
|
|||
return thread
|
||||
else: # Thread not running
|
||||
thread = gevent.spawn(func, *args, **kwargs) # Spawning new thread
|
||||
thread.link(lambda thread: self.cleanup(key, thread))
|
||||
self.threads[key] = thread
|
||||
if self.blocking: # Wait for finish
|
||||
thread.join()
|
||||
ret = thread.value
|
||||
if key in self.threads: del(self.threads[key]) # Allowing it to run again
|
||||
return ret
|
||||
else: # No blocking just return the thread
|
||||
thread.link(lambda thread: self.cleanup(key, thread))
|
||||
return thread
|
||||
wrapper.func_name = func.func_name
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ def callQueue(event):
|
|||
# Rate limit and delay function call if needed, If the function called again within the rate limit interval then previous queued call will be dropped
|
||||
# Return: Immedietly gevent thread
|
||||
def callAsync(event, allowed_again=10, func=None, *args, **kwargs):
|
||||
if isAllowed(event): # Not called recently, call it now
|
||||
if isAllowed(event, allowed_again): # Not called recently, call it now
|
||||
called(event)
|
||||
# print "Calling now"
|
||||
return gevent.spawn(func, *args, **kwargs)
|
||||
|
|
|
@ -56,7 +56,10 @@ def update():
|
|||
|
||||
if dest_dir != dest_path.strip("/"):
|
||||
data = zip.read(inner_path)
|
||||
open(dest_path, 'wb').write(data)
|
||||
try:
|
||||
open(dest_path, 'wb').write(data)
|
||||
except Exception, err:
|
||||
print dest_path, err
|
||||
|
||||
print "Done."
|
||||
|
||||
|
|
|
@ -10,6 +10,13 @@ def main():
|
|||
main.start()
|
||||
if main.update_after_shutdown: # Updater
|
||||
import update, sys, os, gc
|
||||
# Try cleanup openssl
|
||||
try:
|
||||
if "lib.opensslVerify" in sys.modules:
|
||||
sys.modules["lib.opensslVerify"].opensslVerify.close()
|
||||
except Exception, err:
|
||||
print "Error closing openssl", err
|
||||
|
||||
# Update
|
||||
update.update()
|
||||
|
||||
|
@ -24,7 +31,7 @@ def main():
|
|||
except Exception, err: # Prevent closing
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raw_input("-- Error happened, press enter to close --")
|
||||
traceback.print_exc(file=open("log/error.log", "a"))
|
||||
|
||||
if main and main.update_after_shutdown: # Updater
|
||||
# Restart
|
||||
|
|
Loading…
Reference in a new issue