diff --git a/src/Config.py b/src/Config.py
index dad7d4e9..a5ca95c9 100644
--- a/src/Config.py
+++ b/src/Config.py
@@ -3,7 +3,7 @@ import ConfigParser
 
 class Config(object):
 	def __init__(self):
-		self.version = "0.1.6"
+		self.version = "0.2.0"
 		self.parser = self.createArguments()
 		argv = sys.argv[:] # Copy command line arguments
 		argv = self.parseConfig(argv) # Add arguments from config file
@@ -37,14 +37,16 @@ class Config(object):
 
 		# SiteSign
 		action = subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]')
-		action.add_argument('address', 		help='Site to sign')
-		action.add_argument('privatekey',	help='Private key (default: ask on execute)', nargs='?')
+		action.add_argument('address', 			help='Site to sign')
+		action.add_argument('privatekey',		help='Private key (default: ask on execute)', nargs='?')
+		action.add_argument('--inner_path',		help='File you want to sign (default: content.json)', default="content.json", metavar="inner_path")
 
 		# SitePublish
 		action = subparsers.add_parser("sitePublish", help='Publish site to other peers: address')
 		action.add_argument('address', 		help='Site to publish')
 		action.add_argument('peer_ip',		help='Peer ip to publish (default: random peers ip from tracker)', default=None, nargs='?')
 		action.add_argument('peer_port',	help='Peer port to publish (default: random peer port from tracker)', default=15441, nargs='?')
+		action.add_argument('--inner_path',	help='Content.json you want to publish (default: content.json)', default="content.json", metavar="inner_path")
 
 		# SiteVerify
 		action = subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
diff --git a/src/Content/ContentManager.py b/src/Content/ContentManager.py
new file mode 100644
index 00000000..96e49671
--- /dev/null
+++ b/src/Content/ContentManager.py
@@ -0,0 +1,373 @@
+import json, time, re, os
+from Debug import Debug
+from Crypt import CryptHash
+from Config import config
+
+class ContentManager:
+	def __init__(self, site):
+		self.site = site
+		self.log = self.site.log
+		self.contents = {} # Known content.json (without files and includes)
+		self.loadContent(add_bad_files = False)
+
+
+	# Load content.json to self.content
+	# Return: Changed files ["index.html", "data/messages.json"]
+	def loadContent(self, content_inner_path = "content.json", add_bad_files = True, load_includes = True):
+		content_inner_path = content_inner_path.strip("/") # Remove / from begning
+		old_content = self.contents.get(content_inner_path)
+		content_path = self.site.getPath(content_inner_path)
+		content_dir = self.toDir(content_inner_path)
+
+		if os.path.isfile(content_path):
+			try:
+				new_content = json.load(open(content_path))
+			except Exception, err:
+				self.log.error("Content.json load error: %s" % Debug.formatException(err))
+				return False
+		else:
+			self.log.error("Content.json not exits: %s" % content_path)
+			return False # Content.json not exits
+
+
+		try:
+			# Get the files where the sha512 changed
+			changed = []
+			for relative_path, info in new_content.get("files", {}).items():
+				if "sha512" in info:
+					hash_type = "sha512"
+				else: # Backward compatiblity
+					hash_type = "sha1"
+
+				new_hash = info[hash_type]
+				if old_content and old_content["files"].get(relative_path): # We have the file in the old content
+					old_hash = old_content["files"][relative_path][hash_type]
+				else: # The file is not in the old content
+					old_hash = None
+				if old_hash != new_hash: changed.append(content_dir+relative_path)
+
+			# Load includes
+			if load_includes and "includes" in new_content:
+				for relative_path, info in new_content["includes"].items():
+					include_inner_path = content_dir+relative_path
+					if os.path.isfile(self.site.getPath(include_inner_path)): # Content.json exists, load it
+						success = self.loadContent(include_inner_path, add_bad_files=add_bad_files)
+						if success: changed += success # Add changed files
+					else: # Content.json not exits, add to changed files
+						self.log.debug("Missing include: %s" % include_inner_path)
+						changed += [include_inner_path]
+
+			# Update the content
+			self.contents[content_inner_path] = new_content
+		except Exception, err:
+			self.log.error("Content.json parse error: %s" % Debug.formatException(err))
+			return False # Content.json parse error
+
+		# Add changed files to bad files
+		if add_bad_files:
+			for inner_path in changed:
+				self.site.bad_files[inner_path] = True
+
+		return changed
+
+
+	# Find the file info line from self.contents
+	# Return: { "sha512": "c29d73d30ee8c9c1b5600e8a84447a6de15a3c3db6869aca4a2a578c1721f518", "size": 41 , "content_inner_path": "content.json"}
+	def getFileInfo(self, inner_path):
+		dirs = inner_path.split("/") # Parent dirs of content.json
+		inner_path_parts = [dirs.pop()] # Filename relative to content.json
+		while True:
+			content_inner_path = "%s/content.json" % "/".join(dirs)
+			content = self.contents.get(content_inner_path.strip("/"))
+			if content and "files" in content: # Check if content.json exists
+				back = content["files"].get("/".join(inner_path_parts))
+				back["content_inner_path"] = content_inner_path
+				return back
+			else: # No inner path in this dir, lets try the parent dir
+				if dirs: 
+					inner_path_parts.insert(0, dirs.pop())
+				else: # No more parent dirs
+					break
+
+		return False # Not found
+
+
+	def getIncludeInfo(self, inner_path):
+		if not inner_path.endswith("content.json"): # Find the files content.json first
+			inner_path = self.getFileInfo(inner_path)["content_inner_path"]
+		dirs = inner_path.split("/") # Parent dirs of content.json
+		inner_path_parts = [dirs.pop()] # Filename relative to content.json
+		inner_path_parts.insert(0, dirs.pop()) # Dont check in self dir
+		while True:
+			content_inner_path = "%s/content.json" % "/".join(dirs)
+			content = self.contents.get(content_inner_path.strip("/"))
+			if content and "includes" in content:
+				return content["includes"].get("/".join(inner_path_parts))
+			else: # No inner path in this dir, lets try the parent dir
+				if dirs: 
+					inner_path_parts.insert(0, dirs.pop())
+				else: # No more parent dirs
+					break
+
+		return False
+
+
+
+	# Create and sign a content.json
+	# Return: The new content if filewrite = False
+	def sign(self, inner_path = "content.json", privatekey=None, filewrite=True):
+		content = self.contents.get(inner_path)
+		if not content: # Content not exits yet, load default one
+			self.log.info("File %s not exits yet, loading default values..." % inner_path)
+			content = {"files": {}, "signs": {}} # Default content.json
+			if inner_path == "content.json": # Its the root content.json, add some more fields
+				content["title"] = "%s - ZeroNet_" % self.site.address
+				content["description"] = ""
+				content["signs_required"] = 1
+				content["ignore"] = ""
+
+		directory = self.toDir(self.site.getPath(inner_path))
+		self.log.info("Opening site data directory: %s..." % directory)
+
+		hashed_files = {}
+		for root, dirs, files in os.walk(directory):
+			for file_name in files:
+				file_path = self.site.getPath("%s/%s" % (root.strip("/"), file_name))
+				file_inner_path = re.sub(re.escape(directory), "", file_path)
+				
+				if file_name == "content.json" or (content.get("ignore") and re.match(content["ignore"], file_inner_path)) or file_name.startswith("."): # Ignore content.json, definied regexp and files starting with .
+					self.log.info("- [SKIPPED] %s" % file_inner_path)
+				else:
+					sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file
+					self.log.info("- %s (SHA512: %s)" % (file_inner_path, sha512sum))
+					hashed_files[file_inner_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
+					if inner_path == "content.json": # Backward compatibility to root conten.json
+						hashed_files[file_inner_path]["sha1"] = CryptHash.sha1sum(file_path)
+
+		# Generate new content.json
+		self.log.info("Adding timestamp and sha512sums to new content.json...")
+
+		new_content = content.copy() # Create a copy of current content.json
+		new_content["files"] = hashed_files # Add files sha512 hash
+		new_content["modified"] = time.time() # Add timestamp
+		if inner_path == "content.json": 
+			new_content["address"] = self.site.address
+			new_content["zeronet_version"] = config.version
+			new_content["signs_required"] = content.get("signs_required", 1)
+
+		from Crypt import CryptBitcoin
+		self.log.info("Verifying private key...")
+		privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
+		valid_signers = self.getValidSigners(inner_path)
+		if privatekey_address not in valid_signers:
+			return self.log.error("Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address))
+		self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
+
+		if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key sign the valid signers
+			new_content["signers_sign"] = CryptBitcoin.sign("%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey)
+			if not new_content["signers_sign"]: self.log.info("Old style address, signers_sign is none")
+
+		self.log.info("Signing %s..." % inner_path)
+
+		if "signs" in new_content: del(new_content["signs"]) # Delete old signs
+		if "sign" in new_content: del(new_content["sign"]) # Delete old sign (backward compatibility)
+
+		sign_content = json.dumps(new_content, sort_keys=True)
+		sign = CryptBitcoin.sign(sign_content, privatekey)
+		#new_content["signs"] = content.get("signs", {}) # TODO: Multisig
+		if sign: # If signing is successful (not an old address)
+			new_content["signs"] = {}
+			new_content["signs"][privatekey_address] = sign
+			
+		if inner_path == "content.json":  # To root content.json add old format sign for backward compatibility
+			oldsign_content = json.dumps(new_content, sort_keys=True)
+			new_content["sign"] = CryptBitcoin.signOld(oldsign_content, privatekey)
+
+		if filewrite:
+			self.log.info("Saving to %s..." % inner_path)
+			json.dump(new_content, open(self.site.getPath(inner_path), "w"), indent=2, sort_keys=True)
+
+		self.log.info("File %s signed!" % inner_path)
+
+		if filewrite: # Written to file
+			return True
+		else: # Return the new content
+			return new_content
+
+
+	# The valid signers of content.json file
+	# Return: ["1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6", "13ReyhCsjhpuCVahn1DHdf6eMqqEVev162"]
+	def getValidSigners(self, inner_path):
+		valid_signers = []
+		if inner_path == "content.json": # Root content.json
+			if "content.json" in self.contents and "signers" in self.contents["content.json"]:
+				valid_signers += self.contents["content.json"]["signers"].keys()
+		else:
+			include_info = self.getIncludeInfo(inner_path)
+			if include_info and "signers" in include_info:
+				valid_signers += include_info["signers"]
+
+		if self.site.address not in valid_signers: valid_signers.append(self.site.address) # Site address always valid
+		return valid_signers
+
+
+	# Return: The required number of valid signs for the content.json
+	def getSignsRequired(self, inner_path):
+		return 1 # Todo: Multisig
+
+
+	def validContent(self, inner_path, content):
+		if inner_path == "content.json": return True # Always ok
+		include_info = self.getIncludeInfo(inner_path)
+		if not include_info: 
+			self.log.error("%s: No include info" % inner_path)
+			return False
+
+		if include_info.get("max_size"): # Size limit
+			total_size = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()])
+			if total_size > include_info["max_size"]: 
+				self.log.error("%s: Too large %s > %s" % (inner_path, total_size, include_info["max_size"]))
+				return False
+
+		if include_info.get("includes_allowed") == False and content.get("includes"): 
+			self.log.error("%s: Includes not allowed" % inner_path)
+			return False # Includes not allowed
+
+		if include_info.get("files_allowed"): # Filename limit
+			for file_inner_path in content["files"].keys():
+				if not re.match("^%s$" % include_info["files_allowed"], file_inner_path):
+					self.log.error("%s: File not allowed: " % file_inner_path)
+					return False
+
+		return True
+
+
+
+	# Verify file validity
+	# Return: None = Same as before, False = Invalid, True = Valid
+	def verifyFile(self, inner_path, file, ignore_same = True):
+		if inner_path.endswith("content.json"): # content.json: Check using sign
+			from Crypt import CryptBitcoin
+			try:
+				new_content = json.load(file)
+				if inner_path in self.contents: 
+					old_content = self.contents.get(inner_path)
+					# Checks if its newer the ours
+					if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json
+						return None
+					elif old_content["modified"] > new_content["modified"]: # We have newer
+						self.log.debug("We have newer %s (Our: %s, Sent: %s)" % (inner_path, old_content["modified"], new_content["modified"]))
+						return False
+				if new_content["modified"] > time.time()+60*60*24: # Content modified in the far future (allow 1 day window)
+					self.log.error("%s modify is in the future!" % inner_path)
+					return False
+				# Check sign
+				sign = new_content.get("sign")
+				signs = new_content.get("signs", {})
+				if "sign" in new_content: del(new_content["sign"]) # The file signed without the sign
+				if "signs" in new_content: del(new_content["signs"]) # The file signed without the signs
+				sign_content = json.dumps(new_content, sort_keys=True) # Dump the json to string to remove whitepsace
+
+				if not self.validContent(inner_path, new_content): return False # Content not valid (files too large, invalid files)
+
+				if signs: # New style signing
+					valid_signers = self.getValidSigners(inner_path)
+					signs_required = self.getSignsRequired(inner_path)
+
+					if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json 
+						if not CryptBitcoin.verify("%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"]):
+							self.log.error("%s invalid signers_sign!" % inner_path)
+							return False
+
+					valid_signs = 0
+					for address in valid_signers:
+						if address in signs: valid_signs += CryptBitcoin.verify(sign_content, address, signs[address])
+						if valid_signs >= signs_required: break # Break if we has enough signs
+
+					return valid_signs >= signs_required
+				else: # Old style signing
+					return CryptBitcoin.verify(sign_content, self.site.address, sign)
+
+			except Exception, err:
+				self.log.error("Verify sign error: %s" % Debug.formatException(err))
+				return False
+
+		else: # Check using sha1 hash
+			file_info = self.getFileInfo(inner_path)
+			if file_info:
+				if "sha512" in file_info:
+					hash_valid = CryptHash.sha512sum(file) == file_info["sha512"]
+				else: # Backward compatibility
+					hash_valid = CryptHash.sha1sum(file) == file_info["sha1"]
+				if file_info["size"] != file.tell():
+					self.log.error("%s file size does not match %s <> %s, Hash: %s" % (inner_path, file.tell(), file_info["size"], hash_valid))
+					return False
+				return hash_valid
+				
+			else: # File not in content.json
+				self.log.error("File not in content.json: %s" % inner_path)
+				return False
+
+
+	# Get dir from file
+	# Return: data/site/content.json -> data/site
+	def toDir(self, inner_path):
+		file_dir = re.sub("[^/]*?$", "", inner_path).strip("/")
+		if file_dir: file_dir += "/" # Add / at end if its not the root
+		return file_dir
+
+
+
+
+def testSign():
+	global config
+	from Config import config
+	from Site import Site
+	site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
+	content_manager = ContentManager(site)
+	content_manager.sign("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR")
+
+
+def testVerify():
+	from Config import config
+	from Site import Site
+	#site = Site("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
+	site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
+
+	content_manager = ContentManager(site)
+	print "Loaded contents:", content_manager.contents.keys()
+
+	file = open(site.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json"))
+	print "content.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False)
+
+	file = open(site.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json"))
+	print "messages.json valid:", content_manager.verifyFile("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False)
+
+
+def testInfo():
+	from Config import config
+	from Site import Site
+	site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH")
+
+	content_manager = ContentManager(site)
+	print content_manager.contents.keys()
+
+	print content_manager.getFileInfo("index.html")
+	print content_manager.getIncludeInfo("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json")
+	print content_manager.getValidSigners("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json")
+	print content_manager.getValidSigners("data/users/content.json")
+	print content_manager.getValidSigners("content.json")
+
+
+if __name__ == "__main__":
+	import os, sys, logging
+	os.chdir("../..")
+	sys.path.insert(0, os.path.abspath("."))
+	sys.path.insert(0, os.path.abspath("src"))
+	logging.basicConfig(level=logging.DEBUG)
+	from Debug import Debug
+	from Crypt import CryptHash
+
+	#testSign()
+	testVerify()
+	#testInfo()
diff --git a/src/Content/__init__.py b/src/Content/__init__.py
new file mode 100644
index 00000000..fab39f93
--- /dev/null
+++ b/src/Content/__init__.py
@@ -0,0 +1 @@
+from ContentManager import ContentManager
\ No newline at end of file
diff --git a/src/Crypt/CryptBitcoin.py b/src/Crypt/CryptBitcoin.py
index 63199ffe..19c6fc1e 100644
--- a/src/Crypt/CryptBitcoin.py
+++ b/src/Crypt/CryptBitcoin.py
@@ -1,21 +1,42 @@
 from src.lib.BitcoinECC import BitcoinECC
+from src.lib.pybitcointools import bitcoin as btctools
 
 
 def newPrivatekey(uncompressed=True): # Return new private key
-	from src.lib.BitcoinECC import newBitcoinECC # Use new lib to generate WIF compatible addresses, but keep using the old yet for backward compatiblility issues
-	bitcoin = newBitcoinECC.Bitcoin()
-	d = bitcoin.GenerateD()
-	bitcoin.AddressFromD(d, uncompressed) 
-	return bitcoin.PrivFromD(d, uncompressed)
+	privatekey = btctools.encode_privkey(btctools.random_key(), "wif")
+	return privatekey
+
+
+def newSeed():
+	return btctools.random_key()
+
+
+def hdPrivatekey(seed, child):
+	masterkey = btctools.bip32_master_key(seed)
+	childkey = btctools.bip32_ckd(masterkey, child % 100000000) # Too large child id could cause problems
+	key = btctools.bip32_extract_key(childkey)
+	return btctools.encode_privkey(key, "wif")
 
 
 def privatekeyToAddress(privatekey): # Return address from private key
-	bitcoin = BitcoinECC.Bitcoin()
-	bitcoin.BitcoinAddressFromPrivate(privatekey)
-	return bitcoin.BitcoinAddresFromPublicKey()
+	if privatekey.startswith("23") and len(privatekey) > 52: # Backward compatibility to broken lib
+		bitcoin = BitcoinECC.Bitcoin()
+		bitcoin.BitcoinAddressFromPrivate(privatekey)
+		return bitcoin.BitcoinAddresFromPublicKey()
+	else:
+		try:
+			return btctools.privkey_to_address(privatekey)
+		except Exception, err: # Invalid privatekey
+			return False
 
 
 def sign(data, privatekey): # Return sign to data using private key
+	if privatekey.startswith("23") and len(privatekey) > 52: return None # Old style private key not supported
+	sign = btctools.ecdsa_sign(data, privatekey)
+	return sign
+
+
+def signOld(data, privatekey): # Return sign to data using private key (backward compatible old style)
 	bitcoin = BitcoinECC.Bitcoin()
 	bitcoin.BitcoinAddressFromPrivate(privatekey)
 	sign = bitcoin.SignECDSA(data)
@@ -23,5 +44,10 @@ def sign(data, privatekey): # Return sign to data using private key
 
 
 def verify(data, address, sign): # Verify data using address and sign
-	bitcoin = BitcoinECC.Bitcoin()
-	return bitcoin.VerifyMessageFromBitcoinAddress(address, data, sign)
+	if hasattr(sign, "endswith"):
+		pub = btctools.ecdsa_recover(data, sign)
+		sign_address = btctools.pubtoaddr(pub)
+		return sign_address == address
+	else: # Backward compatible old style
+		bitcoin = BitcoinECC.Bitcoin()
+		return bitcoin.VerifyMessageFromBitcoinAddress(address, data, sign)
diff --git a/src/Debug/DebugHook.py b/src/Debug/DebugHook.py
index 60229f61..1c54341d 100644
--- a/src/Debug/DebugHook.py
+++ b/src/Debug/DebugHook.py
@@ -1,22 +1,36 @@
 import gevent, sys
+from Config import config
 
 last_error = None
+
+# Store last error, ignore notify, allow manual error logging
 def handleError(*args):
 	global last_error
-	if not args: # Called explicitly
+	if not args: # Manual called
 		args = sys.exc_info()
 		silent = True
 	else:
 		silent = False
 	print "Error catched", args
-	last_error = args
-	if not silent and args[0].__name__ != "Notify": sys.__excepthook__(*args)
+	if args[0].__name__ != "Notify": last_error = args
+	if not silent and args[0].__name__ != "Notify": 
+		sys.__excepthook__(*args)
+
+
+# Ignore notify errors
+def handleErrorNotify(*args):
+	if args[0].__name__ != "Notify": sys.__excepthook__(*args)
+
 
 OriginalGreenlet = gevent.Greenlet
 class ErrorhookedGreenlet(OriginalGreenlet):
 	def _report_error(self, exc_info):
 		handleError(exc_info[0], exc_info[1], exc_info[2])
 
-sys.excepthook = handleError
+if config.debug:
+	sys.excepthook = handleError
+else:
+	sys.excepthook = handleErrorNotify
+
 gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet
-reload(gevent)
\ No newline at end of file
+reload(gevent)
diff --git a/src/Debug/DebugReloader.py b/src/Debug/DebugReloader.py
index dac6975e..951a5950 100644
--- a/src/Debug/DebugReloader.py
+++ b/src/Debug/DebugReloader.py
@@ -11,6 +11,7 @@ except Exception, err:
 
 class DebugReloader:
 	def __init__ (self, callback, directory = "/"):
+		self.last_chaged = 0
 		if pyfilesystem:
 			self.directory = directory
 			self.callback = callback
@@ -29,7 +30,8 @@ class DebugReloader:
 
 
 	def changed(self, evt):
-		if not evt.path or evt.path.endswith("pyc"): return False # Ignore *.pyc changes
+		if not evt.path or evt.path.endswith("pyc") or time.time()-self.last_chaged < 1: return False # Ignore *.pyc changes and no reload within 1 sec
 		#logging.debug("Changed: %s" % evt)
 		time.sleep(0.1) # Wait for lock release
 		self.callback()
+		self.last_chaged = time.time()
diff --git a/src/File/FileRequest.py b/src/File/FileRequest.py
index f70671b8..050ca240 100644
--- a/src/File/FileRequest.py
+++ b/src/File/FileRequest.py
@@ -1,4 +1,4 @@
-import os, msgpack, shutil
+import os, msgpack, shutil, gevent
 from Site import SiteManager
 from cStringIO import StringIO
 from Debug import Debug
@@ -39,33 +39,34 @@ class FileRequest:
 		if not site or not site.settings["serving"]: # Site unknown or not serving
 			self.send({"error": "Unknown site"})
 			return False
-		if site.settings["own"]:
-			self.log.debug("Someone trying to push a file to own site %s, reload local content.json first" % site.address)
-			site.loadContent()
+		if site.settings["own"] and params["inner_path"].endswith("content.json"):
+			self.log.debug("Someone trying to push a file to own site %s, reload local %s first" % (site.address, params["inner_path"]))
+			site.content_manager.loadContent(params["inner_path"])
 		buff = StringIO(params["body"])
-		valid = site.verifyFile(params["inner_path"], buff)
+		valid = site.content_manager.verifyFile(params["inner_path"], buff)
 		if valid == True: # Valid and changed
 			self.log.debug("Update for %s looks valid, saving..." % params["inner_path"])
 			buff.seek(0)
 			file = open(site.getPath(params["inner_path"]), "wb")
 			shutil.copyfileobj(buff, file) # Write buff to disk
 			file.close()
+			site.onFileDone(params["inner_path"]) # Trigger filedone
 
-			if params["inner_path"] == "content.json": # Download every changed file from peer
-				changed = site.loadContent() # Get changed files
+			if params["inner_path"].endswith("content.json"): # Download every changed file from peer
 				peer = site.addPeer(*params["peer"], return_peer = True) # Add or get peer
-				self.log.info("%s changed files: %s" % (site.address_short, changed))
-				for inner_path in changed: # Updated files in content.json
-					site.needFile(inner_path, peer=peer, update=True, blocking=False) # Download file from peer
-				site.onComplete.once(lambda: site.publish()) # On complete publish to other peers
+				site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"])) # On complete publish to other peers
+				gevent.spawn(
+					lambda: site.downloadContent(params["inner_path"], peer=peer)
+				) # Load new content file and download changed files in new thread
 
 			self.send({"ok": "Thanks, file %s updated!" % params["inner_path"]})
 
 		elif valid == None: # Not changed
 			peer = site.addPeer(*params["peer"], return_peer = True) # Add or get peer
-			self.log.debug("Same version, adding new peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) )
-			for task in site.worker_manager.tasks: # New peer add to every ongoing task
-				if task["peers"]: site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) # Download file from this peer too if its peer locked
+			if peer:
+				self.log.debug("Same version, adding new peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) )
+				for task in site.worker_manager.tasks: # New peer add to every ongoing task
+					if task["peers"]: site.needFile(task["inner_path"], peer=peer, update=True, blocking=False) # Download file from this peer too if its peer locked
 
 			self.send({"ok": "File not changed"})
 
diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py
index b801fe18..b13f7b5c 100644
--- a/src/Peer/Peer.py
+++ b/src/Peer/Peer.py
@@ -36,10 +36,10 @@ class Peer:
 		self.socket.setsockopt(zmq.RCVTIMEO, 50000) # Wait for data arrive
 		self.socket.setsockopt(zmq.SNDTIMEO, 5000) # Wait for data send
 		self.socket.setsockopt(zmq.LINGER, 500) # Wait for socket close
-		#self.socket.setsockopt(zmq.TCP_KEEPALIVE, 1) # Enable keepalive
-		#self.socket.setsockopt(zmq.TCP_KEEPALIVE_IDLE, 4*60) # Send after 4 minute idle
-		#self.socket.setsockopt(zmq.TCP_KEEPALIVE_INTVL, 15) # Wait 15 sec to response
-		#self.socket.setsockopt(zmq.TCP_KEEPALIVE_CNT, 4) # 4 Probes
+		# self.socket.setsockopt(zmq.TCP_KEEPALIVE, 1) # Enable keepalive
+		# self.socket.setsockopt(zmq.TCP_KEEPALIVE_IDLE, 4*60) # Send after 4 minute idle
+		# self.socket.setsockopt(zmq.TCP_KEEPALIVE_INTVL, 15) # Wait 15 sec to response
+		# self.socket.setsockopt(zmq.TCP_KEEPALIVE_CNT, 4) # 4 Probes
 		self.socket.connect('tcp://%s:%s' % (self.ip, self.port))
 
 
@@ -55,7 +55,7 @@ class Peer:
 			if not self.ping(): return None
 
 		for retry in range(1,3): # Retry 3 times
-			if config.debug_socket: self.log.debug("sendCmd: %s" % cmd)
+			if config.debug_socket: self.log.debug("sendCmd: %s %s" % (cmd, params.get("inner_path")))
 			try:
 				self.socket.send(msgpack.packb({"cmd": cmd, "params": params}, use_bin_type=True))
 				if config.debug_socket: self.log.debug("Sent command: %s" % cmd)
diff --git a/src/Site/Site.py b/src/Site/Site.py
index 91c04cde..c7f8a20b 100644
--- a/src/Site/Site.py
+++ b/src/Site/Site.py
@@ -7,11 +7,11 @@ from Peer import Peer
 from Worker import WorkerManager
 from Crypt import CryptHash
 from Debug import Debug
+from Content import ContentManager
 import SiteManager
 
 class Site:
 	def __init__(self, address, allow_create=True):
-
 		self.address = re.sub("[^A-Za-z0-9]", "", address) # Make sure its correct address
 		self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging
 		self.directory = "data/%s" % self.address # Site data diretory
@@ -33,10 +33,10 @@ class Site:
 		self.notifications = [] # Pending notifications displayed once on page load [error|ok|info, message, timeout]
 		self.page_requested = False # Page viewed in browser
 
-		self.loadContent(init=True) # Load content.json
+		self.content_manager = ContentManager(self) # Load contents
 		self.loadSettings() # Load settings from sites.json
 
-		if not self.settings.get("auth_key"): # To auth user in site
+		if not self.settings.get("auth_key"): # To auth user in site (Obsolete, will be removed)
 			self.settings["auth_key"] = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(24))
 			self.log.debug("New auth key: %s" % self.settings["auth_key"])
 			self.saveSettings()
@@ -52,39 +52,6 @@ class Site:
 		self.addEventListeners()
 
 
-	# Load content.json to self.content
-	def loadContent(self, init=False):
-		old_content = self.content
-		content_path = "%s/content.json" % self.directory
-		if os.path.isfile(content_path): 
-			try:
-				new_content = json.load(open(content_path))
-			except Exception, err:
-				self.log.error("Content.json load error: %s" % Debug.formatException(err))
-				return None
-		else:
-			return None # Content.json not exits
-
-		try:
-			changed = []
-			for inner_path, details in new_content["files"].items():
-				new_sha1 = details["sha1"]
-				if old_content and old_content["files"].get(inner_path):
-					old_sha1 = old_content["files"][inner_path]["sha1"]
-				else:
-					old_sha1 = None
-				if old_sha1 != new_sha1: changed.append(inner_path)
-			self.content = new_content
-		except Exception, err:
-			self.log.error("Content.json parse error: %s" % Debug.formatException(err))
-			return None # Content.json parse error
-		# Add to bad files
-		if not init:
-			for inner_path in changed:
-				self.bad_files[inner_path] = True
-		return changed
-
-
 	# Load site settings from data/sites.json
 	def loadSettings(self):
 		sites_settings = json.load(open("data/sites.json"))
@@ -103,7 +70,7 @@ class Site:
 	def saveSettings(self):
 		sites_settings = json.load(open("data/sites.json"))
 		sites_settings[self.address] = self.settings
-		open("data/sites.json", "w").write(json.dumps(sites_settings, indent=4, sort_keys=True))
+		open("data/sites.json", "w").write(json.dumps(sites_settings, indent=2, sort_keys=True))
 		return
 
 
@@ -118,71 +85,111 @@ class Site:
 		return file_path
 
 
-	# Start downloading site
+	# Download all file from content.json
+	@util.Noparallel(blocking=True)
+	def downloadContent(self, inner_path, download_files=True, peer=None):
+		s = time.time()
+		self.log.debug("Downloading %s..." % inner_path)
+		self.last_downloads.append(inner_path)
+		found = self.needFile(inner_path, update=self.bad_files.get(inner_path))
+		content_inner_dir = self.content_manager.toDir(inner_path)
+		if not found: return False # Could not download content.json
+
+		self.log.debug("Got %s" % inner_path)
+		changed = self.content_manager.loadContent(inner_path, load_includes=False)
+
+		# Start download files
+		evts = []
+		if download_files:
+			for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys():
+				file_inner_path = content_inner_dir+file_relative_path
+				res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer) # No waiting for finish, return the event
+				if res != True: # Need downloading
+					self.last_downloads.append(file_inner_path)
+					evts.append(res) # Append evt
+
+		# Wait for includes download
+		for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys():
+			file_inner_path = content_inner_dir+file_relative_path
+			self.downloadContent(file_inner_path, download_files=download_files, peer=peer)
+
+		self.log.debug("%s: Includes downloaded" % inner_path)
+		self.log.debug("%s: Downloading %s files..." % (inner_path, len(evts)))
+		gevent.joinall(evts)
+		self.log.debug("%s: All file downloaded in %.2fs" % (inner_path, time.time()-s))
+
+		return True
+
+
+	# Download all files of the site
 	@util.Noparallel(blocking=False)
 	def download(self):
 		self.log.debug("Start downloading...%s" % self.bad_files)
 		self.announce()
-		found = self.needFile("content.json", update=self.bad_files.get("content.json"))
-		if not found: return False # Could not download content.json
-		self.loadContent() # Load the content.json
-		self.log.debug("Got content.json")
-		evts = []
-		self.last_downloads = ["content.json"] # Files downloaded in this run
-		for inner_path in self.content["files"].keys():
-			res = self.needFile(inner_path, blocking=False, update=self.bad_files.get(inner_path)) # No waiting for finish, return the event
-			if res != True: # Need downloading
-				self.last_downloads.append(inner_path)
-				evts.append(res) # Append evt
-		self.log.debug("Downloading %s files..." % len(evts))
-		s = time.time()
-		gevent.joinall(evts)
-		self.log.debug("All file downloaded in %.2fs" % (time.time()-s))
+		self.last_downloads = []
+		found = self.downloadContent("content.json")
+
+		return found
 
 
 	# Update content.json from peers and download changed files
 	@util.Noparallel()
 	def update(self):
-		self.loadContent() # Reload content.json
+		self.content_manager.loadContent("content.json") # Reload content.json
 		self.content_updated = None
-		self.needFile("content.json", update=True)
-		changed_files = self.loadContent()
-		if changed_files:
-			for changed_file in changed_files:
+		# Download all content.json again
+		for inner_path in self.content_manager.contents.keys():
+			self.needFile(inner_path, update=True)
+		changed = self.content_manager.loadContent("content.json")
+		if changed:
+			for changed_file in changed:
 				self.bad_files[changed_file] = True
 		if not self.settings["own"]: self.checkFiles(quick_check=True) # Quick check files based on file size
 		if self.bad_files:
 			self.download()
-		return changed_files
+		return changed
 
 
-
-	# Update content.json on peers
-	def publish(self, limit=3):
-		self.log.info( "Publishing to %s/%s peers..." % (limit, len(self.peers)) )
-		published = 0
-		for key, peer in self.peers.items(): # Send update command to each peer
+	def publisher(self,inner_path, peers, published, limit):
+		while 1:
+			if not peers or len(published) >= limit: break # All peers done, or published engouht
+			peer = peers.pop(0)
 			result = {"exception": "Timeout"}
 			try:
-				with gevent.Timeout(1, False): # 1 sec timeout
+				with gevent.Timeout(60, False): # 60 sec timeout
 					result = peer.sendCmd("update", {
 						"site": self.address, 
-						"inner_path": "content.json", 
-						"body": open(self.getPath("content.json")).read(),
+						"inner_path": inner_path, 
+						"body": open(self.getPath(inner_path), "rb").read(),
 						"peer": (config.ip_external, config.fileserver_port)
 					})
 			except Exception, err:
 				result = {"exception": Debug.formatException(err)}
 
 			if result and "ok" in result:
-				published += 1
-				self.log.info("[OK] %s: %s" % (key, result["ok"]))
+				published.append(peer)
+				self.log.info("[OK] %s: %s" % (peer.key, result["ok"]))
 			else:
-				self.log.info("[ERROR] %s: %s" % (key, result))
+				self.log.info("[ERROR] %s: %s" % (peer.key, result))
 			
-			if published >= limit: break
-		self.log.info("Successfuly published to %s peers" % published)
-		return published
+
+
+
+
+	# Update content.json on peers
+	def publish(self, limit=3, inner_path="content.json"):
+		self.log.info( "Publishing to %s/%s peers..." % (limit, len(self.peers)) )
+		published = [] # Successfuly published (Peer)
+		publishers = [] # Publisher threads
+		peers = self.peers.values()
+		for i in range(limit):
+			publisher = gevent.spawn(self.publisher, inner_path, peers, published, limit)
+			publishers.append(publisher)
+
+		gevent.joinall(publishers) # Wait for all publishers
+
+		self.log.info("Successfuly published to %s peers" % len(published))
+		return len(published)
 
 
 	# Check and download if file not exits
@@ -192,14 +199,19 @@ class Site:
 		elif self.settings["serving"] == False: # Site not serving
 			return False
 		else: # Wait until file downloaded
-			if not self.content: # No content.json, download it first!
+			if not self.content_manager.contents.get("content.json"): # No content.json, download it first!
 				self.log.debug("Need content.json first")
 				self.announce()
 				if inner_path != "content.json": # Prevent double download
 					task = self.worker_manager.addTask("content.json", peer)
 					task.get()
-					self.loadContent()
-					if not self.content: return False
+					self.content_manager.loadContent()
+					if not self.content_manager.contents.get("content.json"): return False # Content.json download failed
+
+			if not inner_path.endswith("content.json") and not self.content_manager.getFileInfo(inner_path): # No info for file, download all content.json first
+				self.log.debug("No info for %s, waiting for all content.json" % inner_path)
+				success = self.downloadContent("content.json", download_files=False)
+				if not success: return False
 
 			task = self.worker_manager.addTask(inner_path, peer, priority=priority)
 			if blocking:
@@ -210,6 +222,7 @@ class Site:
 
 	# Add or update a peer to site
 	def addPeer(self, ip, port, return_peer = False):
+		if not ip: return False
 		key = "%s:%s" % (ip, port)
 		if key in self.peers: # Already has this ip
 			self.peers[key].found()
@@ -273,8 +286,13 @@ class Site:
 
 	def deleteFiles(self):
 		self.log.debug("Deleting files from content.json...")
-		files = self.content["files"].keys() # Make a copy
-		files.append("content.json")
+		files = [] # Get filenames
+		for content_inner_path, content in self.content_manager.contents.items():
+			files.append(content_inner_path)
+			for file_relative_path in content["files"].keys():
+				file_inner_path = self.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
+				files.append(file_inner_path)
+				
 		for inner_path in files:
 			path = self.getPath(inner_path)
 			if os.path.isfile(path): os.unlink(path)
@@ -351,122 +369,30 @@ class Site:
 		self.updateWebsocket(file_failed=inner_path)
 
 
-	# - Sign and verify -
-
-
-	# Verify fileobj using sha1 in content.json
-	def verifyFile(self, inner_path, file, force=False):
-		if inner_path == "content.json": # Check using sign
-			from Crypt import CryptBitcoin
-
-			try:
-				content = json.load(file)
-				if self.content and not force:
-					if self.content["modified"] == content["modified"]: # Ignore, have the same content.json
-						return None
-					elif self.content["modified"] > content["modified"]: # We have newer
-						self.log.debug("We have newer content.json (Our: %s, Sent: %s)" % (self.content["modified"], content["modified"]))
-						return False
-				if content["modified"] > time.time()+60*60*24: # Content modified in the far future (allow 1 day window)
-					self.log.error("Content.json modify is in the future!")
-					return False
-				# Check sign
-				sign = content["sign"]
-				del(content["sign"]) # The file signed without the sign
-				sign_content = json.dumps(content, sort_keys=True) # Dump the json to string to remove whitepsace
-
-				return CryptBitcoin.verify(sign_content, self.address, sign)
-			except Exception, err:
-				self.log.error("Verify sign error: %s" % Debug.formatException(err))
-				return False
-
-		else: # Check using sha1 hash
-			if self.content and inner_path in self.content["files"]:
-				if "sha512" in self.content["files"][inner_path]: # Use sha512 to verify if possible
-					return CryptHash.sha512sum(file) == self.content["files"][inner_path]["sha512"]
-				else: # Backward compatiblity
-					return CryptHash.sha1sum(file) == self.content["files"][inner_path]["sha1"]
-				
-			else: # File not in content.json
-				self.log.error("File not in content.json: %s" % inner_path)
-				return False
-
-
 	# Verify all files sha512sum using content.json
 	def verifyFiles(self, quick_check=False): # Fast = using file size
 		bad_files = []
-		if not self.content: # No content.json, download it first
+		if not self.content_manager.contents.get("content.json"): # No content.json, download it first
 			self.needFile("content.json", update=True) # Force update to fix corrupt file
-			self.loadContent() # Reload content.json
-		for inner_path in self.content["files"].keys():
-			file_path = self.getPath(inner_path)
-			if not os.path.isfile(file_path):
-				self.log.error("[MISSING] %s" % inner_path)
-				bad_files.append(inner_path)
-				continue
+			self.content_manager.loadContent() # Reload content.json
+		for content_inner_path, content in self.content_manager.contents.items():
+			for file_relative_path in content["files"].keys():
+				file_inner_path = self.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json
+				file_inner_path = file_inner_path.strip("/") # Strip leading /
+				file_path = self.getPath(file_inner_path)
+				if not os.path.isfile(file_path):
+					self.log.error("[MISSING] %s" % file_inner_path)
+					bad_files.append(file_inner_path)
+					continue
 
-			if quick_check:
-				ok = os.path.getsize(file_path) == self.content["files"][inner_path]["size"]
-			else:
-				ok = self.verifyFile(inner_path, open(file_path, "rb"))
+				if quick_check:
+					ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"]
+				else:
+					ok = self.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
 
-			if not ok:
-				self.log.error("[ERROR] %s" % inner_path)
-				bad_files.append(inner_path)
-		self.log.debug("Site verified: %s files, quick_check: %s, bad files: %s" % (len(self.content["files"]), quick_check, bad_files))
+				if not ok:
+					self.log.error("[ERROR] %s" % file_inner_path)
+					bad_files.append(file_inner_path)
+			self.log.debug("%s verified: %s files, quick_check: %s, bad files: %s" % (content_inner_path, len(content["files"]), quick_check, bad_files))
 
 		return bad_files
-
-
-	# Create and sign content.json using private key
-	def signContent(self, privatekey=None):
-		if not self.content: # New site
-			self.log.info("Site not exits yet, loading default content.json values...")
-			self.content = {"files": {}, "title": "%s - ZeroNet_" % self.address, "sign": "", "modified": 0.0, "description": "", "address": self.address, "ignore": "", "zeronet_version": config.version} # Default content.json
-
-		self.log.info("Opening site data directory: %s..." % self.directory)
-
-		hashed_files = {}
-
-		for root, dirs, files in os.walk(self.directory):
-			for file_name in files:
-				file_path = self.getPath("%s/%s" % (root, file_name))
-				
-				if file_name == "content.json" or (self.content["ignore"] and re.match(self.content["ignore"], file_path.replace(self.directory+"/", "") )): # Dont add content.json and ignore regexp pattern definied in content.json
-					self.log.info("- [SKIPPED] %s" % file_path)
-				else:
-					sha1sum = CryptHash.sha1sum(file_path) # Calculate sha1 sum of file
-					sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file
-					inner_path = re.sub("^%s/" % re.escape(self.directory), "", file_path)
-					self.log.info("- %s (SHA512: %s)" % (file_path, sha512sum))
-					hashed_files[inner_path] = {"sha1": sha1sum, "sha512": sha512sum, "size": os.path.getsize(file_path)}
-
-		# Generate new content.json
-		self.log.info("Adding timestamp and sha512sums to new content.json...")
-
-		content = self.content.copy() # Create a copy of current content.json
-		content["address"] = self.address
-		content["files"] = hashed_files # Add files sha512 hash
-		content["modified"] = time.time() # Add timestamp
-		content["zeronet_version"] = config.version # Signer's zeronet version
-		del(content["sign"]) # Delete old sign
-
-		# Signing content
-		from Crypt import CryptBitcoin
-
-		self.log.info("Verifying private key...")
-		privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
-		if self.address != privatekey_address:
-			return self.log.error("Private key invalid! Site address: %s, Private key address: %s" % (self.address, privatekey_address))
-
-		self.log.info("Signing modified content.json...")
-		sign_content = json.dumps(content, sort_keys=True)
-		sign = CryptBitcoin.sign(sign_content, privatekey)
-		content["sign"] = sign
-
-		# Saving modified content.json
-		self.log.info("Saving to %s/content.json..." % self.directory)
-		open("%s/content.json" % self.directory, "w").write(json.dumps(content, indent=4, sort_keys=True))
-
-		self.log.info("Site signed!")
-		return True
diff --git a/src/Site/SiteManager.py b/src/Site/SiteManager.py
index 44ffbb03..4692cfce 100644
--- a/src/Site/SiteManager.py
+++ b/src/Site/SiteManager.py
@@ -2,11 +2,14 @@ import json, logging, time, re, os
 import gevent
 
 TRACKERS = [
-	("udp", "sugoi.pomf.se", 2710),
 	("udp", "open.demonii.com", 1337),
+	("udp", "sugoi.pomf.se", 2710),
 	("udp", "tracker.coppersurfer.tk", 80),
 	("udp", "tracker.leechers-paradise.org", 6969),
 	("udp", "9.rarbg.com", 2710),
+	#("udp", "www.eddie4.nl", 6969), Backup trackers
+	#("udp", "trackr.sytes.net", 80),
+	#("udp", "tracker4.piratux.com", 6969)
 ]
 
 # Load all sites from data/sites.json
@@ -41,7 +44,7 @@ def isAddress(address):
 def need(address, all_file=True):
 	from Site import Site
 	if address not in sites: # Site not exits yet
-		if not isAddress(address): raise Exception("Not address: %s" % address)
+		if not isAddress(address): return False # Not address: %s % address
 		logging.debug("Added new site: %s" % address)
 		sites[address] = Site(address)
 		if not sites[address].settings["serving"]: # Maybe it was deleted before
diff --git a/src/Site/__init__.py b/src/Site/__init__.py
index f0f2f304..24b2e268 100644
--- a/src/Site/__init__.py
+++ b/src/Site/__init__.py
@@ -1 +1 @@
-from Site import Site
\ No newline at end of file
+from Site import Site
diff --git a/src/Test/test.py b/src/Test/test.py
index 23238560..eb4ec0d7 100644
--- a/src/Test/test.py
+++ b/src/Test/test.py
@@ -19,7 +19,7 @@ class TestCase(unittest.TestCase):
 		self.assertIn("Forbidden", urllib.urlopen("http://127.0.0.1:43110/media/1P2rJhkQjYSHdHpWDDwxfRGYXaoWE8u1vV/../../config.py").read())
 
 
-	def testBitcoinSign(self):
+	def testBitcoinSignOld(self):
 		s = time.time()
 		privatekey = "23DKQpDz7bXM7w5KN5Wnmz7bwRNqNHcdQjb2WwrdB1QtTf5gM3pFdf"
 		privatekey_bad = "23DKQpDz7bXM7w5KN5Wnmz6bwRNqNHcdQjb2WwrdB1QtTf5gM3pFdf"
@@ -30,6 +30,28 @@ class TestCase(unittest.TestCase):
 		address_bad = CryptBitcoin.privatekeyToAddress(privatekey_bad)
 		self.assertNotEqual(address_bad, "12vTsjscg4hYPewUL2onma5pgQmWPMs3ez")
 
+		sign = CryptBitcoin.signOld("hello", privatekey)
+
+		self.assertTrue(CryptBitcoin.verify("hello", address, sign))
+		self.assertFalse(CryptBitcoin.verify("not hello", address, sign))
+
+		sign_bad = CryptBitcoin.signOld("hello", privatekey_bad)
+		self.assertFalse(CryptBitcoin.verify("hello", address, sign_bad))
+
+		print "Taken: %.3fs, " % (time.time()-s),
+
+
+	def testBitcoinSign(self):
+		s = time.time()
+		privatekey = "5K9S6dVpufGnroRgFrT6wsKiz2mJRYsC73eWDmajaHserAp3F1C"
+		privatekey_bad = "5Jbm9rrusXyApAoM8YoM4Rja337zMMoBUMRJ1uijiguU2aZRnwC"
+
+		address = CryptBitcoin.privatekeyToAddress(privatekey)
+		self.assertEqual(address, "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz")
+
+		address_bad = CryptBitcoin.privatekeyToAddress(privatekey_bad)
+		self.assertNotEqual(address_bad, "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz")
+
 		sign = CryptBitcoin.sign("hello", privatekey)
 
 		self.assertTrue(CryptBitcoin.verify("hello", address, sign))
@@ -41,7 +63,33 @@ class TestCase(unittest.TestCase):
 		print "Taken: %.3fs, " % (time.time()-s),
 
 
+
+	def testBitcoinSignCompressed(self):
+		raise unittest.SkipTest("Not working")
+		s = time.time()
+		privatekey = "Kwg4YXhL5gsNwarFWtzTKuUiwAhKbZAgWdpFo1UETZSKdgHaNN2J"
+		privatekey_bad = "Kwg4YXhL5gsNwarFWtzTKuUiwAhKsZAgWdpFo1UETZSKdgHaNN2J"
+
+		address = CryptBitcoin.privatekeyToAddress(privatekey)
+		self.assertEqual(address, "1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f")
+
+		address_bad = CryptBitcoin.privatekeyToAddress(privatekey_bad)
+		self.assertNotEqual(address_bad, "1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f")
+
+		sign = CryptBitcoin.sign("hello", privatekey)
+		print sign
+
+		self.assertTrue(CryptBitcoin.verify("hello", address, sign))
+		self.assertFalse(CryptBitcoin.verify("not hello", address, sign))
+
+		sign_bad = CryptBitcoin.sign("hello", privatekey_bad)
+		self.assertFalse(CryptBitcoin.verify("hello", address, sign_bad))
+
+		print "Taken: %.3fs, " % (time.time()-s),
+
+
 	def testTrackers(self):
+		raise unittest.SkipTest("Notyet")
 		from Site import SiteManager
 		from lib.subtl.subtl import UdpTrackerClient
 		import hashlib
@@ -64,7 +112,7 @@ class TestCase(unittest.TestCase):
 				if peers != None:
 					ok += 1
 
-		self.assertGreater(ok, 1)
+		self.assertEqual(ok, len(SiteManager.TRACKERS))
 				
 
 
diff --git a/src/Ui/UiRequest.py b/src/Ui/UiRequest.py
index 1172a647..05444935 100644
--- a/src/Ui/UiRequest.py
+++ b/src/Ui/UiRequest.py
@@ -1,6 +1,7 @@
 import time, re, os, mimetypes, json
 from Config import config
 from Site import SiteManager
+from User import UserManager
 from Ui.UiWebsocket import UiWebsocket
 
 status_texts = {
@@ -19,6 +20,7 @@ class UiRequest:
 			self.log = server.log
 		self.get = {} # Get parameters
 		self.env = {} # Enviroment settings
+		self.user = UserManager.getCurrent()
 		self.start_response = None # Start response function
 
 
@@ -103,6 +105,7 @@ class UiRequest:
 	# Render a file from media with iframe site wrapper
 	def actionWrapper(self, path):
 		if "." in path and not path.endswith(".html"): return self.actionSiteMedia("/media"+path) # Only serve html files with frame
+		if self.get.get("wrapper") == "False": return self.actionSiteMedia("/media"+path) # Only serve html files with frame
 		if self.env.get("HTTP_X_REQUESTED_WITH"): return self.error403() # No ajax allowed on wrapper
 
 		match = re.match("/(?P<site>[A-Za-z0-9]+)(?P<inner_path>/.*|$)", path)
@@ -111,22 +114,22 @@ class UiRequest:
 			if not inner_path: inner_path = "index.html" # If inner path defaults to index.html
 
 			site = self.server.sites.get(match.group("site"))
-			if site and site.content and (not site.bad_files or site.settings["own"]): # Its downloaded or own
-				title = site.content["title"]
+			if site and site.content_manager.contents.get("content.json") and (not site.bad_files or site.settings["own"]): # Its downloaded or own
+				title = site.content_manager.contents["content.json"]["title"]
 			else:
 				title = "Loading %s..." % match.group("site")
 				site = SiteManager.need(match.group("site")) # Start download site
-				if not site: self.error404()
+				if not site: return self.error404(path)
 
 			self.sendHeader(extra_headers=[("X-Frame-Options", "DENY")])
 
 			# Wrapper variable inits
 			if self.env.get("QUERY_STRING"): 
-				query_string = "?"+self.env["QUERY_STRING"] 
-			else: 
+				query_string = "?"+self.env["QUERY_STRING"]
+			else:
 				query_string = ""
 			body_style = ""
-			if site.content and site.content.get("background-color"): body_style += "background-color: "+site.content["background-color"]+";"
+			if site.content_manager.contents.get("content.json") and site.content_manager.contents["content.json"].get("background-color"): body_style += "background-color: "+site.content_manager.contents["content.json"]["background-color"]+";"
 
 			return self.render("src/Ui/template/wrapper.html", 
 				inner_path=inner_path, 
@@ -146,6 +149,8 @@ class UiRequest:
 
 	# Serve a media for site
 	def actionSiteMedia(self, path):
+		path = path.replace("/index.html/", "/") # Base Backward compatibility fix
+		
 		match = re.match("/media/(?P<site>[A-Za-z0-9]+)/(?P<inner_path>.*)", path)
 
 		referer = self.env.get("HTTP_REFERER")
@@ -228,7 +233,7 @@ class UiRequest:
 				if site_check.settings["wrapper_key"] == wrapper_key: site = site_check
 
 			if site: # Correct wrapper key
-				ui_websocket = UiWebsocket(ws, site, self.server)
+				ui_websocket = UiWebsocket(ws, site, self.server, self.user)
 				site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events
 				ui_websocket.start()
 				for site_check in self.server.sites.values(): # Remove websocket from every site (admin sites allowed to join other sites event channels)
@@ -297,3 +302,5 @@ class UiRequest:
 		import imp
 		global UiWebsocket
 		UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket
+		UserManager.reload()
+		self.user = UserManager.getCurrent()
diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py
index 28880dcb..c63a8978 100644
--- a/src/Ui/UiWebsocket.py
+++ b/src/Ui/UiWebsocket.py
@@ -2,11 +2,14 @@ import json, gevent, time, sys, hashlib
 from Config import config
 from Site import SiteManager
 from Debug import Debug
+from util import QueryJson
+
 
 class UiWebsocket:
-	def __init__(self, ws, site, server):
+	def __init__(self, ws, site, server, user):
 		self.ws = ws
 		self.site = site
+		self.user = user
 		self.log = site.log
 		self.server = server
 		self.next_message_id = 1
@@ -98,6 +101,10 @@ class UiWebsocket:
 			func = self.actionSitePublish
 		elif cmd == "fileWrite":
 			func = self.actionFileWrite
+		elif cmd == "fileGet":
+			func = self.actionFileGet
+		elif cmd == "fileQuery":
+			func = self.actionFileQuery
 		# Admin commands
 		elif cmd == "sitePause" and "ADMIN" in permissions:
 			func = self.actionSitePause
@@ -140,15 +147,18 @@ class UiWebsocket:
 
 	# Format site info
 	def formatSiteInfo(self, site):
-		content = site.content
-		if content and "files" in content: # Remove unnecessary data transfer
-			content = site.content.copy()
-			content["files"] = len(content["files"])
-			del(content["sign"])
+		content = site.content_manager.contents.get("content.json")
+		if content: # Remove unnecessary data transfer
+			content = content.copy()
+			content["files"] = len(content.get("files", {}))
+			content["includes"] = len(content.get("includes", {}))
+			if "sign" in content: del(content["sign"])
+			if "signs" in content: del(content["signs"])
 
 		ret = {
-			"auth_key": self.site.settings["auth_key"],
-			"auth_key_sha512": hashlib.sha512(self.site.settings["auth_key"]).hexdigest()[0:64],
+			"auth_key": self.site.settings["auth_key"], # Obsolete, will be removed
+			"auth_key_sha512": hashlib.sha512(self.site.settings["auth_key"]).hexdigest()[0:64], # Obsolete, will be removed
+			"auth_address": self.user.getAuthAddress(site.address),
 			"address": site.address,
 			"settings": site.settings,
 			"content_updated": site.content_updated,
@@ -158,7 +168,7 @@ class UiWebsocket:
 			"tasks": len([task["inner_path"] for task in site.worker_manager.tasks]),
 			"content": content
 		}
-		if site.settings["serving"] and site.content: ret["peers"] += 1 # Add myself if serving
+		if site.settings["serving"] and content: ret["peers"] += 1 # Add myself if serving
 		return ret
 
 
@@ -189,20 +199,26 @@ class UiWebsocket:
 		self.response(to, ret)
 
 
-	def actionSitePublish(self, to, privatekey):
+	def actionSitePublish(self, to, privatekey=None, inner_path="content.json"):
 		site = self.site
-		if not site.settings["own"]: return self.response(to, "Forbidden, you can only modify your own sites")
+		if not inner_path.endswith("content.json"): # Find the content.json first
+			inner_path = site.content_manager.getFileInfo(inner_path)["content_inner_path"]
+
+		if not site.settings["own"] and self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path): 
+			return self.response(to, "Forbidden, you can only modify your own sites")
+		if not privatekey: # Get privatekey from users.json
+			privatekey = self.user.getAuthPrivatekey(self.site.address)
 
 		# Signing
-		site.loadContent(True) # Reload content.json, ignore errors to make it up-to-date
-		signed = site.signContent(privatekey) # Sign using private key sent by user
+		site.content_manager.loadContent(add_bad_files=False) # Reload content.json, ignore errors to make it up-to-date
+		signed = site.content_manager.sign(inner_path, privatekey) # Sign using private key sent by user
 		if signed:
-			self.cmd("notification", ["done", "Private key correct, site signed!", 5000]) # Display message for 5 sec
+			if inner_path == "content_json": self.cmd("notification", ["done", "Private key correct, content signed!", 5000]) # Display message for 5 sec
 		else:
-			self.cmd("notification", ["error", "Site sign failed: invalid private key."])
+			self.cmd("notification", ["error", "Content sign failed: invalid private key."])
 			self.response(to, "Site sign failed")
 			return
-		site.loadContent(True) # Load new content.json, ignore errors
+		site.content_manager.loadContent(add_bad_files=False) # Load new content.json, ignore errors
 
 		# Publishing
 		if not site.settings["serving"]: # Enable site if paused
@@ -210,27 +226,26 @@ class UiWebsocket:
 			site.saveSettings()
 			site.announce()
 
-		published = site.publish(5) # Publish to 5 peer
+		published = site.publish(5, inner_path) # Publish to 5 peer
 
 		if published>0: # Successfuly published
-			self.cmd("notification", ["done", "Site published to %s peers." % published, 5000])
+			self.cmd("notification", ["done", "Content published to %s peers." % published, 5000])
 			self.response(to, "ok")
 			site.updateWebsocket() # Send updated site data to local websocket clients
 		else:
 			if len(site.peers) == 0:
-				self.cmd("notification", ["info", "No peers found, but your site is ready to access."])
-				self.response(to, "No peers found, but your site is ready to access.")
+				self.cmd("notification", ["info", "No peers found, but your content is ready to access."])
+				self.response(to, "No peers found, but your content is ready to access.")
 			else:
-				self.cmd("notification", ["error", "Site publish failed."])
-				self.response(to, "Site publish failed.")
-
-
-		
+				self.cmd("notification", ["error", "Content publish failed."])
+				self.response(to, "Content publish failed.")
 
 
 	# Write a file to disk
 	def actionFileWrite(self, to, inner_path, content_base64):
-		if not self.site.settings["own"]: return self.response(to, "Forbidden, you can only modify your own sites")
+		if not self.site.settings["own"] and self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path):
+			return self.response(to, "Forbidden, you can only modify your own files")
+
 		try:
 			import base64
 			content = base64.b64decode(content_base64)
@@ -238,12 +253,27 @@ class UiWebsocket:
 		except Exception, err:
 			return self.response(to, "Write error: %s" % err)
 
-		if inner_path == "content.json":
-			self.site.loadContent(True)
+		if inner_path.endswith("content.json"):
+			self.site.content_manager.loadContent(inner_path, add_bad_files=False)
 
 		return self.response(to, "ok")
 
-		
+	
+	# Find data in json files
+	def actionFileQuery(self, to, dir_inner_path, query):
+		dir_path = self.site.getPath(dir_inner_path)
+		rows = list(QueryJson.query(dir_path, query))
+		return self.response(to, rows)
+
+
+	# Return file content
+	def actionFileGet(self, to, inner_path):
+		try:
+			self.site.needFile(inner_path, priority=1)
+			body = open(self.site.getPath(inner_path)).read()
+		except:
+			body = None
+		return self.response(to, body)
 
 
 	# - Admin actions -
@@ -253,7 +283,7 @@ class UiWebsocket:
 		ret = []
 		SiteManager.load() # Reload sites
 		for site in self.server.sites.values():
-			if not site.content: continue # Broken site
+			if not site.content_manager.contents.get("content.json"): continue # Broken site
 			ret.append(self.formatSiteInfo(site))
 		self.response(to, ret)
 
diff --git a/src/Ui/media/Wrapper.coffee b/src/Ui/media/Wrapper.coffee
index 7c559445..4cc8b0fb 100644
--- a/src/Ui/media/Wrapper.coffee
+++ b/src/Ui/media/Wrapper.coffee
@@ -25,8 +25,9 @@ class Wrapper
 		window.onload = @onLoad # On iframe loaded
 		$(window).on "hashchange", => # On hash change
 			@log "Hashchange", window.location.hash
-			src = $("#inner-iframe").attr("src").replace(/#.*/, "")+window.location.hash
-			$("#inner-iframe").attr("src", src)
+			if window.location.hash
+				src = $("#inner-iframe").attr("src").replace(/#.*/, "")+window.location.hash
+				$("#inner-iframe").attr("src", src)
 		@
 
 
@@ -97,7 +98,8 @@ class Wrapper
 		input = $("<input type='#{type}' class='input button-#{type}'/>") # Add input
 		input.on "keyup", (e) => # Send on enter
 			if e.keyCode == 13
-				@sendInner {"cmd": "response", "to": message.id, "result": input.val()} # Response to confirm
+				button.trigger "click" # Response to confirm
+
 		body.append(input)
 
 		button = $("<a href='##{caption}' class='button button-#{caption}'>#{caption}</a>") # Add confirm button
@@ -149,6 +151,9 @@ class Wrapper
 		if window.location.hash then $("#inner-iframe")[0].src += window.location.hash # Hash tag
 		if @ws.ws.readyState == 1 and not @site_info # Ws opened
 			@reloadSiteInfo()
+		else if @site_info
+			window.document.title = @site_info.content.title+" - ZeroNet"
+			@log "Setting title to", window.document.title
 
 
 	# Send message to innerframe
diff --git a/src/Ui/media/Wrapper.css b/src/Ui/media/Wrapper.css
index 24624d53..6249c3dd 100644
--- a/src/Ui/media/Wrapper.css
+++ b/src/Ui/media/Wrapper.css
@@ -47,6 +47,7 @@ a { color: black }
 .notification .close { position: absolute; top: 0px; right: 0px; font-size: 19px; line-height: 13px; color: #DDD; padding: 7px; text-decoration: none }
 .notification .close:hover { color: black }
 .notification .close:active, .notification .close:focus { color: #AF3BFF }
+.body-white .notification { box-shadow: 0px 1px 9px rgba(0,0,0,0.1) }
 
 /* Notification types */
 .notification-ask .notification-icon { background-color: #f39c12; }
diff --git a/src/Ui/media/all.css b/src/Ui/media/all.css
index 4659caa6..c5b559cf 100644
--- a/src/Ui/media/all.css
+++ b/src/Ui/media/all.css
@@ -52,6 +52,7 @@ a { color: black }
 .notification .close { position: absolute; top: 0px; right: 0px; font-size: 19px; line-height: 13px; color: #DDD; padding: 7px; text-decoration: none }
 .notification .close:hover { color: black }
 .notification .close:active, .notification .close:focus { color: #AF3BFF }
+.body-white .notification { -webkit-box-shadow: 0px 1px 9px rgba(0,0,0,0.1) ; -moz-box-shadow: 0px 1px 9px rgba(0,0,0,0.1) ; -o-box-shadow: 0px 1px 9px rgba(0,0,0,0.1) ; -ms-box-shadow: 0px 1px 9px rgba(0,0,0,0.1) ; box-shadow: 0px 1px 9px rgba(0,0,0,0.1)  }
 
 /* Notification types */
 .notification-ask .notification-icon { background-color: #f39c12; }
diff --git a/src/Ui/media/all.js b/src/Ui/media/all.js
index c621dca2..cb5fa543 100644
--- a/src/Ui/media/all.js
+++ b/src/Ui/media/all.js
@@ -748,8 +748,10 @@ jQuery.extend( jQuery.easing,
         return function() {
           var src;
           _this.log("Hashchange", window.location.hash);
-          src = $("#inner-iframe").attr("src").replace(/#.*/, "") + window.location.hash;
-          return $("#inner-iframe").attr("src", src);
+          if (window.location.hash) {
+            src = $("#inner-iframe").attr("src").replace(/#.*/, "") + window.location.hash;
+            return $("#inner-iframe").attr("src", src);
+          }
         };
       })(this));
       this;
@@ -840,11 +842,7 @@ jQuery.extend( jQuery.easing,
       input.on("keyup", (function(_this) {
         return function(e) {
           if (e.keyCode === 13) {
-            return _this.sendInner({
-              "cmd": "response",
-              "to": message.id,
-              "result": input.val()
-            });
+            return button.trigger("click");
           }
         };
       })(this));
@@ -923,6 +921,9 @@ jQuery.extend( jQuery.easing,
       }
       if (this.ws.ws.readyState === 1 && !this.site_info) {
         return this.reloadSiteInfo();
+      } else if (this.site_info) {
+        window.document.title = this.site_info.content.title + " - ZeroNet";
+        return this.log("Setting title to", window.document.title);
       }
     };
 
diff --git a/src/User/User.py b/src/User/User.py
new file mode 100644
index 00000000..46642515
--- /dev/null
+++ b/src/User/User.py
@@ -0,0 +1,53 @@
+import logging, json, time
+from Crypt import CryptBitcoin
+
+class User:
+	def __init__(self, master_address=None):
+		if master_address:
+			self.master_address = master_address
+			self.master_seed = None
+		else:
+			self.master_seed = CryptBitcoin.newSeed()
+			self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed)
+		self.sites = {}
+		self.log = logging.getLogger("User:%s" % self.master_address)
+
+
+	# Save to data/users.json
+	def save(self):
+		users = json.load(open("data/users.json"))
+		if not self.master_address in users: users[self.master_address] = {} # Create if not exits
+
+		user_data = users[self.master_address]
+		if self.master_seed: user_data["master_seed"] = self.master_seed
+		user_data["sites"] = self.sites
+		open("data/users.json", "w").write(json.dumps(users, indent=2, sort_keys=True))
+		self.log.debug("Saved")
+
+
+	# Get BIP32 address from site address
+	# Return: BIP32 auth address
+	def getAuthAddress(self, address):
+		if not address in self.sites: # Genreate new BIP32 child key based on site address
+			s = time.time()
+			address_id = int(address.encode("hex"), 16) # Convert site address to int
+			auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id)
+			self.sites[address] = {
+				"auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey),
+				"auth_privatekey": auth_privatekey
+			}
+			self.save()
+			self.log.debug("Added new site: %s in %.3fs" % (address, time.time()-s))
+
+		return self.sites[address]["auth_address"]
+
+
+	def getAuthPrivatekey(self, address):
+		return self.sites[address]["auth_privatekey"]
+
+
+
+	# Set user attributes from dict
+	def setData(self, data):
+		for key, val in data.items():
+			setattr(self, key, val)
diff --git a/src/User/UserManager.py b/src/User/UserManager.py
new file mode 100644
index 00000000..e4983c45
--- /dev/null
+++ b/src/User/UserManager.py
@@ -0,0 +1,65 @@
+import json, logging, os
+from User import User
+
+users = None
+
+# Load all user from data/users.json
+def load():
+	global users
+	if not users: users = {}
+
+	user_found = []
+	added = 0
+	# Load new users
+	for master_address, data in json.load(open("data/users.json")).items():
+		if master_address not in users:
+			user = User(master_address)
+			user.setData(data)
+			users[master_address] = user
+			added += 1
+		user_found.append(master_address)
+
+	# Remove deleted adresses
+	for master_address in users.keys():
+		if master_address not in user_found: 
+			del(users[master_address])
+			logging.debug("Removed user: %s" % master_address)
+
+	if added: logging.debug("UserManager added %s users" % added)
+
+
+# Create new user
+# Return: User
+def create():
+	user = User()
+	logging.debug("Created user: %s" % user.master_address)
+	users[user.master_address] = user
+	user.save()
+	return user
+
+
+# List all users from data/users.json
+# Return: {"usermasteraddr": User}
+def list():
+	if users == None: # Not loaded yet
+		load()
+	return users
+
+
+# Get current authed user
+# Return: User
+def getCurrent():
+	users = list()
+	if users:
+		return users.values()[0]
+	else:
+		return create()
+
+
+# Debug: Reload User.py
+def reload():
+	import imp
+	global users, User
+	users.clear() # Remove all items
+	User = imp.load_source("User", "src/User/User.py").User # Reload source
+	load()
diff --git a/src/User/__init__.py b/src/User/__init__.py
new file mode 100644
index 00000000..8d569979
--- /dev/null
+++ b/src/User/__init__.py
@@ -0,0 +1 @@
+from User import User
diff --git a/src/Worker/Worker.py b/src/Worker/Worker.py
index 7bb5ea66..df651e55 100644
--- a/src/Worker/Worker.py
+++ b/src/Worker/Worker.py
@@ -36,7 +36,7 @@ class Worker:
 					self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"]))
 					return None
 				if buff: # Download ok
-					correct = task["site"].verifyFile(task["inner_path"], buff)
+					correct = task["site"].content_manager.verifyFile(task["inner_path"], buff)
 				else: # Download error
 					correct = False
 				if correct == True or correct == None: # Hash ok or same file
diff --git a/src/Worker/WorkerManager.py b/src/Worker/WorkerManager.py
index c11887ed..7c970639 100644
--- a/src/Worker/WorkerManager.py
+++ b/src/Worker/WorkerManager.py
@@ -139,7 +139,7 @@ class WorkerManager:
 				peers = None
 			task = {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_added": time.time(), "time_started": None, "peers": peers, "priority": priority}
 			self.tasks.append(task)
-			self.log.debug("New task: %s, peer lock: %s" % (task, peers))
+			self.log.debug("New task: %s, peer lock: %s, priority: %s" % (task["inner_path"], peers, priority))
 			self.startWorkers(peers)
 			return evt
 
diff --git a/src/lib/pybitcointools/LICENSE b/src/lib/pybitcointools/LICENSE
new file mode 100644
index 00000000..c47d4ad0
--- /dev/null
+++ b/src/lib/pybitcointools/LICENSE
@@ -0,0 +1,27 @@
+This code is public domain. Everyone has the right to do whatever they want
+with it for any purpose.
+
+In case your jurisdiction does not consider the above disclaimer valid or 
+enforceable, here's an MIT license for you:
+
+The MIT License (MIT)
+
+Copyright (c) 2013 Vitalik Buterin
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/src/lib/pybitcointools/MANIFEST.in b/src/lib/pybitcointools/MANIFEST.in
new file mode 100644
index 00000000..1aba38f6
--- /dev/null
+++ b/src/lib/pybitcointools/MANIFEST.in
@@ -0,0 +1 @@
+include LICENSE
diff --git a/src/lib/pybitcointools/README.txt b/src/lib/pybitcointools/README.txt
new file mode 100644
index 00000000..2f2876e7
--- /dev/null
+++ b/src/lib/pybitcointools/README.txt
@@ -0,0 +1,142 @@
+# Pybitcointools, Python library for Bitcoin signatures and transactions
+
+### Advantages:
+
+* Functions have a simple interface, inputting and outputting in standard formats
+* No classes
+* Many functions can be taken out and used individually
+* Supports binary, hex and base58
+* Transaction deserialization format almost compatible with BitcoinJS
+* Electrum and BIP0032 support
+* Make and publish a transaction all in a single command line instruction
+* Includes non-bitcoin-specific conversion and JSON utilities
+
+### Disadvantages:
+
+* Not a full node, has no idea what blocks are
+* Relies on centralized service (blockchain.info) for blockchain operations, although operations do have backups (eligius, blockr.io)
+
+### Example usage (best way to learn :) ):
+
+    > from bitcoin import *
+    > priv = sha256('some big long brainwallet password')
+    > priv
+    '57c617d9b4e1f7af6ec97ca2ff57e94a28279a7eedd4d12a99fa11170e94f5a4'
+    > pub = privtopub(priv)
+    > pub
+    '0420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9'
+    > addr = pubtoaddr(pub)
+    > addr
+    '1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'
+    > h = history(addr)
+    > h
+    [{'output': u'97f7c7d8ac85e40c255f8a763b6cd9a68f3a94d2e93e8bfa08f977b92e55465e:0', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}, {'output': u'4cc806bb04f730c445c60b3e0f4f44b54769a1c196ca37d8d4002135e4abd171:1', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}]
+    > outs = [{'value': 90000, 'address': '16iw1MQ1sy1DtRPYw3ao1bCamoyBJtRB4t'}]
+    > tx = mktx(h,outs)
+    > tx
+    '01000000025e46552eb977f908fa8b3ee9d2943a8fa6d96c3b768a5f250ce485acd8c7f7970000000000ffffffff71d1abe4352100d4d837ca96c1a16947b5444f0f3e0bc645c430f704bb06c84c0100000000ffffffff01905f0100000000001976a9143ec6c3ed8dfc3ceabcc1cbdb0c5aef4e2d02873c88ac00000000'
+    > tx2 = sign(tx,0,priv)
+    > tx2
+    '01000000025e46552eb977f908fa8b3ee9d2943a8fa6d96c3b768a5f250ce485acd8c7f797000000008b483045022100dd29d89a28451febb990fb1dafa21245b105140083ced315ebcdea187572b3990220713f2e554f384d29d7abfedf39f0eb92afba0ef46f374e49d43a728a0ff6046e01410420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9ffffffff71d1abe4352100d4d837ca96c1a16947b5444f0f3e0bc645c430f704bb06c84c0100000000ffffffff01905f0100000000001976a9143ec6c3ed8dfc3ceabcc1cbdb0c5aef4e2d02873c88ac00000000'
+    > tx3 = sign(tx2,1,priv)
+    > tx3
+    '01000000025e46552eb977f908fa8b3ee9d2943a8fa6d96c3b768a5f250ce485acd8c7f797000000008b483045022100dd29d89a28451febb990fb1dafa21245b105140083ced315ebcdea187572b3990220713f2e554f384d29d7abfedf39f0eb92afba0ef46f374e49d43a728a0ff6046e01410420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9ffffffff71d1abe4352100d4d837ca96c1a16947b5444f0f3e0bc645c430f704bb06c84c010000008c4930460221008bbaaaf172adfefc3a1315dc7312c88645832ff76d52e0029d127e65bbeeabe1022100fdeb89658d503cf2737cedb4049e5070f689c50a9b6c85997d49e0787938f93901410420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9ffffffff01905f0100000000001976a9143ec6c3ed8dfc3ceabcc1cbdb0c5aef4e2d02873c88ac00000000'
+    > pushtx(tx3)
+    'Transaction Submitted'
+
+Or using the pybtctool command line interface:
+
+    @vub: pybtctool random_electrum_seed
+    484ccb566edb66c65dd0fd2e4d90ef65
+
+    @vub: pybtctool electrum_privkey 484ccb566edb66c65dd0fd2e4d90ef65 0 0
+    593240c2205e7b7b5d7c13393b7c9553497854b75c7470b76aeca50cd4a894d7
+
+    @vub: pybtctool electrum_mpk 484ccb566edb66c65dd0fd2e4d90ef65
+    484e42865b8e9a6ea8262fd1cde666b557393258ed598d842e563ad9e5e6c70a97e387eefdef123c1b8b4eb21fe210c6216ad7cc1e4186fbbba70f0e2c062c25
+
+    @vub: pybtctool bip32_master_key 21456t243rhgtucyadh3wgyrcubw3grydfbng
+    xprv9s21ZrQH143K2napkeoHT48gWmoJa89KCQj4nqLfdGybyWHP9Z8jvCGzuEDv4ihCyoed7RFPNbc9NxoSF7cAvH9AaNSvepUaeqbSpJZ4rbT
+
+    @vub: pybtctool bip32_ckd xprv9s21ZrQH143K2napkeoHT48gWmoJa89KCQj4nqLfdGybyWHP9Z8jvCGzuEDv4ihCyoed7RFPNbc9NxoSF7cAvH9AaNSvepUaeqbSpJZ4rbT 0
+    xprv9vfzYrpwo7QHFdtrcvsSCTrBESFPUf1g7NRvayy1QkEfUekpDKLfqvHjgypF5w3nAvnwPjtQUNkyywWNkLbiUS95khfHCzJXFkLEdwRepbw 
+
+    @vub: pybtctool bip32_privtopub xprv9s21ZrQH143K2napkeoHT48gWmoJa89KCQj4nqLfdGybyWHP9Z8jvCGzuEDv4ihCyoed7RFPNbc9NxoSF7cAvH9AaNSvepUaeqbSpJZ4rbT
+    xpub661MyMwAqRbcFGfHrgLHpC5R4odnyasAZdefbDkHBcWarJcXh6SzTzbUkWuhnP142ZFdKdAJSuTSaiGDYjvm7bCLmA8DZqksYjJbYmcgrYF
+
+The -s option lets you read arguments from the command line
+
+    @vub: pybtctool sha256 'some big long brainwallet password' | pybtctool -s privtoaddr | pybtctool -s history
+    [{'output': u'97f7c7d8ac85e40c255f8a763b6cd9a68f3a94d2e93e8bfa08f977b92e55465e:0', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}, {'output': u'4cc806bb04f730c445c60b3e0f4f44b54769a1c196ca37d8d4002135e4abd171:1', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}]
+    @vub: pybtctool random_electrum_seed | pybtctool -s electrum_privkey 0 0
+    593240c2205e7b7b5d7c13393b7c9553497854b75c7470b76aeca50cd4a894d7
+
+The -b option lets you read binary data as an argument
+
+    @vub: pybtctool sha256 123 | pybtctool -s changebase 16 256 | pybtctool -b changebase 256 16
+    a665a45920422f9d417e4867efdc4fb8a04a1f3fff1fa07e998e86f7f7a27ae30a
+
+The -j option lets you read json from the command line (-J to split a json list into multiple arguments)
+
+    @vub: pybtctool unspent 1FxkfJQLJTXpW6QmxGT6oF43ZH959ns8Cq | pybtctool -j select 200000001 | pybtctool -j mksend 1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P:20000 1FxkfJQLJTXpW6QmxGT6oF43ZH959ns8Cq 1000 | pybtctool -s signall 805cd74ca322633372b9bfb857f3be41db0b8de43a3c44353b238c0acff9d523
+    0100000003d5001aae8358ae98cb02c1b6f9859dc1ac3dbc1e9cc88632afeb7b7e3c510a49000000008b4830450221009e03bb6122437767e2ca785535824f4ed13d2ebbb9fa4f9becc6d6f4e1e217dc022064577353c08d8d974250143d920d3b963b463e43bbb90f3371060645c49266b90141048ef80f6bd6b073407a69299c2ba89de48adb59bb9689a5ab040befbbebcfbb15d01b006a6b825121a0d2c546c277acb60f0bd3203bd501b8d67c7dba91f27f47ffffffff1529d655dff6a0f6c9815ee835312fb3ca4df622fde21b6b9097666e9284087d010000008a473044022035dd67d18b575ebd339d05ca6ffa1d27d7549bd993aeaf430985795459fc139402201aaa162cc50181cee493870c9479b1148243a33923cb77be44a73ca554a4e5d60141048ef80f6bd6b073407a69299c2ba89de48adb59bb9689a5ab040befbbebcfbb15d01b006a6b825121a0d2c546c277acb60f0bd3203bd501b8d67c7dba91f27f47ffffffff23d5f9cf0a8c233b35443c3ae48d0bdb41bef357b8bfb972336322a34cd75c80010000008b483045022014daa5c5bbe9b3e5f2539a5cd8e22ce55bc84788f946c5b3643ecac85b4591a9022100a4062074a1df3fa0aea5ef67368d0b1f0eaac520bee6e417c682d83cd04330450141048ef80f6bd6b073407a69299c2ba89de48adb59bb9689a5ab040befbbebcfbb15d01b006a6b825121a0d2c546c277acb60f0bd3203bd501b8d67c7dba91f27f47ffffffff02204e0000000000001976a914946cb2e08075bcbaf157e47bcb67eb2b2339d24288ac5b3c4411000000001976a914a41d15ae657ad3bfd0846771a34d7584c37d54a288ac00000000
+
+Fun stuff with json:
+
+    @vub: pybtctool history 1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P | pybtctool -j multiaccess value | pybtctool -j sum
+    625216206372
+
+    @vub: pybtctool history 1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P | pybtctool -j count
+    6198
+
+### Listing of main commands:
+
+* privkey_to_pubkey    : (privkey) -> pubkey
+* privtopub            : (privkey) -> pubkey
+* pubkey_to_address    : (pubkey) -> address
+* pubtoaddr            : (pubkey) -> address
+* privkey_to_address   : (privkey) -> address
+* privtoaddr           : (privkey) -> address
+
+* add                  : (key1, key2) -> key1 + key2 (works on privkeys or pubkeys)
+* multiply             : (pubkey, privkey) -> returns pubkey * privkey
+
+* ecdsa_sign           : (message, privkey) -> sig
+* ecdsa_verify         : (message, sig, pubkey) -> True/False
+* ecdsa_recover        : (message, sig) -> pubkey
+
+* random_key           : () -> privkey
+* random_electrum_seed : () -> electrum seed
+
+* electrum_stretch     : (seed) -> secret exponent
+* electrum_privkey     : (seed or secret exponent, i, type) -> privkey
+* electrum_mpk         : (seed or secret exponent) -> master public key
+* electrum_pubkey      : (seed or secexp or mpk) -> pubkey
+
+* bip32_master_key     : (seed) -> bip32 master key
+* bip32_ckd            : (private or public bip32 key, i) -> child key
+* bip32_privtopub      : (private bip32 key) -> public bip32 key
+* bip32_extract_key    : (private or public bip32_key) -> privkey or pubkey
+
+* deserialize          : (hex or bin transaction) -> JSON tx
+* serialize            : (JSON tx) -> hex or bin tx
+* mktx                 : (inputs, outputs) -> tx
+* mksend               : (inputs, outputs, change_addr, fee) -> tx
+* sign                 : (tx, i, privkey) -> tx with index i signed with privkey
+* multisign            : (tx, i, script, privkey) -> signature
+* apply_multisignatures: (tx, i, script, sigs) -> tx with index i signed with sigs
+* scriptaddr           : (script) -> P2SH address
+* mk_multisig_script   : (pubkeys, k, n) -> k-of-n multisig script from pubkeys
+* verify_tx_input      : (tx, i, script, sig, pub) -> True/False
+* tx_hash              : (hex or bin tx) -> hash
+
+* history              : (address1, address2, etc) -> outputs to those addresses
+* unspent              : (address1, address2, etc) -> unspent outputs to those addresses
+* fetchtx              : (txash) -> tx if present
+* pushtx               : (hex or bin tx) -> tries to push to blockchain.info/pushtx
+
+* access               : (json list/object, prop) -> desired property of that json object
+* multiaccess          : (json list, prop) -> like access, but mapped across each list element
+* slice                : (json list, start, end) -> given slice of the list
+* count                : (json list) -> number of elements
+* sum                  : (json list) -> sum of all values
diff --git a/src/lib/pybitcointools/__init__.py b/src/lib/pybitcointools/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/src/lib/pybitcointools/bitcoin/__init__.py b/src/lib/pybitcointools/bitcoin/__init__.py
new file mode 100644
index 00000000..8b543fee
--- /dev/null
+++ b/src/lib/pybitcointools/bitcoin/__init__.py
@@ -0,0 +1,9 @@
+from .py2specials import *
+from .py3specials import *
+from .main import *
+from .transaction import *
+from .deterministic import *
+from .bci import *
+from .composite import *
+from .stealth import *
+from .blocks import *
diff --git a/src/lib/pybitcointools/bitcoin/bci.py b/src/lib/pybitcointools/bitcoin/bci.py
new file mode 100644
index 00000000..aa3d0389
--- /dev/null
+++ b/src/lib/pybitcointools/bitcoin/bci.py
@@ -0,0 +1,269 @@
+#!/usr/bin/python
+import json, re
+import random
+import sys
+try:
+    from urllib.request import build_opener
+except:
+    from urllib2 import build_opener
+
+
+# Makes a request to a given URL (first arg) and optional params (second arg)
+def make_request(*args):
+    opener = build_opener()
+    opener.addheaders = [('User-agent',
+                          'Mozilla/5.0'+str(random.randrange(1000000)))]
+    try:
+        return opener.open(*args).read().strip()
+    except Exception as e:
+        try:
+            p = e.read().strip()
+        except:
+            p = e
+        raise Exception(p)
+
+
+# Gets the unspent outputs of one or more addresses
+def unspent(*args):
+    # Valid input formats: unspent([addr1, addr2,addr3])
+    #                      unspent(addr1, addr2, addr3)
+    if len(args) == 0:
+        return []
+    elif isinstance(args[0], list):
+        addrs = args[0]
+    else:
+        addrs = args
+    u = []
+    for a in addrs:
+        try:
+            data = make_request('https://blockchain.info/unspent?address='+a)
+        except Exception as e:
+            if str(e) == 'No free outputs to spend':
+                continue
+            else:
+                raise Exception(e)
+        try:
+            jsonobj = json.loads(data)
+            for o in jsonobj["unspent_outputs"]:
+                h = o['tx_hash'].decode('hex')[::-1].encode('hex')
+                u.append({
+                    "output": h+':'+str(o['tx_output_n']),
+                    "value": o['value']
+                })
+        except:
+            raise Exception("Failed to decode data: "+data)
+    return u
+
+
+def blockr_unspent(*args):
+    # Valid input formats: blockr_unspent([addr1, addr2,addr3])
+    #                      blockr_unspent(addr1, addr2, addr3)
+    #                      blockr_unspent([addr1, addr2, addr3], network)
+    #                      blockr_unspent(addr1, addr2, addr3, network)
+    # Where network is 'btc' or 'testnet'
+    network = 'btc'
+    addr_args = args
+    if len(args) >= 1 and args[-1] in ('testnet', 'btc'):
+        network = args[-1]
+        addr_args = args[:-1]
+
+    if network == 'testnet':
+        blockr_url = 'https://tbtc.blockr.io/api/v1/address/unspent/'
+    elif network == 'btc':
+        blockr_url = 'https://btc.blockr.io/api/v1/address/unspent/'
+    else:
+        raise Exception(
+            'Unsupported network {0} for blockr_unspent'.format(network))
+
+    if len(addr_args) == 0:
+        return []
+    elif isinstance(addr_args[0], list):
+        addrs = addr_args[0]
+    else:
+        addrs = addr_args
+    res = make_request(blockr_url+','.join(addrs))
+    data = json.loads(res)['data']
+    o = []
+    if 'unspent' in data:
+        data = [data]
+    for dat in data:
+        for u in dat['unspent']:
+            o.append({
+                "output": u['tx']+':'+str(u['n']),
+                "value": int(u['amount'].replace('.', ''))
+            })
+    return o
+
+
+# Gets the transaction output history of a given set of addresses,
+# including whether or not they have been spent
+def history(*args):
+    # Valid input formats: history([addr1, addr2,addr3])
+    #                      history(addr1, addr2, addr3)
+    if len(args) == 0:
+        return []
+    elif isinstance(args[0], list):
+        addrs = args[0]
+    else:
+        addrs = args
+
+    txs = []
+    for addr in addrs:
+        offset = 0
+        while 1:
+            data = make_request(
+                'https://blockchain.info/address/%s?format=json&offset=%s' %
+                (addr, offset))
+            try:
+                jsonobj = json.loads(data)
+            except:
+                raise Exception("Failed to decode data: "+data)
+            txs.extend(jsonobj["txs"])
+            if len(jsonobj["txs"]) < 50:
+                break
+            offset += 50
+            sys.stderr.write("Fetching more transactions... "+str(offset)+'\n')
+    outs = {}
+    for tx in txs:
+        for o in tx["out"]:
+            if o['addr'] in addrs:
+                key = str(tx["tx_index"])+':'+str(o["n"])
+                outs[key] = {
+                    "address": o["addr"],
+                    "value": o["value"],
+                    "output": tx["hash"]+':'+str(o["n"]),
+                    "block_height": tx.get("block_height", None)
+                }
+    for tx in txs:
+        for i, inp in enumerate(tx["inputs"]):
+            if inp["prev_out"]["addr"] in addrs:
+                key = str(inp["prev_out"]["tx_index"]) + \
+                    ':'+str(inp["prev_out"]["n"])
+                if outs.get(key):
+                    outs[key]["spend"] = tx["hash"]+':'+str(i)
+    return [outs[k] for k in outs]
+
+
+# Pushes a transaction to the network using https://blockchain.info/pushtx
+def pushtx(tx):
+    if not re.match('^[0-9a-fA-F]*$', tx):
+        tx = tx.encode('hex')
+    return make_request('https://blockchain.info/pushtx', 'tx='+tx)
+
+
+def eligius_pushtx(tx):
+    if not re.match('^[0-9a-fA-F]*$', tx):
+        tx = tx.encode('hex')
+    s = make_request(
+        'http://eligius.st/~wizkid057/newstats/pushtxn.php',
+        'transaction='+tx+'&send=Push')
+    strings = re.findall('string[^"]*"[^"]*"', s)
+    for string in strings:
+        quote = re.findall('"[^"]*"', string)[0]
+        if len(quote) >= 5:
+            return quote[1:-1]
+
+
+def blockr_pushtx(tx, network='btc'):
+    if network == 'testnet':
+        blockr_url = 'https://tbtc.blockr.io/api/v1/tx/push'
+    elif network == 'btc':
+        blockr_url = 'https://btc.blockr.io/api/v1/tx/push'
+    else:
+        raise Exception(
+            'Unsupported network {0} for blockr_pushtx'.format(network))
+
+    if not re.match('^[0-9a-fA-F]*$', tx):
+        tx = tx.encode('hex')
+    return make_request(blockr_url, '{"hex":"%s"}' % tx)
+
+
+def helloblock_pushtx(tx):
+    if not re.match('^[0-9a-fA-F]*$', tx):
+        tx = tx.encode('hex')
+    return make_request('https://mainnet.helloblock.io/v1/transactions',
+                        'rawTxHex='+tx)
+
+
+def last_block_height():
+    data = make_request('https://blockchain.info/latestblock')
+    jsonobj = json.loads(data)
+    return jsonobj["height"]
+
+
+# Gets a specific transaction
+def bci_fetchtx(txhash):
+    if not re.match('^[0-9a-fA-F]*$', txhash):
+        txhash = txhash.encode('hex')
+    data = make_request('https://blockchain.info/rawtx/'+txhash+'?format=hex')
+    return data
+
+
+def blockr_fetchtx(txhash, network='btc'):
+    if network == 'testnet':
+        blockr_url = 'https://tbtc.blockr.io/api/v1/tx/raw/'
+    elif network == 'btc':
+        blockr_url = 'https://btc.blockr.io/api/v1/tx/raw/'
+    else:
+        raise Exception(
+            'Unsupported network {0} for blockr_fetchtx'.format(network))
+    if not re.match('^[0-9a-fA-F]*$', txhash):
+        txhash = txhash.encode('hex')
+    jsondata = json.loads(make_request(blockr_url+txhash))
+    return jsondata['data']['tx']['hex']
+
+
+def fetchtx(txhash):
+    try:
+        return bci_fetchtx(txhash)
+    except:
+        return blockr_fetchtx(txhash)
+
+
+def firstbits(address):
+    if len(address) >= 25:
+        return make_request('https://blockchain.info/q/getfirstbits/'+address)
+    else:
+        return make_request(
+            'https://blockchain.info/q/resolvefirstbits/'+address)
+
+
+def get_block_at_height(height):
+    j = json.loads(make_request("https://blockchain.info/block-height/" +
+                   str(height)+"?format=json"))
+    for b in j['blocks']:
+        if b['main_chain'] is True:
+            return b
+    raise Exception("Block at this height not found")
+
+
+def _get_block(inp):
+    if len(str(inp)) < 64:
+        return get_block_at_height(inp)
+    else:
+        return json.loads(make_request(
+                          'https://blockchain.info/rawblock/'+inp))
+
+
+def get_block_header_data(inp):
+    j = _get_block(inp)
+    return {
+        'version': j['ver'],
+        'hash': j['hash'],
+        'prevhash': j['prev_block'],
+        'timestamp': j['time'],
+        'merkle_root': j['mrkl_root'],
+        'bits': j['bits'],
+        'nonce': j['nonce'],
+    }
+
+
+def get_txs_in_block(inp):
+    j = _get_block(inp)
+    hashes = [t['hash'] for t in j['tx']]
+    return hashes
+
+
+def get_block_height(txhash):
+    j = json.loads(make_request('https://blockchain.info/rawtx/'+txhash))
+    return j['block_height']
diff --git a/src/lib/pybitcointools/bitcoin/blocks.py b/src/lib/pybitcointools/bitcoin/blocks.py
new file mode 100644
index 00000000..d2e51799
--- /dev/null
+++ b/src/lib/pybitcointools/bitcoin/blocks.py
@@ -0,0 +1,48 @@
+from .main import *
+
+
+def serialize_header(inp):
+    o = encode(inp['version'], 256, 4)[::-1] + \
+        inp['prevhash'].decode('hex')[::-1] + \
+        inp['merkle_root'].decode('hex')[::-1] + \
+        encode(inp['timestamp'], 256, 4)[::-1] + \
+        encode(inp['bits'], 256, 4)[::-1] + \
+        encode(inp['nonce'], 256, 4)[::-1]
+    h = bin_sha256(bin_sha256(o))[::-1].encode('hex')
+    assert h == inp['hash'], (sha256(o), inp['hash'])
+    return o.encode('hex')
+
+
+def deserialize_header(inp):
+    inp = inp.decode('hex')
+    return {
+        "version": decode(inp[:4][::-1], 256),
+        "prevhash": inp[4:36][::-1].encode('hex'),
+        "merkle_root": inp[36:68][::-1].encode('hex'),
+        "timestamp": decode(inp[68:72][::-1], 256),
+        "bits": decode(inp[72:76][::-1], 256),
+        "nonce": decode(inp[76:80][::-1], 256),
+        "hash": bin_sha256(bin_sha256(inp))[::-1].encode('hex')
+    }
+
+
+def mk_merkle_proof(header, hashes, index):
+    nodes = [h.decode('hex')[::-1] for h in hashes]
+    layers = [nodes]
+    while len(nodes) > 1:
+        newnodes = []
+        for i in range(0, len(nodes) - 1, 2):
+            newnodes.append(bin_sha256(bin_sha256(nodes[i] + nodes[i+1])))
+        if len(nodes) % 2:
+            newnodes.append(bin_sha256(bin_sha256(nodes[-1] + nodes[-1])))
+        nodes = newnodes
+        layers.append(nodes)
+    # Sanity check, make sure merkle root is valid
+    assert nodes[0][::-1].encode('hex') == header['merkle_root']
+    merkle_siblings = \
+        [layers[i][(index >> i) ^ 1] for i in range(len(layers)-1)]
+    return {
+        "hash": hashes[index],
+        "siblings": [x[::-1].encode('hex') for x in merkle_siblings],
+        "header": header
+    }
diff --git a/src/lib/pybitcointools/bitcoin/composite.py b/src/lib/pybitcointools/bitcoin/composite.py
new file mode 100644
index 00000000..536d8578
--- /dev/null
+++ b/src/lib/pybitcointools/bitcoin/composite.py
@@ -0,0 +1,128 @@
+from .main import *
+from .transaction import *
+from .bci import *
+from .deterministic import *
+from .blocks import *
+
+
+# Takes privkey, address, value (satoshis), fee (satoshis)
+def send(frm, to, value, fee=10000):
+    return sendmultitx(frm, to + ":" + str(value), fee)
+
+
+# Takes privkey, "address1:value1,address2:value2" (satoshis), fee (satoshis)
+def sendmultitx(frm, tovalues, fee=10000):
+    outs = []
+    outvalue = 0
+    tv = tovalues.split(",")
+    for a in tv:
+        outs.append(a)
+        outvalue += int(a.split(":")[1])
+
+    u = unspent(privtoaddr(frm))
+    u2 = select(u, int(outvalue)+int(fee))
+    argz = u2 + outs + [frm, fee]
+    tx = mksend(*argz)
+    tx2 = signall(tx, frm)
+    return pushtx(tx2)
+
+
+# Takes address, address, value (satoshis), fee(satoshis)
+def preparetx(frm, to, value, fee=10000):
+    tovalues = to + ":" + str(value)
+    return preparemultitx(frm, tovalues, fee)
+
+
+# Takes address, address:value, address:value ... (satoshis), fee(satoshis)
+def preparemultitx(frm, *args):
+    tv, fee = args[:-1], int(args[-1])
+    outs = []
+    outvalue = 0
+    for a in tv:
+        outs.append(a)
+        outvalue += int(a.split(":")[1])
+
+    u = unspent(frm)
+    u2 = select(u, int(outvalue)+int(fee))
+    argz = u2 + outs + [frm, fee]
+    return mksend(*argz)
+
+
+# BIP32 hierarchical deterministic multisig script
+def bip32_hdm_script(*args):
+    if len(args) == 3:
+        keys, req, path = args
+    else:
+        i, keys, path = 0, [], []
+        while len(args[i]) > 40:
+            keys.append(args[i])
+            i += 1
+        req = int(args[i])
+        path = map(int, args[i+1:])
+    pubs = sorted(map(lambda x: bip32_descend(x, path), keys))
+    return mk_multisig_script(pubs, req)
+
+
+# BIP32 hierarchical deterministic multisig address
+def bip32_hdm_addr(*args):
+    return scriptaddr(bip32_hdm_script(*args))
+
+
+# Setup a coinvault transaction
+def setup_coinvault_tx(tx, script):
+    txobj = deserialize(tx)
+    N = deserialize_script(script)[-2]
+    for inp in txobj["ins"]:
+        inp["script"] = serialize_script([None] * (N+1) + [script])
+    return serialize(txobj)
+
+
+# Sign a coinvault transaction
+def sign_coinvault_tx(tx, priv):
+    pub = privtopub(priv)
+    txobj = deserialize(tx)
+    subscript = deserialize_script(txobj['ins'][0]['script'])
+    oscript = deserialize_script(subscript[-1])
+    k, pubs = oscript[0], oscript[1:-2]
+    for j in range(len(txobj['ins'])):
+        scr = deserialize_script(txobj['ins'][j]['script'])
+        for i, p in enumerate(pubs):
+            if p == pub:
+                scr[i+1] = multisign(tx, j, subscript[-1], priv)
+        if len(filter(lambda x: x, scr[1:-1])) >= k:
+            scr = [None] + filter(lambda x: x, scr[1:-1])[:k] + [scr[-1]]
+        txobj['ins'][j]['script'] = serialize_script(scr)
+    return serialize(txobj)
+
+
+# Inspects a transaction
+def inspect(tx):
+    d = deserialize(tx)
+    isum = 0
+    ins = {}
+    for _in in d['ins']:
+        h = _in['outpoint']['hash']
+        i = _in['outpoint']['index']
+        prevout = deserialize(fetchtx(h))['outs'][i]
+        isum += prevout['value']
+        a = script_to_address(prevout['script'])
+        ins[a] = ins.get(a, 0) + prevout['value']
+    outs = []
+    osum = 0
+    for _out in d['outs']:
+        outs.append({'address': script_to_address(_out['script']),
+                     'value': _out['value']})
+        osum += _out['value']
+    return {
+        'fee': isum - osum,
+        'outs': outs,
+        'ins': ins
+    }
+
+
+def merkle_prove(txhash):
+    blocknum = str(get_block_height(txhash))
+    header = get_block_header_data(blocknum)
+    hashes = get_txs_in_block(blocknum)
+    i = hashes.index(txhash)
+    return mk_merkle_proof(header, hashes, i)
diff --git a/src/lib/pybitcointools/bitcoin/deterministic.py b/src/lib/pybitcointools/bitcoin/deterministic.py
new file mode 100644
index 00000000..222e2f93
--- /dev/null
+++ b/src/lib/pybitcointools/bitcoin/deterministic.py
@@ -0,0 +1,193 @@
+from .main import *
+import hmac
+import hashlib
+from binascii import hexlify
+# Electrum wallets
+
+
+def electrum_stretch(seed):
+    return slowsha(seed)
+
+# Accepts seed or stretched seed, returns master public key
+
+
+def electrum_mpk(seed):
+    if len(seed) == 32:
+        seed = electrum_stretch(seed)
+    return privkey_to_pubkey(seed)[2:]
+
+# Accepts (seed or stretched seed), index and secondary index
+# (conventionally 0 for ordinary addresses, 1 for change) , returns privkey
+
+
+def electrum_privkey(seed, n, for_change=0):
+    if len(seed) == 32:
+        seed = electrum_stretch(seed)
+    mpk = electrum_mpk(seed)
+    offset = dbl_sha256(from_int_representation_to_bytes(n)+b':'+from_int_representation_to_bytes(for_change)+b':'+binascii.unhexlify(mpk))
+    return add_privkeys(seed, offset)
+
+# Accepts (seed or stretched seed or master pubkey), index and secondary index
+# (conventionally 0 for ordinary addresses, 1 for change) , returns pubkey
+
+
+def electrum_pubkey(masterkey, n, for_change=0):
+    if len(masterkey) == 32:
+        mpk = electrum_mpk(electrum_stretch(masterkey))
+    elif len(masterkey) == 64:
+        mpk = electrum_mpk(masterkey)
+    else:
+        mpk = masterkey
+    bin_mpk = encode_pubkey(mpk, 'bin_electrum')
+    offset = bin_dbl_sha256(from_int_representation_to_bytes(n)+b':'+from_int_representation_to_bytes(for_change)+b':'+bin_mpk)
+    return add_pubkeys('04'+mpk, privtopub(offset))
+
+# seed/stretched seed/pubkey -> address (convenience method)
+
+
+def electrum_address(masterkey, n, for_change=0, version=0):
+    return pubkey_to_address(electrum_pubkey(masterkey, n, for_change), version)
+
+# Given a master public key, a private key from that wallet and its index,
+# cracks the secret exponent which can be used to generate all other private
+# keys in the wallet
+
+
+def crack_electrum_wallet(mpk, pk, n, for_change=0):
+    bin_mpk = encode_pubkey(mpk, 'bin_electrum')
+    offset = dbl_sha256(str(n)+':'+str(for_change)+':'+bin_mpk)
+    return subtract_privkeys(pk, offset)
+
+# Below code ASSUMES binary inputs and compressed pubkeys
+PRIVATE = b'\x04\x88\xAD\xE4'
+PUBLIC = b'\x04\x88\xB2\x1E'
+
+# BIP32 child key derivation
+
+
+def raw_bip32_ckd(rawtuple, i):
+    vbytes, depth, fingerprint, oldi, chaincode, key = rawtuple
+    i = int(i)
+
+    if vbytes == PRIVATE:
+        priv = key
+        pub = privtopub(key)
+    else:
+        pub = key
+
+    if i >= 2**31:
+        if vbytes == PUBLIC:
+            raise Exception("Can't do private derivation on public key!")
+        I = hmac.new(chaincode, b'\x00'+priv[:32]+encode(i, 256, 4), hashlib.sha512).digest()
+    else:
+        I = hmac.new(chaincode, pub+encode(i, 256, 4), hashlib.sha512).digest()
+
+    if vbytes == PRIVATE:
+        newkey = add_privkeys(I[:32]+b'\x01', priv)
+        fingerprint = bin_hash160(privtopub(key))[:4]
+    if vbytes == PUBLIC:
+        newkey = add_pubkeys(compress(privtopub(I[:32])), key)
+        fingerprint = bin_hash160(key)[:4]
+
+    return (vbytes, depth + 1, fingerprint, i, I[32:], newkey)
+
+
+def bip32_serialize(rawtuple):
+    vbytes, depth, fingerprint, i, chaincode, key = rawtuple
+    i = encode(i, 256, 4)
+    chaincode = encode(hash_to_int(chaincode), 256, 32)
+    keydata = b'\x00'+key[:-1] if vbytes == PRIVATE else key
+    bindata = vbytes + from_int_to_byte(depth % 256) + fingerprint + i + chaincode + keydata
+    return changebase(bindata+bin_dbl_sha256(bindata)[:4], 256, 58)
+
+
+def bip32_deserialize(data):
+    dbin = changebase(data, 58, 256)
+    if bin_dbl_sha256(dbin[:-4])[:4] != dbin[-4:]:
+        raise Exception("Invalid checksum")
+    vbytes = dbin[0:4]
+    depth = from_byte_to_int(dbin[4])
+    fingerprint = dbin[5:9]
+    i = decode(dbin[9:13], 256)
+    chaincode = dbin[13:45]
+    key = dbin[46:78]+b'\x01' if vbytes == PRIVATE else dbin[45:78]
+    return (vbytes, depth, fingerprint, i, chaincode, key)
+
+
+def raw_bip32_privtopub(rawtuple):
+    vbytes, depth, fingerprint, i, chaincode, key = rawtuple
+    return (PUBLIC, depth, fingerprint, i, chaincode, privtopub(key))
+
+
+def bip32_privtopub(data):
+    return bip32_serialize(raw_bip32_privtopub(bip32_deserialize(data)))
+
+
+def bip32_ckd(data, i):
+    return bip32_serialize(raw_bip32_ckd(bip32_deserialize(data), i))
+
+
+def bip32_master_key(seed):
+    I = hmac.new(from_string_to_bytes("Bitcoin seed"), seed, hashlib.sha512).digest()
+    return bip32_serialize((PRIVATE, 0, b'\x00'*4, 0, I[32:], I[:32]+b'\x01'))
+
+
+def bip32_bin_extract_key(data):
+    return bip32_deserialize(data)[-1]
+
+
+def bip32_extract_key(data):
+    return safe_hexlify(bip32_deserialize(data)[-1])
+
+# Exploits the same vulnerability as above in Electrum wallets
+# Takes a BIP32 pubkey and one of the child privkeys of its corresponding
+# privkey and returns the BIP32 privkey associated with that pubkey
+
+
+def raw_crack_bip32_privkey(parent_pub, priv):
+    vbytes, depth, fingerprint, i, chaincode, key = priv
+    pvbytes, pdepth, pfingerprint, pi, pchaincode, pkey = parent_pub
+    i = int(i)
+
+    if i >= 2**31:
+        raise Exception("Can't crack private derivation!")
+
+    I = hmac.new(pchaincode, pkey+encode(i, 256, 4), hashlib.sha512).digest()
+
+    pprivkey = subtract_privkeys(key, I[:32]+b'\x01')
+
+    return (PRIVATE, pdepth, pfingerprint, pi, pchaincode, pprivkey)
+
+
+def crack_bip32_privkey(parent_pub, priv):
+    dsppub = bip32_deserialize(parent_pub)
+    dspriv = bip32_deserialize(priv)
+    return bip32_serialize(raw_crack_bip32_privkey(dsppub, dspriv))
+
+
+def coinvault_pub_to_bip32(*args):
+    if len(args) == 1:
+        args = args[0].split(' ')
+    vals = map(int, args[34:])
+    I1 = ''.join(map(chr, vals[:33]))
+    I2 = ''.join(map(chr, vals[35:67]))
+    return bip32_serialize((PUBLIC, 0, b'\x00'*4, 0, I2, I1))
+
+
+def coinvault_priv_to_bip32(*args):
+    if len(args) == 1:
+        args = args[0].split(' ')
+    vals = map(int, args[34:])
+    I2 = ''.join(map(chr, vals[35:67]))
+    I3 = ''.join(map(chr, vals[72:104]))
+    return bip32_serialize((PRIVATE, 0, b'\x00'*4, 0, I2, I3+b'\x01'))
+
+
+def bip32_descend(*args):
+    if len(args) == 2:
+        key, path = args
+    else:
+        key, path = args[0], map(int, args[1:])
+    for p in path:
+        key = bip32_ckd(key, p)
+    return bip32_extract_key(key)
diff --git a/src/lib/pybitcointools/bitcoin/main.py b/src/lib/pybitcointools/bitcoin/main.py
new file mode 100644
index 00000000..cd51f85c
--- /dev/null
+++ b/src/lib/pybitcointools/bitcoin/main.py
@@ -0,0 +1,550 @@
+#!/usr/bin/python
+from .py2specials import *
+from .py3specials import *
+import binascii
+import hashlib
+import re
+import sys
+import os
+import base64
+import time
+import random
+import hmac
+from .ripemd import *
+
+# Elliptic curve parameters (secp256k1)
+
+P = 2**256 - 2**32 - 977
+N = 115792089237316195423570985008687907852837564279074904382605163141518161494337
+A = 0
+B = 7
+Gx = 55066263022277343669578718895168534326250603453777594175500187360389116729240
+Gy = 32670510020758816978083085130507043184471273380659243275938904335757337482424
+G = (Gx, Gy)
+
+
+def change_curve(p, n, a, b, gx, gy):
+    global P, N, A, B, Gx, Gy, G
+    P, N, A, B, Gx, Gy = p, n, a, b, gx, gy
+    G = (Gx, Gy)
+
+
+def getG():
+    return G
+
+# Extended Euclidean Algorithm
+
+
+def inv(a, n):
+    lm, hm = 1, 0
+    low, high = a % n, n
+    while low > 1:
+        r = high//low
+        nm, new = hm-lm*r, high-low*r
+        lm, low, hm, high = nm, new, lm, low
+    return lm % n
+
+
+
+# JSON access (for pybtctool convenience)
+
+
+def access(obj, prop):
+    if isinstance(obj, dict):
+        if prop in obj:
+            return obj[prop]
+        elif '.' in prop:
+            return obj[float(prop)]
+        else:
+            return obj[int(prop)]
+    else:
+        return obj[int(prop)]
+
+
+def multiaccess(obj, prop):
+    return [access(o, prop) for o in obj]
+
+
+def slice(obj, start=0, end=2**200):
+    return obj[int(start):int(end)]
+
+
+def count(obj):
+    return len(obj)
+
+_sum = sum
+
+
+def sum(obj):
+    return _sum(obj)
+
+
+# Elliptic curve Jordan form functions
+# P = (m, n, p, q) where m/n = x, p/q = y
+
+def isinf(p):
+    return p[0] == 0 and p[1] == 0
+
+
+def jordan_isinf(p):
+    return p[0][0] == 0 and p[1][0] == 0
+
+
+def mulcoords(c1, c2):
+    return (c1[0] * c2[0] % P, c1[1] * c2[1] % P)
+
+
+def mul_by_const(c, v):
+    return (c[0] * v % P, c[1])
+
+
+def addcoords(c1, c2):
+    return ((c1[0] * c2[1] + c2[0] * c1[1]) % P, c1[1] * c2[1] % P)
+
+
+def subcoords(c1, c2):
+    return ((c1[0] * c2[1] - c2[0] * c1[1]) % P, c1[1] * c2[1] % P)
+
+
+def invcoords(c):
+    return (c[1], c[0])
+
+
+def jordan_add(a, b):
+    if jordan_isinf(a):
+        return b
+    if jordan_isinf(b):
+        return a
+
+    if (a[0][0] * b[0][1] - b[0][0] * a[0][1]) % P == 0:
+        if (a[1][0] * b[1][1] - b[1][0] * a[1][1]) % P == 0:
+            return jordan_double(a)
+        else:
+            return ((0, 1), (0, 1))
+    xdiff = subcoords(b[0], a[0])
+    ydiff = subcoords(b[1], a[1])
+    m = mulcoords(ydiff, invcoords(xdiff))
+    x = subcoords(subcoords(mulcoords(m, m), a[0]), b[0])
+    y = subcoords(mulcoords(m, subcoords(a[0], x)), a[1])
+    return (x, y)
+
+
+def jordan_double(a):
+    if jordan_isinf(a):
+        return ((0, 1), (0, 1))
+    num = addcoords(mul_by_const(mulcoords(a[0], a[0]), 3), (A, 1))
+    den = mul_by_const(a[1], 2)
+    m = mulcoords(num, invcoords(den))
+    x = subcoords(mulcoords(m, m), mul_by_const(a[0], 2))
+    y = subcoords(mulcoords(m, subcoords(a[0], x)), a[1])
+    return (x, y)
+
+
+def jordan_multiply(a, n):
+    if jordan_isinf(a) or n == 0:
+        return ((0, 0), (0, 0))
+    if n == 1:
+        return a
+    if n < 0 or n >= N:
+        return jordan_multiply(a, n % N)
+    if (n % 2) == 0:
+        return jordan_double(jordan_multiply(a, n//2))
+    if (n % 2) == 1:
+        return jordan_add(jordan_double(jordan_multiply(a, n//2)), a)
+
+
+def to_jordan(p):
+    return ((p[0], 1), (p[1], 1))
+
+
+def from_jordan(p):
+    return (p[0][0] * inv(p[0][1], P) % P, p[1][0] * inv(p[1][1], P) % P)
+    return (p[0][0] * inv(p[0][1], P) % P, p[1][0] * inv(p[1][1], P) % P)
+
+
+def fast_multiply(a, n):
+    return from_jordan(jordan_multiply(to_jordan(a), n))
+
+
+def fast_add(a, b):
+    return from_jordan(jordan_add(to_jordan(a), to_jordan(b)))
+
+# Functions for handling pubkey and privkey formats
+
+
+def get_pubkey_format(pub):
+    if is_python2:
+        two = '\x02'
+        three = '\x03'
+        four = '\x04'
+    else:
+        two = 2
+        three = 3
+        four = 4
+    
+    if isinstance(pub, (tuple, list)): return 'decimal'
+    elif len(pub) == 65 and pub[0] == four: return 'bin'
+    elif len(pub) == 130 and pub[0:2] == '04': return 'hex'
+    elif len(pub) == 33 and pub[0] in [two, three]: return 'bin_compressed'
+    elif len(pub) == 66 and pub[0:2] in ['02', '03']: return 'hex_compressed'
+    elif len(pub) == 64: return 'bin_electrum'
+    elif len(pub) == 128: return 'hex_electrum'
+    else: raise Exception("Pubkey not in recognized format")
+
+
+def encode_pubkey(pub, formt):
+    if not isinstance(pub, (tuple, list)):
+        pub = decode_pubkey(pub)
+    if formt == 'decimal': return pub
+    elif formt == 'bin': return b'\x04' + encode(pub[0], 256, 32) + encode(pub[1], 256, 32)
+    elif formt == 'bin_compressed': 
+        return from_int_to_byte(2+(pub[1] % 2)) + encode(pub[0], 256, 32)
+    elif formt == 'hex': return '04' + encode(pub[0], 16, 64) + encode(pub[1], 16, 64)
+    elif formt == 'hex_compressed': 
+        return '0'+str(2+(pub[1] % 2)) + encode(pub[0], 16, 64)
+    elif formt == 'bin_electrum': return encode(pub[0], 256, 32) + encode(pub[1], 256, 32)
+    elif formt == 'hex_electrum': return encode(pub[0], 16, 64) + encode(pub[1], 16, 64)
+    else: raise Exception("Invalid format!")
+
+
+def decode_pubkey(pub, formt=None):
+    if not formt: formt = get_pubkey_format(pub)
+    if formt == 'decimal': return pub
+    elif formt == 'bin': return (decode(pub[1:33], 256), decode(pub[33:65], 256))
+    elif formt == 'bin_compressed':
+        x = decode(pub[1:33], 256)
+        beta = pow(int(x*x*x+A*x+B), int((P+1)//4), int(P))
+        y = (P-beta) if ((beta + from_byte_to_int(pub[0])) % 2) else beta
+        return (x, y)
+    elif formt == 'hex': return (decode(pub[2:66], 16), decode(pub[66:130], 16))
+    elif formt == 'hex_compressed':
+        return decode_pubkey(safe_from_hex(pub), 'bin_compressed')
+    elif formt == 'bin_electrum':
+        return (decode(pub[:32], 256), decode(pub[32:64], 256))
+    elif formt == 'hex_electrum':
+        return (decode(pub[:64], 16), decode(pub[64:128], 16))
+    else: raise Exception("Invalid format!")
+
+def get_privkey_format(priv):
+    if isinstance(priv, int_types): return 'decimal'
+    elif len(priv) == 32: return 'bin'
+    elif len(priv) == 33: return 'bin_compressed'
+    elif len(priv) == 64: return 'hex'
+    elif len(priv) == 66: return 'hex_compressed'
+    else:
+        bin_p = b58check_to_bin(priv)
+        if len(bin_p) == 32: return 'wif'
+        elif len(bin_p) == 33: return 'wif_compressed'
+        else: raise Exception("WIF does not represent privkey")
+
+def encode_privkey(priv, formt, vbyte=0):
+    if not isinstance(priv, int_types):
+        return encode_privkey(decode_privkey(priv), formt, vbyte)
+    if formt == 'decimal': return priv
+    elif formt == 'bin': return encode(priv, 256, 32)
+    elif formt == 'bin_compressed': return encode(priv, 256, 32)+b'\x01'
+    elif formt == 'hex': return encode(priv, 16, 64)
+    elif formt == 'hex_compressed': return encode(priv, 16, 64)+'01'
+    elif formt == 'wif':
+        return bin_to_b58check(encode(priv, 256, 32), 128+int(vbyte))
+    elif formt == 'wif_compressed':
+        return bin_to_b58check(encode(priv, 256, 32)+b'\x01', 128+int(vbyte))
+    else: raise Exception("Invalid format!")
+
+def decode_privkey(priv,formt=None):
+    if not formt: formt = get_privkey_format(priv)
+    if formt == 'decimal': return priv
+    elif formt == 'bin': return decode(priv, 256)
+    elif formt == 'bin_compressed': return decode(priv[:32], 256)
+    elif formt == 'hex': return decode(priv, 16)
+    elif formt == 'hex_compressed': return decode(priv[:64], 16)
+    elif formt == 'wif': return decode(b58check_to_bin(priv),256)
+    elif formt == 'wif_compressed':
+        return decode(b58check_to_bin(priv)[:32],256)
+    else: raise Exception("WIF does not represent privkey")
+
+def add_pubkeys(p1, p2):
+    f1, f2 = get_pubkey_format(p1), get_pubkey_format(p2)
+    return encode_pubkey(fast_add(decode_pubkey(p1, f1), decode_pubkey(p2, f2)), f1)
+
+def add_privkeys(p1, p2):
+    f1, f2 = get_privkey_format(p1), get_privkey_format(p2)
+    return encode_privkey((decode_privkey(p1, f1) + decode_privkey(p2, f2)) % N, f1)
+
+
+def multiply(pubkey, privkey):
+    f1, f2 = get_pubkey_format(pubkey), get_privkey_format(privkey)
+    pubkey, privkey = decode_pubkey(pubkey, f1), decode_privkey(privkey, f2)
+    # http://safecurves.cr.yp.to/twist.html
+    if not isinf(pubkey) and (pubkey[0]**3+B-pubkey[1]*pubkey[1]) % P != 0:
+        raise Exception("Point not on curve")
+    return encode_pubkey(fast_multiply(pubkey, privkey), f1)
+
+
+def divide(pubkey, privkey):
+    factor = inv(decode_privkey(privkey), N)
+    return multiply(pubkey, factor)
+
+
+def compress(pubkey):
+    f = get_pubkey_format(pubkey)
+    if 'compressed' in f: return pubkey
+    elif f == 'bin': return encode_pubkey(decode_pubkey(pubkey, f), 'bin_compressed')
+    elif f == 'hex' or f == 'decimal':
+        return encode_pubkey(decode_pubkey(pubkey, f), 'hex_compressed')
+
+
+def decompress(pubkey):
+    f = get_pubkey_format(pubkey)
+    if 'compressed' not in f: return pubkey
+    elif f == 'bin_compressed': return encode_pubkey(decode_pubkey(pubkey, f), 'bin')
+    elif f == 'hex_compressed' or f == 'decimal':
+        return encode_pubkey(decode_pubkey(pubkey, f), 'hex')
+
+
+def privkey_to_pubkey(privkey):
+    f = get_privkey_format(privkey)
+    privkey = decode_privkey(privkey, f)
+    if privkey >= N:
+        raise Exception("Invalid privkey")
+    if f in ['bin', 'bin_compressed', 'hex', 'hex_compressed', 'decimal']:
+        return encode_pubkey(fast_multiply(G, privkey), f)
+    else:
+        return encode_pubkey(fast_multiply(G, privkey), f.replace('wif', 'hex'))
+
+privtopub = privkey_to_pubkey
+
+
+def privkey_to_address(priv, magicbyte=0):
+    return pubkey_to_address(privkey_to_pubkey(priv), magicbyte)
+privtoaddr = privkey_to_address
+
+
+def neg_pubkey(pubkey):
+    f = get_pubkey_format(pubkey)
+    pubkey = decode_pubkey(pubkey, f)
+    return encode_pubkey((pubkey[0], (P-pubkey[1]) % P), f)
+
+
+def neg_privkey(privkey):
+    f = get_privkey_format(privkey)
+    privkey = decode_privkey(privkey, f)
+    return encode_privkey((N - privkey) % N, f)
+
+def subtract_pubkeys(p1, p2):
+    f1, f2 = get_pubkey_format(p1), get_pubkey_format(p2)
+    k2 = decode_pubkey(p2, f2)
+    return encode_pubkey(fast_add(decode_pubkey(p1, f1), (k2[0], (P - k2[1]) % P)), f1)
+
+
+def subtract_privkeys(p1, p2):
+    f1, f2 = get_privkey_format(p1), get_privkey_format(p2)
+    k2 = decode_privkey(p2, f2)
+    return encode_privkey((decode_privkey(p1, f1) - k2) % N, f1)
+
+# Hashes
+
+
+def bin_hash160(string):
+    intermed = hashlib.sha256(string).digest()
+    digest = ''
+    try:
+        digest = hashlib.new('ripemd160', intermed).digest()
+    except:
+        digest = RIPEMD160(intermed).digest()
+    return digest
+
+
+def hash160(string):
+    return safe_hexlify(bin_hash160(string))
+
+
+def bin_sha256(string):
+    binary_data = string if isinstance(string, bytes) else bytes(string, 'utf-8')
+    return hashlib.sha256(binary_data).digest()
+
+def sha256(string):
+    return bytes_to_hex_string(bin_sha256(string))
+
+
+def bin_ripemd160(string):
+    try:
+        digest = hashlib.new('ripemd160', string).digest()
+    except:
+        digest = RIPEMD160(string).digest()
+    return digest
+
+
+def ripemd160(string):
+    return safe_hexlify(bin_ripemd160(string))
+
+
+def bin_dbl_sha256(s):
+    bytes_to_hash = from_string_to_bytes(s)
+    return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest()
+
+
+def dbl_sha256(string):
+    return safe_hexlify(bin_dbl_sha256(string))
+
+
+def bin_slowsha(string):
+    string = from_string_to_bytes(string)
+    orig_input = string
+    for i in range(100000):
+        string = hashlib.sha256(string + orig_input).digest()
+    return string
+
+
+def slowsha(string):
+    return safe_hexlify(bin_slowsha(string))
+
+
+def hash_to_int(x):
+    if len(x) in [40, 64]:
+        return decode(x, 16)
+    return decode(x, 256)
+
+
+def num_to_var_int(x):
+    x = int(x)
+    if x < 253: return from_int_to_byte(x)
+    elif x < 65536: return from_int_to_byte(253)+encode(x, 256, 2)[::-1]
+    elif x < 4294967296: return from_int_to_byte(254) + encode(x, 256, 4)[::-1]
+    else: return from_int_to_byte(255) + encode(x, 256, 8)[::-1]
+
+
+# WTF, Electrum?
+def electrum_sig_hash(message):
+    padded = b"\x18Bitcoin Signed Message:\n" + num_to_var_int(len(message)) + from_string_to_bytes(message)
+    return bin_dbl_sha256(padded)
+
+
+def random_key():
+    # Gotta be secure after that java.SecureRandom fiasco...
+    entropy = random_string(32) \
+        + str(random.randrange(2**256)) \
+        + str(int(time.time() * 1000000))
+    return sha256(entropy)
+
+
+def random_electrum_seed():
+    entropy = os.urandom(32) \
+        + str(random.randrange(2**256)) \
+        + str(int(time.time() * 1000000))
+    return sha256(entropy)[:32]
+
+# Encodings
+
+def b58check_to_bin(inp):
+    leadingzbytes = len(re.match('^1*', inp).group(0))
+    data = b'\x00' * leadingzbytes + changebase(inp, 58, 256)
+    assert bin_dbl_sha256(data[:-4])[:4] == data[-4:]
+    return data[1:-4]
+
+
+def get_version_byte(inp):
+    leadingzbytes = len(re.match('^1*', inp).group(0))
+    data = b'\x00' * leadingzbytes + changebase(inp, 58, 256)
+    assert bin_dbl_sha256(data[:-4])[:4] == data[-4:]
+    return ord(data[0])
+
+
+def hex_to_b58check(inp, magicbyte=0):
+    return bin_to_b58check(binascii.unhexlify(inp), magicbyte)
+
+
+def b58check_to_hex(inp):
+    return safe_hexlify(b58check_to_bin(inp))
+
+
+def pubkey_to_address(pubkey, magicbyte=0):
+    if isinstance(pubkey, (list, tuple)):
+        pubkey = encode_pubkey(pubkey, 'bin')
+    if len(pubkey) in [66, 130]:
+        return bin_to_b58check(
+            bin_hash160(binascii.unhexlify(pubkey)), magicbyte)
+    return bin_to_b58check(bin_hash160(pubkey), magicbyte)
+
+pubtoaddr = pubkey_to_address
+
+# EDCSA
+
+
+def encode_sig(v, r, s):
+    vb, rb, sb = from_int_to_byte(v), encode(r, 256), encode(s, 256)
+    
+    result = base64.b64encode(vb+b'\x00'*(32-len(rb))+rb+b'\x00'*(32-len(sb))+sb)
+    return result if is_python2 else str(result, 'utf-8')
+
+
+def decode_sig(sig):
+    bytez = base64.b64decode(sig)
+    return from_byte_to_int(bytez[0]), decode(bytez[1:33], 256), decode(bytez[33:], 256)
+
+# https://tools.ietf.org/html/rfc6979#section-3.2
+
+
+def deterministic_generate_k(msghash, priv):
+    v = b'\x01' * 32
+    k = b'\x00' * 32
+    priv = encode_privkey(priv, 'bin')
+    msghash = encode(hash_to_int(msghash), 256, 32)
+    k = hmac.new(k, v+b'\x00'+priv+msghash, hashlib.sha256).digest()
+    v = hmac.new(k, v, hashlib.sha256).digest()
+    k = hmac.new(k, v+b'\x01'+priv+msghash, hashlib.sha256).digest()
+    v = hmac.new(k, v, hashlib.sha256).digest()
+    return decode(hmac.new(k, v, hashlib.sha256).digest(), 256)
+
+
+def ecdsa_raw_sign(msghash, priv):
+
+    z = hash_to_int(msghash)
+    k = deterministic_generate_k(msghash, priv)
+
+    r, y = fast_multiply(G, k)
+    s = inv(k, N) * (z + r*decode_privkey(priv)) % N
+
+    return 27+(y % 2), r, s
+
+
+def ecdsa_sign(msg, priv):
+    return encode_sig(*ecdsa_raw_sign(electrum_sig_hash(msg), priv))
+
+
+def ecdsa_raw_verify(msghash, vrs, pub):
+    v, r, s = vrs
+
+    w = inv(s, N)
+    z = hash_to_int(msghash)
+
+    u1, u2 = z*w % N, r*w % N
+    x, y = fast_add(fast_multiply(G, u1), fast_multiply(decode_pubkey(pub), u2))
+
+    return r == x
+
+
+def ecdsa_verify(msg, sig, pub):
+    return ecdsa_raw_verify(electrum_sig_hash(msg), decode_sig(sig), pub)
+
+
+def ecdsa_raw_recover(msghash, vrs):
+    v, r, s = vrs
+
+    x = r
+    beta = pow(x*x*x+A*x+B, (P+1)//4, P)
+    y = beta if v % 2 ^ beta % 2 else (P - beta)
+    z = hash_to_int(msghash)
+    Gz = jordan_multiply(((Gx, 1), (Gy, 1)), (N - z) % N)
+    XY = jordan_multiply(((x, 1), (y, 1)), s)
+    Qr = jordan_add(Gz, XY)
+    Q = jordan_multiply(Qr, inv(r, N))
+    Q = from_jordan(Q)
+
+    if ecdsa_raw_verify(msghash, vrs, Q):
+        return Q
+    return False
+
+
+def ecdsa_recover(msg, sig):
+    return encode_pubkey(ecdsa_raw_recover(electrum_sig_hash(msg), decode_sig(sig)), 'hex')
diff --git a/src/lib/pybitcointools/bitcoin/py2specials.py b/src/lib/pybitcointools/bitcoin/py2specials.py
new file mode 100644
index 00000000..9a08bb7e
--- /dev/null
+++ b/src/lib/pybitcointools/bitcoin/py2specials.py
@@ -0,0 +1,94 @@
+import sys, re
+import binascii
+import os
+import hashlib
+
+
+if sys.version_info.major == 2:
+    string_types = (str, unicode)
+    string_or_bytes_types = string_types
+    int_types = (int, float, long)
+
+    # Base switching
+    code_strings = {
+        2: '01',
+        10: '0123456789',
+        16: '0123456789abcdef',
+        32: 'abcdefghijklmnopqrstuvwxyz234567',
+        58: '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz',
+        256: ''.join([chr(x) for x in range(256)])
+    }
+
+    def bin_dbl_sha256(s):
+        bytes_to_hash = from_string_to_bytes(s)
+        return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest()
+
+    def lpad(msg, symbol, length):
+        if len(msg) >= length:
+            return msg
+        return symbol * (length - len(msg)) + msg
+
+    def get_code_string(base):
+        if base in code_strings:
+            return code_strings[base]
+        else:
+            raise ValueError("Invalid base!")
+
+    def changebase(string, frm, to, minlen=0):
+        if frm == to:
+            return lpad(string, get_code_string(frm)[0], minlen)
+        return encode(decode(string, frm), to, minlen)
+
+    def bin_to_b58check(inp, magicbyte=0):
+        inp_fmtd = chr(int(magicbyte)) + inp
+        leadingzbytes = len(re.match('^\x00*', inp_fmtd).group(0))
+        checksum = bin_dbl_sha256(inp_fmtd)[:4]
+        return '1' * leadingzbytes + changebase(inp_fmtd+checksum, 256, 58)
+
+    def bytes_to_hex_string(b):
+        return b.encode('hex')
+
+    def safe_from_hex(s):
+        return s.decode('hex')
+
+    def from_int_representation_to_bytes(a):
+        return str(a)
+
+    def from_int_to_byte(a):
+        return chr(a)
+
+    def from_byte_to_int(a):
+        return ord(a)
+
+    def from_byes_to_string(s):
+        return s
+
+    def from_string_to_bytes(a):
+        return a
+
+    def safe_hexlify(a):
+        return binascii.hexlify(a)
+
+    def encode(val, base, minlen=0):
+        base, minlen = int(base), int(minlen)
+        code_string = get_code_string(base)
+        result = ""
+        while val > 0:
+            result = code_string[val % base] + result
+            val //= base
+        return code_string[0] * max(minlen - len(result), 0) + result
+
+    def decode(string, base):
+        base = int(base)
+        code_string = get_code_string(base)
+        result = 0
+        if base == 16:
+            string = string.lower()
+        while len(string) > 0:
+            result *= base
+            result += code_string.find(string[0])
+            string = string[1:]
+        return result
+
+    def random_string(x):
+        return os.urandom(x)
diff --git a/src/lib/pybitcointools/bitcoin/py3specials.py b/src/lib/pybitcointools/bitcoin/py3specials.py
new file mode 100644
index 00000000..949e1f7c
--- /dev/null
+++ b/src/lib/pybitcointools/bitcoin/py3specials.py
@@ -0,0 +1,118 @@
+import sys, os
+import binascii
+import hashlib
+
+
+if sys.version_info.major == 3:
+    string_types = (str)
+    string_or_bytes_types = (str, bytes)
+    int_types = (int, float)
+    # Base switching
+    code_strings = {
+        2: '01',
+        10: '0123456789',
+        16: '0123456789abcdef',
+        32: 'abcdefghijklmnopqrstuvwxyz234567',
+        58: '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz',
+        256: ''.join([chr(x) for x in range(256)])
+    }
+
+    def bin_dbl_sha256(s):
+        bytes_to_hash = from_string_to_bytes(s)
+        return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest()
+
+    def lpad(msg, symbol, length):
+        if len(msg) >= length:
+            return msg
+        return symbol * (length - len(msg)) + msg
+
+    def get_code_string(base):
+        if base in code_strings:
+            return code_strings[base]
+        else:
+            raise ValueError("Invalid base!")
+
+    def changebase(string, frm, to, minlen=0):
+        if frm == to:
+            return lpad(string, get_code_string(frm)[0], minlen)
+        return encode(decode(string, frm), to, minlen)
+
+    def bin_to_b58check(inp, magicbyte=0):
+        inp_fmtd = from_int_to_byte(magicbyte)+inp
+
+        leadingzbytes = 0
+        for x in inp_fmtd:
+            if x != 0:
+                break
+            leadingzbytes += 1
+
+        checksum = bin_dbl_sha256(inp_fmtd)[:4]
+        return '1' * leadingzbytes + changebase(inp_fmtd+checksum, 256, 58)
+
+    def bytes_to_hex_string(b):
+        if isinstance(b, str):
+            return b
+
+        return ''.join('{:02x}'.format(y) for y in b)
+
+    def safe_from_hex(s):
+        return bytes.fromhex(s)
+
+    def from_int_representation_to_bytes(a):
+        return bytes(str(a), 'utf-8')
+
+    def from_int_to_byte(a):
+        return bytes([a])
+
+    def from_byte_to_int(a):
+        return a
+
+    def from_string_to_bytes(a):
+        return a if isinstance(a, bytes) else bytes(a, 'utf-8')
+
+    def safe_hexlify(a):
+        return str(binascii.hexlify(a), 'utf-8')
+
+    def encode(val, base, minlen=0):
+        base, minlen = int(base), int(minlen)
+        code_string = get_code_string(base)
+        result_bytes = bytes()
+        while val > 0:
+            curcode = code_string[val % base]
+            result_bytes = bytes([ord(curcode)]) + result_bytes
+            val //= base
+
+        pad_size = minlen - len(result_bytes)
+
+        padding_element = b'\x00' if base == 256 else b'0'
+        if (pad_size > 0):
+            result_bytes = padding_element*pad_size + result_bytes
+
+        result_string = ''.join([chr(y) for y in result_bytes])
+        result = result_bytes if base == 256 else result_string
+
+        return result
+
+    def decode(string, base):
+        if base == 256 and isinstance(string, str):
+            string = bytes(bytearray.fromhex(string))
+        base = int(base)
+        code_string = get_code_string(base)
+        result = 0
+        if base == 256:
+            def extract(d, cs):
+                return d
+        else:
+            def extract(d, cs):
+                return cs.find(d if isinstance(d, str) else chr(d))
+
+        if base == 16:
+            string = string.lower()
+        while len(string) > 0:
+            result *= base
+            result += extract(string[0], code_string)
+            string = string[1:]
+        return result
+
+    def random_string(x):
+        return str(os.urandom(x))
diff --git a/src/lib/pybitcointools/bitcoin/ripemd.py b/src/lib/pybitcointools/bitcoin/ripemd.py
new file mode 100644
index 00000000..4b0c6045
--- /dev/null
+++ b/src/lib/pybitcointools/bitcoin/ripemd.py
@@ -0,0 +1,414 @@
+## ripemd.py - pure Python implementation of the RIPEMD-160 algorithm.
+## Bjorn Edstrom <be@bjrn.se> 16 december 2007.
+##
+## Copyrights
+## ==========
+##
+## This code is a derived from an implementation by Markus Friedl which is
+## subject to the following license. This Python implementation is not
+## subject to any other license.
+##
+##/*
+## * Copyright (c) 2001 Markus Friedl.  All rights reserved.
+## *
+## * Redistribution and use in source and binary forms, with or without
+## * modification, are permitted provided that the following conditions
+## * are met:
+## * 1. Redistributions of source code must retain the above copyright
+## *    notice, this list of conditions and the following disclaimer.
+## * 2. Redistributions in binary form must reproduce the above copyright
+## *    notice, this list of conditions and the following disclaimer in the
+## *    documentation and/or other materials provided with the distribution.
+## *
+## * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+## * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+## * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+## * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+## * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+## * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+## * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+## * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+## * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+## * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+## */
+##/*
+## * Preneel, Bosselaers, Dobbertin, "The Cryptographic Hash Function RIPEMD-160",
+## * RSA Laboratories, CryptoBytes, Volume 3, Number 2, Autumn 1997,
+## * ftp://ftp.rsasecurity.com/pub/cryptobytes/crypto3n2.pdf
+## */
+
+try:
+    import psyco
+    psyco.full()
+except ImportError:
+    pass
+
+import sys
+
+is_python2 = sys.version_info.major == 2
+#block_size = 1
+digest_size = 20
+digestsize = 20
+
+try:
+    range = xrange
+except:
+    pass
+
+class RIPEMD160:
+    """Return a new RIPEMD160 object. An optional string argument
+    may be provided; if present, this string will be automatically
+    hashed."""
+    
+    def __init__(self, arg=None):
+        self.ctx = RMDContext()
+        if arg:
+            self.update(arg)
+        self.dig = None
+        
+    def update(self, arg):
+        """update(arg)"""        
+        RMD160Update(self.ctx, arg, len(arg))
+        self.dig = None
+        
+    def digest(self):
+        """digest()"""        
+        if self.dig:
+            return self.dig
+        ctx = self.ctx.copy()
+        self.dig = RMD160Final(self.ctx)
+        self.ctx = ctx
+        return self.dig
+    
+    def hexdigest(self):
+        """hexdigest()"""
+        dig = self.digest()
+        hex_digest = ''
+        for d in dig:
+            if (is_python2):
+                hex_digest += '%02x' % ord(d)
+            else:
+                hex_digest += '%02x' % d
+        return hex_digest
+    
+    def copy(self):
+        """copy()"""        
+        import copy
+        return copy.deepcopy(self)
+
+
+
+def new(arg=None):
+    """Return a new RIPEMD160 object. An optional string argument
+    may be provided; if present, this string will be automatically
+    hashed."""    
+    return RIPEMD160(arg)
+
+
+
+#
+# Private.
+#
+
+class RMDContext:
+    def __init__(self):
+        self.state = [0x67452301, 0xEFCDAB89, 0x98BADCFE,
+                      0x10325476, 0xC3D2E1F0] # uint32
+        self.count = 0 # uint64
+        self.buffer = [0]*64 # uchar
+    def copy(self):
+        ctx = RMDContext()
+        ctx.state = self.state[:]
+        ctx.count = self.count
+        ctx.buffer = self.buffer[:]
+        return ctx
+
+K0 = 0x00000000
+K1 = 0x5A827999
+K2 = 0x6ED9EBA1
+K3 = 0x8F1BBCDC
+K4 = 0xA953FD4E
+
+KK0 = 0x50A28BE6
+KK1 = 0x5C4DD124
+KK2 = 0x6D703EF3
+KK3 = 0x7A6D76E9
+KK4 = 0x00000000
+
+def ROL(n, x):
+    return ((x << n) & 0xffffffff) | (x >> (32 - n))
+
+def F0(x, y, z):
+    return x ^ y ^ z
+
+def F1(x, y, z):
+    return (x & y) | (((~x) % 0x100000000) & z)
+
+def F2(x, y, z):
+    return (x | ((~y) % 0x100000000)) ^ z
+
+def F3(x, y, z):
+    return (x & z) | (((~z) % 0x100000000) & y)
+
+def F4(x, y, z):
+    return x ^ (y | ((~z) % 0x100000000))
+
+def R(a, b, c, d, e, Fj, Kj, sj, rj, X):
+    a = ROL(sj, (a + Fj(b, c, d) + X[rj] + Kj) % 0x100000000) + e
+    c = ROL(10, c)
+    return a % 0x100000000, c
+
+PADDING = [0x80] + [0]*63
+
+import sys
+import struct
+
+def RMD160Transform(state, block): #uint32 state[5], uchar block[64]
+    x = [0]*16
+    if sys.byteorder == 'little':
+        if is_python2:
+            x = struct.unpack('<16L', ''.join([chr(x) for x in block[0:64]]))
+        else:
+            x = struct.unpack('<16L', bytes(block[0:64]))
+    else:
+        raise "Error!!"
+    a = state[0]
+    b = state[1]
+    c = state[2]
+    d = state[3]
+    e = state[4]
+
+    #/* Round 1 */
+    a, c = R(a, b, c, d, e, F0, K0, 11,  0, x);
+    e, b = R(e, a, b, c, d, F0, K0, 14,  1, x);
+    d, a = R(d, e, a, b, c, F0, K0, 15,  2, x);
+    c, e = R(c, d, e, a, b, F0, K0, 12,  3, x);
+    b, d = R(b, c, d, e, a, F0, K0,  5,  4, x);
+    a, c = R(a, b, c, d, e, F0, K0,  8,  5, x);
+    e, b = R(e, a, b, c, d, F0, K0,  7,  6, x);
+    d, a = R(d, e, a, b, c, F0, K0,  9,  7, x);
+    c, e = R(c, d, e, a, b, F0, K0, 11,  8, x);
+    b, d = R(b, c, d, e, a, F0, K0, 13,  9, x);
+    a, c = R(a, b, c, d, e, F0, K0, 14, 10, x);
+    e, b = R(e, a, b, c, d, F0, K0, 15, 11, x);
+    d, a = R(d, e, a, b, c, F0, K0,  6, 12, x);
+    c, e = R(c, d, e, a, b, F0, K0,  7, 13, x);
+    b, d = R(b, c, d, e, a, F0, K0,  9, 14, x);
+    a, c = R(a, b, c, d, e, F0, K0,  8, 15, x); #/* #15 */
+    #/* Round 2 */
+    e, b = R(e, a, b, c, d, F1, K1,  7,  7, x);
+    d, a = R(d, e, a, b, c, F1, K1,  6,  4, x);
+    c, e = R(c, d, e, a, b, F1, K1,  8, 13, x);
+    b, d = R(b, c, d, e, a, F1, K1, 13,  1, x);
+    a, c = R(a, b, c, d, e, F1, K1, 11, 10, x);
+    e, b = R(e, a, b, c, d, F1, K1,  9,  6, x);
+    d, a = R(d, e, a, b, c, F1, K1,  7, 15, x);
+    c, e = R(c, d, e, a, b, F1, K1, 15,  3, x);
+    b, d = R(b, c, d, e, a, F1, K1,  7, 12, x);
+    a, c = R(a, b, c, d, e, F1, K1, 12,  0, x);
+    e, b = R(e, a, b, c, d, F1, K1, 15,  9, x);
+    d, a = R(d, e, a, b, c, F1, K1,  9,  5, x);
+    c, e = R(c, d, e, a, b, F1, K1, 11,  2, x);
+    b, d = R(b, c, d, e, a, F1, K1,  7, 14, x);
+    a, c = R(a, b, c, d, e, F1, K1, 13, 11, x);
+    e, b = R(e, a, b, c, d, F1, K1, 12,  8, x); #/* #31 */
+    #/* Round 3 */
+    d, a = R(d, e, a, b, c, F2, K2, 11,  3, x);
+    c, e = R(c, d, e, a, b, F2, K2, 13, 10, x);
+    b, d = R(b, c, d, e, a, F2, K2,  6, 14, x);
+    a, c = R(a, b, c, d, e, F2, K2,  7,  4, x);
+    e, b = R(e, a, b, c, d, F2, K2, 14,  9, x);
+    d, a = R(d, e, a, b, c, F2, K2,  9, 15, x);
+    c, e = R(c, d, e, a, b, F2, K2, 13,  8, x);
+    b, d = R(b, c, d, e, a, F2, K2, 15,  1, x);
+    a, c = R(a, b, c, d, e, F2, K2, 14,  2, x);
+    e, b = R(e, a, b, c, d, F2, K2,  8,  7, x);
+    d, a = R(d, e, a, b, c, F2, K2, 13,  0, x);
+    c, e = R(c, d, e, a, b, F2, K2,  6,  6, x);
+    b, d = R(b, c, d, e, a, F2, K2,  5, 13, x);
+    a, c = R(a, b, c, d, e, F2, K2, 12, 11, x);
+    e, b = R(e, a, b, c, d, F2, K2,  7,  5, x);
+    d, a = R(d, e, a, b, c, F2, K2,  5, 12, x); #/* #47 */
+    #/* Round 4 */
+    c, e = R(c, d, e, a, b, F3, K3, 11,  1, x);
+    b, d = R(b, c, d, e, a, F3, K3, 12,  9, x);
+    a, c = R(a, b, c, d, e, F3, K3, 14, 11, x);
+    e, b = R(e, a, b, c, d, F3, K3, 15, 10, x);
+    d, a = R(d, e, a, b, c, F3, K3, 14,  0, x);
+    c, e = R(c, d, e, a, b, F3, K3, 15,  8, x);
+    b, d = R(b, c, d, e, a, F3, K3,  9, 12, x);
+    a, c = R(a, b, c, d, e, F3, K3,  8,  4, x);
+    e, b = R(e, a, b, c, d, F3, K3,  9, 13, x);
+    d, a = R(d, e, a, b, c, F3, K3, 14,  3, x);
+    c, e = R(c, d, e, a, b, F3, K3,  5,  7, x);
+    b, d = R(b, c, d, e, a, F3, K3,  6, 15, x);
+    a, c = R(a, b, c, d, e, F3, K3,  8, 14, x);
+    e, b = R(e, a, b, c, d, F3, K3,  6,  5, x);
+    d, a = R(d, e, a, b, c, F3, K3,  5,  6, x);
+    c, e = R(c, d, e, a, b, F3, K3, 12,  2, x); #/* #63 */
+    #/* Round 5 */
+    b, d = R(b, c, d, e, a, F4, K4,  9,  4, x);
+    a, c = R(a, b, c, d, e, F4, K4, 15,  0, x);
+    e, b = R(e, a, b, c, d, F4, K4,  5,  5, x);
+    d, a = R(d, e, a, b, c, F4, K4, 11,  9, x);
+    c, e = R(c, d, e, a, b, F4, K4,  6,  7, x);
+    b, d = R(b, c, d, e, a, F4, K4,  8, 12, x);
+    a, c = R(a, b, c, d, e, F4, K4, 13,  2, x);
+    e, b = R(e, a, b, c, d, F4, K4, 12, 10, x);
+    d, a = R(d, e, a, b, c, F4, K4,  5, 14, x);
+    c, e = R(c, d, e, a, b, F4, K4, 12,  1, x);
+    b, d = R(b, c, d, e, a, F4, K4, 13,  3, x);
+    a, c = R(a, b, c, d, e, F4, K4, 14,  8, x);
+    e, b = R(e, a, b, c, d, F4, K4, 11, 11, x);
+    d, a = R(d, e, a, b, c, F4, K4,  8,  6, x);
+    c, e = R(c, d, e, a, b, F4, K4,  5, 15, x);
+    b, d = R(b, c, d, e, a, F4, K4,  6, 13, x); #/* #79 */
+
+    aa = a;
+    bb = b;
+    cc = c;
+    dd = d;
+    ee = e;
+
+    a = state[0]
+    b = state[1]
+    c = state[2]
+    d = state[3]
+    e = state[4]    
+
+    #/* Parallel round 1 */
+    a, c = R(a, b, c, d, e, F4, KK0,  8,  5, x)
+    e, b = R(e, a, b, c, d, F4, KK0,  9, 14, x)
+    d, a = R(d, e, a, b, c, F4, KK0,  9,  7, x)
+    c, e = R(c, d, e, a, b, F4, KK0, 11,  0, x)
+    b, d = R(b, c, d, e, a, F4, KK0, 13,  9, x)
+    a, c = R(a, b, c, d, e, F4, KK0, 15,  2, x)
+    e, b = R(e, a, b, c, d, F4, KK0, 15, 11, x)
+    d, a = R(d, e, a, b, c, F4, KK0,  5,  4, x)
+    c, e = R(c, d, e, a, b, F4, KK0,  7, 13, x)
+    b, d = R(b, c, d, e, a, F4, KK0,  7,  6, x)
+    a, c = R(a, b, c, d, e, F4, KK0,  8, 15, x)
+    e, b = R(e, a, b, c, d, F4, KK0, 11,  8, x)
+    d, a = R(d, e, a, b, c, F4, KK0, 14,  1, x)
+    c, e = R(c, d, e, a, b, F4, KK0, 14, 10, x)
+    b, d = R(b, c, d, e, a, F4, KK0, 12,  3, x)
+    a, c = R(a, b, c, d, e, F4, KK0,  6, 12, x) #/* #15 */
+    #/* Parallel round 2 */
+    e, b = R(e, a, b, c, d, F3, KK1,  9,  6, x)
+    d, a = R(d, e, a, b, c, F3, KK1, 13, 11, x)
+    c, e = R(c, d, e, a, b, F3, KK1, 15,  3, x)
+    b, d = R(b, c, d, e, a, F3, KK1,  7,  7, x)
+    a, c = R(a, b, c, d, e, F3, KK1, 12,  0, x)
+    e, b = R(e, a, b, c, d, F3, KK1,  8, 13, x)
+    d, a = R(d, e, a, b, c, F3, KK1,  9,  5, x)
+    c, e = R(c, d, e, a, b, F3, KK1, 11, 10, x)
+    b, d = R(b, c, d, e, a, F3, KK1,  7, 14, x)
+    a, c = R(a, b, c, d, e, F3, KK1,  7, 15, x)
+    e, b = R(e, a, b, c, d, F3, KK1, 12,  8, x)
+    d, a = R(d, e, a, b, c, F3, KK1,  7, 12, x)
+    c, e = R(c, d, e, a, b, F3, KK1,  6,  4, x)
+    b, d = R(b, c, d, e, a, F3, KK1, 15,  9, x)
+    a, c = R(a, b, c, d, e, F3, KK1, 13,  1, x)
+    e, b = R(e, a, b, c, d, F3, KK1, 11,  2, x) #/* #31 */
+    #/* Parallel round 3 */
+    d, a = R(d, e, a, b, c, F2, KK2,  9, 15, x)
+    c, e = R(c, d, e, a, b, F2, KK2,  7,  5, x)
+    b, d = R(b, c, d, e, a, F2, KK2, 15,  1, x)
+    a, c = R(a, b, c, d, e, F2, KK2, 11,  3, x)
+    e, b = R(e, a, b, c, d, F2, KK2,  8,  7, x)
+    d, a = R(d, e, a, b, c, F2, KK2,  6, 14, x)
+    c, e = R(c, d, e, a, b, F2, KK2,  6,  6, x)
+    b, d = R(b, c, d, e, a, F2, KK2, 14,  9, x)
+    a, c = R(a, b, c, d, e, F2, KK2, 12, 11, x)
+    e, b = R(e, a, b, c, d, F2, KK2, 13,  8, x)
+    d, a = R(d, e, a, b, c, F2, KK2,  5, 12, x)
+    c, e = R(c, d, e, a, b, F2, KK2, 14,  2, x)
+    b, d = R(b, c, d, e, a, F2, KK2, 13, 10, x)
+    a, c = R(a, b, c, d, e, F2, KK2, 13,  0, x)
+    e, b = R(e, a, b, c, d, F2, KK2,  7,  4, x)
+    d, a = R(d, e, a, b, c, F2, KK2,  5, 13, x) #/* #47 */
+    #/* Parallel round 4 */
+    c, e = R(c, d, e, a, b, F1, KK3, 15,  8, x)
+    b, d = R(b, c, d, e, a, F1, KK3,  5,  6, x)
+    a, c = R(a, b, c, d, e, F1, KK3,  8,  4, x)
+    e, b = R(e, a, b, c, d, F1, KK3, 11,  1, x)
+    d, a = R(d, e, a, b, c, F1, KK3, 14,  3, x)
+    c, e = R(c, d, e, a, b, F1, KK3, 14, 11, x)
+    b, d = R(b, c, d, e, a, F1, KK3,  6, 15, x)
+    a, c = R(a, b, c, d, e, F1, KK3, 14,  0, x)
+    e, b = R(e, a, b, c, d, F1, KK3,  6,  5, x)
+    d, a = R(d, e, a, b, c, F1, KK3,  9, 12, x)
+    c, e = R(c, d, e, a, b, F1, KK3, 12,  2, x)
+    b, d = R(b, c, d, e, a, F1, KK3,  9, 13, x)
+    a, c = R(a, b, c, d, e, F1, KK3, 12,  9, x)
+    e, b = R(e, a, b, c, d, F1, KK3,  5,  7, x)
+    d, a = R(d, e, a, b, c, F1, KK3, 15, 10, x)
+    c, e = R(c, d, e, a, b, F1, KK3,  8, 14, x) #/* #63 */
+    #/* Parallel round 5 */
+    b, d = R(b, c, d, e, a, F0, KK4,  8, 12, x)
+    a, c = R(a, b, c, d, e, F0, KK4,  5, 15, x)
+    e, b = R(e, a, b, c, d, F0, KK4, 12, 10, x)
+    d, a = R(d, e, a, b, c, F0, KK4,  9,  4, x)
+    c, e = R(c, d, e, a, b, F0, KK4, 12,  1, x)
+    b, d = R(b, c, d, e, a, F0, KK4,  5,  5, x)
+    a, c = R(a, b, c, d, e, F0, KK4, 14,  8, x)
+    e, b = R(e, a, b, c, d, F0, KK4,  6,  7, x)
+    d, a = R(d, e, a, b, c, F0, KK4,  8,  6, x)
+    c, e = R(c, d, e, a, b, F0, KK4, 13,  2, x)
+    b, d = R(b, c, d, e, a, F0, KK4,  6, 13, x)
+    a, c = R(a, b, c, d, e, F0, KK4,  5, 14, x)
+    e, b = R(e, a, b, c, d, F0, KK4, 15,  0, x)
+    d, a = R(d, e, a, b, c, F0, KK4, 13,  3, x)
+    c, e = R(c, d, e, a, b, F0, KK4, 11,  9, x)
+    b, d = R(b, c, d, e, a, F0, KK4, 11, 11, x) #/* #79 */
+
+    t = (state[1] + cc + d) % 0x100000000;
+    state[1] = (state[2] + dd + e) % 0x100000000;
+    state[2] = (state[3] + ee + a) % 0x100000000;
+    state[3] = (state[4] + aa + b) % 0x100000000;
+    state[4] = (state[0] + bb + c) % 0x100000000;
+    state[0] = t % 0x100000000;
+
+    pass
+
+
+def RMD160Update(ctx, inp, inplen):
+    if type(inp) == str:
+        inp = [ord(i)&0xff for i in inp]
+    
+    have = int((ctx.count // 8) % 64)
+    inplen = int(inplen)
+    need = 64 - have
+    ctx.count += 8 * inplen
+    off = 0
+    if inplen >= need:
+        if have:
+            for i in range(need):
+                ctx.buffer[have+i] = inp[i]
+            RMD160Transform(ctx.state, ctx.buffer)
+            off = need
+            have = 0
+        while off + 64 <= inplen:
+            RMD160Transform(ctx.state, inp[off:]) #<---
+            off += 64
+    if off < inplen:
+        # memcpy(ctx->buffer + have, input+off, len-off);
+        for i in range(inplen - off):
+            ctx.buffer[have+i] = inp[off+i]
+
+def RMD160Final(ctx):
+    size = struct.pack("<Q", ctx.count)
+    padlen = 64 - ((ctx.count // 8) % 64)
+    if padlen < 1+8:
+        padlen += 64
+    RMD160Update(ctx, PADDING, padlen-8)
+    RMD160Update(ctx, size, 8)
+    return struct.pack("<5L", *ctx.state)
+
+
+assert '37f332f68db77bd9d7edd4969571ad671cf9dd3b' == \
+       new('The quick brown fox jumps over the lazy dog').hexdigest()
+assert '132072df690933835eb8b6ad0b77e7b6f14acad7' == \
+       new('The quick brown fox jumps over the lazy cog').hexdigest()
+assert '9c1185a5c5e9fc54612808977ee8f548b2258d31' == \
+       new('').hexdigest()
diff --git a/src/lib/pybitcointools/bitcoin/stealth.py b/src/lib/pybitcointools/bitcoin/stealth.py
new file mode 100644
index 00000000..7940f99d
--- /dev/null
+++ b/src/lib/pybitcointools/bitcoin/stealth.py
@@ -0,0 +1,100 @@
+import main as main
+import transaction as tx
+
+# Shared secrets and uncovering pay keys
+
+
+def shared_secret_sender(scan_pubkey, ephem_privkey):
+    shared_point = main.multiply(scan_pubkey, ephem_privkey)
+    shared_secret = main.sha256(main.encode_pubkey(shared_point, 'bin_compressed'))
+    return shared_secret
+
+
+def shared_secret_receiver(ephem_pubkey, scan_privkey):
+    shared_point = main.multiply(ephem_pubkey, scan_privkey)
+    shared_secret = main.sha256(main.encode_pubkey(shared_point, 'bin_compressed'))
+    return shared_secret
+
+
+def uncover_pay_pubkey_sender(scan_pubkey, spend_pubkey, ephem_privkey):
+    shared_secret = shared_secret_sender(scan_pubkey, ephem_privkey)
+    return main.add_pubkeys(spend_pubkey, main.privtopub(shared_secret))
+
+
+def uncover_pay_pubkey_receiver(scan_privkey, spend_pubkey, ephem_pubkey):
+    shared_secret = shared_secret_receiver(ephem_pubkey, scan_privkey)
+    return main.add_pubkeys(spend_pubkey, main.privtopub(shared_secret))
+
+
+def uncover_pay_privkey(scan_privkey, spend_privkey, ephem_pubkey):
+    shared_secret = shared_secret_receiver(ephem_pubkey, scan_privkey)
+    return main.add_privkeys(spend_privkey, shared_secret)
+
+# Address encoding
+
+# Functions for basic stealth addresses,
+# i.e. one scan key, one spend key, no prefix
+
+
+def pubkeys_to_basic_stealth_address(scan_pubkey, spend_pubkey, magic_byte=42):
+    # magic_byte = 42 for mainnet, 43 for testnet.
+    hex_scankey = main.encode_pubkey(scan_pubkey, 'hex_compressed')
+    hex_spendkey = main.encode_pubkey(spend_pubkey, 'hex_compressed')
+    hex_data = '00{0:066x}01{1:066x}0100'.format(int(hex_scankey, 16), int(hex_spendkey, 16))
+    addr = main.hex_to_b58check(hex_data, magic_byte)
+    return addr
+
+
+def basic_stealth_address_to_pubkeys(stealth_address):
+    hex_data = main.b58check_to_hex(stealth_address)
+    if len(hex_data) != 140:
+        raise Exception('Stealth address is not of basic type (one scan key, one spend key, no prefix)')
+
+    scan_pubkey = hex_data[2:68]
+    spend_pubkey = hex_data[70:136]
+    return scan_pubkey, spend_pubkey
+
+# Sending stealth payments
+
+
+def mk_stealth_metadata_script(ephem_pubkey, nonce):
+    op_return = '6a'
+    msg_size = '26'
+    version = '06'
+    return op_return + msg_size + version + '{0:08x}'.format(nonce) + main.encode_pubkey(ephem_pubkey, 'hex_compressed')
+
+
+def mk_stealth_tx_outputs(stealth_addr, value, ephem_privkey, nonce, network='btc'):
+
+    scan_pubkey, spend_pubkey = basic_stealth_address_to_pubkeys(stealth_addr)
+
+    if network == 'btc':
+        btc_magic_byte = 42
+        if stealth_addr != pubkeys_to_basic_stealth_address(scan_pubkey, spend_pubkey, btc_magic_byte):
+            raise Exception('Invalid btc mainnet stealth address: ' + stealth_addr)
+        magic_byte_addr = 0
+
+    elif network == 'testnet':
+        testnet_magic_byte = 43
+        if stealth_addr != pubkeys_to_basic_stealth_address(scan_pubkey, spend_pubkey, testnet_magic_byte):
+            raise Exception('Invalid testnet stealth address: ' + stealth_addr)
+        magic_byte_addr = 111
+
+    ephem_pubkey = main.privkey_to_pubkey(ephem_privkey)
+    output0 = {'script': mk_stealth_metadata_script(ephem_pubkey, nonce),
+               'value': 0}
+
+    pay_pubkey = uncover_pay_pubkey_sender(scan_pubkey, spend_pubkey, ephem_privkey)
+    pay_addr = main.pubkey_to_address(pay_pubkey, magic_byte_addr)
+    output1 = {'address': pay_addr,
+               'value': value}
+
+    return [output0, output1]
+
+# Receiving stealth payments
+
+
+def ephem_pubkey_from_tx_script(stealth_tx_script):
+    if len(stealth_tx_script) != 80:
+        raise Exception('Wrong format for stealth tx output')
+    return stealth_tx_script[14:]
diff --git a/src/lib/pybitcointools/bitcoin/transaction.py b/src/lib/pybitcointools/bitcoin/transaction.py
new file mode 100644
index 00000000..90a6b67a
--- /dev/null
+++ b/src/lib/pybitcointools/bitcoin/transaction.py
@@ -0,0 +1,482 @@
+#!/usr/bin/python
+import binascii, re, json, copy, sys
+from .main import *
+from _functools import reduce
+
+### Hex to bin converter and vice versa for objects
+
+
+def json_is_base(obj, base):
+    if not is_python2 and isinstance(obj, bytes):
+        return False
+    
+    alpha = get_code_string(base)
+    if isinstance(obj, string_types):
+        for i in range(len(obj)):
+            if alpha.find(obj[i]) == -1:
+                return False
+        return True
+    elif isinstance(obj, int_types) or obj is None:
+        return True
+    elif isinstance(obj, list):
+        for i in range(len(obj)):
+            if not json_is_base(obj[i], base):
+                return False
+        return True
+    else:
+        for x in obj:
+            if not json_is_base(obj[x], base):
+                return False
+        return True
+
+
+def json_changebase(obj, changer):
+    if isinstance(obj, string_or_bytes_types):
+        return changer(obj)
+    elif isinstance(obj, int_types) or obj is None:
+        return obj
+    elif isinstance(obj, list):
+        return [json_changebase(x, changer) for x in obj]
+    return dict((x, json_changebase(obj[x], changer)) for x in obj)
+
+# Transaction serialization and deserialization
+
+
+def deserialize(tx):
+    if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
+        #tx = bytes(bytearray.fromhex(tx))
+        return json_changebase(deserialize(binascii.unhexlify(tx)),
+                              lambda x: safe_hexlify(x))
+    # http://stackoverflow.com/questions/4851463/python-closure-write-to-variable-in-parent-scope
+    # Python's scoping rules are demented, requiring me to make pos an object
+    # so that it is call-by-reference
+    pos = [0]
+
+    def read_as_int(bytez):
+        pos[0] += bytez
+        return decode(tx[pos[0]-bytez:pos[0]][::-1], 256)
+
+    def read_var_int():
+        pos[0] += 1
+        
+        val = from_byte_to_int(tx[pos[0]-1])
+        if val < 253:
+            return val
+        return read_as_int(pow(2, val - 252))
+
+    def read_bytes(bytez):
+        pos[0] += bytez
+        return tx[pos[0]-bytez:pos[0]]
+
+    def read_var_string():
+        size = read_var_int()
+        return read_bytes(size)
+
+    obj = {"ins": [], "outs": []}
+    obj["version"] = read_as_int(4)
+    ins = read_var_int()
+    for i in range(ins):
+        obj["ins"].append({
+            "outpoint": {
+                "hash": read_bytes(32)[::-1],
+                "index": read_as_int(4)
+            },
+            "script": read_var_string(),
+            "sequence": read_as_int(4)
+        })
+    outs = read_var_int()
+    for i in range(outs):
+        obj["outs"].append({
+            "value": read_as_int(8),
+            "script": read_var_string()
+        })
+    obj["locktime"] = read_as_int(4)
+    return obj
+
+def serialize(txobj):
+    #if isinstance(txobj, bytes):
+    #    txobj = bytes_to_hex_string(txobj)
+    o = []
+    if json_is_base(txobj, 16):
+        json_changedbase = json_changebase(txobj, lambda x: binascii.unhexlify(x))
+        hexlified = safe_hexlify(serialize(json_changedbase))
+        return hexlified
+    o.append(encode(txobj["version"], 256, 4)[::-1])
+    o.append(num_to_var_int(len(txobj["ins"])))
+    for inp in txobj["ins"]:
+        o.append(inp["outpoint"]["hash"][::-1])
+        o.append(encode(inp["outpoint"]["index"], 256, 4)[::-1])
+        o.append(num_to_var_int(len(inp["script"]))+(inp["script"] if inp["script"] or is_python2 else bytes()))
+        o.append(encode(inp["sequence"], 256, 4)[::-1])
+    o.append(num_to_var_int(len(txobj["outs"])))
+    for out in txobj["outs"]:
+        o.append(encode(out["value"], 256, 8)[::-1])
+        o.append(num_to_var_int(len(out["script"]))+out["script"])
+    o.append(encode(txobj["locktime"], 256, 4)[::-1])
+
+    return ''.join(o) if is_python2 else reduce(lambda x,y: x+y, o, bytes())
+
+# Hashing transactions for signing
+
+SIGHASH_ALL = 1
+SIGHASH_NONE = 2
+SIGHASH_SINGLE = 3
+# this works like SIGHASH_ANYONECANPAY | SIGHASH_ALL, might as well make it explicit while
+# we fix the constant
+SIGHASH_ANYONECANPAY = 0x81
+
+
+def signature_form(tx, i, script, hashcode=SIGHASH_ALL):
+    i, hashcode = int(i), int(hashcode)
+    if isinstance(tx, string_or_bytes_types):
+        return serialize(signature_form(deserialize(tx), i, script, hashcode))
+    newtx = copy.deepcopy(tx)
+    for inp in newtx["ins"]:
+        inp["script"] = ""
+    newtx["ins"][i]["script"] = script
+    if hashcode == SIGHASH_NONE:
+        newtx["outs"] = []
+    elif hashcode == SIGHASH_SINGLE:
+        newtx["outs"] = newtx["outs"][:len(newtx["ins"])]
+        for out in range(len(newtx["ins"]) - 1):
+            out.value = 2**64 - 1
+            out.script = ""
+    elif hashcode == SIGHASH_ANYONECANPAY:
+        newtx["ins"] = [newtx["ins"][i]]
+    else:
+        pass
+    return newtx
+
+# Making the actual signatures
+
+
+def der_encode_sig(v, r, s):
+    b1, b2 = safe_hexlify(encode(r, 256)), safe_hexlify(encode(s, 256))
+    if r >= 2**255:
+        b1 = '00' + b1
+    if s >= 2**255:
+        b2 = '00' + b2
+    left = '02'+encode(len(b1)//2, 16, 2)+b1
+    right = '02'+encode(len(b2)//2, 16, 2)+b2
+    return '30'+encode(len(left+right)//2, 16, 2)+left+right
+
+
+def der_decode_sig(sig):
+    leftlen = decode(sig[6:8], 16)*2
+    left = sig[8:8+leftlen]
+    rightlen = decode(sig[10+leftlen:12+leftlen], 16)*2
+    right = sig[12+leftlen:12+leftlen+rightlen]
+    return (None, decode(left, 16), decode(right, 16))
+
+
+def txhash(tx, hashcode=None):
+    if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
+        tx = changebase(tx, 16, 256)
+    if hashcode:
+        return dbl_sha256(from_string_to_bytes(tx) + encode(int(hashcode), 256, 4)[::-1])
+    else:
+        return safe_hexlify(bin_dbl_sha256(tx)[::-1])
+
+
+def bin_txhash(tx, hashcode=None):
+    return binascii.unhexlify(txhash(tx, hashcode))
+
+
+def ecdsa_tx_sign(tx, priv, hashcode=SIGHASH_ALL):
+    rawsig = ecdsa_raw_sign(bin_txhash(tx, hashcode), priv)
+    return der_encode_sig(*rawsig)+encode(hashcode, 16, 2)
+
+
+def ecdsa_tx_verify(tx, sig, pub, hashcode=SIGHASH_ALL):
+    return ecdsa_raw_verify(bin_txhash(tx, hashcode), der_decode_sig(sig), pub)
+
+
+def ecdsa_tx_recover(tx, sig, hashcode=SIGHASH_ALL):
+    z = bin_txhash(tx, hashcode)
+    _, r, s = der_decode_sig(sig)
+    left = ecdsa_raw_recover(z, (0, r, s))
+    right = ecdsa_raw_recover(z, (1, r, s))
+    return (encode_pubkey(left, 'hex'), encode_pubkey(right, 'hex'))
+
+# Scripts
+
+
+def mk_pubkey_script(addr):
+    # Keep the auxiliary functions around for altcoins' sake
+    return '76a914' + b58check_to_hex(addr) + '88ac'
+
+
+def mk_scripthash_script(addr):
+    return 'a914' + b58check_to_hex(addr) + '87'
+
+# Address representation to output script
+
+
+def address_to_script(addr):
+    if addr[0] == '3' or addr[0] == '2':
+        return mk_scripthash_script(addr)
+    else:
+        return mk_pubkey_script(addr)
+
+# Output script to address representation
+
+
+def script_to_address(script, vbyte=0):
+    if re.match('^[0-9a-fA-F]*$', script):
+        script = binascii.unhexlify(script)
+    if script[:3] == b'\x76\xa9\x14' and script[-2:] == b'\x88\xac' and len(script) == 25:
+        return bin_to_b58check(script[3:-2], vbyte)  # pubkey hash addresses
+    else:
+        if vbyte == 111:
+            # Testnet
+            scripthash_byte = 196
+        else:
+            scripthash_byte = 5
+        # BIP0016 scripthash addresses
+        return bin_to_b58check(script[2:-1], scripthash_byte)
+
+
+def p2sh_scriptaddr(script, magicbyte=5):
+    if re.match('^[0-9a-fA-F]*$', script):
+        script = binascii.unhexlify(script)
+    return hex_to_b58check(hash160(script), magicbyte)
+scriptaddr = p2sh_scriptaddr
+
+
+def deserialize_script(script):
+    if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script):
+       return json_changebase(deserialize_script(binascii.unhexlify(script)),
+                              lambda x: safe_hexlify(x))
+    out, pos = [], 0
+    while pos < len(script):
+        code = from_byte_to_int(script[pos])
+        if code == 0:
+            out.append(None)
+            pos += 1
+        elif code <= 75:
+            out.append(script[pos+1:pos+1+code])
+            pos += 1 + code
+        elif code <= 78:
+            szsz = pow(2, code - 76)
+            sz = decode(script[pos+szsz: pos:-1], 256)
+            out.append(script[pos + 1 + szsz:pos + 1 + szsz + sz])
+            pos += 1 + szsz + sz
+        elif code <= 96:
+            out.append(code - 80)
+            pos += 1
+        else:
+            out.append(code)
+            pos += 1
+    return out
+
+
+def serialize_script_unit(unit):
+    if isinstance(unit, int):
+        if unit < 16:
+            return from_int_to_byte(unit + 80)
+        else:
+            return bytes([unit])
+    elif unit is None:
+        return b'\x00'
+    else:
+        if len(unit) <= 75:
+            return from_int_to_byte(len(unit))+unit
+        elif len(unit) < 256:
+            return from_int_to_byte(76)+from_int_to_byte(len(unit))+unit
+        elif len(unit) < 65536:
+            return from_int_to_byte(77)+encode(len(unit), 256, 2)[::-1]+unit
+        else:
+            return from_int_to_byte(78)+encode(len(unit), 256, 4)[::-1]+unit
+
+
+if is_python2:
+    def serialize_script(script):
+        if json_is_base(script, 16):
+            return binascii.hexlify(serialize_script(json_changebase(script,
+                                    lambda x: binascii.unhexlify(x))))
+        return ''.join(map(serialize_script_unit, script))
+else:
+    def serialize_script(script):
+        if json_is_base(script, 16):
+            return safe_hexlify(serialize_script(json_changebase(script,
+                                    lambda x: binascii.unhexlify(x))))
+        
+        result = bytes()
+        for b in map(serialize_script_unit, script):
+            result += b if isinstance(b, bytes) else bytes(b, 'utf-8')
+        return result
+
+
+def mk_multisig_script(*args):  # [pubs],k or pub1,pub2...pub[n],k
+    if isinstance(args[0], list):
+        pubs, k = args[0], int(args[1])
+    else:
+        pubs = list(filter(lambda x: len(str(x)) >= 32, args))
+        k = int(args[len(pubs)])
+    return serialize_script([k]+pubs+[len(pubs), 174])
+
+# Signing and verifying
+
+
+def verify_tx_input(tx, i, script, sig, pub):
+    if re.match('^[0-9a-fA-F]*$', tx):
+        tx = binascii.unhexlify(tx)
+    if re.match('^[0-9a-fA-F]*$', script):
+        script = binascii.unhexlify(script)
+    if not re.match('^[0-9a-fA-F]*$', sig):
+        sig = safe_hexlify(sig)
+    hashcode = decode(sig[-2:], 16)
+    modtx = signature_form(tx, int(i), script, hashcode)
+    return ecdsa_tx_verify(modtx, sig, pub, hashcode)
+
+
+def sign(tx, i, priv, hashcode=SIGHASH_ALL):
+    i = int(i)
+    if (not is_python2 and isinstance(re, bytes)) or not re.match('^[0-9a-fA-F]*$', tx):
+        return binascii.unhexlify(sign(safe_hexlify(tx), i, priv))
+    if len(priv) <= 33:
+        priv = safe_hexlify(priv)
+    pub = privkey_to_pubkey(priv)
+    address = pubkey_to_address(pub)
+    signing_tx = signature_form(tx, i, mk_pubkey_script(address), hashcode)
+    sig = ecdsa_tx_sign(signing_tx, priv, hashcode)
+    txobj = deserialize(tx)
+    txobj["ins"][i]["script"] = serialize_script([sig, pub])
+    return serialize(txobj)
+
+
+def signall(tx, priv):
+    # if priv is a dictionary, assume format is
+    # { 'txinhash:txinidx' : privkey }
+    if isinstance(priv, dict):
+        for e, i in enumerate(deserialize(tx)["ins"]):
+            k = priv["%s:%d" % (i["outpoint"]["hash"], i["outpoint"]["index"])]
+            tx = sign(tx, e, k)
+    else:
+        for i in range(len(deserialize(tx)["ins"])):
+            tx = sign(tx, i, priv)
+    return tx
+
+
+def multisign(tx, i, script, pk, hashcode=SIGHASH_ALL):
+    if re.match('^[0-9a-fA-F]*$', tx):
+        tx = binascii.unhexlify(tx)
+    if re.match('^[0-9a-fA-F]*$', script):
+        script = binascii.unhexlify(script)
+    modtx = signature_form(tx, i, script, hashcode)
+    return ecdsa_tx_sign(modtx, pk, hashcode)
+
+
+def apply_multisignatures(*args):
+    # tx,i,script,sigs OR tx,i,script,sig1,sig2...,sig[n]
+    tx, i, script = args[0], int(args[1]), args[2]
+    sigs = args[3] if isinstance(args[3], list) else list(args[3:])
+
+    if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script):
+        script = binascii.unhexlify(script)
+    sigs = [binascii.unhexlify(x) if x[:2] == '30' else x for x in sigs]
+    if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
+        return safe_hexlify(apply_multisignatures(binascii.unhexlify(tx), i, script, sigs))
+
+    txobj = deserialize(tx)
+    txobj["ins"][i]["script"] = serialize_script([None]+sigs+[script])
+    return serialize(txobj)
+
+
+def is_inp(arg):
+    return len(arg) > 64 or "output" in arg or "outpoint" in arg
+
+
+def mktx(*args):
+    # [in0, in1...],[out0, out1...] or in0, in1 ... out0 out1 ...
+    ins, outs = [], []
+    for arg in args:
+        if isinstance(arg, list):
+            for a in arg: (ins if is_inp(a) else outs).append(a)
+        else:
+            (ins if is_inp(arg) else outs).append(arg)
+
+    txobj = {"locktime": 0, "version": 1, "ins": [], "outs": []}
+    for i in ins:
+        if isinstance(i, dict) and "outpoint" in i:
+            txobj["ins"].append(i)
+        else:
+            if isinstance(i, dict) and "output" in i:
+                i = i["output"]
+            txobj["ins"].append({
+                "outpoint": {"hash": i[:64], "index": int(i[65:])},
+                "script": "",
+                "sequence": 4294967295
+            })
+    for o in outs:
+        if isinstance(o, string_or_bytes_types):
+            addr = o[:o.find(':')]
+            val = int(o[o.find(':')+1:])
+            o = {}
+            if re.match('^[0-9a-fA-F]*$', addr):
+                o["script"] = addr
+            else:
+                o["address"] = addr
+            o["value"] = val
+
+        outobj = {}
+        if "address" in o:
+            outobj["script"] = address_to_script(o["address"])
+        elif "script" in o:
+            outobj["script"] = o["script"]
+        else:
+            raise Exception("Could not find 'address' or 'script' in output.")
+        outobj["value"] = o["value"]
+        txobj["outs"].append(outobj)
+
+    return serialize(txobj)
+
+
+def select(unspent, value):
+    value = int(value)
+    high = [u for u in unspent if u["value"] >= value]
+    high.sort(key=lambda u: u["value"])
+    low = [u for u in unspent if u["value"] < value]
+    low.sort(key=lambda u: -u["value"])
+    if len(high):
+        return [high[0]]
+    i, tv = 0, 0
+    while tv < value and i < len(low):
+        tv += low[i]["value"]
+        i += 1
+    if tv < value:
+        raise Exception("Not enough funds")
+    return low[:i]
+
+# Only takes inputs of the form { "output": blah, "value": foo }
+
+
+def mksend(*args):
+    argz, change, fee = args[:-2], args[-2], int(args[-1])
+    ins, outs = [], []
+    for arg in argz:
+        if isinstance(arg, list):
+            for a in arg:
+                (ins if is_inp(a) else outs).append(a)
+        else:
+            (ins if is_inp(arg) else outs).append(arg)
+
+    isum = sum([i["value"] for i in ins])
+    osum, outputs2 = 0, []
+    for o in outs:
+        if isinstance(o, string_types):
+            o2 = {
+                "address": o[:o.find(':')],
+                "value": int(o[o.find(':')+1:])
+            }
+        else:
+            o2 = o
+        outputs2.append(o2)
+        osum += o2["value"]
+
+    if isum < osum+fee:
+        raise Exception("Not enough money")
+    elif isum > osum+fee+5430:
+        outputs2 += [{"address": change, "value": isum-osum-fee}]
+
+    return mktx(ins, outputs2)
diff --git a/src/lib/pybitcointools/pybtctool b/src/lib/pybitcointools/pybtctool
new file mode 100644
index 00000000..600f9a9a
--- /dev/null
+++ b/src/lib/pybitcointools/pybtctool
@@ -0,0 +1,24 @@
+#!/usr/bin/python
+import sys, json, re
+from bitcoin import *
+
+if len(sys.argv) == 1:
+    print "pybtctool <command> <arg1> <arg2> ..."
+else:
+    cmd = sys.argv[2] if sys.argv[1][0] == '-' else sys.argv[1]
+    if sys.argv[1] == '-s':
+        args = re.findall(r'\S\S*',sys.stdin.read())+sys.argv[3:]
+    elif sys.argv[1] == '-B':
+        args = [sys.stdin.read()]+sys.argv[3:]
+    elif sys.argv[1] == '-b':
+        args = [sys.stdin.read()[:-1]]+sys.argv[3:] # remove trailing \n
+    elif sys.argv[1] == '-j':
+        args = [json.loads(sys.stdin.read())]+sys.argv[3:]
+    elif sys.argv[1] == '-J':
+        args = json.loads(sys.stdin.read())+sys.argv[3:]
+    else:
+        cmd = sys.argv[1]
+        args = sys.argv[2:]
+    o = vars()[cmd](*args)
+    if isinstance(o,(list,dict)): print json.dumps(o)
+    else: print o
diff --git a/src/lib/pybitcointools/setup.py b/src/lib/pybitcointools/setup.py
new file mode 100644
index 00000000..8473f81c
--- /dev/null
+++ b/src/lib/pybitcointools/setup.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+try:
+    from setuptools import setup
+except ImportError:
+    from distutils.core import setup
+
+setup(name='bitcoin',
+      version='1.1.25',
+      description='Python Bitcoin Tools',
+      author='Vitalik Buterin',
+      author_email='vbuterin@gmail.com',
+      url='http://github.com/vbuterin/pybitcointools',
+      install_requires='six==1.8.0',
+      packages=['bitcoin'],
+      scripts=['pybtctool'],
+      include_package_data=True,
+      data_files=[("", ["LICENSE"])],
+      )
diff --git a/src/lib/pybitcointools/test.py b/src/lib/pybitcointools/test.py
new file mode 100644
index 00000000..20ba77ed
--- /dev/null
+++ b/src/lib/pybitcointools/test.py
@@ -0,0 +1,437 @@
+import json
+import os
+import random
+import unittest
+
+import bitcoin.ripemd as ripemd
+from bitcoin import *
+
+
+class TestECCArithmetic(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        print('Starting ECC arithmetic tests')
+
+    def test_all(self):
+        for i in range(8):
+            print('### Round %d' % (i+1))
+            x, y = random.randrange(2**256), random.randrange(2**256)
+            self.assertEqual(
+                multiply(multiply(G, x), y)[0],
+                multiply(multiply(G, y), x)[0]
+            )
+            self.assertEqual(
+
+                add_pubkeys(multiply(G, x), multiply(G, y))[0],
+                multiply(G, add_privkeys(x, y))[0]
+            )
+
+            hx, hy = encode(x % N, 16, 64), encode(y % N, 16, 64)
+            self.assertEqual(
+                multiply(multiply(G, hx), hy)[0],
+                multiply(multiply(G, hy), hx)[0]
+            )
+            self.assertEqual(
+                add_pubkeys(multiply(G, hx), multiply(G, hy))[0],
+                multiply(G, add_privkeys(hx, hy))[0]
+            )
+            self.assertEqual(
+                b58check_to_hex(pubtoaddr(privtopub(x))),
+                b58check_to_hex(pubtoaddr(multiply(G, hx), 23))
+            )
+
+            p = privtopub(sha256(str(x)))
+            if i % 2 == 1:
+                p = changebase(p, 16, 256)
+            self.assertEqual(p, decompress(compress(p)))
+            self.assertEqual(G[0], multiply(divide(G, x), x)[0])
+
+
+class TestBases(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        print('Starting base change tests')
+
+    def test_all(self):
+        data = [
+            [10, '65535', 16, 'ffff'],
+            [16, 'deadbeef', 10, '3735928559'],
+            [10, '0', 16, ''],
+            [256, b'34567', 10, '219919234615'],
+            [10, '444', 16, '1bc'],
+            [256, b'\x03\x04\x05\x06\x07', 10, '12952339975'],
+            [16, '3132333435', 256, b'12345']
+        ]
+        for prebase, preval, postbase, postval in data:
+            self.assertEqual(changebase(preval, prebase, postbase), postval)
+
+        for i in range(100):
+            x = random.randrange(1, 9999999999999999)
+            frm = random.choice([2, 10, 16, 58, 256])
+            to = random.choice([2, 10, 16, 58, 256])
+            self.assertEqual(decode(encode(x, to), to), x)
+            self.assertEqual(changebase(encode(x, frm), frm, to), encode(x, to))
+            self.assertEqual(decode(changebase(encode(x, frm), frm, to), to), x)
+
+
+class TestElectrumWalletInternalConsistency(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        print('Starting Electrum wallet internal consistency tests')
+
+    def test_all(self):
+        for i in range(3):
+            seed = sha256(str(random.randrange(2**40)))[:32]
+            mpk = electrum_mpk(seed)
+            for i in range(5):
+                pk = electrum_privkey(seed, i)
+                pub = electrum_pubkey((mpk, seed)[i % 2], i)
+                pub2 = privtopub(pk)
+                self.assertEqual(
+                    pub,
+                    pub2,
+                    'Does not match! Details:\nseed: %s\nmpk: %s\npriv: %s\npub: %s\npub2: %s' % (
+                        seed, mpk, pk, pub, pub2
+                    )
+                )
+
+
+class TestElectrumSignVerify(unittest.TestCase):
+    """Requires Electrum."""
+
+    @classmethod
+    def setUpClass(cls):
+        cls.wallet = "/tmp/tempwallet_" + str(random.randrange(2**40))
+        print("Starting wallet tests with: " + cls.wallet)
+        os.popen('echo "\n\n\n\n\n\n" | electrum -w %s create' % cls.wallet).read()
+        cls.seed = str(json.loads(os.popen("electrum -w %s getseed" % cls.wallet).read())['seed'])
+        cls.addies = json.loads(os.popen("electrum -w %s listaddresses" % cls.wallet).read())
+
+    def test_address(self):
+        for i in range(5):
+            self.assertEqual(
+                self.addies[i],
+                electrum_address(self.seed, i, 0),
+                "Address does not match! Details:\nseed %s, i: %d" % (self.seed, i)
+            )
+
+    def test_sign_verify(self):
+        print("Electrum-style signing and verification tests, against actual Electrum")
+        alphabet = "1234567890qwertyuiopasdfghjklzxcvbnm"
+        for i in range(8):
+            msg = ''.join([random.choice(alphabet) for i in range(random.randrange(20, 200))])
+            addy = random.choice(self.addies)
+            wif = os.popen('electrum -w %s dumpprivkey %s' % (self.wallet, addy)).readlines()[-2].replace('"', '').strip()
+            priv = b58check_to_hex(wif)
+            pub = privtopub(priv)
+
+            sig = os.popen('electrum -w %s signmessage %s %s' % (self.wallet, addy, msg)).readlines()[-1].strip()
+            self.assertTrue(
+                ecdsa_verify(msg, sig, pub),
+                "Verification error. Details:\nmsg: %s\nsig: %s\npriv: %s\naddy: %s\npub: %s" % (
+                    msg, sig, priv, addy, pub
+                )
+            )
+
+            rec = ecdsa_recover(msg, sig)
+            self.assertEqual(
+                pub,
+                rec,
+                "Recovery error. Details:\nmsg: %s\nsig: %s\npriv: %s\naddy: %s\noriginal pub: %s, %s\nrecovered pub: %s" % (
+                    msg, sig, priv, addy, pub, decode_pubkey(pub, 'hex')[1], rec
+                )
+            )
+
+            mysig = ecdsa_sign(msg, priv)
+            self.assertEqual(
+                os.popen('electrum -w %s verifymessage %s %s %s' % (self.wallet, addy, mysig, msg)).read().strip(),
+                "true",
+                "Electrum verify message does not match"
+            )
+
+
+class TestTransactionSignVerify(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        print("Transaction-style signing and verification tests")
+
+    def test_all(self):
+        alphabet = "1234567890qwertyuiopasdfghjklzxcvbnm"
+        for i in range(10):
+            msg = ''.join([random.choice(alphabet) for i in range(random.randrange(20, 200))])
+            priv = sha256(str(random.randrange(2**256)))
+            pub = privtopub(priv)
+            sig = ecdsa_tx_sign(msg, priv)
+            self.assertTrue(
+                ecdsa_tx_verify(msg, sig, pub),
+                "Verification error"
+            )
+
+            self.assertIn(
+                pub,
+                ecdsa_tx_recover(msg, sig),
+                "Recovery failed"
+            )
+
+
+class TestSerialize(unittest.TestCase):
+
+    def test_serialize(self):
+        tx = '0100000001239f932c780e517015842f3b02ff765fba97f9f63f9f1bc718b686a56ed9c73400000000fd5d010047304402200c40fa58d3f6d5537a343cf9c8d13bc7470baf1d13867e0de3e535cd6b4354c802200f2b48f67494835b060d0b2ff85657d2ba2d9ea4e697888c8cb580e8658183a801483045022056f488c59849a4259e7cef70fe5d6d53a4bd1c59a195b0577bd81cb76044beca022100a735b319fa66af7b178fc719b93f905961ef4d4446deca8757a90de2106dd98a014cc95241046c7d87fd72caeab48e937f2feca9e9a4bd77f0eff4ebb2dbbb9855c023e334e188d32aaec4632ea4cbc575c037d8101aec73d029236e7b1c2380f3e4ad7edced41046fd41cddf3bbda33a240b417a825cc46555949917c7ccf64c59f42fd8dfe95f34fae3b09ed279c8c5b3530510e8cca6230791102eef9961d895e8db54af0563c410488d618b988efd2511fc1f9c03f11c210808852b07fe46128c1a6b1155aa22cdf4b6802460ba593db2d11c7e6cbe19cedef76b7bcabd05d26fd97f4c5a59b225053aeffffffff0310270000000000001976a914a89733100315c37d228a529853af341a9d290a4588ac409c00000000000017a9142b56f9a4009d9ff99b8f97bea4455cd71135f5dd87409c00000000000017a9142b56f9a4009d9ff99b8f97bea4455cd71135f5dd8700000000'
+        self.assertEqual(
+            serialize(deserialize(tx)),
+            tx,
+            "Serialize roundtrip failed"
+        )
+
+    def test_serialize_script(self):
+        script = '47304402200c40fa58d3f6d5537a343cf9c8d13bc7470baf1d13867e0de3e535cd6b4354c802200f2b48f67494835b060d0b2ff85657d2ba2d9ea4e697888c8cb580e8658183a801483045022056f488c59849a4259e7cef70fe5d6d53a4bd1c59a195b0577bd81cb76044beca022100a735b319fa66af7b178fc719b93f905961ef4d4446deca8757a90de2106dd98a014cc95241046c7d87fd72caeab48e937f2feca9e9a4bd77f0eff4ebb2dbbb9855c023e334e188d32aaec4632ea4cbc575c037d8101aec73d029236e7b1c2380f3e4ad7edced41046fd41cddf3bbda33a240b417a825cc46555949917c7ccf64c59f42fd8dfe95f34fae3b09ed279c8c5b3530510e8cca6230791102eef9961d895e8db54af0563c410488d618b988efd2511fc1f9c03f11c210808852b07fe46128c1a6b1155aa22cdf4b6802460ba593db2d11c7e6cbe19cedef76b7bcabd05d26fd97f4c5a59b225053ae'
+        self.assertEqual(
+            serialize_script(deserialize_script(script)),
+            script,
+            "Script serialize roundtrip failed"
+        )
+
+
+class TestTransaction(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls):
+        print("Attempting transaction creation")
+
+    # FIXME: I don't know how to write this as a unit test.
+    # What should be asserted?
+    def test_all(self):
+        privs = [sha256(str(random.randrange(2**256))) for x in range(4)]
+        pubs = [privtopub(priv) for priv in privs]
+        addresses = [pubtoaddr(pub) for pub in pubs]
+        mscript = mk_multisig_script(pubs[1:], 2, 3)
+        msigaddr = p2sh_scriptaddr(mscript)
+        tx = mktx(['01'*32+':1', '23'*32+':2'], [msigaddr+':20202', addresses[0]+':40404'])
+        tx1 = sign(tx, 1, privs[0])
+
+        sig1 = multisign(tx, 0, mscript, privs[1])
+        self.assertTrue(verify_tx_input(tx1, 0, mscript, sig1, pubs[1]), "Verification Error")
+
+        sig3 = multisign(tx, 0, mscript, privs[3])
+        self.assertTrue(verify_tx_input(tx1, 0, mscript, sig3, pubs[3]), "Verification Error")
+
+        tx2 = apply_multisignatures(tx1, 0, mscript, [sig1, sig3])
+        print("Outputting transaction: ", tx2)
+
+
+class TestDeterministicGenerate(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls):
+        print("Beginning RFC6979 deterministic signing tests")
+
+    def test_all(self):
+        # Created with python-ecdsa 0.9
+        # Code to make your own vectors:
+        # class gen:
+        #     def order(self): return 115792089237316195423570985008687907852837564279074904382605163141518161494337
+        # dummy = gen()
+        # for i in range(10): ecdsa.rfc6979.generate_k(dummy, i, hashlib.sha256, hashlib.sha256(str(i)).digest())
+        test_vectors = [
+            32783320859482229023646250050688645858316445811207841524283044428614360139869,
+            109592113955144883013243055602231029997040992035200230706187150761552110229971,
+            65765393578006003630736298397268097590176526363988568884298609868706232621488,
+            85563144787585457107933685459469453513056530050186673491900346620874099325918,
+            99829559501561741463404068005537785834525504175465914981205926165214632019533,
+            7755945018790142325513649272940177083855222863968691658328003977498047013576,
+            81516639518483202269820502976089105897400159721845694286620077204726637043798,
+            52824159213002398817852821148973968315579759063230697131029801896913602807019,
+            44033460667645047622273556650595158811264350043302911918907282441675680538675,
+            32396602643737403620316035551493791485834117358805817054817536312402837398361
+        ]
+
+        for i, ti in enumerate(test_vectors):
+            mine = deterministic_generate_k(bin_sha256(str(i)), encode(i, 256, 32))
+            self.assertEqual(
+                ti,
+                mine,
+                "Test vector does not match. Details:\n%s\n%s" % (
+                    ti,
+                    mine
+                )
+            )
+
+
+class TestBIP0032(unittest.TestCase):
+    """See: https://en.bitcoin.it/wiki/BIP_0032"""
+    @classmethod
+    def setUpClass(cls):
+        print("Beginning BIP0032 tests")
+
+    def _full_derive(self, key, chain):
+        if len(chain) == 0:
+            return key
+        elif chain[0] == 'pub':
+            return self._full_derive(bip32_privtopub(key), chain[1:])
+        else:
+            return self._full_derive(bip32_ckd(key, chain[0]), chain[1:])
+
+    def test_all(self):
+        test_vectors = [
+            [[], 'xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi'],
+            [['pub'], 'xpub661MyMwAqRbcFtXgS5sYJABqqG9YLmC4Q1Rdap9gSE8NqtwybGhePY2gZ29ESFjqJoCu1Rupje8YtGqsefD265TMg7usUDFdp6W1EGMcet8'],
+            [[2**31], 'xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6KCesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7'],
+            [[2**31, 1], 'xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs'],
+            [[2**31, 1, 2**31 + 2], 'xprv9z4pot5VBttmtdRTWfWQmoH1taj2axGVzFqSb8C9xaxKymcFzXBDptWmT7FwuEzG3ryjH4ktypQSAewRiNMjANTtpgP4mLTj34bhnZX7UiM'],
+            [[2**31, 1, 2**31 + 2, 'pub', 2, 1000000000], 'xpub6H1LXWLaKsWFhvm6RVpEL9P4KfRZSW7abD2ttkWP3SSQvnyA8FSVqNTEcYFgJS2UaFcxupHiYkro49S8yGasTvXEYBVPamhGW6cFJodrTHy']
+        ]
+
+        mk = bip32_master_key(safe_from_hex('000102030405060708090a0b0c0d0e0f'))
+
+        for tv in test_vectors:
+            left, right = self._full_derive(mk, tv[0]), tv[1]
+            self.assertEqual(
+                left,
+                right,
+                "Test vector does not match. Details: \n%s\n%s\n\%s" % (
+                    tv[0],
+                    [x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(left)],
+                    [x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(right)],
+                )
+            )
+
+
+class TestStartingAddressAndScriptGenerationConsistency(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls):
+        print("Starting address and script generation consistency tests")
+
+    def test_all(self):
+        for i in range(5):
+            a = privtoaddr(random_key())
+            self.assertEqual(a, script_to_address(address_to_script(a)))
+            b = privtoaddr(random_key(), 5)
+            self.assertEqual(b, script_to_address(address_to_script(b)))
+
+
+class TestRipeMD160PythonBackup(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        print('Testing the pure python backup for ripemd160')
+
+    def test_all(self):
+        strvec = [
+            '',
+            'The quick brown fox jumps over the lazy dog',
+            'The quick brown fox jumps over the lazy cog',
+            'Nobody inspects the spammish repetition'
+        ]
+
+        target = [
+            '9c1185a5c5e9fc54612808977ee8f548b2258d31',
+            '37f332f68db77bd9d7edd4969571ad671cf9dd3b',
+            '132072df690933835eb8b6ad0b77e7b6f14acad7',
+            'cc4a5ce1b3df48aec5d22d1f16b894a0b894eccc'
+        ]
+
+        hash160target = [
+            'b472a266d0bd89c13706a4132ccfb16f7c3b9fcb',
+            '0e3397b4abc7a382b3ea2365883c3c7ca5f07600',
+            '53e0dacac5249e46114f65cb1f30d156b14e0bdc',
+            '1c9b7b48049a8f98699bca22a5856c5ef571cd68'
+        ]
+
+        for i, s in enumerate(strvec):
+            digest = ripemd.RIPEMD160(s).digest()
+            hash160digest = ripemd.RIPEMD160(bin_sha256(s)).digest()
+            self.assertEqual(bytes_to_hex_string(digest), target[i])
+            self.assertEqual(bytes_to_hex_string(hash160digest), hash160target[i])
+            self.assertEqual(bytes_to_hex_string(bin_hash160(from_string_to_bytes(s))), hash160target[i])
+            self.assertEqual(hash160(from_string_to_bytes(s)), hash160target[i])
+
+
+class TestScriptVsAddressOutputs(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(cls):
+        print('Testing script vs address outputs')
+
+    def test_all(self):
+        addr0 = '1Lqgj1ThNfwLgHMp5qJUerYsuUEm8vHmVG'
+        script0 = '76a914d99f84267d1f90f3e870a5e9d2399918140be61d88ac'
+        addr1 = '31oSGBBNrpCiENH3XMZpiP6GTC4tad4bMy'
+        script1 = 'a9140136d001619faba572df2ef3d193a57ad29122d987'
+
+        inputs = [{
+            'output': 'cd6219ea108119dc62fce09698b649efde56eca7ce223a3315e8b431f6280ce7:0',
+            'value': 158000
+        }]
+
+        outputs = [
+            [{'address': addr0, 'value': 1000}, {'address': addr1, 'value': 2000}],
+            [{'script': script0, 'value': 1000}, {'address': addr1, 'value': 2000}],
+            [{'address': addr0, 'value': 1000}, {'script': script1, 'value': 2000}],
+            [{'script': script0, 'value': 1000}, {'script': script1, 'value': 2000}],
+            [addr0 + ':1000', addr1 + ':2000'],
+            [script0 + ':1000', addr1 + ':2000'],
+            [addr0 + ':1000', script1 + ':2000'],
+            [script0 + ':1000', script1 + ':2000']
+        ]
+
+        for outs in outputs:
+            tx_struct = deserialize(mktx(inputs, outs))
+            self.assertEqual(tx_struct['outs'], outputs[3])
+
+
+class TestConversions(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls):
+        cls.privkey_hex = (
+            "e9873d79c6d87dc0fb6a5778633389f4453213303da61f20bd67fc233aa33262"
+        )
+        cls.privkey_bin = (
+            b"\xe9\x87=y\xc6\xd8}\xc0\xfbjWxc3\x89\xf4E2\x130=\xa6\x1f \xbdg\xfc#:\xa32b"
+        )
+
+        cls.pubkey_hex = (
+            "04588d202afcc1ee4ab5254c7847ec25b9a135bbda0f2bc69ee1a714749fd77dc9f88ff2a00d7e752d44cbe16e1ebcf0890b76ec7c78886109dee76ccfc8445424"
+        )
+        cls.pubkey_bin = (
+            b"\x04X\x8d *\xfc\xc1\xeeJ\xb5%LxG\xec%\xb9\xa15\xbb\xda\x0f+\xc6\x9e\xe1\xa7\x14t\x9f\xd7}\xc9\xf8\x8f\xf2\xa0\r~u-D\xcb\xe1n\x1e\xbc\xf0\x89\x0bv\xec|x\x88a\t\xde\xe7l\xcf\xc8DT$"
+        )
+
+    def test_privkey_to_pubkey(self):
+        pubkey_hex = privkey_to_pubkey(self.privkey_hex)
+        self.assertEqual(pubkey_hex, self.pubkey_hex)
+
+    def test_changebase(self):
+        self.assertEqual(
+            self.pubkey_bin,
+            changebase(
+                self.pubkey_hex, 16, 256, minlen=len(self.pubkey_bin)
+            )
+        )
+
+        self.assertEqual(
+            self.pubkey_hex,
+            changebase(
+                self.pubkey_bin, 256, 16, minlen=len(self.pubkey_hex)
+            )
+        )
+
+        self.assertEqual(
+            self.privkey_bin,
+            changebase(
+                self.privkey_hex, 16, 256, minlen=len(self.privkey_bin)
+            )
+        )
+
+        self.assertEqual(
+            self.privkey_hex,
+            changebase(
+                self.privkey_bin, 256, 16, minlen=len(self.privkey_hex)
+            )
+        )
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/src/lib/pybitcointools/test_stealth.py b/src/lib/pybitcointools/test_stealth.py
new file mode 100644
index 00000000..01a1432d
--- /dev/null
+++ b/src/lib/pybitcointools/test_stealth.py
@@ -0,0 +1,92 @@
+import bitcoin as bc
+import sys
+import unittest
+
+class TestStealth(unittest.TestCase):
+
+    def setUp(self):
+        
+        if sys.getrecursionlimit() < 1000:
+            sys.setrecursionlimit(1000)
+        
+        self.addr = 'vJmtjxSDxNPXL4RNapp9ARdqKz3uJyf1EDGjr1Fgqs9c8mYsVH82h8wvnA4i5rtJ57mr3kor1EVJrd4e5upACJd588xe52yXtzumxj'
+        self.scan_pub = '025e58a31122b38c86abc119b9379fe247410aee87a533f9c07b189aef6c3c1f52'
+        self.scan_priv = '3e49e7257cb31db997edb1cf8299af0f37e2663e2260e4b8033e49d39a6d02f2'
+        self.spend_pub = '03616562c98e7d7b74be409a787cec3a912122f3fb331a9bee9b0b73ce7b9f50af'
+        self.spend_priv = 'aa3db0cfb3edc94de4d10f873f8190843f2a17484f6021a95a7742302c744748'
+        self.ephem_pub = '03403d306ec35238384c7e340393335f9bc9bb4a2e574eb4e419452c4ea19f14b0'
+        self.ephem_priv = '9e63abaf8dcd5ea3919e6de0b6c544e00bf51bf92496113a01d6e369944dc091'
+        self.shared_secret = 'a4047ee231f4121e3a99a3a3378542e34a384b865a9917789920e1f13ffd91c6'
+        self.pay_pub = '02726112ad39cb6bf848b1b1ef30b88e35286bf99f746c2be575f96c0e02a9357c'
+        self.pay_priv = '4e422fb1e5e1db6c1f6ab32a7706d368ceb385e7fab098e633c5c5949c3b97cd'
+        
+        self.testnet_addr = 'waPUuLLykSnY3itzf1AyrQZm42F7KyB7SR5zpfqmnzPXWhx9kXLzV3EcyqzDdpTwngiyCCMUqztS9S1d7XJs3JMt3MsHPDpBCudvx9'
+        
+    def test_address_encoding(self):
+
+        sc_pub, sp_pub = bc.basic_stealth_address_to_pubkeys(self.addr)
+        self.assertEqual(sc_pub, self.scan_pub)
+        self.assertEqual(sp_pub, self.spend_pub)
+        
+        stealth_addr2 = bc.pubkeys_to_basic_stealth_address(sc_pub, sp_pub)
+        self.assertEqual(stealth_addr2, self.addr)
+        
+        magic_byte_testnet = 43
+        sc_pub, sp_pub = bc.basic_stealth_address_to_pubkeys(self.testnet_addr)
+        self.assertEqual(sc_pub, self.scan_pub)
+        self.assertEqual(sp_pub, self.spend_pub)
+        
+        stealth_addr2 = bc.pubkeys_to_basic_stealth_address(sc_pub, sp_pub, magic_byte_testnet)
+        self.assertEqual(stealth_addr2, self.testnet_addr)
+        
+    def test_shared_secret(self):
+
+        sh_sec = bc.shared_secret_sender(self.scan_pub, self.ephem_priv)
+        self.assertEqual(sh_sec, self.shared_secret)
+
+        sh_sec2 = bc.shared_secret_receiver(self.ephem_pub, self.scan_priv)
+        self.assertEqual(sh_sec2, self.shared_secret)
+
+    def test_uncover_pay_keys(self):
+
+        pub = bc.uncover_pay_pubkey_sender(self.scan_pub, self.spend_pub, self.ephem_priv)
+        pub2 = bc.uncover_pay_pubkey_receiver(self.scan_priv, self.spend_pub, self.ephem_pub)
+        self.assertEqual(pub, self.pay_pub)
+        self.assertEqual(pub2, self.pay_pub)
+
+        priv = bc.uncover_pay_privkey(self.scan_priv, self.spend_priv, self.ephem_pub)
+        self.assertEqual(priv, self.pay_priv)
+
+    def test_stealth_metadata_script(self):
+
+        nonce = int('deadbeef', 16)
+        script = bc.mk_stealth_metadata_script(self.ephem_pub, nonce)
+        self.assertEqual(script[6:], 'deadbeef' + self.ephem_pub)
+        
+        eph_pub = bc.ephem_pubkey_from_tx_script(script)
+        self.assertEqual(eph_pub, self.ephem_pub)
+
+    def test_stealth_tx_outputs(self):
+
+        nonce = int('deadbeef', 16)
+        value = 10**8
+        outputs = bc.mk_stealth_tx_outputs(self.addr, value, self.ephem_priv, nonce)
+
+        self.assertEqual(outputs[0]['value'], 0)
+        self.assertEqual(outputs[0]['script'], '6a2606deadbeef' + self.ephem_pub)
+        self.assertEqual(outputs[1]['address'], bc.pubkey_to_address(self.pay_pub))
+        self.assertEqual(outputs[1]['value'], value)
+        
+        outputs = bc.mk_stealth_tx_outputs(self.testnet_addr, value, self.ephem_priv, nonce, 'testnet')
+        
+        self.assertEqual(outputs[0]['value'], 0)
+        self.assertEqual(outputs[0]['script'], '6a2606deadbeef' + self.ephem_pub)
+        self.assertEqual(outputs[1]['address'], bc.pubkey_to_address(self.pay_pub, 111))
+        self.assertEqual(outputs[1]['value'], value)
+
+        self.assertRaises(Exception, bc.mk_stealth_tx_outputs, self.testnet_addr, value, self.ephem_priv, nonce, 'btc')
+        
+        self.assertRaises(Exception, bc.mk_stealth_tx_outputs, self.addr, value, self.ephem_priv, nonce, 'testnet')
+ 
+if __name__ == '__main__':
+    unittest.main()
diff --git a/src/main.py b/src/main.py
index 46d576a7..03deb90e 100644
--- a/src/main.py
+++ b/src/main.py
@@ -5,6 +5,7 @@ sys.path.insert(0, os.path.dirname(__file__)) # Imports relative to main.py
 if not os.path.isdir("log"): os.mkdir("log")
 if not os.path.isdir("data"): os.mkdir("data")
 if not os.path.isfile("data/sites.json"): open("data/sites.json", "w").write("{}")
+if not os.path.isfile("data/users.json"): open("data/users.json", "w").write("{}")
 
 # Load config
 from Config import config
@@ -31,9 +32,9 @@ logging.getLogger('').addHandler(console_log) # Add console logger
 logging.getLogger('').name = "-" # Remove root prefix
 
 # Debug dependent configuration
+from Debug import DebugHook
 if config.debug:
 	console_log.setLevel(logging.DEBUG)
-	from Debug import DebugHook
 	from gevent import monkey; monkey.patch_all(thread=False) # thread=False because of pyfilesystem
 else:
 	console_log.setLevel(logging.INFO)
@@ -91,14 +92,14 @@ def siteCreate():
 
 	logging.info("Creating content.json...")
 	site = Site(address)
-	site.signContent(privatekey)
+	site.content_manager.sign(privatekey=privatekey)
 	site.settings["own"] = True
 	site.saveSettings()
 
 	logging.info("Site created!")
 
 
-def siteSign(address, privatekey=None):
+def siteSign(address, privatekey=None, inner_path="content.json"):
 	from Site import Site
 	logging.info("Signing site: %s..." % address)
 	site = Site(address, allow_create = False)
@@ -106,7 +107,7 @@ def siteSign(address, privatekey=None):
 	if not privatekey: # If no privatekey in args then ask it now
 		import getpass
 		privatekey = getpass.getpass("Private key (input hidden):")
-	site.signContent(privatekey)
+	site.content_manager.sign(inner_path=inner_path, privatekey=privatekey)
 
 
 def siteVerify(address):
@@ -114,11 +115,12 @@ def siteVerify(address):
 	logging.info("Verifing site: %s..." % address)
 	site = Site(address)
 
-	logging.info("Verifing content.json signature...")
-	if site.verifyFile("content.json", open(site.getPath("content.json"), "rb"), force=True) != False: # Force check the sign
-		logging.info("[OK] content.json signed by address %s!" % address)
-	else:
-		logging.error("[ERROR] Content.json not signed by address %s!" % address)
+	for content_inner_path in site.content_manager.contents:
+		logging.info("Verifing %s signature..." % content_inner_path)
+		if site.content_manager.verifyFile(content_inner_path, open(site.getPath(content_inner_path), "rb"), ignore_same=False) == True:
+			logging.info("[OK] %s signed by address %s!" % (content_inner_path, address))
+		else:
+			logging.error("[ERROR] %s not signed by address %s!" % (content_inner_path, address))
 
 	logging.info("Verifying site files...")
 	bad_files = site.verifyFiles()
@@ -146,7 +148,7 @@ def siteNeedFile(address, inner_path):
 	print site.needFile(inner_path, update=True)
 
 
-def sitePublish(address, peer_ip=None, peer_port=15441):
+def sitePublish(address, peer_ip=None, peer_port=15441, inner_path="content.json"):
 	from Site import Site
 	from File import FileServer # We need fileserver to handle incoming file requests
 	logging.info("Creating FileServer....")
@@ -163,7 +165,7 @@ def sitePublish(address, peer_ip=None, peer_port=15441):
 	else: # Just ask the tracker
 		logging.info("Gathering peers from tracker")
 		site.announce() # Gather peers
-	site.publish(20) # Push to 20 peers
+	site.publish(20, inner_path) # Push to 20 peers
 	logging.info("Serving files....")
 	gevent.joinall([file_server_thread])
 
diff --git a/src/util/QueryJson.py b/src/util/QueryJson.py
new file mode 100644
index 00000000..1db4b047
--- /dev/null
+++ b/src/util/QueryJson.py
@@ -0,0 +1,60 @@
+import json, re, os
+
+
+def queryFile(file_path, filter_path, filter_key = None, filter_val = None):
+	back = []
+	data = json.load(open(file_path))
+	for key in filter_path: # Get to the point
+		data = data.get(key)
+		if not data: return
+
+	for row in data:
+		if filter_val: # Filter by value
+			if row[filter_key] == filter_val: back.append(row)
+		else:
+			back.append(row)
+
+	return back
+
+
+# Find in json files
+# Return: [{u'body': u'Hello Topic 1!!', 'inner_path': '1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6', u'added': 1422740732, u'message_id': 1},...]
+def query(path_pattern, filter):
+	if "=" in filter: # Filter by value
+		filter_path, filter_val = filter.split("=")
+		filter_path = filter_path.split(".")
+		filter_key = filter_path.pop() # Last element is the key
+		filter_val = int(filter_val)
+	else: # No filter
+		filter_path = filter
+		filter_path = filter_path.split(".")
+		filter_key = None
+		filter_val = None
+
+	if "/*/" in path_pattern: # Wildcard search
+		root_dir, file_pattern = path_pattern.replace("\\", "/").split("/*/")
+	else: # No wildcard
+		root_dir, file_pattern = re.match("(.*)/(.*?)$", path_pattern.replace("\\", "/")).groups()
+	for root, dirs, files in os.walk(root_dir, topdown=False):
+		root = root.replace("\\", "/")
+		inner_path = root.replace(root_dir, "").strip("/")
+		for file_name in files:
+			if file_pattern != file_name: continue
+
+			try:
+				res = queryFile(root+"/"+file_name, filter_path, filter_key, filter_val)
+				if not res: continue
+			except Exception, err: # Json load error
+				# print file_name, filter, err
+				continue
+			for row in res:
+				row["inner_path"] = inner_path
+				yield row
+
+
+
+if __name__ == "__main__":
+	for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "topics")):
+		print row
+	for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/data.json", "topics.topic_id=1")):
+		print row
\ No newline at end of file