Remove included win_inet_pton, websocket, rsa, socks, pyelliptic, pybitcointools, pyasn1, opensslVerify, merkletools, geventwebsocket, BitcoinECC, bencode module

This commit is contained in:
shortcutme 2019-03-16 02:58:49 +01:00
parent ff5004cb8d
commit 3eae349a0a
No known key found for this signature in database
GPG key ID: 5B63BAE6CB9613AE
142 changed files with 0 additions and 27935 deletions

View file

@ -1,467 +0,0 @@
# By: HurlSly
# Source: https://github.com/HurlSly/Python/blob/master/BitcoinECC.py
# Modified: random number generator in def GeneratePrivateKey(self):
import random
import hashlib
import os
class GaussInt:
#A class for the Gauss integers of the form a + b sqrt(n) where a,b are integers.
#n can be positive or negative.
def __init__(self,x,y,n,p=0):
if p:
self.x=x%p
self.y=y%p
self.n=n%p
else:
self.x=x
self.y=y
self.n=n
self.p=p
def __add__(self,b):
return GaussInt(self.x+b.x,self.y+b.y,self.n,self.p)
def __sub__(self,b):
return GaussInt(self.x-b.x,self.y-b.y,self.n,self.p)
def __mul__(self,b):
return GaussInt(self.x*b.x+self.n*self.y*b.y,self.x*b.y+self.y*b.x,self.n,self.p)
def __div__(self,b):
return GaussInt((self.x*b.x-self.n*self.y*b.y)/(b.x*b.x-self.n*b.y*b.y),(-self.x*b.y+self.y*b.x)/(b.x*b.x-self.n*b.y*b.y),self.n,self.p)
def __eq__(self,b):
return self.x==b.x and self.y==b.y
def __repr__(self):
if self.p:
return "%s+%s (%d,%d)"%(self.x,self.y,self.n,self.p)
else:
return "%s+%s (%d)"%(self.x,self.y,self.n)
def __pow__(self,n):
b=Base(n,2)
t=GaussInt(1,0,self.n)
while b:
t=t*t
if b.pop():
t=self*t
return t
def Inv(self):
return GaussInt(self.x/(self.x*self.x-self.n*self.y*self.y),-self.y/(self.x*self.x-self.n*self.y*self.y),self.n,self.p)
def Cipolla(a,p):
#Find a square root of a modulo p using the algorithm of Cipolla
b=0
while pow((b*b-a)%p,(p-1)/2,p)==1:
b+=1
return (GaussInt(b,1,b**2-a,p)**((p+1)/2)).x
def Base(n,b):
#Decompose n in base b
l=[]
while n:
l.append(n%b)
n/=b
return l
def InvMod(a,n):
#Find the inverse mod n of a.
#Use the Extended Euclides Algorithm.
m=[]
s=n
while n:
m.append(a/n)
(a,n)=(n,a%n)
u=1
v=0
while m:
(u,v)=(v,u-m.pop()*v)
return u%s
def b58encode(v):
#Encode a byte string to the Base58
digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
base=len(digit)
val=0
for c in v:
val*=256
val+=ord(c)
result=""
while val:
(val,mod)=divmod(val,base)
result=digit[mod]+result
pad=0
for c in v:
if c=="\0":
pad+=1
else:
break
return (digit[0]*pad)+result
def b58decode(v):
#Decode a Base58 string to byte string
digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
base=len(digit)
val=0
for c in v:
val*=base
val+=digit.find(c)
result=""
while val:
(val,mod)=divmod(val,256)
result=chr(mod)+result
pad=0
for c in v:
if c==digit[0]:
pad+=1
else:
break
result="\0"*pad+result
return result
def Byte2Hex(b):
#Convert a byte string to hex number
out=""
for x in b:
y=hex(ord(x))[2:]
if len(y)==1:
y="0"+y
out+="%2s"%y
return out
def Int2Byte(n,b):
#Convert a integer to a byte string of length b
out=""
for i in range(b):
(n,m)=divmod(n,256)
out=chr(m)+out
return out
class EllipticCurvePoint:
#Main class
#It is an point on an Elliptic Curve
def __init__(self,x,a,b,p,n=0):
#We store the coordinate in x and the elliptic curbe parameter.
#x is of length 3. This is the 3 projective coordinates of the point.
self.x=x[:]
self.a=a
self.b=b
self.p=p
self.n=n
def EqualProj(self,y):
#Does y equals self ?
#It computes self cross product with y and check if the result is 0.
return self.x[0]*y.x[1]==self.x[1]*y.x[0] and self.x[1]*y.x[2]==self.x[2]*y.x[1] and self.x[2]*y.x[0]==self.x[0]*y.x[2]
def __add__(self,y):
#The main function to add self and y
#It uses the formulas I derived in projective coordinates.
#Projectives coordinates are more performant than the usual (x,y) coordinates
#because it we don't need to compute inverse mod p, which is faster.
z=EllipticCurvePoint([0,0,0],self.a,self.b,self.p)
if self.EqualProj(y):
d=(2*self.x[1]*self.x[2])%self.p
d3=pow(d,3,self.p)
n=(3*pow(self.x[0],2,self.p)+self.a*pow(self.x[2],2,self.p))%self.p
z.x[0]=(pow(n,2,self.p)*d*self.x[2]-2*d3*self.x[0])%self.p
z.x[1]=(3*self.x[0]*n*pow(d,2,self.p)-pow(n,3,self.p)*self.x[2]-self.x[1]*d3)%self.p
z.x[2]=(self.x[2]*d3)%self.p
else:
d=(y.x[0]*self.x[2]-y.x[2]*self.x[0])%self.p
d3=pow(d,3,self.p)
n=(y.x[1]*self.x[2]-self.x[1]*y.x[2])%self.p
z.x[0]=(y.x[2]*self.x[2]*pow(n,2,self.p)*d-d3*(y.x[2]*self.x[0]+y.x[0]*self.x[2]))%self.p
z.x[1]=(pow(d,2,self.p)*n*(2*self.x[0]*y.x[2]+y.x[0]*self.x[2])-pow(n,3,self.p)*self.x[2]*y.x[2]-self.x[1]*d3*y.x[2])%self.p
z.x[2]=(self.x[2]*d3*y.x[2])%self.p
return z
def __mul__(self,n):
#The fast multiplication of point n times by itself.
b=Base(n,2)
t=EllipticCurvePoint(self.x,self.a,self.b,self.p)
b.pop()
while b:
t+=t
if b.pop():
t+=self
return t
def __repr__(self):
#print a point in (x,y) coordinate.
return "x=%d\ny=%d\n"%((self.x[0]*InvMod(self.x[2],self.p))%self.p,(self.x[1]*InvMod(self.x[2],self.p))%self.p)
def __eq__(self,x):
#Does self==x ?
return self.x==x.x and self.a==x.a and self.b==x.b and self.p==x.p
def __ne__(self,x):
#Does self!=x ?
return self.x!=x.x or self.a!=x.a or self.b!=x.b or self.p!=x.p
def Check(self):
#Is self on the curve ?
return (self.x[0]**3+self.a*self.x[0]*self.x[2]**2+self.b*self.x[2]**3-self.x[1]**2*self.x[2])%self.p==0
def GeneratePrivateKey(self):
#Generate a private key. It's just a random number between 1 and n-1.
#Of course, this function isn't cryptographically secure.
#Don't use it to generate your key. Use a cryptographically secure source of randomness instead.
#self.d = random.randint(1,self.n-1)
self.d = random.SystemRandom().randint(1,self.n-1) # Better random fix
def SignECDSA(self,m):
#Sign a message. The private key is self.d .
h=hashlib.new("SHA256")
h.update(m)
z=int(h.hexdigest(),16)
r=0
s=0
while not r or not s:
#k=random.randint(1,self.n-1)
k=random.SystemRandom().randint(1,self.n-1) # Better random fix
R=self*k
R.Normalize()
r=R.x[0]%self.n
s=(InvMod(k,self.n)*(z+r*self.d))%self.n
return (r,s)
def CheckECDSA(self,sig,m):
#Check a signature (r,s) of the message m using the public key self.Q
# and the generator which is self.
#This is not the one used by Bitcoin because the public key isn't known;
# only a hash of the public key is known. See the next function.
(r,s)=sig
h=hashlib.new("SHA256")
h.update(m)
z=int(h.hexdigest(),16)
if self.Q.x[2]==0:
return False
if not self.Q.Check():
return False
if (self.Q*self.n).x[2]!=0:
return False
if r<1 or r>self.n-1 or s<1 or s>self.n-1:
return False
w=InvMod(s,self.n)
u1=(z*w)%self.n
u2=(r*w)%self.n
R=self*u1+self.Q*u2
R.Normalize()
return (R.x[0]-r)%self.n==0
def VerifyMessageFromBitcoinAddress(self,adresse,m,sig):
#Check a signature (r,s) for the message m signed by the Bitcoin
# address "addresse".
h=hashlib.new("SHA256")
h.update(m)
z=int(h.hexdigest(),16)
(r,s)=sig
x=r
y2=(pow(x,3,self.p)+self.a*x+self.b)%self.p
y=Cipolla(y2,self.p)
for i in range(2):
kG=EllipticCurvePoint([x,y,1],self.a,self.b,self.p,self.n)
mzG=self*((-z)%self.n)
self.Q=(kG*s+mzG)*InvMod(r,self.n)
adr=self.BitcoinAddresFromPublicKey()
if adr==adresse:
break
y=(-y)%self.p
if adr!=adresse:
return False
return True
def BitcoinAddressFromPrivate(self,pri=None):
#Transform a private key in base58 encoding to a bitcoin address.
#normal means "uncompressed".
if not pri:
print "Private Key :",
pri=raw_input()
normal=(len(pri)==51)
pri=b58decode(pri)
if normal:
pri=pri[1:-4]
else:
pri=pri[1:-5]
self.d=int(Byte2Hex(pri),16)
return self.BitcoinAddress(normal)
def PrivateEncoding(self,normal=True):
#Encode a private key self.d to base58 encoding.
p=Int2Byte(self.d,32)
p="\80"+p
if not normal:
p+=chr(1)
h=hashlib.new("SHA256")
h.update(p)
s=h.digest()
h=hashlib.new("SHA256")
h.update(s)
s=h.digest()
cs=s[:4]
p+=cs
p=b58encode(p)
return p
def BitcoinAddresFromPublicKey(self,normal=True):
#Find the bitcoin address from the public key self.Q
#We do normalization to go from the projective coordinates to the usual
# (x,y) coordinates.
self.Q.Normalize()
if normal:
pk=chr(4)+Int2Byte(self.Q.x[0],32)+Int2Byte((self.Q.x[1])%self.p,32)
else:
if self.Q.x[1]%2==0:
pk=chr(2)+Int2Byte(self.Q.x[0],32)
else:
pk=chr(3)+Int2Byte(self.Q.x[0],32)
version=chr(0)
h=hashlib.new("SHA256")
h.update(pk)
s=h.digest()
h=hashlib.new("RIPEMD160")
h.update(s)
kh=version+h.digest()
h=hashlib.new("SHA256")
h.update(kh)
cs=h.digest()
h=hashlib.new("SHA256")
h.update(cs)
cs=h.digest()[:4]
adr=b58encode(kh+cs)
return adr
def BitcoinAddress(self,normal=True):
#Computes a bitcoin address given the private key self.d.
self.Q=self*self.d
return self.BitcoinAddresFromPublicKey(normal)
def BitcoinAddressGenerator(self,k,filename):
#Generate Bitcoin address and write them in the filename in the multibit format.
#Change the date as you like.
f=open(filename,"w")
for i in range(k):
self.GeneratePrivateKey()
adr=self.BitcoinAddress()
p=self.PrivateEncoding()
f.write("#%s\n%s 2014-01-30T12:00:00Z\n"%(adr,p))
#print hex(self.d)
print adr,p
f.close()
def TestSign(self):
#Test signature
self.GeneratePrivateKey()
self.Q=self*self.d
m="Hello World"
adresse=self.BitcoinAddresFromPublicKey()
(r,s)=self.SignECDSA(m)
m="Hello World"
print self.VerifyMessageFromBitcoinAddress(adresse,m,r,s)
def Normalize(self):
#Transform projective coordinates of self to the usual (x,y) coordinates.
if self.x[2]:
self.x[0]=(self.x[0]*InvMod(self.x[2],self.p))%self.p
self.x[1]=(self.x[1]*InvMod(self.x[2],self.p))%self.p
self.x[2]=1
elif self.x[1]:
self.x[0]=(self.x[0]*InvMod(self.x[1],self.p))%self.p
self.x[1]=1
elif self.x[0]:
self.x[0]=1
else:
raise Exception
def Bitcoin():
#Create the Bitcoin elliptiv curve
a=0
b=7
p=2**256-2**32-2**9-2**8-2**7-2**6-2**4-1
#Create the generator G of the Bitcoin elliptic curve, with is order n.
Gx=int("79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",16)
Gy=int("483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8",16)
n =int("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141",16)
#Create the generator
return EllipticCurvePoint([Gx,Gy,1],a,b,p,n)
if __name__ == "__main__":
bitcoin=Bitcoin()
#Generate the public key from the private one
print bitcoin.BitcoinAddressFromPrivate("23DKRBLkeDbcSaddsMYLAHXhanPmGwkWAhSPVGbspAkc72Hw9BdrDF")
print bitcoin.BitcoinAddress()
#Print the bitcoin address of the public key generated at the previous line
adr=bitcoin.BitcoinAddresFromPublicKey()
print adr
#Sign a message with the current address
m="Hello World"
sig=bitcoin.SignECDSA("Hello World")
#Verify the message using only the bitcoin adress, the signature and the message.
#Not using the public key as it is not needed.
print bitcoin.VerifyMessageFromBitcoinAddress(adr,m,sig)

View file

@ -1,461 +0,0 @@
import random
import hashlib
import base64
import math
class GaussInt:
def __init__(self,x,y,n,p=0):
if p:
self.x=x%p
self.y=y%p
self.n=n%p
else:
self.x=x
self.y=y
self.n=n
self.p=p
def __add__(self,b):
return GaussInt(self.x+b.x,self.y+b.y,self.n,self.p)
def __sub__(self,b):
return GaussInt(self.x-b.x,self.y-b.y,self.n,self.p)
def __mul__(self,b):
return GaussInt(self.x*b.x+self.n*self.y*b.y,self.x*b.y+self.y*b.x,self.n,self.p)
def __div__(self,b):
return GaussInt((self.x*b.x-self.n*self.y*b.y)/(b.x*b.x-self.n*b.y*b.y),(-self.x*b.y+self.y*b.x)/(b.x*b.x-self.n*b.y*b.y),self.n,self.p)
def __eq__(self,b):
return self.x==b.x and self.y==b.y
def __repr__(self):
if self.p:
return "%s+%s (%d,%d)"%(self.x,self.y,self.n,self.p)
else:
return "%s+%s (%d)"%(self.x,self.y,self.n)
def __pow__(self,n):
b=Base(n,2)
t=GaussInt(1,0,self.n)
while b:
t=t*t
if b.pop():
t=self*t
return t
def Inv(self):
return GaussInt(self.x/(self.x*self.x-self.n*self.y*self.y),-self.y/(self.x*self.x-self.n*self.y*self.y),self.n,self.p)
def Eval(self):
return self.x.Eval()+self.y.Eval()*math.sqrt(self.n)
def Cipolla(a,p):
b=0
while pow((b*b-a)%p,(p-1)/2,p)==1:
b+=1
return (GaussInt(b,1,b**2-a,p)**((p+1)/2)).x
def InvMod(a,n):
m=[]
s=n
while n:
m.append(a/n)
(a,n)=(n,a%n)
u=1
v=0
while m:
(u,v)=(v,u-m.pop()*v)
return u%s
def Base(n,b):
l=[]
while n:
l.append(n%b)
n/=b
return l
def MsgMagic(message):
return "\x18Bitcoin Signed Message:\n"+chr(len(message))+message
def Hash(m,method):
h=hashlib.new(method)
h.update(m)
return h.digest()
def b58encode(v):
#Encode a byte string to the Base58
digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
base=len(digit)
val=0
for c in v:
val*=256
val+=ord(c)
result=""
while val:
(val,mod)=divmod(val,base)
result=digit[mod]+result
pad=0
for c in v:
if c=="\x00":
pad+=1
else:
break
return (digit[0]*pad)+result
def b58decode(v):
#Decode a Base58 string to byte string
digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
base=len(digit)
val=0
for c in v:
val*=base
val+=digit.find(c)
result=""
while val:
(val,mod)=divmod(val,256)
result=chr(mod)+result
pad=0
for c in v:
if c==digit[0]:
pad+=1
else:
break
return "\x00"*pad+result
def Byte2Int(b):
n=0
for x in b:
n*=256
n+=ord(x)
return n
def Byte2Hex(b):
#Convert a byte string to hex number
out=""
for x in b:
y=hex(ord(x))[2:]
if len(y)==1:
y="0"+y
out+="%2s"%y
return out
def Int2Byte(n,b):
#Convert a integer to a byte string of length b
out=""
for _ in range(b):
(n,m)=divmod(n,256)
out=chr(m)+out
return out
class EllipticCurvePoint:
#Main class
#It's a point on an Elliptic Curve
def __init__(self,x,a,b,p,n=0):
#We store the coordinate in x and the elliptic curve parameter.
#x is of length 3. This is the 3 projective coordinates of the point.
self.x=x[:]
self.a=a
self.b=b
self.p=p
self.n=n
def __add__(self,y):
#The main function to add self and y
#It uses the formulas I derived in projective coordinates.
#Projectives coordinates are more efficient than the usual (x,y) coordinates
#because we don't need to compute inverse mod p, which is faster.
z=EllipticCurvePoint([0,0,0],self.a,self.b,self.p)
if self==y:
d=(2*self.x[1]*self.x[2])%self.p
d3=pow(d,3,self.p)
n=(3*pow(self.x[0],2,self.p)+self.a*pow(self.x[2],2,self.p))%self.p
z.x[0]=(pow(n,2,self.p)*d*self.x[2]-2*d3*self.x[0])%self.p
z.x[1]=(3*self.x[0]*n*pow(d,2,self.p)-pow(n,3,self.p)*self.x[2]-self.x[1]*d3)%self.p
z.x[2]=(self.x[2]*d3)%self.p
else:
d=(y.x[0]*self.x[2]-y.x[2]*self.x[0])%self.p
d3=pow(d,3,self.p)
n=(y.x[1]*self.x[2]-self.x[1]*y.x[2])%self.p
z.x[0]=(y.x[2]*self.x[2]*pow(n,2,self.p)*d-d3*(y.x[2]*self.x[0]+y.x[0]*self.x[2]))%self.p
z.x[1]=(pow(d,2,self.p)*n*(2*self.x[0]*y.x[2]+y.x[0]*self.x[2])-pow(n,3,self.p)*self.x[2]*y.x[2]-self.x[1]*d3*y.x[2])%self.p
z.x[2]=(self.x[2]*d3*y.x[2])%self.p
return z
def __mul__(self,n):
#The fast multiplication of point n times by itself.
b=Base(n,2)
t=EllipticCurvePoint(self.x,self.a,self.b,self.p)
b.pop()
while b:
t+=t
if b.pop():
t+=self
return t
def __repr__(self):
#print a point in (x,y) coordinate.
return "x=%d\ny=%d\n"%((self.x[0]*InvMod(self.x[2],self.p))%self.p,(self.x[1]*InvMod(self.x[2],self.p))%self.p)
def __eq__(self,y):
#Does self==y ?
#It computes self cross product with x and check if the result is 0.
return self.x[0]*y.x[1]==self.x[1]*y.x[0] and self.x[1]*y.x[2]==self.x[2]*y.x[1] and self.x[2]*y.x[0]==self.x[0]*y.x[2] and self.a==y.a and self.b==y.b and self.p==y.p
def __ne__(self,y):
#Does self!=x ?
return not (self == y)
def Normalize(self):
#Transform projective coordinates of self to the usual (x,y) coordinates.
if self.x[2]:
self.x[0]=(self.x[0]*InvMod(self.x[2],self.p))%self.p
self.x[1]=(self.x[1]*InvMod(self.x[2],self.p))%self.p
self.x[2]=1
elif self.x[1]:
self.x[0]=(self.x[0]*InvMod(self.x[1],self.p))%self.p
self.x[1]=1
elif self.x[0]:
self.x[0]=1
else:
raise Exception
def Check(self):
#Is self on the curve ?
return (self.x[0]**3+self.a*self.x[0]*self.x[2]**2+self.b*self.x[2]**3-self.x[1]**2*self.x[2])%self.p==0
def CryptAddr(self,filename,password,Address):
txt=""
for tag in Address:
(addr,priv)=Address[tag]
if priv:
txt+="%s\t%s\t%s\n"%(tag,addr,priv)
else:
txt+="%s\t%s\t\n"%(tag,addr)
txt+="\x00"*(15-(len(txt)-1)%16)
password+="\x00"*(15-(len(password)-1)%16)
crypt=twofish.Twofish(password).encrypt(txt)
f=open(filename,"wb")
f.write(crypt)
f.close()
def GenerateD(self):
#Generate a private key. It's just a random number between 1 and n-1.
#Of course, this function isn't cryptographically secure.
#Don't use it to generate your key. Use a cryptographically secure source of randomness instead.
#return random.randint(1,self.n-1)
return random.SystemRandom().randint(1,self.n-1) # Better random fix
def CheckECDSA(self,sig,message,Q):
#Check a signature (r,s) of the message m using the public key self.Q
# and the generator which is self.
#This is not the one used by Bitcoin because the public key isn't known;
# only a hash of the public key is known. See the function VerifyMessageFromAddress.
(r,s)=sig
if Q.x[2]==0:
return False
if not Q.Check():
return False
if (Q*self.n).x[2]!=0:
return False
if r<1 or r>self.n-1 or s<1 or s>self.n-1:
return False
z=Byte2Int(Hash(Hash(MsgMagic(message),"SHA256"),"SHA256"))
w=InvMod(s,self.n)
u1=(z*w)%self.n
u2=(r*w)%self.n
R=self*u1+Q*u2
R.Normalize()
return (R.x[0]-r)%self.n==0
def SignMessage(self,message,priv):
#Sign a message. The private key is self.d.
(d,uncompressed)=self.DFromPriv(priv)
z=Byte2Int(Hash(Hash(MsgMagic(message),"SHA256"),"SHA256"))
r=0
s=0
while not r or not s:
#k=random.randint(1,self.n-1)
k=random.SystemRandom().randint(1,self.n-1) # Better random fix
R=self*k
R.Normalize()
r=R.x[0]%self.n
s=(InvMod(k,self.n)*(z+r*d))%self.n
val=27
if not uncompressed:
val+=4
return base64.standard_b64encode(chr(val)+Int2Byte(r,32)+Int2Byte(s,32))
def VerifyMessageFromAddress(self,addr,message,sig):
#Check a signature (r,s) for the message m signed by the Bitcoin
# address "addr".
sign=base64.standard_b64decode(sig)
(r,s)=(Byte2Int(sign[1:33]),Byte2Int(sign[33:65]))
z=Byte2Int(Hash(Hash(MsgMagic(message),"SHA256"),"SHA256"))
val=ord(sign[0])
if val<27 or val>=35:
return False
if val>=31:
uncompressed=False
val-=4
else:
uncompressed=True
x=r
y2=(pow(x,3,self.p) + self.a*x + self.b) % self.p
y=Cipolla(y2,self.p)
for _ in range(2):
kG=EllipticCurvePoint([x,y,1],self.a,self.b,self.p,self.n)
mzG=self*((-z)%self.n)
Q=(kG*s+mzG)*InvMod(r,self.n)
if self.AddressFromPublicKey(Q,uncompressed)==addr:
return True
y=self.p-y
return False
def AddressFromPrivate(self,priv):
#Transform a private key to a bitcoin address.
(d,uncompressed)=self.DFromPriv(priv)
return self.AddressFromD(d,uncompressed)
def PrivFromD(self,d,uncompressed):
#Encode a private key self.d to base58 encoding.
p=Int2Byte(d,32)
p="\x80"+p
if not uncompressed:
p+=chr(1)
cs=Hash(Hash(p,"SHA256"),"SHA256")[:4]
return b58encode(p+cs)
def DFromPriv(self,priv):
uncompressed=(len(priv)==51)
priv=b58decode(priv)
if uncompressed:
priv=priv[:-4]
else:
priv=priv[:-5]
return (Byte2Int(priv[1:]),uncompressed)
def AddressFromPublicKey(self,Q,uncompressed):
#Find the bitcoin address from the public key self.Q
#We do normalization to go from the projective coordinates to the usual
# (x,y) coordinates.
Q.Normalize()
if uncompressed:
pk=chr(4)+Int2Byte(Q.x[0],32)+Int2Byte(Q.x[1],32)
else:
pk=chr(2+Q.x[1]%2)+Int2Byte(Q.x[0],32)
kh=chr(0)+Hash(Hash(pk,"SHA256"),"RIPEMD160")
cs=Hash(Hash(kh,"SHA256"),"SHA256")[:4]
return b58encode(kh+cs)
def AddressFromD(self,d,uncompressed):
#Computes a bitcoin address given the private key self.d.
return self.AddressFromPublicKey(self*d,uncompressed)
def IsValid(self,addr):
adr=b58decode(addr)
kh=adr[:-4]
cs=adr[-4:]
verif=Hash(Hash(kh,"SHA256"),"SHA256")[:4]
return cs==verif
def AddressGenerator(self,k,uncompressed=True):
#Generate Bitcoin address and write them in the multibit format.
#Change the date as you like.
liste={}
for i in range(k):
d=self.GenerateD()
addr=self.AddressFromD(d,uncompressed)
priv=self.PrivFromD(d,uncompressed)
liste[i]=[addr,priv]
print "%s %s"%(addr, priv)
return liste
def Bitcoin():
a=0
b=7
p=2**256-2**32-2**9-2**8-2**7-2**6-2**4-1
Gx=int("79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",16)
Gy=int("483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8",16)
n=int("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141",16)
return EllipticCurvePoint([Gx,Gy,1],a,b,p,n)
def main():
bitcoin=Bitcoin()
#Generate an adress from the private key
privkey = "PrivatekeyinBase58"
adr = bitcoin.AddressFromPrivate(privkey)
print "Address : ", adr
#Sign a message with the current address
m="Hello World"
sig=bitcoin.SignMessage("Hello World", privkey)
#Verify the message using only the bitcoin adress, the signature and the message.
#Not using the public key as it is not needed.
if bitcoin.VerifyMessageFromAddress(adr,m,sig):
print "Message verified"
#Generate some addresses
print "Here are some adresses and associated private keys"
bitcoin.AddressGenerator(10)
if __name__ == "__main__": main()

View file

@ -1,22 +0,0 @@
Copyright 2006 Dan-Haim. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of Dan Haim nor the names of his contributors may be used
to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.

View file

@ -1,299 +0,0 @@
PySocks
=======
Updated version of SocksiPy. Many old bugs fixed, and overall code cleanup.
Acts as a drop-in replacement to the socket module.
----------------
Features
========
* Fully supports Python 2.6 - 3.4
* UDP support
* SocksiPyHandler, courtesy e000, was also added as an example of how this module can be used with urllib2. See example code in sockshandler.py. `pip install` and `setup.py install` will automatically install the `sockshandler` module.
* Bugs in the original SocksiPy were fixed, including two that could lead to infinite hanging when communicating with bad proxy servers.
* urllib3, which powers the requests module, is working on integrating SOCKS proxy support based on this branch
* `SOCKS5`, `SOCKS4`, and `HTTP` are now aliases for `PROXY_TYPE_SOCKS5`, `PROXY_TYPE_SOCKS4`, and `PROXY_TYPE_HTTP`
* Tests added
* Various style and performance improvements; codebase simplified
* Actively maintained
Installation
============
pip install PySocks
Or download the tarball / `git clone` and...
python setup.py install
These will install both the `socks` and `sockshandler` modules.
Alternatively, include just `socks.py` in your project.
--------------------------------------------
*Warning:* PySocks/SocksiPy only supports HTTP proxies that use CONNECT tunneling. Certain HTTP proxies may not work with this library. If you wish to use HTTP proxies (and not SOCKS proxies), it is recommended that you rely on your HTTP client's native proxy support (`proxies` dict for `requests`, or `urllib2.ProxyHandler` for `urllib2`) instead.
--------------------------------------------
Usage
=====
## Example ##
import socks
s = socks.socksocket()
s.set_proxy(socks.SOCKS5, "localhost") # SOCKS4 and SOCKS5 use port 1080 by default
# Or
s.set_proxy(socks.SOCKS4, "localhost", 4444)
# Or
s.set_proxy(socks.HTTP, "5.5.5.5", 8888)
# Can be treated identical to a regular socket object
s.connect(("www.test.com", 80))
s.sendall("GET / ...")
print s.recv(4096)
To monkeypatch the entire standard library with a single default proxy:
import socket
import socks
import urllib2
socks.set_default_proxy(socks.SOCKS5, "localhost")
socket.socket = socks.socksocket
urllib2.urlopen("http://...") # All requests will pass through the SOCKS proxy
Note that monkeypatching may not work for all standard modules or for all third party modules, and generally isn't recommended.
--------------------------------------------
Original SocksiPy README attached below, amended to reflect API changes.
--------------------------------------------
SocksiPy - version 1.5.0
A Python SOCKS module.
(C) 2006 Dan-Haim. All rights reserved.
See LICENSE file for details.
*WHAT IS A SOCKS PROXY?*
A SOCKS proxy is a proxy server at the TCP level. In other words, it acts as
a tunnel, relaying all traffic going through it without modifying it.
SOCKS proxies can be used to relay traffic using any network protocol that
uses TCP.
*WHAT IS SOCKSIPY?*
This Python module allows you to create TCP connections through a SOCKS
proxy without any special effort.
It also supports relaying UDP packets with a SOCKS5 proxy.
*PROXY COMPATIBILITY*
SocksiPy is compatible with three different types of proxies:
1. SOCKS Version 4 (SOCKS4), including the SOCKS4a extension.
2. SOCKS Version 5 (SOCKS5).
3. HTTP Proxies which support tunneling using the CONNECT method.
*SYSTEM REQUIREMENTS*
Being written in Python, SocksiPy can run on any platform that has a Python
interpreter and TCP/IP support.
This module has been tested with Python 2.3 and should work with greater versions
just as well.
INSTALLATION
-------------
Simply copy the file "socks.py" to your Python's `lib/site-packages` directory,
and you're ready to go. [Editor's note: it is better to use `python setup.py install` for PySocks]
USAGE
------
First load the socks module with the command:
>>> import socks
>>>
The socks module provides a class called `socksocket`, which is the base to all of the module's functionality.
The `socksocket` object has the same initialization parameters as the normal socket
object to ensure maximal compatibility, however it should be noted that `socksocket` will only function with family being `AF_INET` and
type being either `SOCK_STREAM` or `SOCK_DGRAM`.
Generally, it is best to initialize the `socksocket` object with no parameters
>>> s = socks.socksocket()
>>>
The `socksocket` object has an interface which is very similiar to socket's (in fact
the `socksocket` class is derived from socket) with a few extra methods.
To select the proxy server you would like to use, use the `set_proxy` method, whose
syntax is:
set_proxy(proxy_type, addr[, port[, rdns[, username[, password]]]])
Explanation of the parameters:
`proxy_type` - The type of the proxy server. This can be one of three possible
choices: `PROXY_TYPE_SOCKS4`, `PROXY_TYPE_SOCKS5` and `PROXY_TYPE_HTTP` for SOCKS4,
SOCKS5 and HTTP servers respectively. `SOCKS4`, `SOCKS5`, and `HTTP` are all aliases, respectively.
`addr` - The IP address or DNS name of the proxy server.
`port` - The port of the proxy server. Defaults to 1080 for socks and 8080 for http.
`rdns` - This is a boolean flag than modifies the behavior regarding DNS resolving.
If it is set to True, DNS resolving will be preformed remotely, on the server.
If it is set to False, DNS resolving will be preformed locally. Please note that
setting this to True with SOCKS4 servers actually use an extension to the protocol,
called SOCKS4a, which may not be supported on all servers (SOCKS5 and http servers
always support DNS). The default is True.
`username` - For SOCKS5 servers, this allows simple username / password authentication
with the server. For SOCKS4 servers, this parameter will be sent as the userid.
This parameter is ignored if an HTTP server is being used. If it is not provided,
authentication will not be used (servers may accept unauthenticated requests).
`password` - This parameter is valid only for SOCKS5 servers and specifies the
respective password for the username provided.
Example of usage:
>>> s.set_proxy(socks.SOCKS5, "socks.example.com") # uses default port 1080
>>> s.set_proxy(socks.SOCKS4, "socks.test.com", 1081)
After the set_proxy method has been called, simply call the connect method with the
traditional parameters to establish a connection through the proxy:
>>> s.connect(("www.sourceforge.net", 80))
>>>
Connection will take a bit longer to allow negotiation with the proxy server.
Please note that calling connect without calling `set_proxy` earlier will connect
without a proxy (just like a regular socket).
Errors: Any errors in the connection process will trigger exceptions. The exception
may either be generated by the underlying socket layer or may be custom module
exceptions, whose details follow:
class `ProxyError` - This is a base exception class. It is not raised directly but
rather all other exception classes raised by this module are derived from it.
This allows an easy way to catch all proxy-related errors. It descends from `IOError`.
All `ProxyError` exceptions have an attribute `socket_err`, which will contain either a
caught `socket.error` exception, or `None` if there wasn't any.
class `GeneralProxyError` - When thrown, it indicates a problem which does not fall
into another category.
* `Sent invalid data` - This error means that unexpected data has been received from
the server. The most common reason is that the server specified as the proxy is
not really a SOCKS4/SOCKS5/HTTP proxy, or maybe the proxy type specified is wrong.
* `Connection closed unexpectedly` - The proxy server unexpectedly closed the connection.
This may indicate that the proxy server is experiencing network or software problems.
* `Bad proxy type` - This will be raised if the type of the proxy supplied to the
set_proxy function was not one of `SOCKS4`/`SOCKS5`/`HTTP`.
* `Bad input` - This will be raised if the `connect()` method is called with bad input
parameters.
class `SOCKS5AuthError` - This indicates that the connection through a SOCKS5 server
failed due to an authentication problem.
* `Authentication is required` - This will happen if you use a SOCKS5 server which
requires authentication without providing a username / password at all.
* `All offered authentication methods were rejected` - This will happen if the proxy
requires a special authentication method which is not supported by this module.
* `Unknown username or invalid password` - Self descriptive.
class `SOCKS5Error` - This will be raised for SOCKS5 errors which are not related to
authentication.
The parameter is a tuple containing a code, as given by the server,
and a description of the
error. The possible errors, according to the RFC, are:
* `0x01` - General SOCKS server failure - If for any reason the proxy server is unable to
fulfill your request (internal server error).
* `0x02` - connection not allowed by ruleset - If the address you're trying to connect to
is blacklisted on the server or requires authentication.
* `0x03` - Network unreachable - The target could not be contacted. A router on the network
had replied with a destination net unreachable error.
* `0x04` - Host unreachable - The target could not be contacted. A router on the network
had replied with a destination host unreachable error.
* `0x05` - Connection refused - The target server has actively refused the connection
(the requested port is closed).
* `0x06` - TTL expired - The TTL value of the SYN packet from the proxy to the target server
has expired. This usually means that there are network problems causing the packet
to be caught in a router-to-router "ping-pong".
* `0x07` - Command not supported - For instance if the server does not support UDP.
* `0x08` - Address type not supported - The client has provided an invalid address type.
When using this module, this error should not occur.
class `SOCKS4Error` - This will be raised for SOCKS4 errors. The parameter is a tuple
containing a code and a description of the error, as given by the server. The
possible error, according to the specification are:
* `0x5B` - Request rejected or failed - Will be raised in the event of an failure for any
reason other then the two mentioned next.
* `0x5C` - request rejected because SOCKS server cannot connect to identd on the client -
The Socks server had tried an ident lookup on your computer and has failed. In this
case you should run an identd server and/or configure your firewall to allow incoming
connections to local port 113 from the remote server.
* `0x5D` - request rejected because the client program and identd report different user-ids -
The Socks server had performed an ident lookup on your computer and has received a
different userid than the one you have provided. Change your userid (through the
username parameter of the set_proxy method) to match and try again.
class `HTTPError` - This will be raised for HTTP errors. The message will contain
the HTTP status code and provided error message.
After establishing the connection, the object behaves like a standard socket.
Methods like `makefile()` and `settimeout()` should behave just like regular sockets.
Call the `close()` method to close the connection.
In addition to the `socksocket` class, an additional function worth mentioning is the
`set_default_proxy` function. The parameters are the same as the `set_proxy` method.
This function will set default proxy settings for newly created `socksocket` objects,
in which the proxy settings haven't been changed via the `set_proxy` method.
This is quite useful if you wish to force 3rd party modules to use a SOCKS proxy,
by overriding the socket object.
For example:
>>> socks.set_default_proxy(socks.SOCKS5, "socks.example.com")
>>> socket.socket = socks.socksocket
>>> urllib.urlopen("http://www.sourceforge.net/")
PROBLEMS
---------
Please open a GitHub issue at https://github.com/Anorov/PySocks

View file

@ -1,17 +0,0 @@
#!/usr/bin/env python
from distutils.core import setup
VERSION = "1.5.3"
setup(
name = "PySocks",
version = VERSION,
description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information.",
url = "https://github.com/Anorov/PySocks",
license = "BSD",
author = "Anorov",
author_email = "anorov.vorona@gmail.com",
keywords = ["socks", "proxy"],
py_modules=["socks", "sockshandler"]
)

View file

@ -1,699 +0,0 @@
"""
SocksiPy - Python SOCKS module.
Version 1.5.3
Copyright 2006 Dan-Haim. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of Dan Haim nor the names of his contributors may be used
to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
This module provides a standard socket-like interface for Python
for tunneling connections through SOCKS proxies.
===============================================================================
Minor modifications made by Christopher Gilbert (http://motomastyle.com/)
for use in PyLoris (http://pyloris.sourceforge.net/)
Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/)
mainly to merge bug fixes found in Sourceforge
Modifications made by Anorov (https://github.com/Anorov)
-Forked and renamed to PySocks
-Fixed issue with HTTP proxy failure checking (same bug that was in the old ___recvall() method)
-Included SocksiPyHandler (sockshandler.py), to be used as a urllib2 handler,
courtesy of e000 (https://github.com/e000): https://gist.github.com/869791#file_socksipyhandler.py
-Re-styled code to make it readable
-Aliased PROXY_TYPE_SOCKS5 -> SOCKS5 etc.
-Improved exception handling and output
-Removed irritating use of sequence indexes, replaced with tuple unpacked variables
-Fixed up Python 3 bytestring handling - chr(0x03).encode() -> b"\x03"
-Other general fixes
-Added clarification that the HTTP proxy connection method only supports CONNECT-style tunneling HTTP proxies
-Various small bug fixes
"""
__version__ = "1.5.3"
import socket
import struct
from errno import EOPNOTSUPP, EINVAL, EAGAIN
from io import BytesIO
from os import SEEK_CUR
from collections import Callable
PROXY_TYPE_SOCKS4 = SOCKS4 = 1
PROXY_TYPE_SOCKS5 = SOCKS5 = 2
PROXY_TYPE_HTTP = HTTP = 3
PROXY_TYPES = {"SOCKS4": SOCKS4, "SOCKS5": SOCKS5, "HTTP": HTTP}
PRINTABLE_PROXY_TYPES = dict(zip(PROXY_TYPES.values(), PROXY_TYPES.keys()))
_orgsocket = _orig_socket = socket.socket
class ProxyError(IOError):
"""
socket_err contains original socket.error exception.
"""
def __init__(self, msg, socket_err=None):
self.msg = msg
self.socket_err = socket_err
if socket_err:
self.msg += ": {0}".format(socket_err)
def __str__(self):
return self.msg
class GeneralProxyError(ProxyError): pass
class ProxyConnectionError(ProxyError): pass
class SOCKS5AuthError(ProxyError): pass
class SOCKS5Error(ProxyError): pass
class SOCKS4Error(ProxyError): pass
class HTTPError(ProxyError): pass
SOCKS4_ERRORS = { 0x5B: "Request rejected or failed",
0x5C: "Request rejected because SOCKS server cannot connect to identd on the client",
0x5D: "Request rejected because the client program and identd report different user-ids"
}
SOCKS5_ERRORS = { 0x01: "General SOCKS server failure",
0x02: "Connection not allowed by ruleset",
0x03: "Network unreachable",
0x04: "Host unreachable",
0x05: "Connection refused",
0x06: "TTL expired",
0x07: "Command not supported, or protocol error",
0x08: "Address type not supported"
}
DEFAULT_PORTS = { SOCKS4: 1080,
SOCKS5: 1080,
HTTP: 8080
}
def set_default_proxy(proxy_type=None, addr=None, port=None, rdns=True, username=None, password=None):
"""
set_default_proxy(proxy_type, addr[, port[, rdns[, username, password]]])
Sets a default proxy which all further socksocket objects will use,
unless explicitly changed. All parameters are as for socket.set_proxy().
"""
socksocket.default_proxy = (proxy_type, addr, port, rdns,
username.encode() if username else None,
password.encode() if password else None)
setdefaultproxy = set_default_proxy
def get_default_proxy():
"""
Returns the default proxy, set by set_default_proxy.
"""
return socksocket.default_proxy
getdefaultproxy = get_default_proxy
def wrap_module(module):
"""
Attempts to replace a module's socket library with a SOCKS socket. Must set
a default proxy using set_default_proxy(...) first.
This will only work on modules that import socket directly into the namespace;
most of the Python Standard Library falls into this category.
"""
if socksocket.default_proxy:
module.socket.socket = socksocket
else:
raise GeneralProxyError("No default proxy specified")
wrapmodule = wrap_module
def create_connection(dest_pair, proxy_type=None, proxy_addr=None,
proxy_port=None, proxy_username=None,
proxy_password=None, timeout=None,
source_address=None):
"""create_connection(dest_pair, *[, timeout], **proxy_args) -> socket object
Like socket.create_connection(), but connects to proxy
before returning the socket object.
dest_pair - 2-tuple of (IP/hostname, port).
**proxy_args - Same args passed to socksocket.set_proxy() if present.
timeout - Optional socket timeout value, in seconds.
source_address - tuple (host, port) for the socket to bind to as its source
address before connecting (only for compatibility)
"""
sock = socksocket()
if isinstance(timeout, (int, float)):
sock.settimeout(timeout)
if proxy_type is not None:
sock.set_proxy(proxy_type, proxy_addr, proxy_port,
proxy_username, proxy_password)
sock.connect(dest_pair)
return sock
class _BaseSocket(socket.socket):
"""Allows Python 2's "delegated" methods such as send() to be overridden
"""
def __init__(self, *pos, **kw):
_orig_socket.__init__(self, *pos, **kw)
self._savedmethods = dict()
for name in self._savenames:
self._savedmethods[name] = getattr(self, name)
delattr(self, name) # Allows normal overriding mechanism to work
_savenames = list()
def _makemethod(name):
return lambda self, *pos, **kw: self._savedmethods[name](*pos, **kw)
for name in ("sendto", "send", "recvfrom", "recv"):
method = getattr(_BaseSocket, name, None)
# Determine if the method is not defined the usual way
# as a function in the class.
# Python 2 uses __slots__, so there are descriptors for each method,
# but they are not functions.
if not isinstance(method, Callable):
_BaseSocket._savenames.append(name)
setattr(_BaseSocket, name, _makemethod(name))
class socksocket(_BaseSocket):
"""socksocket([family[, type[, proto]]]) -> socket object
Open a SOCKS enabled socket. The parameters are the same as
those of the standard socket init. In order for SOCKS to work,
you must specify family=AF_INET and proto=0.
The "type" argument must be either SOCK_STREAM or SOCK_DGRAM.
"""
default_proxy = None
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
if type not in (socket.SOCK_STREAM, socket.SOCK_DGRAM):
msg = "Socket type must be stream or datagram, not {!r}"
raise ValueError(msg.format(type))
_BaseSocket.__init__(self, family, type, proto, _sock)
self._proxyconn = None # TCP connection to keep UDP relay alive
if self.default_proxy:
self.proxy = self.default_proxy
else:
self.proxy = (None, None, None, None, None, None)
self.proxy_sockname = None
self.proxy_peername = None
def _readall(self, file, count):
"""
Receive EXACTLY the number of bytes requested from the file object.
Blocks until the required number of bytes have been received.
"""
data = b""
while len(data) < count:
d = file.read(count - len(data))
if not d:
raise GeneralProxyError("Connection closed unexpectedly")
data += d
return data
def set_proxy(self, proxy_type=None, addr=None, port=None, rdns=True, username=None, password=None):
"""set_proxy(proxy_type, addr[, port[, rdns[, username[, password]]]])
Sets the proxy to be used.
proxy_type - The type of the proxy to be used. Three types
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
addr - The address of the server (IP or DNS).
port - The port of the server. Defaults to 1080 for SOCKS
servers and 8080 for HTTP proxy servers.
rdns - Should DNS queries be performed on the remote side
(rather than the local side). The default is True.
Note: This has no effect with SOCKS4 servers.
username - Username to authenticate with to the server.
The default is no authentication.
password - Password to authenticate with to the server.
Only relevant when username is also provided.
"""
self.proxy = (proxy_type, addr, port, rdns,
username.encode() if username else None,
password.encode() if password else None)
setproxy = set_proxy
def bind(self, *pos, **kw):
"""
Implements proxy connection for UDP sockets,
which happens during the bind() phase.
"""
proxy_type, proxy_addr, proxy_port, rdns, username, password = self.proxy
if not proxy_type or self.type != socket.SOCK_DGRAM:
return _orig_socket.bind(self, *pos, **kw)
if self._proxyconn:
raise socket.error(EINVAL, "Socket already bound to an address")
if proxy_type != SOCKS5:
msg = "UDP only supported by SOCKS5 proxy type"
raise socket.error(EOPNOTSUPP, msg)
_BaseSocket.bind(self, *pos, **kw)
# Need to specify actual local port because
# some relays drop packets if a port of zero is specified.
# Avoid specifying host address in case of NAT though.
_, port = self.getsockname()
dst = ("0", port)
self._proxyconn = _orig_socket()
proxy = self._proxy_addr()
self._proxyconn.connect(proxy)
UDP_ASSOCIATE = b"\x03"
_, relay = self._SOCKS5_request(self._proxyconn, UDP_ASSOCIATE, dst)
# The relay is most likely on the same host as the SOCKS proxy,
# but some proxies return a private IP address (10.x.y.z)
host, _ = proxy
_, port = relay
_BaseSocket.connect(self, (host, port))
self.proxy_sockname = ("0.0.0.0", 0) # Unknown
def sendto(self, bytes, *args, **kwargs):
if self.type != socket.SOCK_DGRAM:
return _BaseSocket.sendto(self, bytes, *args, **kwargs)
if not self._proxyconn:
self.bind(("", 0))
address = args[-1]
flags = args[:-1]
header = BytesIO()
RSV = b"\x00\x00"
header.write(RSV)
STANDALONE = b"\x00"
header.write(STANDALONE)
self._write_SOCKS5_address(address, header)
sent = _BaseSocket.send(self, header.getvalue() + bytes, *flags, **kwargs)
return sent - header.tell()
def send(self, bytes, flags=0, **kwargs):
if self.type == socket.SOCK_DGRAM:
return self.sendto(bytes, flags, self.proxy_peername, **kwargs)
else:
return _BaseSocket.send(self, bytes, flags, **kwargs)
def recvfrom(self, bufsize, flags=0):
if self.type != socket.SOCK_DGRAM:
return _BaseSocket.recvfrom(self, bufsize, flags)
if not self._proxyconn:
self.bind(("", 0))
buf = BytesIO(_BaseSocket.recv(self, bufsize, flags))
buf.seek(+2, SEEK_CUR)
frag = buf.read(1)
if ord(frag):
raise NotImplementedError("Received UDP packet fragment")
fromhost, fromport = self._read_SOCKS5_address(buf)
if self.proxy_peername:
peerhost, peerport = self.proxy_peername
if fromhost != peerhost or peerport not in (0, fromport):
raise socket.error(EAGAIN, "Packet filtered")
return (buf.read(), (fromhost, fromport))
def recv(self, *pos, **kw):
bytes, _ = self.recvfrom(*pos, **kw)
return bytes
def close(self):
if self._proxyconn:
self._proxyconn.close()
return _BaseSocket.close(self)
def get_proxy_sockname(self):
"""
Returns the bound IP address and port number at the proxy.
"""
return self.proxy_sockname
getproxysockname = get_proxy_sockname
def get_proxy_peername(self):
"""
Returns the IP and port number of the proxy.
"""
return _BaseSocket.getpeername(self)
getproxypeername = get_proxy_peername
def get_peername(self):
"""
Returns the IP address and port number of the destination
machine (note: get_proxy_peername returns the proxy)
"""
return self.proxy_peername
getpeername = get_peername
def _negotiate_SOCKS5(self, *dest_addr):
"""
Negotiates a stream connection through a SOCKS5 server.
"""
CONNECT = b"\x01"
self.proxy_peername, self.proxy_sockname = self._SOCKS5_request(self,
CONNECT, dest_addr)
def _SOCKS5_request(self, conn, cmd, dst):
"""
Send SOCKS5 request with given command (CMD field) and
address (DST field). Returns resolved DST address that was used.
"""
proxy_type, addr, port, rdns, username, password = self.proxy
writer = conn.makefile("wb")
reader = conn.makefile("rb", 0) # buffering=0 renamed in Python 3
try:
# First we'll send the authentication packages we support.
if username and password:
# The username/password details were supplied to the
# set_proxy method so we support the USERNAME/PASSWORD
# authentication (in addition to the standard none).
writer.write(b"\x05\x02\x00\x02")
else:
# No username/password were entered, therefore we
# only support connections with no authentication.
writer.write(b"\x05\x01\x00")
# We'll receive the server's response to determine which
# method was selected
writer.flush()
chosen_auth = self._readall(reader, 2)
if chosen_auth[0:1] != b"\x05":
# Note: string[i:i+1] is used because indexing of a bytestring
# via bytestring[i] yields an integer in Python 3
raise GeneralProxyError("SOCKS5 proxy server sent invalid data")
# Check the chosen authentication method
if chosen_auth[1:2] == b"\x02":
# Okay, we need to perform a basic username/password
# authentication.
writer.write(b"\x01" + chr(len(username)).encode()
+ username
+ chr(len(password)).encode()
+ password)
writer.flush()
auth_status = self._readall(reader, 2)
if auth_status[0:1] != b"\x01":
# Bad response
raise GeneralProxyError("SOCKS5 proxy server sent invalid data")
if auth_status[1:2] != b"\x00":
# Authentication failed
raise SOCKS5AuthError("SOCKS5 authentication failed")
# Otherwise, authentication succeeded
# No authentication is required if 0x00
elif chosen_auth[1:2] != b"\x00":
# Reaching here is always bad
if chosen_auth[1:2] == b"\xFF":
raise SOCKS5AuthError("All offered SOCKS5 authentication methods were rejected")
else:
raise GeneralProxyError("SOCKS5 proxy server sent invalid data")
# Now we can request the actual connection
writer.write(b"\x05" + cmd + b"\x00")
resolved = self._write_SOCKS5_address(dst, writer)
writer.flush()
# Get the response
resp = self._readall(reader, 3)
if resp[0:1] != b"\x05":
raise GeneralProxyError("SOCKS5 proxy server sent invalid data")
status = ord(resp[1:2])
if status != 0x00:
# Connection failed: server returned an error
error = SOCKS5_ERRORS.get(status, "Unknown error")
raise SOCKS5Error("{0:#04x}: {1}".format(status, error))
# Get the bound address/port
bnd = self._read_SOCKS5_address(reader)
return (resolved, bnd)
finally:
reader.close()
writer.close()
def _write_SOCKS5_address(self, addr, file):
"""
Return the host and port packed for the SOCKS5 protocol,
and the resolved address as a tuple object.
"""
host, port = addr
proxy_type, _, _, rdns, username, password = self.proxy
# If the given destination address is an IP address, we'll
# use the IPv4 address request even if remote resolving was specified.
try:
addr_bytes = socket.inet_aton(host)
file.write(b"\x01" + addr_bytes)
host = socket.inet_ntoa(addr_bytes)
except socket.error:
# Well it's not an IP number, so it's probably a DNS name.
if rdns:
# Resolve remotely
host_bytes = host.encode('idna')
file.write(b"\x03" + chr(len(host_bytes)).encode() + host_bytes)
else:
# Resolve locally
addr_bytes = socket.inet_aton(socket.gethostbyname(host))
file.write(b"\x01" + addr_bytes)
host = socket.inet_ntoa(addr_bytes)
file.write(struct.pack(">H", port))
return host, port
def _read_SOCKS5_address(self, file):
atyp = self._readall(file, 1)
if atyp == b"\x01":
addr = socket.inet_ntoa(self._readall(file, 4))
elif atyp == b"\x03":
length = self._readall(file, 1)
addr = self._readall(file, ord(length))
else:
raise GeneralProxyError("SOCKS5 proxy server sent invalid data")
port = struct.unpack(">H", self._readall(file, 2))[0]
return addr, port
def _negotiate_SOCKS4(self, dest_addr, dest_port):
"""
Negotiates a connection through a SOCKS4 server.
"""
proxy_type, addr, port, rdns, username, password = self.proxy
writer = self.makefile("wb")
reader = self.makefile("rb", 0) # buffering=0 renamed in Python 3
try:
# Check if the destination address provided is an IP address
remote_resolve = False
try:
addr_bytes = socket.inet_aton(dest_addr)
except socket.error:
# It's a DNS name. Check where it should be resolved.
if rdns:
addr_bytes = b"\x00\x00\x00\x01"
remote_resolve = True
else:
addr_bytes = socket.inet_aton(socket.gethostbyname(dest_addr))
# Construct the request packet
writer.write(struct.pack(">BBH", 0x04, 0x01, dest_port))
writer.write(addr_bytes)
# The username parameter is considered userid for SOCKS4
if username:
writer.write(username)
writer.write(b"\x00")
# DNS name if remote resolving is required
# NOTE: This is actually an extension to the SOCKS4 protocol
# called SOCKS4A and may not be supported in all cases.
if remote_resolve:
writer.write(dest_addr.encode('idna') + b"\x00")
writer.flush()
# Get the response from the server
resp = self._readall(reader, 8)
if resp[0:1] != b"\x00":
# Bad data
raise GeneralProxyError("SOCKS4 proxy server sent invalid data")
status = ord(resp[1:2])
if status != 0x5A:
# Connection failed: server returned an error
error = SOCKS4_ERRORS.get(status, "Unknown error")
raise SOCKS4Error("{0:#04x}: {1}".format(status, error))
# Get the bound address/port
self.proxy_sockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0])
if remote_resolve:
self.proxy_peername = socket.inet_ntoa(addr_bytes), dest_port
else:
self.proxy_peername = dest_addr, dest_port
finally:
reader.close()
writer.close()
def _negotiate_HTTP(self, dest_addr, dest_port):
"""
Negotiates a connection through an HTTP server.
NOTE: This currently only supports HTTP CONNECT-style proxies.
"""
proxy_type, addr, port, rdns, username, password = self.proxy
# If we need to resolve locally, we do this now
addr = dest_addr if rdns else socket.gethostbyname(dest_addr)
self.sendall(b"CONNECT " + addr.encode('idna') + b":" + str(dest_port).encode() +
b" HTTP/1.1\r\n" + b"Host: " + dest_addr.encode('idna') + b"\r\n\r\n")
# We just need the first line to check if the connection was successful
fobj = self.makefile()
status_line = fobj.readline()
fobj.close()
if not status_line:
raise GeneralProxyError("Connection closed unexpectedly")
try:
proto, status_code, status_msg = status_line.split(" ", 2)
except ValueError:
raise GeneralProxyError("HTTP proxy server sent invalid response")
if not proto.startswith("HTTP/"):
raise GeneralProxyError("Proxy server does not appear to be an HTTP proxy")
try:
status_code = int(status_code)
except ValueError:
raise HTTPError("HTTP proxy server did not return a valid HTTP status")
if status_code != 200:
error = "{0}: {1}".format(status_code, status_msg)
if status_code in (400, 403, 405):
# It's likely that the HTTP proxy server does not support the CONNECT tunneling method
error += ("\n[*] Note: The HTTP proxy server may not be supported by PySocks"
" (must be a CONNECT tunnel proxy)")
raise HTTPError(error)
self.proxy_sockname = (b"0.0.0.0", 0)
self.proxy_peername = addr, dest_port
_proxy_negotiators = {
SOCKS4: _negotiate_SOCKS4,
SOCKS5: _negotiate_SOCKS5,
HTTP: _negotiate_HTTP
}
def connect(self, dest_pair):
"""
Connects to the specified destination through a proxy.
Uses the same API as socket's connect().
To select the proxy server, use set_proxy().
dest_pair - 2-tuple of (IP/hostname, port).
"""
dest_addr, dest_port = dest_pair
if self.type == socket.SOCK_DGRAM:
if not self._proxyconn:
self.bind(("", 0))
dest_addr = socket.gethostbyname(dest_addr)
# If the host address is INADDR_ANY or similar, reset the peer
# address so that packets are received from any peer
if dest_addr == "0.0.0.0" and not dest_port:
self.proxy_peername = None
else:
self.proxy_peername = (dest_addr, dest_port)
return
proxy_type, proxy_addr, proxy_port, rdns, username, password = self.proxy
# Do a minimal input check first
if (not isinstance(dest_pair, (list, tuple))
or len(dest_pair) != 2
or not dest_addr
or not isinstance(dest_port, int)):
raise GeneralProxyError("Invalid destination-connection (host, port) pair")
if proxy_type is None:
# Treat like regular socket object
self.proxy_peername = dest_pair
_BaseSocket.connect(self, (dest_addr, dest_port))
return
proxy_addr = self._proxy_addr()
try:
# Initial connection to proxy server
_BaseSocket.connect(self, proxy_addr)
except socket.error as error:
# Error while connecting to proxy
self.close()
proxy_addr, proxy_port = proxy_addr
proxy_server = "{0}:{1}".format(proxy_addr, proxy_port)
printable_type = PRINTABLE_PROXY_TYPES[proxy_type]
msg = "Error connecting to {0} proxy {1}".format(printable_type,
proxy_server)
raise ProxyConnectionError(msg, error)
else:
# Connected to proxy server, now negotiate
try:
# Calls negotiate_{SOCKS4, SOCKS5, HTTP}
negotiate = self._proxy_negotiators[proxy_type]
negotiate(self, dest_addr, dest_port)
except socket.error as error:
# Wrap socket errors
self.close()
raise GeneralProxyError("Socket error", error)
except ProxyError:
# Protocol error while negotiating with proxy
self.close()
raise
def _proxy_addr(self):
"""
Return proxy address to connect to as tuple object
"""
proxy_type, proxy_addr, proxy_port, rdns, username, password = self.proxy
proxy_port = proxy_port or DEFAULT_PORTS.get(proxy_type)
if not proxy_port:
raise GeneralProxyError("Invalid proxy type")
return proxy_addr, proxy_port

View file

@ -1,79 +0,0 @@
#!/usr/bin/env python
"""
SocksiPy + urllib2 handler
version: 0.3
author: e<e@tr0ll.in>
This module provides a Handler which you can use with urllib2 to allow it to tunnel your connection through a socks.sockssocket socket, with out monkey patching the original socket...
"""
import ssl
try:
import urllib2
import httplib
except ImportError: # Python 3
import urllib.request as urllib2
import http.client as httplib
import socks # $ pip install PySocks
def merge_dict(a, b):
d = a.copy()
d.update(b)
return d
class SocksiPyConnection(httplib.HTTPConnection):
def __init__(self, proxytype, proxyaddr, proxyport=None, rdns=True, username=None, password=None, *args, **kwargs):
self.proxyargs = (proxytype, proxyaddr, proxyport, rdns, username, password)
httplib.HTTPConnection.__init__(self, *args, **kwargs)
def connect(self):
self.sock = socks.socksocket()
self.sock.setproxy(*self.proxyargs)
if type(self.timeout) in (int, float):
self.sock.settimeout(self.timeout)
self.sock.connect((self.host, self.port))
class SocksiPyConnectionS(httplib.HTTPSConnection):
def __init__(self, proxytype, proxyaddr, proxyport=None, rdns=True, username=None, password=None, *args, **kwargs):
self.proxyargs = (proxytype, proxyaddr, proxyport, rdns, username, password)
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
def connect(self):
sock = socks.socksocket()
sock.setproxy(*self.proxyargs)
if type(self.timeout) in (int, float):
sock.settimeout(self.timeout)
sock.connect((self.host, self.port))
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file)
class SocksiPyHandler(urllib2.HTTPHandler, urllib2.HTTPSHandler):
def __init__(self, *args, **kwargs):
self.args = args
self.kw = kwargs
urllib2.HTTPHandler.__init__(self)
def http_open(self, req):
def build(host, port=None, timeout=0, **kwargs):
kw = merge_dict(self.kw, kwargs)
conn = SocksiPyConnection(*self.args, host=host, port=port, timeout=timeout, **kw)
return conn
return self.do_open(build, req)
def https_open(self, req):
def build(host, port=None, timeout=0, **kwargs):
kw = merge_dict(self.kw, kwargs)
conn = SocksiPyConnectionS(*self.args, host=host, port=port, timeout=timeout, **kw)
return conn
return self.do_open(build, req)
if __name__ == "__main__":
import sys
try:
port = int(sys.argv[1])
except (ValueError, IndexError):
port = 9050
opener = urllib2.build_opener(SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, "localhost", port))
print("HTTP: " + opener.open("http://httpbin.org/ip").read().decode())
print("HTTPS: " + opener.open("https://httpbin.org/ip").read().decode())

View file

@ -1,7 +0,0 @@
try:
string_type = basestring
except NameError:
string_type = str
from .encode import encode
from .decode import decode

View file

@ -1,141 +0,0 @@
import itertools
import collections
from . import string_type
try:
range = xrange
except NameError:
pass
def decode(data):
'''
Bdecodes data into Python built-in types.
'''
return consume(LookaheadIterator(data))
class LookaheadIterator(collections.Iterator):
'''
An iterator that lets you peek at the next item.
'''
def __init__(self, iterator):
self.iterator, self.next_iterator = itertools.tee(iter(iterator))
# Be one step ahead
self._advance()
def _advance(self):
self.next_item = next(self.next_iterator, None)
def __next__(self):
self._advance()
return next(self.iterator)
# Python 2 compatibility
next = __next__
def consume(stream):
item = stream.next_item
if item is None:
raise ValueError('Encoding empty data is undefined')
elif item == 'i':
return consume_int(stream)
elif item == 'l':
return consume_list(stream)
elif item == 'd':
return consume_dict(stream)
elif item is not None and item[0].isdigit():
return consume_str(stream)
else:
raise ValueError('Invalid bencode object type: ', item)
def consume_number(stream):
result = ''
while True:
chunk = stream.next_item
if not chunk.isdigit():
return result
elif result.startswith('0'):
raise ValueError('Invalid number')
next(stream)
result += chunk
def consume_int(stream):
if next(stream) != 'i':
raise ValueError()
negative = stream.next_item == '-'
if negative:
next(stream)
result = int(consume_number(stream))
if negative:
result *= -1
if result == 0:
raise ValueError('Negative zero is not allowed')
if next(stream) != 'e':
raise ValueError('Unterminated integer')
return result
def consume_str(stream):
length = int(consume_number(stream))
if next(stream) != ':':
raise ValueError('Malformed string')
result = ''
for i in range(length):
try:
result += next(stream)
except StopIteration:
raise ValueError('Invalid string length')
return result
def consume_list(stream):
if next(stream) != 'l':
raise ValueError()
l = []
while stream.next_item != 'e':
l.append(consume(stream))
if next(stream) != 'e':
raise ValueError('Unterminated list')
return l
def consume_dict(stream):
if next(stream) != 'd':
raise ValueError()
d = {}
while stream.next_item != 'e':
key = consume(stream)
if not isinstance(key, string_type):
raise ValueError('Dictionary keys must be strings')
value = consume(stream)
d[key] = value
if next(stream) != 'e':
raise ValueError('Unterminated dictionary')
return d

View file

@ -1,22 +0,0 @@
from . import string_type
def encode(obj):
'''
Bencodes the object. The object must be an instance of: str, int, list, or dict.
'''
if isinstance(obj, string_type):
return '{0}:{1}'.format(len(obj), obj)
elif isinstance(obj, int):
return 'i{0}e'.format(obj)
elif isinstance(obj, list):
values = ''.join([encode(o) for o in obj])
return 'l{0}e'.format(values)
elif isinstance(obj, dict):
items = sorted(obj.items())
values = ''.join([encode(str(key)) + encode(value) for key, value in items])
return 'd{0}e'.format(values)
else:
raise TypeError('Unsupported type: {0}. Must be one of: str, int, list, dict.'.format(type(obj)))

View file

@ -1,9 +0,0 @@
This Websocket library for Gevent is written and maintained by
Jeffrey Gelens <jeffrey at noppo.pro>
Contributors:
Denis Bilenko
Lon Ingram

View file

@ -1,13 +0,0 @@
Copyright 2011-2017 Jeffrey Gelens <jeffrey@noppo.pro>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -1,21 +0,0 @@
VERSION = (0, 10, 1, 'final', 0)
__all__ = [
'WebSocketApplication',
'Resource',
'WebSocketServer',
'WebSocketError',
'get_version'
]
def get_version(*args, **kwargs):
from .utils import get_version
return get_version(*args, **kwargs)
try:
from .resource import WebSocketApplication, Resource
from .server import WebSocketServer
from .exceptions import WebSocketError
except ImportError:
pass

View file

@ -1,23 +0,0 @@
from __future__ import absolute_import, division, print_function
import sys
import codecs
PY3 = sys.version_info[0] == 3
PY2 = sys.version_info[0] == 2
if PY2:
bytes = str
text_type = unicode
string_types = basestring
range_type = xrange
iteritems = lambda x: x.iteritems()
# b = lambda x: x
else:
text_type = str
string_types = str,
range_type = range
iteritems = lambda x: iter(x.items())
# b = lambda x: codecs.latin_1_encode(x)[0]

View file

@ -1,19 +0,0 @@
from socket import error as socket_error
class WebSocketError(socket_error):
"""
Base class for all websocket errors.
"""
class ProtocolError(WebSocketError):
"""
Raised if an error occurs when de/encoding the websocket protocol.
"""
class FrameTooLargeException(ProtocolError):
"""
Raised if a frame is received that is too large.
"""

View file

@ -1,6 +0,0 @@
from geventwebsocket.handler import WebSocketHandler
from gunicorn.workers.ggevent import GeventPyWSGIWorker
class GeventWebSocketWorker(GeventPyWSGIWorker):
wsgi_handler = WebSocketHandler

View file

@ -1,283 +0,0 @@
import base64
import hashlib
from gevent.pywsgi import WSGIHandler
from ._compat import PY3
from .websocket import WebSocket, Stream
from .logging import create_logger
class Client(object):
def __init__(self, address, ws):
self.address = address
self.ws = ws
class WebSocketHandler(WSGIHandler):
"""
Automatically upgrades the connection to a websocket.
To prevent the WebSocketHandler to call the underlying WSGI application,
but only setup the WebSocket negotiations, do:
mywebsockethandler.prevent_wsgi_call = True
before calling run_application(). This is useful if you want to do more
things before calling the app, and want to off-load the WebSocket
negotiations to this library. Socket.IO needs this for example, to send
the 'ack' before yielding the control to your WSGI app.
"""
SUPPORTED_VERSIONS = ('13', '8', '7')
GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
def run_websocket(self):
"""
Called when a websocket has been created successfully.
"""
if getattr(self, 'prevent_wsgi_call', False):
return
# In case WebSocketServer is not used
if not hasattr(self.server, 'clients'):
self.server.clients = {}
# Since we're now a websocket connection, we don't care what the
# application actually responds with for the http response
try:
self.server.clients[self.client_address] = Client(
self.client_address, self.websocket)
list(self.application(self.environ, lambda s, h, e=None: []))
finally:
del self.server.clients[self.client_address]
if not self.websocket.closed:
self.websocket.close()
self.environ.update({
'wsgi.websocket': None
})
self.websocket = None
def run_application(self):
if (hasattr(self.server, 'pre_start_hook') and self.server.pre_start_hook):
self.logger.debug("Calling pre-start hook")
if self.server.pre_start_hook(self):
return super(WebSocketHandler, self).run_application()
self.logger.debug("Initializing WebSocket")
self.result = self.upgrade_websocket()
if hasattr(self, 'websocket'):
if self.status and not self.headers_sent:
self.write('')
self.run_websocket()
else:
if self.status:
# A status was set, likely an error so just send the response
if not self.result:
self.result = []
self.process_result()
return
# This handler did not handle the request, so defer it to the
# underlying application object
return super(WebSocketHandler, self).run_application()
def upgrade_websocket(self):
"""
Attempt to upgrade the current environ into a websocket enabled
connection. If successful, the environ dict with be updated with two
new entries, `wsgi.websocket` and `wsgi.websocket_version`.
:returns: Whether the upgrade was successful.
"""
# Some basic sanity checks first
self.logger.debug("Validating WebSocket request")
if self.environ.get('REQUEST_METHOD', '') != 'GET':
# This is not a websocket request, so we must not handle it
self.logger.debug('Can only upgrade connection if using GET method.')
return
upgrade = self.environ.get('HTTP_UPGRADE', '').lower()
if upgrade == 'websocket':
connection = self.environ.get('HTTP_CONNECTION', '').lower()
if 'upgrade' not in connection:
# This is not a websocket request, so we must not handle it
self.logger.warning("Client didn't ask for a connection "
"upgrade")
return
else:
# This is not a websocket request, so we must not handle it
return
if self.request_version != 'HTTP/1.1':
self.start_response('402 Bad Request', [])
self.logger.warning("Bad server protocol in headers")
return ['Bad protocol version']
if self.environ.get('HTTP_SEC_WEBSOCKET_VERSION'):
return self.upgrade_connection()
else:
self.logger.warning("No protocol defined")
self.start_response('426 Upgrade Required', [
('Sec-WebSocket-Version', ', '.join(self.SUPPORTED_VERSIONS))])
return ['No Websocket protocol version defined']
def upgrade_connection(self):
"""
Validate and 'upgrade' the HTTP request to a WebSocket request.
If an upgrade succeeded then then handler will have `start_response`
with a status of `101`, the environ will also be updated with
`wsgi.websocket` and `wsgi.websocket_version` keys.
:param environ: The WSGI environ dict.
:param start_response: The callable used to start the response.
:param stream: File like object that will be read from/written to by
the underlying WebSocket object, if created.
:return: The WSGI response iterator is something went awry.
"""
self.logger.debug("Attempting to upgrade connection")
version = self.environ.get("HTTP_SEC_WEBSOCKET_VERSION")
if version not in self.SUPPORTED_VERSIONS:
msg = "Unsupported WebSocket Version: {0}".format(version)
self.logger.warning(msg)
self.start_response('400 Bad Request', [
('Sec-WebSocket-Version', ', '.join(self.SUPPORTED_VERSIONS))
])
return [msg]
key = self.environ.get("HTTP_SEC_WEBSOCKET_KEY", '').strip()
if not key:
# 5.2.1 (3)
msg = "Sec-WebSocket-Key header is missing/empty"
self.logger.warning(msg)
self.start_response('400 Bad Request', [])
return [msg]
try:
key_len = len(base64.b64decode(key))
except TypeError:
msg = "Invalid key: {0}".format(key)
self.logger.warning(msg)
self.start_response('400 Bad Request', [])
return [msg]
if key_len != 16:
# 5.2.1 (3)
msg = "Invalid key: {0}".format(key)
self.logger.warning(msg)
self.start_response('400 Bad Request', [])
return [msg]
# Check for WebSocket Protocols
requested_protocols = self.environ.get(
'HTTP_SEC_WEBSOCKET_PROTOCOL', '')
protocol = None
if hasattr(self.application, 'app_protocol'):
allowed_protocol = self.application.app_protocol(
self.environ['PATH_INFO'])
if allowed_protocol and allowed_protocol in requested_protocols:
protocol = allowed_protocol
self.logger.debug("Protocol allowed: {0}".format(protocol))
self.websocket = WebSocket(self.environ, Stream(self), self)
self.environ.update({
'wsgi.websocket_version': version,
'wsgi.websocket': self.websocket
})
if PY3:
accept = base64.b64encode(
hashlib.sha1((key + self.GUID).encode("latin-1")).digest()
).decode("latin-1")
else:
accept = base64.b64encode(hashlib.sha1(key + self.GUID).digest())
headers = [
("Upgrade", "websocket"),
("Connection", "Upgrade"),
("Sec-WebSocket-Accept", accept)
]
if protocol:
headers.append(("Sec-WebSocket-Protocol", protocol))
self.logger.debug("WebSocket request accepted, switching protocols")
self.start_response("101 Switching Protocols", headers)
@property
def logger(self):
if not hasattr(self.server, 'logger'):
self.server.logger = create_logger(__name__)
return self.server.logger
def log_request(self):
if '101' not in str(self.status):
self.logger.info(self.format_request())
@property
def active_client(self):
return self.server.clients[self.client_address]
def start_response(self, status, headers, exc_info=None):
"""
Called when the handler is ready to send a response back to the remote
endpoint. A websocket connection may have not been created.
"""
writer = super(WebSocketHandler, self).start_response(
status, headers, exc_info=exc_info)
self._prepare_response()
return writer
def _prepare_response(self):
"""
Sets up the ``pywsgi.Handler`` to work with a websocket response.
This is used by other projects that need to support WebSocket
connections as part of a larger effort.
"""
assert not self.headers_sent
if not self.environ.get('wsgi.websocket'):
# a WebSocket connection is not established, do nothing
return
# So that `finalize_headers` doesn't write a Content-Length header
self.provided_content_length = False
# The websocket is now controlling the response
self.response_use_chunked = False
# Once the request is over, the connection must be closed
self.close_connection = True
# Prevents the Date header from being written
self.provided_date = True

View file

@ -1,32 +0,0 @@
from __future__ import absolute_import
from logging import getLogger, StreamHandler, getLoggerClass, Formatter, DEBUG, INFO
def create_logger(name, debug=False, format=None):
Logger = getLoggerClass()
class DebugLogger(Logger):
def getEffectiveLevel(x):
if x.level == 0 and debug:
return DEBUG
else:
return Logger.getEffectiveLevel(x)
class DebugHandler(StreamHandler):
def emit(x, record):
StreamHandler.emit(x, record) if debug else None
handler = DebugHandler()
handler.setLevel(DEBUG)
if format:
handler.setFormatter(Formatter(format))
logger = getLogger(name)
del logger.handlers[:]
logger.__class__ = DebugLogger
logger.addHandler(handler)
logger.setLevel(INFO)
return logger

View file

@ -1,35 +0,0 @@
class BaseProtocol(object):
PROTOCOL_NAME = ''
def __init__(self, app):
self._app = app
def on_open(self):
self.app.on_open()
def on_message(self, message):
self.app.on_message(message)
def on_close(self, reason=None):
self.app.on_close(reason)
@property
def app(self):
if self._app:
return self._app
else:
raise Exception("No application coupled")
@property
def server(self):
if not hasattr(self.app, 'ws'):
return None
return self.app.ws.handler.server
@property
def handler(self):
if not hasattr(self.app, 'ws'):
return None
return self.app.ws.handler

View file

@ -1,235 +0,0 @@
import inspect
import random
import string
import types
try:
import ujson as json
except ImportError:
try:
import simplejson as json
except ImportError:
import json
from .._compat import range_type, string_types
from ..exceptions import WebSocketError
from .base import BaseProtocol
def export_rpc(arg=None):
if isinstance(arg, types.FunctionType):
arg._rpc = arg.__name__
return arg
def serialize(data):
return json.dumps(data)
class Prefixes(object):
def __init__(self):
self.prefixes = {}
def add(self, prefix, uri):
self.prefixes[prefix] = uri
def resolve(self, curie_or_uri):
if "http://" in curie_or_uri:
return curie_or_uri
elif ':' in curie_or_uri:
prefix, proc = curie_or_uri.split(':', 1)
return self.prefixes[prefix] + proc
else:
raise Exception(curie_or_uri)
class RemoteProcedures(object):
def __init__(self):
self.calls = {}
def register_procedure(self, uri, proc):
self.calls[uri] = proc
def register_object(self, uri, obj):
for k in inspect.getmembers(obj, inspect.ismethod):
if '_rpc' in k[1].__dict__:
proc_uri = uri + k[1]._rpc
self.calls[proc_uri] = (obj, k[1])
def call(self, uri, args):
if uri in self.calls:
proc = self.calls[uri]
# Do the correct call whether it's a function or instance method.
if isinstance(proc, tuple):
if proc[1].__self__ is None:
# Create instance of object and call method
return proc[1](proc[0](), *args)
else:
# Call bound method on instance
return proc[1](*args)
else:
return self.calls[uri](*args)
else:
raise Exception("no such uri '{}'".format(uri))
class Channels(object):
def __init__(self):
self.channels = {}
def create(self, uri, prefix_matching=False):
if uri not in self.channels:
self.channels[uri] = []
# TODO: implement prefix matching
def subscribe(self, uri, client):
if uri in self.channels:
self.channels[uri].append(client)
def unsubscribe(self, uri, client):
if uri not in self.channels:
return
client_index = self.channels[uri].index(client)
self.channels[uri].pop(client_index)
if len(self.channels[uri]) == 0:
del self.channels[uri]
def publish(self, uri, event, exclude=None, eligible=None):
if uri not in self.channels:
return
# TODO: exclude & eligible
msg = [WampProtocol.MSG_EVENT, uri, event]
for client in self.channels[uri]:
try:
client.ws.send(serialize(msg))
except WebSocketError:
# Seems someone didn't unsubscribe before disconnecting
self.channels[uri].remove(client)
class WampProtocol(BaseProtocol):
MSG_WELCOME = 0
MSG_PREFIX = 1
MSG_CALL = 2
MSG_CALL_RESULT = 3
MSG_CALL_ERROR = 4
MSG_SUBSCRIBE = 5
MSG_UNSUBSCRIBE = 6
MSG_PUBLISH = 7
MSG_EVENT = 8
PROTOCOL_NAME = "wamp"
def __init__(self, *args, **kwargs):
self.procedures = RemoteProcedures()
self.prefixes = Prefixes()
self.session_id = ''.join(
[random.choice(string.digits + string.letters)
for i in range_type(16)])
super(WampProtocol, self).__init__(*args, **kwargs)
def register_procedure(self, *args, **kwargs):
self.procedures.register_procedure(*args, **kwargs)
def register_object(self, *args, **kwargs):
self.procedures.register_object(*args, **kwargs)
def register_pubsub(self, *args, **kwargs):
if not hasattr(self.server, 'channels'):
self.server.channels = Channels()
self.server.channels.create(*args, **kwargs)
def do_handshake(self):
from geventwebsocket import get_version
welcome = [
self.MSG_WELCOME,
self.session_id,
1,
'gevent-websocket/' + get_version()
]
self.app.ws.send(serialize(welcome))
def _get_exception_info(self, e):
uri = 'http://TODO#generic'
desc = str(type(e))
details = str(e)
return [uri, desc, details]
def rpc_call(self, data):
call_id, curie_or_uri = data[1:3]
args = data[3:]
if not isinstance(call_id, string_types):
raise Exception()
if not isinstance(curie_or_uri, string_types):
raise Exception()
uri = self.prefixes.resolve(curie_or_uri)
try:
result = self.procedures.call(uri, args)
result_msg = [self.MSG_CALL_RESULT, call_id, result]
except Exception as e:
result_msg = [self.MSG_CALL_ERROR,
call_id] + self._get_exception_info(e)
self.app.on_message(serialize(result_msg))
def pubsub_action(self, data):
action = data[0]
curie_or_uri = data[1]
if not isinstance(action, int):
raise Exception()
if not isinstance(curie_or_uri, string_types):
raise Exception()
uri = self.prefixes.resolve(curie_or_uri)
if action == self.MSG_SUBSCRIBE and len(data) == 2:
self.server.channels.subscribe(data[1], self.handler.active_client)
elif action == self.MSG_UNSUBSCRIBE and len(data) == 2:
self.server.channels.unsubscribe(
data[1], self.handler.active_client)
elif action == self.MSG_PUBLISH and len(data) >= 3:
payload = data[2] if len(data) >= 3 else None
exclude = data[3] if len(data) >= 4 else None
eligible = data[4] if len(data) >= 5 else None
self.server.channels.publish(uri, payload, exclude, eligible)
def on_open(self):
self.app.on_open()
self.do_handshake()
def on_message(self, message):
data = json.loads(message)
if not isinstance(data, list):
raise Exception('incoming data is no list')
if data[0] == self.MSG_PREFIX and len(data) == 3:
prefix, uri = data[1:3]
self.prefixes.add(prefix, uri)
elif data[0] == self.MSG_CALL and len(data) >= 3:
return self.rpc_call(data)
elif data[0] in (self.MSG_SUBSCRIBE, self.MSG_UNSUBSCRIBE,
self.MSG_PUBLISH):
return self.pubsub_action(data)
else:
raise Exception("Unknown call")

View file

@ -1,100 +0,0 @@
import re
import warnings
from .protocols.base import BaseProtocol
from .exceptions import WebSocketError
try:
from collections import OrderedDict
except ImportError:
class OrderedDict:
pass
class WebSocketApplication(object):
protocol_class = BaseProtocol
def __init__(self, ws):
self.protocol = self.protocol_class(self)
self.ws = ws
def handle(self):
self.protocol.on_open()
while True:
try:
message = self.ws.receive()
except WebSocketError:
self.protocol.on_close()
break
self.protocol.on_message(message)
def on_open(self, *args, **kwargs):
pass
def on_close(self, *args, **kwargs):
pass
def on_message(self, message, *args, **kwargs):
self.ws.send(message, **kwargs)
@classmethod
def protocol_name(cls):
return cls.protocol_class.PROTOCOL_NAME
class Resource(object):
def __init__(self, apps=None):
self.apps = apps if apps else []
if isinstance(apps, dict):
if not isinstance(apps, OrderedDict):
warnings.warn("Using an unordered dictionary for the "
"app list is discouraged and may lead to "
"undefined behavior.", UserWarning)
self.apps = apps.items()
# An app can either be a standard WSGI application (an object we call with
# __call__(self, environ, start_response)) or a class we instantiate
# (and which can handle websockets). This function tells them apart.
# Override this if you have apps that can handle websockets but don't
# fulfill these criteria.
def _is_websocket_app(self, app):
return isinstance(app, type) and issubclass(app, WebSocketApplication)
def _app_by_path(self, environ_path, is_websocket_request):
# Which app matched the current path?
for path, app in self.apps:
if re.match(path, environ_path):
if is_websocket_request == self._is_websocket_app(app):
return app
return None
def app_protocol(self, path):
# app_protocol will only be called for websocket apps
app = self._app_by_path(path, True)
if hasattr(app, 'protocol_name'):
return app.protocol_name()
else:
return ''
def __call__(self, environ, start_response):
environ = environ
is_websocket_call = 'wsgi.websocket' in environ
current_app = self._app_by_path(environ['PATH_INFO'], is_websocket_call)
if current_app is None:
raise Exception("No apps defined")
if is_websocket_call:
ws = environ['wsgi.websocket']
current_app = current_app(ws)
current_app.ws = ws # TODO: needed?
current_app.handle()
# Always return something, calling WSGI middleware may rely on it
return []
else:
return current_app(environ, start_response)

View file

@ -1,34 +0,0 @@
from gevent.pywsgi import WSGIServer
from .handler import WebSocketHandler
from .logging import create_logger
class WebSocketServer(WSGIServer):
handler_class = WebSocketHandler
debug_log_format = (
'-' * 80 + '\n' +
'%(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:\n' +
'%(message)s\n' +
'-' * 80
)
def __init__(self, *args, **kwargs):
self.debug = kwargs.pop('debug', False)
self.pre_start_hook = kwargs.pop('pre_start_hook', None)
self._logger = None
self.clients = {}
super(WebSocketServer, self).__init__(*args, **kwargs)
def handle(self, socket, address):
handler = self.handler_class(socket, address, self)
handler.handle()
@property
def logger(self):
if not self._logger:
self._logger = create_logger(
__name__, self.debug, self.debug_log_format)
return self._logger

View file

@ -1,224 +0,0 @@
from ._compat import PY3
###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
# Note: This code is a Python implementation of the algorithm
# "Flexible and Economical UTF-8 Decoder" by Bjoern Hoehrmann
# bjoern@hoehrmann.de, http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
__all__ = ("Utf8Validator",)
# DFA transitions
UTF8VALIDATOR_DFA = (
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 00..1f
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 20..3f
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 40..5f
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 60..7f
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, # 80..9f
7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, # a0..bf
8, 8, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, # c0..df
0xa, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x4, 0x3, 0x3, # e0..ef
0xb, 0x6, 0x6, 0x6, 0x5, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, # f0..ff
0x0, 0x1, 0x2, 0x3, 0x5, 0x8, 0x7, 0x1, 0x1, 0x1, 0x4, 0x6, 0x1, 0x1, 0x1, 0x1, # s0..s0
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, # s1..s2
1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, # s3..s4
1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, # s5..s6
1, 3, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, # s7..s8
)
UTF8_ACCEPT = 0
UTF8_REJECT = 1
# use Cython implementation of UTF8 validator if available
#
try:
from wsaccel.utf8validator import Utf8Validator
except ImportError:
#
# Fallback to pure Python implementation - also for PyPy.
#
# Do NOT touch this code unless you know what you are doing!
# https://github.com/oberstet/scratchbox/tree/master/python/utf8
#
if PY3:
# Python 3 and above
# convert DFA table to bytes (performance)
UTF8VALIDATOR_DFA_S = bytes(UTF8VALIDATOR_DFA)
class Utf8Validator(object):
"""
Incremental UTF-8 validator with constant memory consumption (minimal state).
Implements the algorithm "Flexible and Economical UTF-8 Decoder" by
Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/).
"""
def __init__(self):
self.reset()
def decode(self, b):
"""
Eat one UTF-8 octet, and validate on the fly.
Returns ``UTF8_ACCEPT`` when enough octets have been consumed, in which case
``self.codepoint`` contains the decoded Unicode code point.
Returns ``UTF8_REJECT`` when invalid UTF-8 was encountered.
Returns some other positive integer when more octets need to be eaten.
"""
tt = UTF8VALIDATOR_DFA_S[b]
if self.state != UTF8_ACCEPT:
self.codepoint = (b & 0x3f) | (self.codepoint << 6)
else:
self.codepoint = (0xff >> tt) & b
self.state = UTF8VALIDATOR_DFA_S[256 + self.state * 16 + tt]
return self.state
def reset(self):
"""
Reset validator to start new incremental UTF-8 decode/validation.
"""
self.state = UTF8_ACCEPT # the empty string is valid UTF8
self.codepoint = 0
self.i = 0
def validate(self, ba):
"""
Incrementally validate a chunk of bytes provided as string.
Will return a quad ``(valid?, endsOnCodePoint?, currentIndex, totalIndex)``.
As soon as an octet is encountered which renders the octet sequence
invalid, a quad with ``valid? == False`` is returned. ``currentIndex`` returns
the index within the currently consumed chunk, and ``totalIndex`` the
index within the total consumed sequence that was the point of bail out.
When ``valid? == True``, currentIndex will be ``len(ba)`` and ``totalIndex`` the
total amount of consumed bytes.
"""
#
# The code here is written for optimal JITting in PyPy, not for best
# readability by your grandma or particular elegance. Do NOT touch!
#
l = len(ba)
i = 0
state = self.state
while i < l:
# optimized version of decode(), since we are not interested in actual code points
state = UTF8VALIDATOR_DFA_S[256 + (state << 4) + UTF8VALIDATOR_DFA_S[ba[i]]]
if state == UTF8_REJECT:
self.state = state
self.i += i
return False, False, i, self.i
i += 1
self.state = state
self.i += l
return True, state == UTF8_ACCEPT, l, self.i
else:
# convert DFA table to string (performance)
UTF8VALIDATOR_DFA_S = ''.join([chr(c) for c in UTF8VALIDATOR_DFA])
class Utf8Validator(object):
"""
Incremental UTF-8 validator with constant memory consumption (minimal state).
Implements the algorithm "Flexible and Economical UTF-8 Decoder" by
Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/).
"""
def __init__(self):
self.reset()
def decode(self, b):
"""
Eat one UTF-8 octet, and validate on the fly.
Returns ``UTF8_ACCEPT`` when enough octets have been consumed, in which case
``self.codepoint`` contains the decoded Unicode code point.
Returns ``UTF8_REJECT`` when invalid UTF-8 was encountered.
Returns some other positive integer when more octets need to be eaten.
"""
tt = ord(UTF8VALIDATOR_DFA_S[b])
if self.state != UTF8_ACCEPT:
self.codepoint = (b & 0x3f) | (self.codepoint << 6)
else:
self.codepoint = (0xff >> tt) & b
self.state = ord(UTF8VALIDATOR_DFA_S[256 + self.state * 16 + tt])
return self.state
def reset(self):
"""
Reset validator to start new incremental UTF-8 decode/validation.
"""
self.state = UTF8_ACCEPT # the empty string is valid UTF8
self.codepoint = 0
self.i = 0
def validate(self, ba):
"""
Incrementally validate a chunk of bytes provided as string.
Will return a quad ``(valid?, endsOnCodePoint?, currentIndex, totalIndex)``.
As soon as an octet is encountered which renders the octet sequence
invalid, a quad with ``valid? == False`` is returned. ``currentIndex`` returns
the index within the currently consumed chunk, and ``totalIndex`` the
index within the total consumed sequence that was the point of bail out.
When ``valid? == True``, currentIndex will be ``len(ba)`` and ``totalIndex`` the
total amount of consumed bytes.
"""
#
# The code here is written for optimal JITting in PyPy, not for best
# readability by your grandma or particular elegance. Do NOT touch!
#
l = len(ba)
i = 0
state = self.state
while i < l:
# optimized version of decode(), since we are not interested in actual code points
try:
state = ord(UTF8VALIDATOR_DFA_S[256 + (state << 4) + ord(UTF8VALIDATOR_DFA_S[ba[i]])])
except:
import ipdb; ipdb.set_trace()
if state == UTF8_REJECT:
self.state = state
self.i += i
return False, False, i, self.i
i += 1
self.state = state
self.i += l
return True, state == UTF8_ACCEPT, l, self.i

View file

@ -1,45 +0,0 @@
import subprocess
def get_version(version=None):
"Returns a PEP 386-compliant version number from VERSION."
if version is None:
from geventwebsocket import VERSION as version
else:
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:parts])
sub = ''
if version[3] == 'alpha' and version[4] == 0:
hg_changeset = get_hg_changeset()
if hg_changeset:
sub = '.dev{0}'.format(hg_changeset)
elif version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4])
return str(main + sub)
def get_hg_changeset():
rev, err = subprocess.Popen(
'hg id -i',
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
).communicate()
if err:
return None
else:
return rev.strip().replace('+', '')

View file

@ -1,570 +0,0 @@
import struct
import socket
from ._compat import string_types, range_type, text_type
from .exceptions import ProtocolError
from .exceptions import WebSocketError
from .exceptions import FrameTooLargeException
from .utf8validator import Utf8Validator
MSG_SOCKET_DEAD = "Socket is dead"
MSG_ALREADY_CLOSED = "Connection is already closed"
MSG_CLOSED = "Connection closed"
class WebSocket(object):
"""
Base class for supporting websocket operations.
:ivar environ: The http environment referenced by this connection.
:ivar closed: Whether this connection is closed/closing.
:ivar stream: The underlying file like object that will be read from /
written to by this WebSocket object.
"""
__slots__ = ('utf8validator', 'utf8validate_last', 'environ', 'closed',
'stream', 'raw_write', 'raw_read', 'handler')
OPCODE_CONTINUATION = 0x00
OPCODE_TEXT = 0x01
OPCODE_BINARY = 0x02
OPCODE_CLOSE = 0x08
OPCODE_PING = 0x09
OPCODE_PONG = 0x0a
def __init__(self, environ, stream, handler):
self.environ = environ
self.closed = False
self.stream = stream
self.raw_write = stream.write
self.raw_read = stream.read
self.utf8validator = Utf8Validator()
self.handler = handler
def __del__(self):
try:
self.close()
except:
# close() may fail if __init__ didn't complete
pass
def _decode_bytes(self, bytestring):
"""
Internal method used to convert the utf-8 encoded bytestring into
unicode.
If the conversion fails, the socket will be closed.
"""
if not bytestring:
return ''
try:
return bytestring.decode('utf-8')
except UnicodeDecodeError:
self.close(1007)
raise
def _encode_bytes(self, text):
"""
:returns: The utf-8 byte string equivalent of `text`.
"""
if not isinstance(text, str):
text = text_type(text or '')
return text.encode("utf-8")
def _is_valid_close_code(self, code):
"""
:returns: Whether the returned close code is a valid hybi return code.
"""
if code < 1000:
return False
if 1004 <= code <= 1006:
return False
if 1012 <= code <= 1016:
return False
if code == 1100:
# not sure about this one but the autobahn fuzzer requires it.
return False
if 2000 <= code <= 2999:
return False
return True
@property
def current_app(self):
if hasattr(self.handler.server.application, 'current_app'):
return self.handler.server.application.current_app
else:
# For backwards compatibility reasons
class MockApp():
def on_close(self, *args):
pass
return MockApp()
@property
def origin(self):
if not self.environ:
return
return self.environ.get('HTTP_ORIGIN')
@property
def protocol(self):
if not self.environ:
return
return self.environ.get('HTTP_SEC_WEBSOCKET_PROTOCOL')
@property
def version(self):
if not self.environ:
return
return self.environ.get('HTTP_SEC_WEBSOCKET_VERSION')
@property
def path(self):
if not self.environ:
return
return self.environ.get('PATH_INFO')
@property
def logger(self):
return self.handler.logger
def handle_close(self, header, payload):
"""
Called when a close frame has been decoded from the stream.
:param header: The decoded `Header`.
:param payload: The bytestring payload associated with the close frame.
"""
if not payload:
self.close(1000, None)
return
if len(payload) < 2:
raise ProtocolError('Invalid close frame: {0} {1}'.format(
header, payload))
code = struct.unpack('!H', payload[:2])[0]
payload = payload[2:]
if payload:
validator = Utf8Validator()
val = validator.validate(payload)
if not val[0]:
raise UnicodeError
if not self._is_valid_close_code(code):
raise ProtocolError('Invalid close code {0}'.format(code))
self.close(code, payload)
def handle_ping(self, header, payload):
self.send_frame(payload, self.OPCODE_PONG)
def handle_pong(self, header, payload):
pass
def read_frame(self):
"""
Block until a full frame has been read from the socket.
This is an internal method as calling this will not cleanup correctly
if an exception is called. Use `receive` instead.
:return: The header and payload as a tuple.
"""
header = Header.decode_header(self.stream)
if header.flags:
raise ProtocolError
if not header.length:
return header, b''
try:
payload = self.raw_read(header.length)
except socket.error:
payload = b''
except Exception:
# TODO log out this exception
payload = b''
if len(payload) != header.length:
raise WebSocketError('Unexpected EOF reading frame payload')
if header.mask:
payload = header.unmask_payload(payload)
return header, payload
def validate_utf8(self, payload):
# Make sure the frames are decodable independently
self.utf8validate_last = self.utf8validator.validate(payload)
if not self.utf8validate_last[0]:
raise UnicodeError("Encountered invalid UTF-8 while processing "
"text message at payload octet index "
"{0:d}".format(self.utf8validate_last[3]))
def read_message(self):
"""
Return the next text or binary message from the socket.
This is an internal method as calling this will not cleanup correctly
if an exception is called. Use `receive` instead.
"""
opcode = None
message = bytearray()
while True:
header, payload = self.read_frame()
f_opcode = header.opcode
if f_opcode in (self.OPCODE_TEXT, self.OPCODE_BINARY):
# a new frame
if opcode:
raise ProtocolError("The opcode in non-fin frame is "
"expected to be zero, got "
"{0!r}".format(f_opcode))
# Start reading a new message, reset the validator
self.utf8validator.reset()
self.utf8validate_last = (True, True, 0, 0)
opcode = f_opcode
elif f_opcode == self.OPCODE_CONTINUATION:
if not opcode:
raise ProtocolError("Unexpected frame with opcode=0")
elif f_opcode == self.OPCODE_PING:
self.handle_ping(header, payload)
continue
elif f_opcode == self.OPCODE_PONG:
self.handle_pong(header, payload)
continue
elif f_opcode == self.OPCODE_CLOSE:
self.handle_close(header, payload)
return
else:
raise ProtocolError("Unexpected opcode={0!r}".format(f_opcode))
if opcode == self.OPCODE_TEXT:
self.validate_utf8(payload)
message += payload
if header.fin:
break
if opcode == self.OPCODE_TEXT:
self.validate_utf8(message)
return self._decode_bytes(message)
else:
return message
def receive(self):
"""
Read and return a message from the stream. If `None` is returned, then
the socket is considered closed/errored.
"""
if self.closed:
self.current_app.on_close(MSG_ALREADY_CLOSED)
raise WebSocketError(MSG_ALREADY_CLOSED)
try:
return self.read_message()
except UnicodeError:
self.close(1007)
except ProtocolError:
self.close(1002)
except socket.timeout:
self.close()
self.current_app.on_close(MSG_CLOSED)
except socket.error:
self.close()
self.current_app.on_close(MSG_CLOSED)
return None
def send_frame(self, message, opcode):
"""
Send a frame over the websocket with message as its payload
"""
if self.closed:
self.current_app.on_close(MSG_ALREADY_CLOSED)
raise WebSocketError(MSG_ALREADY_CLOSED)
if not message:
return
if opcode in (self.OPCODE_TEXT, self.OPCODE_PING):
message = self._encode_bytes(message)
elif opcode == self.OPCODE_BINARY:
message = bytes(message)
header = Header.encode_header(True, opcode, b'', len(message), 0)
try:
self.raw_write(header + message)
except socket.error:
raise WebSocketError(MSG_SOCKET_DEAD)
except:
raise
def send(self, message, binary=None):
"""
Send a frame over the websocket with message as its payload
"""
if binary is None:
binary = not isinstance(message, string_types)
opcode = self.OPCODE_BINARY if binary else self.OPCODE_TEXT
try:
self.send_frame(message, opcode)
except WebSocketError:
self.current_app.on_close(MSG_SOCKET_DEAD)
raise WebSocketError(MSG_SOCKET_DEAD)
def close(self, code=1000, message=b''):
"""
Close the websocket and connection, sending the specified code and
message. The underlying socket object is _not_ closed, that is the
responsibility of the initiator.
"""
if self.closed:
self.current_app.on_close(MSG_ALREADY_CLOSED)
try:
message = self._encode_bytes(message)
self.send_frame(message, opcode=self.OPCODE_CLOSE)
except WebSocketError:
# Failed to write the closing frame but it's ok because we're
# closing the socket anyway.
self.logger.debug("Failed to write closing frame -> closing socket")
finally:
self.logger.debug("Closed WebSocket")
self.closed = True
self.stream = None
self.raw_write = None
self.raw_read = None
self.environ = None
#self.current_app.on_close(MSG_ALREADY_CLOSED)
class Stream(object):
"""
Wraps the handler's socket/rfile attributes and makes it in to a file like
object that can be read from/written to by the lower level websocket api.
"""
__slots__ = ('handler', 'read', 'write')
def __init__(self, handler):
self.handler = handler
self.read = handler.rfile.read
self.write = handler.socket.sendall
class Header(object):
__slots__ = ('fin', 'mask', 'opcode', 'flags', 'length')
FIN_MASK = 0x80
OPCODE_MASK = 0x0f
MASK_MASK = 0x80
LENGTH_MASK = 0x7f
RSV0_MASK = 0x40
RSV1_MASK = 0x20
RSV2_MASK = 0x10
# bitwise mask that will determine the reserved bits for a frame header
HEADER_FLAG_MASK = RSV0_MASK | RSV1_MASK | RSV2_MASK
def __init__(self, fin=0, opcode=0, flags=0, length=0):
self.mask = ''
self.fin = fin
self.opcode = opcode
self.flags = flags
self.length = length
def mask_payload(self, payload):
payload = bytearray(payload)
mask = bytearray(self.mask)
for i in range_type(self.length):
payload[i] ^= mask[i % 4]
return payload
# it's the same operation
unmask_payload = mask_payload
def __repr__(self):
opcodes = {
0: 'continuation(0)',
1: 'text(1)',
2: 'binary(2)',
8: 'close(8)',
9: 'ping(9)',
10: 'pong(10)'
}
flags = {
0x40: 'RSV1 MASK',
0x20: 'RSV2 MASK',
0x10: 'RSV3 MASK'
}
return ("<Header fin={0} opcode={1} length={2} flags={3} mask={4} at "
"0x{5:x}>").format(
self.fin,
opcodes.get(self.opcode, 'reserved({})'.format(self.opcode)),
self.length,
flags.get(self.flags, 'reserved({})'.format(self.flags)),
self.mask, id(self)
)
@classmethod
def decode_header(cls, stream):
"""
Decode a WebSocket header.
:param stream: A file like object that can be 'read' from.
:returns: A `Header` instance.
"""
read = stream.read
data = read(2)
if len(data) != 2:
raise WebSocketError("Unexpected EOF while decoding header")
first_byte, second_byte = struct.unpack('!BB', data)
header = cls(
fin=first_byte & cls.FIN_MASK == cls.FIN_MASK,
opcode=first_byte & cls.OPCODE_MASK,
flags=first_byte & cls.HEADER_FLAG_MASK,
length=second_byte & cls.LENGTH_MASK)
has_mask = second_byte & cls.MASK_MASK == cls.MASK_MASK
if header.opcode > 0x07:
if not header.fin:
raise ProtocolError(
"Received fragmented control frame: {0!r}".format(data))
# Control frames MUST have a payload length of 125 bytes or less
if header.length > 125:
raise FrameTooLargeException(
"Control frame cannot be larger than 125 bytes: "
"{0!r}".format(data))
if header.length == 126:
# 16 bit length
data = read(2)
if len(data) != 2:
raise WebSocketError('Unexpected EOF while decoding header')
header.length = struct.unpack('!H', data)[0]
elif header.length == 127:
# 64 bit length
data = read(8)
if len(data) != 8:
raise WebSocketError('Unexpected EOF while decoding header')
header.length = struct.unpack('!Q', data)[0]
if has_mask:
mask = read(4)
if len(mask) != 4:
raise WebSocketError('Unexpected EOF while decoding header')
header.mask = mask
return header
@classmethod
def encode_header(cls, fin, opcode, mask, length, flags):
"""
Encodes a WebSocket header.
:param fin: Whether this is the final frame for this opcode.
:param opcode: The opcode of the payload, see `OPCODE_*`
:param mask: Whether the payload is masked.
:param length: The length of the frame.
:param flags: The RSV* flags.
:return: A bytestring encoded header.
"""
first_byte = opcode
second_byte = 0
extra = b""
result = bytearray()
if fin:
first_byte |= cls.FIN_MASK
if flags & cls.RSV0_MASK:
first_byte |= cls.RSV0_MASK
if flags & cls.RSV1_MASK:
first_byte |= cls.RSV1_MASK
if flags & cls.RSV2_MASK:
first_byte |= cls.RSV2_MASK
# now deal with length complexities
if length < 126:
second_byte += length
elif length <= 0xffff:
second_byte += 126
extra = struct.pack('!H', length)
elif length <= 0xffffffffffffffff:
second_byte += 127
extra = struct.pack('!Q', length)
else:
raise FrameTooLargeException
if mask:
second_byte |= cls.MASK_MASK
result.append(first_byte)
result.append(second_byte)
result.extend(extra)
if mask:
result.extend(mask)
return result

View file

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) 2016 Tierion
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,178 +0,0 @@
# pymerkletools
[![PyPI version](https://badge.fury.io/py/merkletools.svg)](https://badge.fury.io/py/merkletools) [![Build Status](https://travis-ci.org/Tierion/pymerkletools.svg?branch=master)](https://travis-ci.org/Tierion/pymerkletools)
This is a Python port of [merkle-tools](https://github.com/tierion/merkle-tools).
Tools for creating Merkle trees, generating merkle proofs, and verification of merkle proofs.
## Installation
```
pip install merkletools
```
### Create MerkleTools Object
```python
import merkletools
mt = MerkleTools(hash_type="md5") # default is sha256
# valid hashTypes include all crypto hash algorithms
# such as 'MD5', 'SHA1', 'SHA224', 'SHA256', 'SHA384', 'SHA512'
# as well as the SHA3 family of algorithms
# including 'SHA3-224', 'SHA3-256', 'SHA3-384', and 'SHA3-512'
```
To use `sha3`, this module depends on [pysha3](https://pypi.python.org/pypi/pysha3). It will be installed as part of this module or you can install it manually with :
```bash
pip install pysha3==1.0b1
```
## Methods
### add_leaf(value, do_hash)
Adds a value as a leaf or a list of leafs to the tree. The value must be a hex string, otherwise set the optional `do_hash` to true to have your value hashed prior to being added to the tree.
```python
hex_data = '05ae04314577b2783b4be98211d1b72476c59e9c413cfb2afa2f0c68e0d93911'
list_data = ['Some text data', 'perhaps']
mt.add_leaf(hexData)
mt.add_leaf(otherData, True)
```
### get_leaf_count()
Returns the number of leaves that are currently added to the tree.
```python
leaf_count = mt.get_leaf_count();
```
### get_leaf(index)
Returns the value of the leaf at the given index as a hex string.
```python
leaf_value = mt.get_leaf(1)
```
### reset_tree()
Removes all the leaves from the tree, prepararing to to begin creating a new tree.
```python
mt.reset_tree()
```
### make_tree()
Generates the merkle tree using the leaves that have been added.
```python
mt.make_tree();
```
### is_ready
`.is_ready` is a boolean property indicating if the tree is built and ready to supply its root and proofs. The `is_ready` state is `True` only after calling 'make_tree()'. Adding leaves or resetting the tree will change the ready state to False.
```python
is_ready = mt.is_ready
```
### get_merkle_root()
Returns the merkle root of the tree as a hex string. If the tree is not ready, `None` is returned.
```python
root_value = mt.get_merkle_root();
```
### get_proof(index)
Returns the proof as an array of hash objects for the leaf at the given index. If the tree is not ready or no leaf exists at the given index, null is returned.
```python
proof = mt.get_proof(1)
```
The proof array contains a set of merkle sibling objects. Each object contains the sibling hash, with the key value of either right or left. The right or left value tells you where that sibling was in relation to the current hash being evaluated. This information is needed for proof validation, as explained in the following section.
### validate_proof(proof, target_hash, merkle_root)
Returns a boolean indicating whether or not the proof is valid and correctly connects the `target_hash` to the `merkle_root`. `proof` is a proof array as supplied by the `get_proof` method. The `target_hash` and `merkle_root` parameters must be a hex strings.
```python
proof = [
{ right: '09096dbc49b7909917e13b795ebf289ace50b870440f10424af8845fb7761ea5' },
{ right: 'ed2456914e48c1e17b7bd922177291ef8b7f553edf1b1f66b6fc1a076524b22f' },
{ left: 'eac53dde9661daf47a428efea28c81a021c06d64f98eeabbdcff442d992153a8' },
]
target_hash = '36e0fd847d927d68475f32a94efff30812ee3ce87c7752973f4dd7476aa2e97e'
merkle_root = 'b8b1f39aa2e3fc2dde37f3df04e829f514fb98369b522bfb35c663befa896766'
is_valid = mt.validate_proof(proof, targetHash, merkleRoot)
```
The proof process uses all the proof objects in the array to attempt to prove a relationship between the `target_hash` and the `merkle_root` values. The steps to validate a proof are:
1. Concatenate `target_hash` and the first hash in the proof array. The right or left designation specifies which side of the concatenation that the proof hash value should be on.
2. Hash the resulting value.
3. Concatenate the resulting hash with the next hash in the proof array, using the same left and right rules.
4. Hash that value and continue the process until youve gone through each item in the proof array.
5. The final hash value should equal the `merkle_root` value if the proof is valid, otherwise the proof is invalid.
## Common Usage
### Creating a tree and generating the proofs
```python
mt = MerkleTools()
mt.add_leaf("tierion", True)
mt.add_leaf(["bitcoin", "blockchain"], True)
mt.make_tree()
print "root:", mt.get_merkle_root() # root: '765f15d171871b00034ee55e48ffdf76afbc44ed0bcff5c82f31351d333c2ed1'
print mt.get_proof(1) # [{left: '2da7240f6c88536be72abe9f04e454c6478ee29709fc3729ddfb942f804fbf08'},
# {right: 'ef7797e13d3a75526946a3bcf00daec9fc9c9c4d51ddc7cc5df888f74dd434d1'}]
print mt.validate_proof(mt.get_proof(1), mt.get_leaf(1), mt.get_merkle_root()) # True
```
## Notes
### About tree generation
1. Internally, leaves are stored as `bytearray`. When the tree is build, it is generated by hashing together the `bytearray` values.
2. Lonely leaf nodes are promoted to the next level up, as depicted below.
ROOT=Hash(H+E)
/ \
/ \
H=Hash(F+G) E
/ \ \
/ \ \
F=Hash(A+B) G=Hash(C+D) E
/ \ / \ \
/ \ / \ \
A B C D E
### Development
This module uses Python's `hashlib` for hashing. Inside a `MerkleTools` object all
hashes are stored as Python `bytearray`. This way hashes can be concatenated simply with `+` and the result
used as input for the hash function. But for
simplicity and easy to use `MerkleTools` methods expect that both input and outputs are hex
strings. We can convert from one type to the other using default Python string methods.
For example:
```python
hash = hashlib.sha256('a').digest() # '\xca\x97\x81\x12\xca\x1b\xbd\xca\xfa\xc21\xb3\x9a#\xdcM\xa7\x86\xef\xf8\x14|Nr\xb9\x80w\x85\xaf\xeeH\xbb'
hex_string = hash.decode('hex') # 'ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb'
back_to_hash = hash_string.decode('hex') # '\xca\x97\x81\x12\xca\x1b\xbd\xca\xfa\xc21\xb3\x9a#\xdcM\xa7\x86\xef\xf8\x14|Nr\xb9\x80w\x85\xaf\xeeH\xbb'
```

View file

@ -1,138 +0,0 @@
import hashlib
import binascii
class MerkleTools(object):
def __init__(self, hash_type="sha256"):
hash_type = hash_type.lower()
if hash_type == 'sha256':
self.hash_function = hashlib.sha256
elif hash_type == 'md5':
self.hash_function = hashlib.md5
elif hash_type == 'sha224':
self.hash_function = hashlib.sha224
elif hash_type == 'sha384':
self.hash_function = hashlib.sha384
elif hash_type == 'sha512':
self.hash_function = hashlib.sha512
elif hash_type == 'sha3_256':
self.hash_function = hashlib.sha3_256
elif hash_type == 'sha3_224':
self.hash_function = hashlib.sha3_224
elif hash_type == 'sha3_384':
self.hash_function = hashlib.sha3_384
elif hash_type == 'sha3_512':
self.hash_function = hashlib.sha3_512
else:
raise Exception('`hash_type` {} nor supported'.format(hash_type))
self.reset_tree()
def _to_hex(self, x):
try: # python3
return x.hex()
except: # python2
return binascii.hexlify(x)
def reset_tree(self):
self.leaves = list()
self.levels = None
self.is_ready = False
def add_leaf(self, values, do_hash=False):
self.is_ready = False
# check if single leaf
if isinstance(values, tuple) or isinstance(values, list):
for v in values:
if do_hash:
v = v.encode('utf-8')
v = self.hash_function(v).hexdigest()
v = bytearray.fromhex(v)
else:
v = bytearray.fromhex(v)
self.leaves.append(v)
else:
if do_hash:
v = values.encode("utf-8")
v = self.hash_function(v).hexdigest()
v = bytearray.fromhex(v)
else:
v = bytearray.fromhex(values)
self.leaves.append(v)
def get_leaf(self, index):
return self._to_hex(self.leaves[index])
def get_leaf_count(self):
return len(self.leaves)
def get_tree_ready_state(self):
return self.is_ready
def _calculate_next_level(self):
solo_leave = None
N = len(self.levels[0]) # number of leaves on the level
if N % 2 == 1: # if odd number of leaves on the level
solo_leave = self.levels[0][-1]
N -= 1
new_level = []
for l, r in zip(self.levels[0][0:N:2], self.levels[0][1:N:2]):
new_level.append(self.hash_function(l+r).digest())
if solo_leave is not None:
new_level.append(solo_leave)
self.levels = [new_level, ] + self.levels # prepend new level
def make_tree(self):
self.is_ready = False
if self.get_leaf_count() > 0:
self.levels = [self.leaves, ]
while len(self.levels[0]) > 1:
self._calculate_next_level()
self.is_ready = True
def get_merkle_root(self):
if self.is_ready:
if self.levels is not None:
return self._to_hex(self.levels[0][0])
else:
return None
else:
return None
def get_proof(self, index):
if self.levels is None:
return None
elif not self.is_ready or index > len(self.leaves)-1 or index < 0:
return None
else:
proof = []
for x in range(len(self.levels) - 1, 0, -1):
level_len = len(self.levels[x])
if (index == level_len - 1) and (level_len % 2 == 1): # skip if this is an odd end node
index = int(index / 2.)
continue
is_right_node = index % 2
sibling_index = index - 1 if is_right_node else index + 1
sibling_pos = "left" if is_right_node else "right"
sibling_value = self._to_hex(self.levels[x][sibling_index])
proof.append({sibling_pos: sibling_value})
index = int(index / 2.)
return proof
def validate_proof(self, proof, target_hash, merkle_root):
merkle_root = bytearray.fromhex(merkle_root)
target_hash = bytearray.fromhex(target_hash)
if len(proof) == 0:
return target_hash == merkle_root
else:
proof_hash = target_hash
for p in proof:
try:
# the sibling is a left node
sibling = bytearray.fromhex(p['left'])
proof_hash = self.hash_function(sibling + proof_hash).digest()
except:
# the sibling is a right node
sibling = bytearray.fromhex(p['right'])
proof_hash = self.hash_function(proof_hash + sibling).digest()
return proof_hash == merkle_root

View file

@ -1,29 +0,0 @@
import os
from setuptools import find_packages
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
install_requires = [
"pysha3==1.0b1"
]
setup(
name='merkletools',
version='1.0.2',
description='Merkle Tools',
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
],
url='https://github.com/',
author='Eder Santana',
keywords='merkle tree, blockchain, tierion',
license="MIT",
packages=find_packages(),
include_package_data=False,
zip_safe=False,
install_requires=install_requires
)

Binary file not shown.

View file

@ -1,126 +0,0 @@
LICENSE ISSUES
==============
The OpenSSL toolkit stays under a dual license, i.e. both the conditions of
the OpenSSL License and the original SSLeay license apply to the toolkit.
See below for the actual license texts. Actually both licenses are BSD-style
Open Source licenses. In case of any license issues related to OpenSSL
please contact openssl-core@openssl.org.
OpenSSL License
---------------
/* ====================================================================
* Copyright (c) 1998-2016 The OpenSSL Project. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. All advertising materials mentioning features or use of this
* software must display the following acknowledgment:
* "This product includes software developed by the OpenSSL Project
* for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
*
* 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
* endorse or promote products derived from this software without
* prior written permission. For written permission, please contact
* openssl-core@openssl.org.
*
* 5. Products derived from this software may not be called "OpenSSL"
* nor may "OpenSSL" appear in their names without prior written
* permission of the OpenSSL Project.
*
* 6. Redistributions of any form whatsoever must retain the following
* acknowledgment:
* "This product includes software developed by the OpenSSL Project
* for use in the OpenSSL Toolkit (http://www.openssl.org/)"
*
* THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* ====================================================================
*
* This product includes cryptographic software written by Eric Young
* (eay@cryptsoft.com). This product includes software written by Tim
* Hudson (tjh@cryptsoft.com).
*
*/
Original SSLeay License
-----------------------
/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
* All rights reserved.
*
* This package is an SSL implementation written
* by Eric Young (eay@cryptsoft.com).
* The implementation was written so as to conform with Netscapes SSL.
*
* This library is free for commercial and non-commercial use as long as
* the following conditions are aheared to. The following conditions
* apply to all code found in this distribution, be it the RC4, RSA,
* lhash, DES, etc., code; not just the SSL code. The SSL documentation
* included with this distribution is covered by the same copyright terms
* except that the holder is Tim Hudson (tjh@cryptsoft.com).
*
* Copyright remains Eric Young's, and as such any Copyright notices in
* the code are not to be removed.
* If this package is used in a product, Eric Young should be given attribution
* as the author of the parts of the library used.
* This can be in the form of a textual message at program startup or
* in documentation (online or textual) provided with the package.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* "This product includes cryptographic software written by
* Eric Young (eay@cryptsoft.com)"
* The word 'cryptographic' can be left out if the rouines from the library
* being used are not cryptographic related :-).
* 4. If you include any Windows specific code (or a derivative thereof) from
* the apps directory (application code) you must include an acknowledgement:
* "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
*
* THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* The licence and distribution terms for any publically available version or
* derivative of this code cannot be changed. i.e. this code cannot simply be
* copied and put under another distribution licence
* [including the GNU Public Licence.]
*/

View file

@ -1,59 +0,0 @@
=============================================================================
OpenSSL v1.0.2l Precompiled Binaries for Win32
-----------------------------------------------------------------------------
*** Release Information ***
Release Date: May 29, 2017
Author: Frederik A. Winkelsdorf (opendec.wordpress.com)
for the Indy Project (www.indyproject.org)
Requirements: Indy 10.5.5+ (SVN Version or Delphi 2009 and newer)
Dependencies: The libraries have no noteworthy dependencies
Installation: Copy both DLL files into your application directory
Supported OS: Windows 2000 up to Windows 10
-----------------------------------------------------------------------------
*** Legal Disclaimer ***
THIS SOFTWARE IS PROVIDED BY ITS AUTHOR AND THE INDY PROJECT "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
OpenSSL license terms are provided in the file "OpenSSL License.txt".
PLEASE CHECK IF YOU NEED TO COMPLY WITH EXPORT RESTRICTIONS FOR CRYPTOGRAPHIC
SOFTWARE AND/OR PATENTS.
-----------------------------------------------------------------------------
*** Build Information Win32 ***
Built with: Microsoft Visual C++ 2008 Express Edition
The Netwide Assembler (NASM) v2.11.08 Win32
Strawberry Perl v5.22.0.1 Win32 Portable
Windows PowerShell
FinalBuilder 7
Commands: perl configure VC-WIN32
ms\do_nasm
adjusted ms\ntdll.mak (replaced "/MD" with "/MT")
adjusted ms\version32.rc (Indy Information inserted)
nmake -f ms\ntdll.mak
nmake -f ms\ntdll.mak test
editbin.exe /rebase:base=0x11000000 libeay32.dll
editbin.exe /rebase:base=0x12000000 ssleay32.dll
=============================================================================

View file

@ -1,10 +0,0 @@
openssl req -x509 -newkey rsa:2048 -keyout key.pem -out cert.pem -nodes -config openssl.cnf
REM openssl ecparam -name secp521r1 -genkey -param_enc explicit -out key-ecc.pem -config openssl.cnf
openssl ecparam -name secp256r1 -genkey -out key-ecc.pem
openssl req -new -key key-ecc.pem -x509 -nodes -out cert-ecc.pem -config openssl.cnf
@echo off
REM openssl ecparam -genkey -name prime256v1 -out key.pem
REM openssl req -new -key key.pem -out csr.pem
REM openssl req -x509 -days 365 -key key.pem -in csr.pem -out certificate.pem

Binary file not shown.

View file

@ -1,127 +0,0 @@
LICENSE ISSUES
==============
The OpenSSL toolkit stays under a dual license, i.e. both the conditions of
the OpenSSL License and the original SSLeay license apply to the toolkit.
See below for the actual license texts. Actually both licenses are BSD-style
Open Source licenses. In case of any license issues related to OpenSSL
please contact openssl-core@openssl.org.
OpenSSL License
---------------
/* ====================================================================
* Copyright (c) 1998-2016 The OpenSSL Project. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. All advertising materials mentioning features or use of this
* software must display the following acknowledgment:
* "This product includes software developed by the OpenSSL Project
* for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
*
* 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
* endorse or promote products derived from this software without
* prior written permission. For written permission, please contact
* openssl-core@openssl.org.
*
* 5. Products derived from this software may not be called "OpenSSL"
* nor may "OpenSSL" appear in their names without prior written
* permission of the OpenSSL Project.
*
* 6. Redistributions of any form whatsoever must retain the following
* acknowledgment:
* "This product includes software developed by the OpenSSL Project
* for use in the OpenSSL Toolkit (http://www.openssl.org/)"
*
* THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* ====================================================================
*
* This product includes cryptographic software written by Eric Young
* (eay@cryptsoft.com). This product includes software written by Tim
* Hudson (tjh@cryptsoft.com).
*
*/
Original SSLeay License
-----------------------
/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
* All rights reserved.
*
* This package is an SSL implementation written
* by Eric Young (eay@cryptsoft.com).
* The implementation was written so as to conform with Netscapes SSL.
*
* This library is free for commercial and non-commercial use as long as
* the following conditions are aheared to. The following conditions
* apply to all code found in this distribution, be it the RC4, RSA,
* lhash, DES, etc., code; not just the SSL code. The SSL documentation
* included with this distribution is covered by the same copyright terms
* except that the holder is Tim Hudson (tjh@cryptsoft.com).
*
* Copyright remains Eric Young's, and as such any Copyright notices in
* the code are not to be removed.
* If this package is used in a product, Eric Young should be given attribution
* as the author of the parts of the library used.
* This can be in the form of a textual message at program startup or
* in documentation (online or textual) provided with the package.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* "This product includes cryptographic software written by
* Eric Young (eay@cryptsoft.com)"
* The word 'cryptographic' can be left out if the rouines from the library
* being used are not cryptographic related :-).
* 4. If you include any Windows specific code (or a derivative thereof) from
* the apps directory (application code) you must include an acknowledgement:
* "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
*
* THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* The licence and distribution terms for any publically available version or
* derivative of this code cannot be changed. i.e. this code cannot simply be
* copied and put under another distribution licence
* [including the GNU Public Licence.]
*/

View file

@ -1,70 +0,0 @@
[ req ]
prompt = no
default_bits = 2048
default_keyfile = server-key.pem
distinguished_name = subject
req_extensions = req_ext
x509_extensions = x509_ext
string_mask = utf8only
# The Subject DN can be formed using X501 or RFC 4514 (see RFC 4519 for a description).
# Its sort of a mashup. For example, RFC 4514 does not provide emailAddress.
[ subject ]
countryName = US
stateOrProvinceName = NY
localityName = New York
organizationName = Example, LLC
# Use a friendly name here because its presented to the user. The server's DNS
# names are placed in Subject Alternate Names. Plus, DNS names here is deprecated
# by both IETF and CA/Browser Forums. If you place a DNS name here, then you
# must include the DNS name in the SAN too (otherwise, Chrome and others that
# strictly follow the CA/Browser Baseline Requirements will fail).
commonName = Example Company
emailAddress = test@example.com
# Section x509_ext is used when generating a self-signed certificate. I.e., openssl req -x509 ...
[ x509_ext ]
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid,issuer
basicConstraints = CA:FALSE
keyUsage = digitalSignature, keyEncipherment
subjectAltName = @alternate_names
nsComment = "OpenSSL Generated Certificate"
# RFC 5280, Section 4.2.1.12 makes EKU optional
# CA/Browser Baseline Requirements, Appendix (B)(3)(G) makes me confused
# extendedKeyUsage = serverAuth, clientAuth
# Section req_ext is used when generating a certificate signing request. I.e., openssl req ...
[ req_ext ]
subjectKeyIdentifier = hash
basicConstraints = CA:FALSE
keyUsage = digitalSignature, keyEncipherment
subjectAltName = @alternate_names
nsComment = "OpenSSL Generated Certificate"
# RFC 5280, Section 4.2.1.12 makes EKU optional
# CA/Browser Baseline Requirements, Appendix (B)(3)(G) makes me confused
# extendedKeyUsage = serverAuth, clientAuth
[ alternate_names ]
DNS.1 = example.com
DNS.2 = www.example.com
DNS.3 = mail.example.com
DNS.4 = ftp.example.com
# Add these if you need them. But usually you don't want them or
# need them in production. You may need them for development.
# DNS.5 = localhost
# DNS.6 = localhost.localdomain
# DNS.7 = 127.0.0.1
# IPv6 localhost
# DNS.8 = ::1

Binary file not shown.

View file

@ -1,455 +0,0 @@
# via http://pastebin.com/H1XikJFd
# -*- Mode: Python -*-
# This is a combination of http://pastebin.com/bQtdDzHx and
# https://github.com/Bitmessage/PyBitmessage/blob/master/src/pyelliptic/openssl.py
# that doesn't crash on OSX.
# Long message bug fixed by ZeroNet
import ctypes
import ctypes.util
import _ctypes
import hashlib
import base64
import time
import logging
import sys
import os
addrtype = 0
class _OpenSSL:
"""
Wrapper for OpenSSL using ctypes
"""
def __init__(self, library):
self.time_opened = time.time()
"""
Build the wrapper
"""
try:
self._lib = ctypes.CDLL(library)
except:
self._lib = ctypes.cdll.LoadLibrary(library)
self.pointer = ctypes.pointer
self.c_int = ctypes.c_int
self.byref = ctypes.byref
self.create_string_buffer = ctypes.create_string_buffer
self.BN_new = self._lib.BN_new
self.BN_new.restype = ctypes.c_void_p
self.BN_new.argtypes = []
self.BN_copy = self._lib.BN_copy
self.BN_copy.restype = ctypes.c_void_p
self.BN_copy.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
self.BN_mul_word = self._lib.BN_mul_word
self.BN_mul_word.restype = ctypes.c_int
self.BN_mul_word.argtypes = [ctypes.c_void_p, ctypes.c_int]
self.BN_set_word = self._lib.BN_set_word
self.BN_set_word.restype = ctypes.c_int
self.BN_set_word.argtypes = [ctypes.c_void_p, ctypes.c_int]
self.BN_add = self._lib.BN_add
self.BN_add.restype = ctypes.c_void_p
self.BN_add.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
ctypes.c_void_p]
self.BN_mod_sub = self._lib.BN_mod_sub
self.BN_mod_sub.restype = ctypes.c_int
self.BN_mod_sub.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.BN_mod_mul = self._lib.BN_mod_mul
self.BN_mod_mul.restype = ctypes.c_int
self.BN_mod_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.BN_mod_inverse = self._lib.BN_mod_inverse
self.BN_mod_inverse.restype = ctypes.c_void_p
self.BN_mod_inverse.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.BN_cmp = self._lib.BN_cmp
self.BN_cmp.restype = ctypes.c_int
self.BN_cmp.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
self.BN_bn2bin = self._lib.BN_bn2bin
self.BN_bn2bin.restype = ctypes.c_int
self.BN_bn2bin.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
self.BN_bin2bn = self._lib.BN_bin2bn
self.BN_bin2bn.restype = ctypes.c_void_p
self.BN_bin2bn.argtypes = [ctypes.c_void_p, ctypes.c_int,
ctypes.c_void_p]
self.EC_KEY_new_by_curve_name = self._lib.EC_KEY_new_by_curve_name
self.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
self.EC_KEY_new_by_curve_name.argtypes = [ctypes.c_int]
self.EC_KEY_get0_group = self._lib.EC_KEY_get0_group
self.EC_KEY_get0_group.restype = ctypes.c_void_p
self.EC_KEY_get0_group.argtypes = [ctypes.c_void_p]
self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key
self.EC_KEY_set_private_key.restype = ctypes.c_int
self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p,
ctypes.c_void_p]
self.EC_KEY_set_public_key = self._lib.EC_KEY_set_public_key
self.EC_KEY_set_public_key.restype = ctypes.c_int
self.EC_KEY_set_public_key.argtypes = [ctypes.c_void_p,
ctypes.c_void_p]
self.EC_POINT_set_compressed_coordinates_GFp = self._lib.EC_POINT_set_compressed_coordinates_GFp
self.EC_POINT_set_compressed_coordinates_GFp.restype = ctypes.c_int
self.EC_POINT_set_compressed_coordinates_GFp.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p]
self.EC_POINT_new = self._lib.EC_POINT_new
self.EC_POINT_new.restype = ctypes.c_void_p
self.EC_POINT_new.argtypes = [ctypes.c_void_p]
self.EC_POINT_free = self._lib.EC_POINT_free
self.EC_POINT_free.restype = None
self.EC_POINT_free.argtypes = [ctypes.c_void_p]
self.EC_GROUP_get_order = self._lib.EC_GROUP_get_order
self.EC_GROUP_get_order.restype = ctypes.c_void_p
self.EC_GROUP_get_order.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
self.EC_GROUP_get_degree = self._lib.EC_GROUP_get_degree
self.EC_GROUP_get_degree.restype = ctypes.c_void_p
self.EC_GROUP_get_degree.argtypes = [ctypes.c_void_p]
self.EC_GROUP_get_curve_GFp = self._lib.EC_GROUP_get_curve_GFp
self.EC_GROUP_get_curve_GFp.restype = ctypes.c_void_p
self.EC_GROUP_get_curve_GFp.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_void_p]
self.EC_POINT_mul = self._lib.EC_POINT_mul
self.EC_POINT_mul.restype = ctypes.c_int
self.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p,
ctypes.c_void_p, ctypes.c_void_p,
ctypes.c_void_p, ctypes.c_void_p]
self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key
self.EC_KEY_set_private_key.restype = ctypes.c_int
self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p,
ctypes.c_void_p]
self.EC_KEY_set_conv_form = self._lib.EC_KEY_set_conv_form
self.EC_KEY_set_conv_form.restype = None
self.EC_KEY_set_conv_form.argtypes = [ctypes.c_void_p,
ctypes.c_int]
self.BN_CTX_new = self._lib.BN_CTX_new
self._lib.BN_CTX_new.restype = ctypes.c_void_p
self._lib.BN_CTX_new.argtypes = []
self.BN_CTX_start = self._lib.BN_CTX_start
self._lib.BN_CTX_start.restype = ctypes.c_void_p
self._lib.BN_CTX_start.argtypes = [ctypes.c_void_p]
self.BN_CTX_get = self._lib.BN_CTX_get
self._lib.BN_CTX_get.restype = ctypes.c_void_p
self._lib.BN_CTX_get.argtypes = [ctypes.c_void_p]
self.ECDSA_sign = self._lib.ECDSA_sign
self.ECDSA_sign.restype = ctypes.c_int
self.ECDSA_sign.argtypes = [ctypes.c_int, ctypes.c_void_p,
ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
self.ECDSA_verify = self._lib.ECDSA_verify
self.ECDSA_verify.restype = ctypes.c_int
self.ECDSA_verify.argtypes = [ctypes.c_int, ctypes.c_void_p,
ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p]
self.i2o_ECPublicKey = self._lib.i2o_ECPublicKey
self.i2o_ECPublicKey.restype = ctypes.c_void_p
self.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
self.BN_CTX_free = self._lib.BN_CTX_free
self.BN_CTX_free.restype = None
self.BN_CTX_free.argtypes = [ctypes.c_void_p]
self.EC_POINT_free = self._lib.EC_POINT_free
self.EC_POINT_free.restype = None
self.EC_POINT_free.argtypes = [ctypes.c_void_p]
ssl = None
def openLibrary():
global ssl
import util.SslPatch
ssl = _OpenSSL(util.SslPatch.getLibraryPath())
logging.debug("opensslVerify loaded: %s", ssl._lib)
if __name__ == "__main__":
ssl = _OpenSSL(sys.argv[1])
else:
openLibrary()
openssl_version = "%.9X" % ssl._lib.SSLeay()
NID_secp256k1 = 714
def check_result(val, func, args):
if val == 0:
raise ValueError
else:
return ctypes.c_void_p(val)
ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
ssl.EC_KEY_new_by_curve_name.errcheck = check_result
POINT_CONVERSION_COMPRESSED = 2
POINT_CONVERSION_UNCOMPRESSED = 4
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
def b58encode(v):
""" encode v, which is a string of bytes, to base58.
"""
long_value = 0L
for (i, c) in enumerate(v[::-1]):
long_value += (256 ** i) * ord(c)
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# Bitcoin does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0':
nPad += 1
else:
break
return (__b58chars[0] * nPad) + result
def hash_160(public_key):
md = hashlib.new('ripemd160')
md.update(hashlib.sha256(public_key).digest())
return md.digest()
def hash_160_to_bc_address(h160):
vh160 = chr(addrtype) + h160
h = Hash(vh160)
addr = vh160 + h[0:4]
return b58encode(addr)
def public_key_to_bc_address(public_key):
h160 = hash_160(public_key)
return hash_160_to_bc_address(h160)
def encode(val, base, minlen=0):
base, minlen = int(base), int(minlen)
code_string = ''.join([chr(x) for x in range(256)])
result = ""
while val > 0:
result = code_string[val % base] + result
val //= base
return code_string[0] * max(minlen - len(result), 0) + result
def num_to_var_int(x):
x = int(x)
if x < 253:
return chr(x)
elif x < 65536:
return chr(253) + encode(x, 256, 2)[::-1]
elif x < 4294967296:
return chr(254) + encode(x, 256, 4)[::-1]
else:
return chr(255) + encode(x, 256, 8)[::-1]
def msg_magic(message):
return "\x18Bitcoin Signed Message:\n" + num_to_var_int(len(message)) + message
def get_address(eckey):
size = ssl.i2o_ECPublicKey(eckey, 0)
mb = ctypes.create_string_buffer(size)
ssl.i2o_ECPublicKey(eckey, ctypes.byref(ctypes.pointer(mb)))
return public_key_to_bc_address(mb.raw)
def Hash(data):
return hashlib.sha256(hashlib.sha256(data).digest()).digest()
def bx(bn, size=32):
b = ctypes.create_string_buffer(size)
ssl.BN_bn2bin(bn, b)
return b.raw.encode('hex')
def verify_message(address, signature, message):
pkey = ssl.EC_KEY_new_by_curve_name(NID_secp256k1)
eckey = SetCompactSignature(pkey, Hash(msg_magic(message)), signature)
addr = get_address(eckey)
return (address == addr)
def SetCompactSignature(pkey, hash, signature):
sig = base64.b64decode(signature)
if len(sig) != 65:
raise Exception("Wrong encoding")
nV = ord(sig[0])
if nV < 27 or nV >= 35:
return False
if nV >= 31:
ssl.EC_KEY_set_conv_form(pkey, POINT_CONVERSION_COMPRESSED)
nV -= 4
r = ssl.BN_bin2bn(sig[1:33], 32, None)
s = ssl.BN_bin2bn(sig[33:], 32, None)
eckey = ECDSA_SIG_recover_key_GFp(pkey, r, s, hash, len(hash), nV - 27,
False)
return eckey
def ECDSA_SIG_recover_key_GFp(eckey, r, s, msg, msglen, recid, check):
n = 0
i = recid / 2
ctx = R = O = Q = None
try:
group = ssl.EC_KEY_get0_group(eckey)
ctx = ssl.BN_CTX_new()
ssl.BN_CTX_start(ctx)
order = ssl.BN_CTX_get(ctx)
ssl.EC_GROUP_get_order(group, order, ctx)
x = ssl.BN_CTX_get(ctx)
ssl.BN_copy(x, order)
ssl.BN_mul_word(x, i)
ssl.BN_add(x, x, r)
field = ssl.BN_CTX_get(ctx)
ssl.EC_GROUP_get_curve_GFp(group, field, None, None, ctx)
if (ssl.BN_cmp(x, field) >= 0):
return False
R = ssl.EC_POINT_new(group)
ssl.EC_POINT_set_compressed_coordinates_GFp(group, R, x, recid % 2, ctx)
if check:
O = ssl.EC_POINT_new(group)
ssl.EC_POINT_mul(group, O, None, R, order, ctx)
if ssl.EC_POINT_is_at_infinity(group, O):
return False
Q = ssl.EC_POINT_new(group)
n = ssl.EC_GROUP_get_degree(group)
e = ssl.BN_CTX_get(ctx)
ssl.BN_bin2bn(msg, msglen, e)
if 8 * msglen > n:
ssl.BN_rshift(e, e, 8 - (n & 7))
zero = ssl.BN_CTX_get(ctx)
ssl.BN_set_word(zero, 0)
ssl.BN_mod_sub(e, zero, e, order, ctx)
rr = ssl.BN_CTX_get(ctx)
ssl.BN_mod_inverse(rr, r, order, ctx)
sor = ssl.BN_CTX_get(ctx)
ssl.BN_mod_mul(sor, s, rr, order, ctx)
eor = ssl.BN_CTX_get(ctx)
ssl.BN_mod_mul(eor, e, rr, order, ctx)
ssl.EC_POINT_mul(group, Q, eor, R, sor, ctx)
ssl.EC_KEY_set_public_key(eckey, Q)
return eckey
finally:
if ctx:
ssl.BN_CTX_free(ctx)
if R:
ssl.EC_POINT_free(R)
if O:
ssl.EC_POINT_free(O)
if Q:
ssl.EC_POINT_free(Q)
def closeLibrary():
handle = ssl._lib._handle
if "FreeLibrary" in dir(_ctypes):
_ctypes.FreeLibrary(handle)
_ctypes.FreeLibrary(handle)
print "OpenSSL closed, handle:", handle
else:
_ctypes.dlclose(handle)
_ctypes.dlclose(handle)
print "OpenSSL dlclosed, handle:", handle
def getMessagePubkey(message, sig):
pkey = ssl.EC_KEY_new_by_curve_name(NID_secp256k1)
if type(pkey) is not int and not pkey.value:
raise Exception(
"OpenSSL %s (%s) EC_KEY_new_by_curve_name failed: %s, probably your OpenSSL lib does not support secp256k1 elliptic curve. Please check: https://github.com/HelloZeroNet/ZeroNet/issues/132" %
(openssl_version, ssl._lib._name, pkey.value)
)
eckey = SetCompactSignature(pkey, Hash(msg_magic(message)), sig)
size = ssl.i2o_ECPublicKey(eckey, 0)
mb = ctypes.create_string_buffer(size)
ssl.i2o_ECPublicKey(eckey, ctypes.byref(ctypes.pointer(mb)))
pub = mb.raw
"""
if time.time() - ssl.time_opened > 60 * 5: # Reopen every 5 min
logging.debug("Reopening OpenSSL...")
closeLibrary()
openLibrary()
"""
return pub
def test():
sign = "HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ="
pubkey = "044827c756561b8ef6b28b5e53a000805adbf4938ab82e1c2b7f7ea16a0d6face9a509a0a13e794d742210b00581f3e249ebcc705240af2540ea19591091ac1d41"
assert getMessagePubkey("hello", sign).encode("hex") == pubkey
test() # Make sure it working right
if __name__ == "__main__":
import time
import os
import sys
sys.path.append("../pybitcointools")
import bitcoin as btctools
print "OpenSSL version %s" % openssl_version
print ssl._lib
priv = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
address = "1N2XWu5soeppX2qUjvrf81rpdbShKJrjTr"
sign = btctools.ecdsa_sign("hello", priv) # HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ=
s = time.time()
for i in range(1000):
pubkey = getMessagePubkey("hello", sign)
verified = btctools.pubkey_to_address(pubkey) == address
print "1000x Verified", verified, time.time() - s

Binary file not shown.

View file

@ -1,24 +0,0 @@
Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.

View file

@ -1,8 +0,0 @@
import sys
# http://www.python.org/dev/peps/pep-0396/
__version__ = '0.2.4'
if sys.version_info[:2] < (2, 4):
raise RuntimeError('PyASN1 requires Python 2.4 or later')

View file

@ -1 +0,0 @@
# This file is necessary to make this directory a package.

View file

@ -1 +0,0 @@
# This file is necessary to make this directory a package.

File diff suppressed because it is too large Load diff

View file

@ -1,506 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
from pyasn1.type import base, tag, univ, char, useful
from pyasn1.codec.ber import eoo
from pyasn1.compat.octets import int2oct, oct2int, ints2octs, null, str2octs
from pyasn1.compat.integer import to_bytes
from pyasn1 import debug, error
__all__ = ['encode']
class AbstractItemEncoder(object):
supportIndefLenMode = 1
# noinspection PyMethodMayBeStatic
def encodeTag(self, singleTag, isConstructed):
tagClass, tagFormat, tagId = singleTag
encodedTag = tagClass | tagFormat
if isConstructed:
encodedTag |= tag.tagFormatConstructed
if tagId < 31:
return (encodedTag | tagId,)
else:
substrate = (tagId & 0x7f,)
tagId >>= 7
while tagId:
substrate = (0x80 | (tagId & 0x7f),) + substrate
tagId >>= 7
return (encodedTag | 0x1F,) + substrate
def encodeLength(self, length, defMode):
if not defMode and self.supportIndefLenMode:
return (0x80,)
if length < 0x80:
return (length,)
else:
substrate = ()
while length:
substrate = (length & 0xff,) + substrate
length >>= 8
substrateLen = len(substrate)
if substrateLen > 126:
raise error.PyAsn1Error('Length octets overflow (%d)' % substrateLen)
return (0x80 | substrateLen,) + substrate
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
raise error.PyAsn1Error('Not implemented')
def _encodeEndOfOctets(self, encodeFun, defMode):
if defMode or not self.supportIndefLenMode:
return null
else:
return encodeFun(eoo.endOfOctets, defMode)
def encode(self, encodeFun, value, defMode, maxChunkSize):
substrate, isConstructed, isOctets = self.encodeValue(
encodeFun, value, defMode, maxChunkSize
)
tagSet = value.tagSet
# tagged value?
if tagSet:
if not isConstructed: # primitive form implies definite mode
defMode = True
header = self.encodeTag(tagSet[-1], isConstructed)
header += self.encodeLength(len(substrate), defMode)
if isOctets:
substrate = ints2octs(header) + substrate
else:
substrate = ints2octs(header + substrate)
eoo = self._encodeEndOfOctets(encodeFun, defMode)
if eoo:
substrate += eoo
return substrate
class EndOfOctetsEncoder(AbstractItemEncoder):
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
return null, False, True
class ExplicitlyTaggedItemEncoder(AbstractItemEncoder):
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
if isinstance(value, base.AbstractConstructedAsn1Item):
value = value.clone(tagSet=value.tagSet[:-1], cloneValueFlag=1)
else:
value = value.clone(tagSet=value.tagSet[:-1])
return encodeFun(value, defMode, maxChunkSize), True, True
explicitlyTaggedItemEncoder = ExplicitlyTaggedItemEncoder()
class BooleanEncoder(AbstractItemEncoder):
supportIndefLenMode = False
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
return value and (1,) or (0,), False, False
class IntegerEncoder(AbstractItemEncoder):
supportIndefLenMode = False
supportCompactZero = False
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
if value == 0:
# de-facto way to encode zero
if self.supportCompactZero:
return (), False, False
else:
return (0,), False, False
return to_bytes(int(value), signed=True), False, True
class BitStringEncoder(AbstractItemEncoder):
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
valueLength = len(value)
if valueLength % 8:
alignedValue = value << (8 - valueLength % 8)
else:
alignedValue = value
if not maxChunkSize or len(alignedValue) <= maxChunkSize * 8:
substrate = alignedValue.asOctets()
return int2oct(len(substrate) * 8 - valueLength) + substrate, False, True
stop = 0
substrate = null
while stop < valueLength:
start = stop
stop = min(start + maxChunkSize * 8, valueLength)
substrate += encodeFun(alignedValue[start:stop], defMode, maxChunkSize)
return substrate, True, True
class OctetStringEncoder(AbstractItemEncoder):
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
if not maxChunkSize or len(value) <= maxChunkSize:
return value.asOctets(), False, True
else:
pos = 0
substrate = null
while True:
v = value.clone(value[pos:pos + maxChunkSize])
if not v:
break
substrate += encodeFun(v, defMode, maxChunkSize)
pos += maxChunkSize
return substrate, True, True
class NullEncoder(AbstractItemEncoder):
supportIndefLenMode = False
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
return null, False, True
class ObjectIdentifierEncoder(AbstractItemEncoder):
supportIndefLenMode = False
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
oid = value.asTuple()
# Build the first pair
try:
first = oid[0]
second = oid[1]
except IndexError:
raise error.PyAsn1Error('Short OID %s' % (value,))
if 0 <= second <= 39:
if first == 1:
oid = (second + 40,) + oid[2:]
elif first == 0:
oid = (second,) + oid[2:]
elif first == 2:
oid = (second + 80,) + oid[2:]
else:
raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,))
elif first == 2:
oid = (second + 80,) + oid[2:]
else:
raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,))
octets = ()
# Cycle through subIds
for subOid in oid:
if 0 <= subOid <= 127:
# Optimize for the common case
octets += (subOid,)
elif subOid > 127:
# Pack large Sub-Object IDs
res = (subOid & 0x7f,)
subOid >>= 7
while subOid:
res = (0x80 | (subOid & 0x7f),) + res
subOid >>= 7
# Add packed Sub-Object ID to resulted Object ID
octets += res
else:
raise error.PyAsn1Error('Negative OID arc %s at %s' % (subOid, value))
return octets, False, False
class RealEncoder(AbstractItemEncoder):
supportIndefLenMode = 0
binEncBase = 2 # set to None to choose encoding base automatically
@staticmethod
def _dropFloatingPoint(m, encbase, e):
ms, es = 1, 1
if m < 0:
ms = -1 # mantissa sign
if e < 0:
es = -1 # exponenta sign
m *= ms
if encbase == 8:
m *= 2 ** (abs(e) % 3 * es)
e = abs(e) // 3 * es
elif encbase == 16:
m *= 2 ** (abs(e) % 4 * es)
e = abs(e) // 4 * es
while True:
if int(m) != m:
m *= encbase
e -= 1
continue
break
return ms, int(m), encbase, e
def _chooseEncBase(self, value):
m, b, e = value
encBase = [2, 8, 16]
if value.binEncBase in encBase:
return self._dropFloatingPoint(m, value.binEncBase, e)
elif self.binEncBase in encBase:
return self._dropFloatingPoint(m, self.binEncBase, e)
# auto choosing base 2/8/16
mantissa = [m, m, m]
exponenta = [e, e, e]
sign = 1
encbase = 2
e = float('inf')
for i in range(3):
(sign,
mantissa[i],
encBase[i],
exponenta[i]) = self._dropFloatingPoint(mantissa[i], encBase[i], exponenta[i])
if abs(exponenta[i]) < abs(e) or (abs(exponenta[i]) == abs(e) and mantissa[i] < m):
e = exponenta[i]
m = int(mantissa[i])
encbase = encBase[i]
return sign, m, encbase, e
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
if value.isPlusInfinity():
return (0x40,), False, False
if value.isMinusInfinity():
return (0x41,), False, False
m, b, e = value
if not m:
return null, False, True
if b == 10:
return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), False, True
elif b == 2:
fo = 0x80 # binary encoding
ms, m, encbase, e = self._chooseEncBase(value)
if ms < 0: # mantissa sign
fo |= 0x40 # sign bit
# exponenta & mantissa normalization
if encbase == 2:
while m & 0x1 == 0:
m >>= 1
e += 1
elif encbase == 8:
while m & 0x7 == 0:
m >>= 3
e += 1
fo |= 0x10
else: # encbase = 16
while m & 0xf == 0:
m >>= 4
e += 1
fo |= 0x20
sf = 0 # scale factor
while m & 0x1 == 0:
m >>= 1
sf += 1
if sf > 3:
raise error.PyAsn1Error('Scale factor overflow') # bug if raised
fo |= sf << 2
eo = null
if e == 0 or e == -1:
eo = int2oct(e & 0xff)
else:
while e not in (0, -1):
eo = int2oct(e & 0xff) + eo
e >>= 8
if e == 0 and eo and oct2int(eo[0]) & 0x80:
eo = int2oct(0) + eo
if e == -1 and eo and not (oct2int(eo[0]) & 0x80):
eo = int2oct(0xff) + eo
n = len(eo)
if n > 0xff:
raise error.PyAsn1Error('Real exponent overflow')
if n == 1:
pass
elif n == 2:
fo |= 1
elif n == 3:
fo |= 2
else:
fo |= 3
eo = int2oct(n & 0xff) + eo
po = null
while m:
po = int2oct(m & 0xff) + po
m >>= 8
substrate = int2oct(fo) + eo + po
return substrate, False, True
else:
raise error.PyAsn1Error('Prohibited Real base %s' % b)
class SequenceEncoder(AbstractItemEncoder):
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
value.verifySizeSpec()
namedTypes = value.getComponentType()
substrate = null
idx = len(value)
while idx > 0:
idx -= 1
if namedTypes:
if namedTypes[idx].isOptional and not value[idx].isValue:
continue
if namedTypes[idx].isDefaulted and value[idx] == namedTypes[idx].asn1Object:
continue
substrate = encodeFun(value[idx], defMode, maxChunkSize) + substrate
return substrate, True, True
class SequenceOfEncoder(AbstractItemEncoder):
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
value.verifySizeSpec()
substrate = null
idx = len(value)
while idx > 0:
idx -= 1
substrate = encodeFun(value[idx], defMode, maxChunkSize) + substrate
return substrate, True, True
class ChoiceEncoder(AbstractItemEncoder):
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
return encodeFun(value.getComponent(), defMode, maxChunkSize), True, True
class AnyEncoder(OctetStringEncoder):
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
return value.asOctets(), defMode == False, True
tagMap = {
eoo.endOfOctets.tagSet: EndOfOctetsEncoder(),
univ.Boolean.tagSet: BooleanEncoder(),
univ.Integer.tagSet: IntegerEncoder(),
univ.BitString.tagSet: BitStringEncoder(),
univ.OctetString.tagSet: OctetStringEncoder(),
univ.Null.tagSet: NullEncoder(),
univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(),
univ.Enumerated.tagSet: IntegerEncoder(),
univ.Real.tagSet: RealEncoder(),
# Sequence & Set have same tags as SequenceOf & SetOf
univ.SequenceOf.tagSet: SequenceOfEncoder(),
univ.SetOf.tagSet: SequenceOfEncoder(),
univ.Choice.tagSet: ChoiceEncoder(),
# character string types
char.UTF8String.tagSet: OctetStringEncoder(),
char.NumericString.tagSet: OctetStringEncoder(),
char.PrintableString.tagSet: OctetStringEncoder(),
char.TeletexString.tagSet: OctetStringEncoder(),
char.VideotexString.tagSet: OctetStringEncoder(),
char.IA5String.tagSet: OctetStringEncoder(),
char.GraphicString.tagSet: OctetStringEncoder(),
char.VisibleString.tagSet: OctetStringEncoder(),
char.GeneralString.tagSet: OctetStringEncoder(),
char.UniversalString.tagSet: OctetStringEncoder(),
char.BMPString.tagSet: OctetStringEncoder(),
# useful types
useful.ObjectDescriptor.tagSet: OctetStringEncoder(),
useful.GeneralizedTime.tagSet: OctetStringEncoder(),
useful.UTCTime.tagSet: OctetStringEncoder()
}
# Put in ambiguous & non-ambiguous types for faster codec lookup
typeMap = {
univ.Boolean.typeId: BooleanEncoder(),
univ.Integer.typeId: IntegerEncoder(),
univ.BitString.typeId: BitStringEncoder(),
univ.OctetString.typeId: OctetStringEncoder(),
univ.Null.typeId: NullEncoder(),
univ.ObjectIdentifier.typeId: ObjectIdentifierEncoder(),
univ.Enumerated.typeId: IntegerEncoder(),
univ.Real.typeId: RealEncoder(),
# Sequence & Set have same tags as SequenceOf & SetOf
univ.Set.typeId: SequenceEncoder(),
univ.SetOf.typeId: SequenceOfEncoder(),
univ.Sequence.typeId: SequenceEncoder(),
univ.SequenceOf.typeId: SequenceOfEncoder(),
univ.Choice.typeId: ChoiceEncoder(),
univ.Any.typeId: AnyEncoder(),
# character string types
char.UTF8String.typeId: OctetStringEncoder(),
char.NumericString.typeId: OctetStringEncoder(),
char.PrintableString.typeId: OctetStringEncoder(),
char.TeletexString.typeId: OctetStringEncoder(),
char.VideotexString.typeId: OctetStringEncoder(),
char.IA5String.typeId: OctetStringEncoder(),
char.GraphicString.typeId: OctetStringEncoder(),
char.VisibleString.typeId: OctetStringEncoder(),
char.GeneralString.typeId: OctetStringEncoder(),
char.UniversalString.typeId: OctetStringEncoder(),
char.BMPString.typeId: OctetStringEncoder(),
# useful types
useful.ObjectDescriptor.typeId: OctetStringEncoder(),
useful.GeneralizedTime.typeId: OctetStringEncoder(),
useful.UTCTime.typeId: OctetStringEncoder()
}
class Encoder(object):
supportIndefLength = True
# noinspection PyDefaultArgument
def __init__(self, tagMap, typeMap={}):
self.__tagMap = tagMap
self.__typeMap = typeMap
def __call__(self, value, defMode=True, maxChunkSize=0):
if not defMode and not self.supportIndefLength:
raise error.PyAsn1Error('Indefinite length encoding not supported by this codec')
debug.logger & debug.flagEncoder and debug.logger(
'encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % (
not defMode and 'in' or '', maxChunkSize, value.prettyPrintType(), value.prettyPrint()))
tagSet = value.tagSet
if len(tagSet) > 1:
concreteEncoder = explicitlyTaggedItemEncoder
else:
try:
concreteEncoder = self.__typeMap[value.typeId]
except KeyError:
# use base type for codec lookup to recover untagged types
baseTagSet = tag.TagSet(value.tagSet.baseTag, value.tagSet.baseTag)
try:
concreteEncoder = self.__tagMap[baseTagSet]
except KeyError:
raise error.PyAsn1Error('No encoder for %s' % (value,))
debug.logger & debug.flagEncoder and debug.logger(
'using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet))
substrate = concreteEncoder.encode(
self, value, defMode, maxChunkSize
)
debug.logger & debug.flagEncoder and debug.logger(
'built %s octets of substrate: %s\nencoder completed' % (len(substrate), debug.hexdump(substrate)))
return substrate
#: Turns ASN.1 object into BER octet stream.
#:
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: walks all its components recursively and produces a BER octet stream.
#:
#: Parameters
#: ----------
# value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: A pyasn1 object to encode
#:
#: defMode: :py:class:`bool`
#: If `False`, produces indefinite length encoding
#:
#: maxChunkSize: :py:class:`int`
#: Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size)
#:
#: Returns
#: -------
#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
#: Given ASN.1 object encoded into BER octetstream
#:
#: Raises
#: ------
#: : :py:class:`pyasn1.error.PyAsn1Error`
#: On encoding errors
encode = Encoder(tagMap, typeMap)

View file

@ -1,25 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
from pyasn1.type import base, tag
class EndOfOctets(base.AbstractSimpleAsn1Item):
defaultValue = 0
tagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x00)
)
_instance = None
def __new__(cls, *args):
if cls._instance is None:
cls._instance = object.__new__(cls, *args)
return cls._instance
endOfOctets = EndOfOctets()

View file

@ -1 +0,0 @@
# This file is necessary to make this directory a package.

View file

@ -1,87 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
from pyasn1.type import univ
from pyasn1.codec.ber import decoder
from pyasn1.compat.octets import oct2int
from pyasn1 import error
__all__ = ['decode']
class BooleanDecoder(decoder.AbstractSimpleDecoder):
protoComponent = univ.Boolean(0)
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
state, decodeFun, substrateFun):
head, tail = substrate[:length], substrate[length:]
if not head or length != 1:
raise error.PyAsn1Error('Not single-octet Boolean payload')
byte = oct2int(head[0])
# CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while
# BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1
# in http://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf
if byte == 0xff:
value = 1
elif byte == 0x00:
value = 0
else:
raise error.PyAsn1Error('Unexpected Boolean payload: %s' % byte)
return self._createComponent(asn1Spec, tagSet, value), tail
# TODO: prohibit non-canonical encoding
BitStringDecoder = decoder.BitStringDecoder
OctetStringDecoder = decoder.OctetStringDecoder
RealDecoder = decoder.RealDecoder
tagMap = decoder.tagMap.copy()
tagMap.update(
{univ.Boolean.tagSet: BooleanDecoder(),
univ.BitString.tagSet: BitStringDecoder(),
univ.OctetString.tagSet: OctetStringDecoder(),
univ.Real.tagSet: RealDecoder()}
)
typeMap = decoder.typeMap.copy()
# Put in non-ambiguous types for faster codec lookup
for typeDecoder in tagMap.values():
typeId = typeDecoder.protoComponent.__class__.typeId
if typeId is not None and typeId not in typeMap:
typeMap[typeId] = typeDecoder
class Decoder(decoder.Decoder):
pass
#: Turns CER octet stream into an ASN.1 object.
#:
#: Takes CER octetstream and decode it into an ASN.1 object
#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
#: may be a scalar or an arbitrary nested structure.
#:
#: Parameters
#: ----------
#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
#: CER octetstream
#:
#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure
#: being decoded, *asn1Spec* may or may not be required. Most common reason for
#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode.
#:
#: Returns
#: -------
#: : :py:class:`tuple`
#: A tuple of pyasn1 object recovered from CER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: and the unprocessed trailing portion of the *substrate* (may be empty)
#:
#: Raises
#: ------
#: : :py:class:`pyasn1.error.PyAsn1Error`
#: On decoding errors
decode = Decoder(tagMap, decoder.typeMap)

View file

@ -1,179 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
from pyasn1.type import univ
from pyasn1.type import useful
from pyasn1.codec.ber import encoder
from pyasn1.compat.octets import int2oct, str2octs, null
from pyasn1 import error
__all__ = ['encode']
class BooleanEncoder(encoder.IntegerEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
if client == 0:
substrate = (0,)
else:
substrate = (255,)
return substrate, False, False
class BitStringEncoder(encoder.BitStringEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
return encoder.BitStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class OctetStringEncoder(encoder.OctetStringEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class RealEncoder(encoder.RealEncoder):
def _chooseEncBase(self, value):
m, b, e = value
return self._dropFloatingPoint(m, b, e)
# specialized GeneralStringEncoder here
class GeneralizedTimeEncoder(OctetStringEncoder):
zchar = str2octs('Z')
pluschar = str2octs('+')
minuschar = str2octs('-')
zero = str2octs('0')
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
octets = client.asOctets()
# This breaks too many existing data items
# if '.' not in octets:
# raise error.PyAsn1Error('Format must include fraction of second: %r' % octets)
if len(octets) < 15:
raise error.PyAsn1Error('Bad UTC time length: %r' % octets)
if self.pluschar in octets or self.minuschar in octets:
raise error.PyAsn1Error('Must be UTC time: %r' % octets)
if octets[-1] != self.zchar[0]:
raise error.PyAsn1Error('Missing timezone specifier: %r' % octets)
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class UTCTimeEncoder(encoder.OctetStringEncoder):
zchar = str2octs('Z')
pluschar = str2octs('+')
minuschar = str2octs('-')
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
octets = client.asOctets()
if self.pluschar in octets or self.minuschar in octets:
raise error.PyAsn1Error('Must be UTC time: %r' % octets)
if octets and octets[-1] != self.zchar[0]:
client = client.clone(octets + self.zchar)
if len(client) != 13:
raise error.PyAsn1Error('Bad UTC time length: %r' % client)
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class SetOfEncoder(encoder.SequenceOfEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
client.verifySizeSpec()
substrate = null
idx = len(client)
# This is certainly a hack but how else do I distinguish SetOf
# from Set if they have the same tags&constraints?
if isinstance(client, univ.SequenceAndSetBase):
# Set
namedTypes = client.getComponentType()
comps = []
while idx > 0:
idx -= 1
if namedTypes[idx].isOptional and not client[idx].isValue:
continue
if namedTypes[idx].isDefaulted and client[idx] == namedTypes[idx].asn1Object:
continue
comps.append(client[idx])
comps.sort(key=lambda x: isinstance(x, univ.Choice) and x.getMinTagSet() or x.tagSet)
for c in comps:
substrate += encodeFun(c, defMode, maxChunkSize)
else:
# SetOf
compSubs = []
while idx > 0:
idx -= 1
compSubs.append(
encodeFun(client[idx], defMode, maxChunkSize)
)
compSubs.sort() # perhaps padding's not needed
substrate = null
for compSub in compSubs:
substrate += compSub
return substrate, True, True
tagMap = encoder.tagMap.copy()
tagMap.update({
univ.Boolean.tagSet: BooleanEncoder(),
univ.BitString.tagSet: BitStringEncoder(),
univ.OctetString.tagSet: OctetStringEncoder(),
univ.Real.tagSet: RealEncoder(),
useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(),
useful.UTCTime.tagSet: UTCTimeEncoder(),
univ.SetOf().tagSet: SetOfEncoder() # conflcts with Set
})
typeMap = encoder.typeMap.copy()
typeMap.update({
univ.Boolean.typeId: BooleanEncoder(),
univ.BitString.typeId: BitStringEncoder(),
univ.OctetString.typeId: OctetStringEncoder(),
univ.Real.typeId: RealEncoder(),
useful.GeneralizedTime.typeId: GeneralizedTimeEncoder(),
useful.UTCTime.typeId: UTCTimeEncoder(),
univ.Set.typeId: SetOfEncoder(),
univ.SetOf.typeId: SetOfEncoder()
})
class Encoder(encoder.Encoder):
def __call__(self, client, defMode=False, maxChunkSize=0):
return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
#: Turns ASN.1 object into CER octet stream.
#:
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: walks all its components recursively and produces a CER octet stream.
#:
#: Parameters
#: ----------
# value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: A pyasn1 object to encode
#:
#: defMode: :py:class:`bool`
#: If `False`, produces indefinite length encoding
#:
#: maxChunkSize: :py:class:`int`
#: Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size)
#:
#: Returns
#: -------
#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
#: Given ASN.1 object encoded into BER octetstream
#:
#: Raises
#: ------
#: : :py:class:`pyasn1.error.PyAsn1Error`
#: On encoding errors
encode = Encoder(tagMap, typeMap)
# EncoderFactory queries class instance and builds a map of tags -> encoders

View file

@ -1 +0,0 @@
# This file is necessary to make this directory a package.

View file

@ -1,69 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
from pyasn1.type import univ
from pyasn1.codec.cer import decoder
__all__ = ['decode']
class BitStringDecoder(decoder.BitStringDecoder):
supportConstructedForm = False
class OctetStringDecoder(decoder.OctetStringDecoder):
supportConstructedForm = False
# TODO: prohibit non-canonical encoding
RealDecoder = decoder.RealDecoder
tagMap = decoder.tagMap.copy()
tagMap.update(
{univ.BitString.tagSet: BitStringDecoder(),
univ.OctetString.tagSet: OctetStringDecoder(),
univ.Real.tagSet: RealDecoder()}
)
typeMap = decoder.typeMap.copy()
# Put in non-ambiguous types for faster codec lookup
for typeDecoder in tagMap.values():
typeId = typeDecoder.protoComponent.__class__.typeId
if typeId is not None and typeId not in typeMap:
typeMap[typeId] = typeDecoder
class Decoder(decoder.Decoder):
supportIndefLength = False
#: Turns DER octet stream into an ASN.1 object.
#:
#: Takes DER octetstream and decode it into an ASN.1 object
#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
#: may be a scalar or an arbitrary nested structure.
#:
#: Parameters
#: ----------
#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
#: DER octetstream
#:
#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure
#: being decoded, *asn1Spec* may or may not be required. Most common reason for
#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode.
#:
#: Returns
#: -------
#: : :py:class:`tuple`
#: A tuple of pyasn1 object recovered from DER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: and the unprocessed trailing portion of the *substrate* (may be empty)
#:
#: Raises
#: ------
#: : :py:class:`pyasn1.error.PyAsn1Error`
#: On decoding errors
decode = Decoder(tagMap, typeMap)

View file

@ -1,67 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
from pyasn1.type import univ
from pyasn1.codec.cer import encoder
from pyasn1 import error
__all__ = ['encode']
class SetOfEncoder(encoder.SetOfEncoder):
@staticmethod
def _cmpSetComponents(c1, c2):
tagSet1 = isinstance(c1, univ.Choice) and c1.effectiveTagSet or c1.tagSet
tagSet2 = isinstance(c2, univ.Choice) and c2.effectiveTagSet or c2.tagSet
return cmp(tagSet1, tagSet2)
tagMap = encoder.tagMap.copy()
tagMap.update({
# Overload CER encoders with BER ones (a bit hackerish XXX)
univ.BitString.tagSet: encoder.encoder.BitStringEncoder(),
univ.OctetString.tagSet: encoder.encoder.OctetStringEncoder(),
# Set & SetOf have same tags
univ.SetOf().tagSet: SetOfEncoder()
})
typeMap = encoder.typeMap.copy()
class Encoder(encoder.Encoder):
supportIndefLength = False
def __call__(self, client, defMode=True, maxChunkSize=0):
if not defMode or maxChunkSize:
raise error.PyAsn1Error('DER forbids indefinite length mode')
return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
#: Turns ASN.1 object into DER octet stream.
#:
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: walks all its components recursively and produces a DER octet stream.
#:
#: Parameters
#: ----------
# value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: A pyasn1 object to encode
#:
#: defMode: :py:class:`bool`
#: If `False`, produces indefinite length encoding
#:
#: maxChunkSize: :py:class:`int`
#: Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size)
#:
#: Returns
#: -------
#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
#: Given ASN.1 object encoded into BER octetstream
#:
#: Raises
#: ------
#: : :py:class:`pyasn1.error.PyAsn1Error`
#: On encoding errors
encode = Encoder(tagMap, typeMap)

View file

@ -1 +0,0 @@
# This file is necessary to make this directory a package.

View file

@ -1,188 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
from pyasn1.type import base, univ, char, useful, tag
from pyasn1 import debug, error
__all__ = ['decode']
class AbstractScalarDecoder(object):
def __call__(self, pyObject, asn1Spec, decoderFunc=None):
return asn1Spec.clone(pyObject)
class BitStringDecoder(AbstractScalarDecoder):
def __call__(self, pyObject, asn1Spec, decoderFunc=None):
return asn1Spec.clone(univ.BitString.fromBinaryString(pyObject))
class SequenceOrSetDecoder(object):
def __call__(self, pyObject, asn1Spec, decoderFunc):
asn1Value = asn1Spec.clone()
componentsTypes = asn1Spec.getComponentType()
for field in asn1Value:
if field in pyObject:
asn1Value[field] = decoderFunc(pyObject[field], componentsTypes[field].asn1Object)
return asn1Value
class SequenceOfOrSetOfDecoder(object):
def __call__(self, pyObject, asn1Spec, decoderFunc):
asn1Value = asn1Spec.clone()
for pyValue in pyObject:
asn1Value.append(decoderFunc(pyValue, asn1Spec.getComponentType()))
return asn1Value
class ChoiceDecoder(object):
def __call__(self, pyObject, asn1Spec, decoderFunc):
asn1Value = asn1Spec.clone()
componentsTypes = asn1Spec.getComponentType()
for field in pyObject:
if field in componentsTypes:
asn1Value[field] = decoderFunc(pyObject[field], componentsTypes[field].asn1Object)
break
return asn1Value
tagMap = {
univ.Integer.tagSet: AbstractScalarDecoder(),
univ.Boolean.tagSet: AbstractScalarDecoder(),
univ.BitString.tagSet: BitStringDecoder(),
univ.OctetString.tagSet: AbstractScalarDecoder(),
univ.Null.tagSet: AbstractScalarDecoder(),
univ.ObjectIdentifier.tagSet: AbstractScalarDecoder(),
univ.Enumerated.tagSet: AbstractScalarDecoder(),
univ.Real.tagSet: AbstractScalarDecoder(),
univ.Sequence.tagSet: SequenceOrSetDecoder(), # conflicts with SequenceOf
univ.Set.tagSet: SequenceOrSetDecoder(), # conflicts with SetOf
univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any
# character string types
char.UTF8String.tagSet: AbstractScalarDecoder(),
char.NumericString.tagSet: AbstractScalarDecoder(),
char.PrintableString.tagSet: AbstractScalarDecoder(),
char.TeletexString.tagSet: AbstractScalarDecoder(),
char.VideotexString.tagSet: AbstractScalarDecoder(),
char.IA5String.tagSet: AbstractScalarDecoder(),
char.GraphicString.tagSet: AbstractScalarDecoder(),
char.VisibleString.tagSet: AbstractScalarDecoder(),
char.GeneralString.tagSet: AbstractScalarDecoder(),
char.UniversalString.tagSet: AbstractScalarDecoder(),
char.BMPString.tagSet: AbstractScalarDecoder(),
# useful types
useful.ObjectDescriptor.tagSet: AbstractScalarDecoder(),
useful.GeneralizedTime.tagSet: AbstractScalarDecoder(),
useful.UTCTime.tagSet: AbstractScalarDecoder()
}
# Put in ambiguous & non-ambiguous types for faster codec lookup
typeMap = {
univ.Integer.typeId: AbstractScalarDecoder(),
univ.Boolean.typeId: AbstractScalarDecoder(),
univ.BitString.typeId: BitStringDecoder(),
univ.OctetString.typeId: AbstractScalarDecoder(),
univ.Null.typeId: AbstractScalarDecoder(),
univ.ObjectIdentifier.typeId: AbstractScalarDecoder(),
univ.Enumerated.typeId: AbstractScalarDecoder(),
univ.Real.typeId: AbstractScalarDecoder(),
# ambiguous base types
univ.Set.typeId: SequenceOrSetDecoder(),
univ.SetOf.typeId: SequenceOfOrSetOfDecoder(),
univ.Sequence.typeId: SequenceOrSetDecoder(),
univ.SequenceOf.typeId: SequenceOfOrSetOfDecoder(),
univ.Choice.typeId: ChoiceDecoder(),
univ.Any.typeId: AbstractScalarDecoder(),
# character string types
char.UTF8String.typeId: AbstractScalarDecoder(),
char.NumericString.typeId: AbstractScalarDecoder(),
char.PrintableString.typeId: AbstractScalarDecoder(),
char.TeletexString.typeId: AbstractScalarDecoder(),
char.VideotexString.typeId: AbstractScalarDecoder(),
char.IA5String.typeId: AbstractScalarDecoder(),
char.GraphicString.typeId: AbstractScalarDecoder(),
char.VisibleString.typeId: AbstractScalarDecoder(),
char.GeneralString.typeId: AbstractScalarDecoder(),
char.UniversalString.typeId: AbstractScalarDecoder(),
char.BMPString.typeId: AbstractScalarDecoder(),
# useful types
useful.ObjectDescriptor.typeId: AbstractScalarDecoder(),
useful.GeneralizedTime.typeId: AbstractScalarDecoder(),
useful.UTCTime.typeId: AbstractScalarDecoder()
}
class Decoder(object):
# noinspection PyDefaultArgument
def __init__(self, tagMap, typeMap):
self.__tagMap = tagMap
self.__typeMap = typeMap
def __call__(self, pyObject, asn1Spec):
if debug.logger & debug.flagDecoder:
debug.scope.push(type(pyObject).__name__)
debug.logger('decoder called at scope %s, working with type %s' % (debug.scope, type(pyObject).__name__))
if asn1Spec is None or not isinstance(asn1Spec, base.Asn1Item):
raise error.PyAsn1Error('asn1Spec is not valid (should be an instance of an ASN.1 Item, not %s)' % asn1Spec.__class__.__name__)
try:
valueDecoder = self.__typeMap[asn1Spec.typeId]
except KeyError:
# use base type for codec lookup to recover untagged types
baseTagSet = tag.TagSet(asn1Spec.tagSet.baseTag, asn1Spec.tagSet.baseTag)
try:
valueDecoder = self.__tagMap[baseTagSet]
except KeyError:
raise error.PyAsn1Error('Unknown ASN.1 tag %s' % asn1Spec.tagSet)
if debug.logger & debug.flagDecoder:
debug.logger('calling decoder %s on Python type %s <%s>' % (type(valueDecoder).__name__, type(pyObject).__name__, repr(pyObject)))
value = valueDecoder(pyObject, asn1Spec, self)
if debug.logger & debug.flagDecoder:
debug.logger('decoder %s produced ASN.1 type %s <%s>' % (type(valueDecoder).__name__, type(value).__name__, repr(value)))
debug.scope.pop()
return value
#: Turns Python objects of built-in types into ASN.1 objects.
#:
#: Takes Python objects of built-in types and turns them into a tree of
#: ASN.1 objects (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
#: may be a scalar or an arbitrary nested structure.
#:
#: Parameters
#: ----------
#: pyObject: :py:class:`object`
#: A scalar or nested Python objects
#:
#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
#: A pyasn1 type object to act as a template guiding the decoder. It is required
#: for successful interpretation of Python objects mapping into their ASN.1
#: representations.
#:
#: Returns
#: -------
#: : :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
#: A scalar or constructed pyasn1 object
#:
#: Raises
#: ------
#: : :py:class:`pyasn1.error.PyAsn1Error`
#: On decoding errors
decode = Decoder(tagMap, typeMap)

View file

@ -1,215 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
try:
from collections import OrderedDict
except ImportError:
OrderedDict = dict
from pyasn1.type import base, univ, char, useful
from pyasn1 import debug, error
__all__ = ['encode']
class AbstractItemEncoder(object):
def encode(self, encodeFun, value):
raise error.PyAsn1Error('Not implemented')
class ExplicitlyTaggedItemEncoder(AbstractItemEncoder):
def encode(self, encodeFun, value):
if isinstance(value, base.AbstractConstructedAsn1Item):
value = value.clone(tagSet=value.tagSet[:-1],
cloneValueFlag=1)
else:
value = value.clone(tagSet=value.tagSet[:-1])
return encodeFun(value)
explicitlyTaggedItemEncoder = ExplicitlyTaggedItemEncoder()
class BooleanEncoder(AbstractItemEncoder):
def encode(self, encodeFun, value):
return bool(value)
class IntegerEncoder(AbstractItemEncoder):
def encode(self, encodeFun, value):
return int(value)
class BitStringEncoder(AbstractItemEncoder):
def encode(self, encodeFun, value):
return str(value)
class OctetStringEncoder(AbstractItemEncoder):
def encode(self, encodeFun, value):
return value.asOctets()
class TextStringEncoder(AbstractItemEncoder):
def encode(self, encodeFun, value):
return value.prettyPrint()
class NullEncoder(AbstractItemEncoder):
def encode(self, encodeFun, value):
return None
class ObjectIdentifierEncoder(AbstractItemEncoder):
def encode(self, encodeFun, value):
return str(value)
class RealEncoder(AbstractItemEncoder):
def encode(self, encodeFun, value):
return float(value)
class SetEncoder(AbstractItemEncoder):
protoDict = dict
def encode(self, encodeFun, value):
value.verifySizeSpec()
namedTypes = value.getComponentType()
substrate = self.protoDict()
for idx, (key, subValue) in enumerate(value.items()):
if namedTypes[idx].isOptional and not value[idx].isValue:
continue
substrate[key] = encodeFun(subValue)
return substrate
class SequenceEncoder(SetEncoder):
protoDict = OrderedDict
class SequenceOfEncoder(AbstractItemEncoder):
def encode(self, encodeFun, value):
value.verifySizeSpec()
return [encodeFun(x) for x in value]
class ChoiceEncoder(SequenceEncoder):
pass
class AnyEncoder(AbstractItemEncoder):
def encode(self, encodeFun, value):
return value.asOctets()
tagMap = {
univ.Boolean.tagSet: BooleanEncoder(),
univ.Integer.tagSet: IntegerEncoder(),
univ.BitString.tagSet: BitStringEncoder(),
univ.OctetString.tagSet: OctetStringEncoder(),
univ.Null.tagSet: NullEncoder(),
univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(),
univ.Enumerated.tagSet: IntegerEncoder(),
univ.Real.tagSet: RealEncoder(),
# Sequence & Set have same tags as SequenceOf & SetOf
univ.SequenceOf.tagSet: SequenceOfEncoder(),
univ.SetOf.tagSet: SequenceOfEncoder(),
univ.Choice.tagSet: ChoiceEncoder(),
# character string types
char.UTF8String.tagSet: TextStringEncoder(),
char.NumericString.tagSet: TextStringEncoder(),
char.PrintableString.tagSet: TextStringEncoder(),
char.TeletexString.tagSet: TextStringEncoder(),
char.VideotexString.tagSet: TextStringEncoder(),
char.IA5String.tagSet: TextStringEncoder(),
char.GraphicString.tagSet: TextStringEncoder(),
char.VisibleString.tagSet: TextStringEncoder(),
char.GeneralString.tagSet: TextStringEncoder(),
char.UniversalString.tagSet: TextStringEncoder(),
char.BMPString.tagSet: TextStringEncoder(),
# useful types
useful.ObjectDescriptor.tagSet: OctetStringEncoder(),
useful.GeneralizedTime.tagSet: OctetStringEncoder(),
useful.UTCTime.tagSet: OctetStringEncoder()
}
# Type-to-codec map for ambiguous ASN.1 types
typeMap = {
univ.Set.typeId: SetEncoder(),
univ.SetOf.typeId: SequenceOfEncoder(),
univ.Sequence.typeId: SequenceEncoder(),
univ.SequenceOf.typeId: SequenceOfEncoder(),
univ.Choice.typeId: ChoiceEncoder(),
univ.Any.typeId: AnyEncoder()
}
class Encoder(object):
# noinspection PyDefaultArgument
def __init__(self, tagMap, typeMap={}):
self.__tagMap = tagMap
self.__typeMap = typeMap
def __call__(self, asn1Value):
if not isinstance(asn1Value, base.Asn1Item):
raise error.PyAsn1Error('value is not valid (should be an instance of an ASN.1 Item)')
if debug.logger & debug.flagEncoder:
debug.scope.push(type(asn1Value).__name__)
debug.logger('encoder called for type %s <%s>' % (type(asn1Value).__name__, asn1Value.prettyPrint()))
tagSet = asn1Value.tagSet
if len(tagSet) > 1:
concreteEncoder = explicitlyTaggedItemEncoder
else:
if asn1Value.typeId is not None and asn1Value.typeId in self.__typeMap:
concreteEncoder = self.__typeMap[asn1Value.typeId]
elif tagSet in self.__tagMap:
concreteEncoder = self.__tagMap[tagSet]
else:
tagSet = asn1Value.baseTagSet
if tagSet in self.__tagMap:
concreteEncoder = self.__tagMap[tagSet]
else:
raise error.PyAsn1Error('No encoder for %s' % (asn1Value,))
debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %s' % (type(concreteEncoder).__name__, tagSet))
pyObject = concreteEncoder.encode(self, asn1Value)
if debug.logger & debug.flagEncoder:
debug.logger('encoder %s produced: %s' % (type(concreteEncoder).__name__, repr(pyObject)))
debug.scope.pop()
return pyObject
#: Turns ASN.1 object into a Python built-in type object(s).
#:
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: walks all its components recursively and produces a Python built-in type or a tree
#: of those.
#:
#: One exception is that instead of :py:class:`dict`, the :py:class:`OrderedDict`
#: can be produced (whenever available) to preserve ordering of the components
#: in ASN.1 SEQUENCE.
#:
#: Parameters
#: ----------
# asn1Value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: pyasn1 object to encode (or a tree of them)
#:
#: Returns
#: -------
#: : :py:class:`object`
#: Python built-in type instance (or a tree of them)
#:
#: Raises
#: ------
#: : :py:class:`pyasn1.error.PyAsn1Error`
#: On encoding errors
encode = Encoder(tagMap, typeMap)

View file

@ -1 +0,0 @@
# This file is necessary to make this directory a package.

View file

@ -1,25 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
from sys import version_info
if version_info[0:2] < (2, 6):
def bin(value):
bitstring = []
while value:
if value & 1 == 1:
bitstring.append('1')
else:
bitstring.append('0')
value >>= 1
bitstring.reverse()
return '0b' + ''.join(bitstring)
else:
bin = bin

View file

@ -1,96 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
import sys
if sys.version_info[0:2] < (3, 2):
from binascii import a2b_hex, b2a_hex
from pyasn1.compat.octets import oct2int, null
if sys.version_info[0:2] < (3, 2):
def from_bytes(octets, signed=False):
value = long(b2a_hex(str(octets)), 16)
if signed and oct2int(octets[0]) & 0x80:
return value - (1 << len(octets) * 8)
return value
def to_bytes(value, signed=False, length=0):
if value < 0:
if signed:
bits = bitLength(value)
# two's complement form
maxValue = 1 << bits
valueToEncode = (value + maxValue) % maxValue
else:
raise OverflowError('can\'t convert negative int to unsigned')
elif value == 0 and length == 0:
return null
else:
bits = 0
valueToEncode = value
hexValue = hex(valueToEncode)[2:]
if hexValue.endswith('L'):
hexValue = hexValue[:-1]
if len(hexValue) & 1:
hexValue = '0' + hexValue
# padding may be needed for two's complement encoding
if value != valueToEncode or length:
hexLength = len(hexValue) * 4
padLength = max(length, bits)
if padLength > hexLength:
hexValue = '00' * ((padLength - hexLength - 1) // 8 + 1) + hexValue
elif length and hexLength - length > 7:
raise OverflowError('int too big to convert')
firstOctet = int(hexValue[:2], 16)
if signed:
if firstOctet & 0x80:
if value >= 0:
hexValue = '00' + hexValue
elif value < 0:
hexValue = 'ff' + hexValue
octets_value = a2b_hex(hexValue)
return octets_value
def bitLength(number):
# bits in unsigned number
hexValue = hex(abs(number))
bits = len(hexValue) - 2
if hexValue.endswith('L'):
bits -= 1
if bits & 1:
bits += 1
bits *= 4
# TODO: strip lhs zeros
return bits
else:
def from_bytes(octets, signed=False):
return int.from_bytes(bytes(octets), 'big', signed=signed)
def to_bytes(value, signed=False, length=0):
length = max(value.bit_length(), length)
if signed and length % 8 == 0:
length += 1
return value.to_bytes(length // 8 + (length % 8 and 1 or 0), 'big', signed=signed)
def bitLength(number):
return int(number).bit_length()

View file

@ -1,46 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
from sys import version_info
if version_info[0] <= 2:
int2oct = chr
# noinspection PyPep8
ints2octs = lambda s: ''.join([int2oct(x) for x in s])
null = ''
oct2int = ord
# noinspection PyPep8
octs2ints = lambda s: [oct2int(x) for x in s]
# noinspection PyPep8
str2octs = lambda x: x
# noinspection PyPep8
octs2str = lambda x: x
# noinspection PyPep8
isOctetsType = lambda s: isinstance(s, str)
# noinspection PyPep8
isStringType = lambda s: isinstance(s, (str, unicode))
# noinspection PyPep8
ensureString = str
else:
ints2octs = bytes
# noinspection PyPep8
int2oct = lambda x: ints2octs((x,))
null = ints2octs()
# noinspection PyPep8
oct2int = lambda x: x
# noinspection PyPep8
octs2ints = lambda x: x
# noinspection PyPep8
str2octs = lambda x: x.encode('iso-8859-1')
# noinspection PyPep8
octs2str = lambda x: x.decode('iso-8859-1')
# noinspection PyPep8
isOctetsType = lambda s: isinstance(s, bytes)
# noinspection PyPep8
isStringType = lambda s: isinstance(s, str)
# noinspection PyPep8
ensureString = bytes

View file

@ -1,130 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
import logging
from pyasn1.compat.octets import octs2ints
from pyasn1 import error
from pyasn1 import __version__
__all__ = ['Debug', 'setLogger', 'hexdump']
flagNone = 0x0000
flagEncoder = 0x0001
flagDecoder = 0x0002
flagAll = 0xffff
flagMap = {
'encoder': flagEncoder,
'decoder': flagDecoder,
'all': flagAll
}
class Printer(object):
# noinspection PyShadowingNames
def __init__(self, logger=None, handler=None, formatter=None):
if logger is None:
logger = logging.getLogger('pyasn1')
logger.setLevel(logging.DEBUG)
if handler is None:
handler = logging.StreamHandler()
if formatter is None:
formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
self.__logger = logger
def __call__(self, msg):
self.__logger.debug(msg)
def __str__(self):
return '<python built-in logging>'
if hasattr(logging, 'NullHandler'):
NullHandler = logging.NullHandler
else:
# Python 2.6 and older
class NullHandler(logging.Handler):
def emit(self, record):
pass
class Debug(object):
defaultPrinter = None
def __init__(self, *flags, **options):
self._flags = flagNone
if options.get('printer') is not None:
self._printer = options.get('printer')
elif self.defaultPrinter is not None:
self._printer = self.defaultPrinter
if 'loggerName' in options:
# route our logs to parent logger
self._printer = Printer(
logger=logging.getLogger(options['loggerName']),
handler=NullHandler()
)
else:
self._printer = Printer()
self('running pyasn1 version %s' % __version__)
for f in flags:
inverse = f and f[0] in ('!', '~')
if inverse:
f = f[1:]
try:
if inverse:
self._flags &= ~flagMap[f]
else:
self._flags |= flagMap[f]
except KeyError:
raise error.PyAsn1Error('bad debug flag %s' % f)
self('debug category \'%s\' %s' % (f, inverse and 'disabled' or 'enabled'))
def __str__(self):
return 'logger %s, flags %x' % (self._printer, self._flags)
def __call__(self, msg):
self._printer(msg)
def __and__(self, flag):
return self._flags & flag
def __rand__(self, flag):
return flag & self._flags
logger = 0
def setLogger(l):
global logger
logger = l
def hexdump(octets):
return ' '.join(
['%s%.2X' % (n % 16 == 0 and ('\n%.5d: ' % n) or '', x)
for n, x in zip(range(len(octets)), octs2ints(octets))]
)
class Scope(object):
def __init__(self):
self._list = []
def __str__(self): return '.'.join(self._list)
def push(self, token):
self._list.append(token)
def pop(self):
return self._list.pop()
scope = Scope()

View file

@ -1,18 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
class PyAsn1Error(Exception):
pass
class ValueConstraintError(PyAsn1Error):
pass
class SubstrateUnderrunError(PyAsn1Error):
pass

View file

@ -1 +0,0 @@
# This file is necessary to make this directory a package.

View file

@ -1,617 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
import sys
from pyasn1.type import constraint, tagmap, tag
from pyasn1 import error
__all__ = ['Asn1Item', 'Asn1ItemBase', 'AbstractSimpleAsn1Item', 'AbstractConstructedAsn1Item']
class Asn1Item(object):
@classmethod
def getTypeId(cls, increment=1):
try:
Asn1Item._typeCounter += increment
except AttributeError:
Asn1Item._typeCounter = increment
return Asn1Item._typeCounter
class Asn1ItemBase(Asn1Item):
#: Set or return a :py:class:`~pyasn1.type.tag.TagSet` object representing
#: ASN.1 tag(s) associated with |ASN.1| type.
tagSet = tag.TagSet()
#: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
#: object imposing constraints on initialization values.
subtypeSpec = constraint.ConstraintsIntersection()
# Disambiguation ASN.1 types identification
typeId = None
def __init__(self, tagSet=None, subtypeSpec=None):
if tagSet is None:
self._tagSet = self.__class__.tagSet
else:
self._tagSet = tagSet
if subtypeSpec is None:
self._subtypeSpec = self.__class__.subtypeSpec
else:
self._subtypeSpec = subtypeSpec
@property
def effectiveTagSet(self):
"""For |ASN.1| type is equivalent to *tagSet*
"""
return self._tagSet # used by untagged types
@property
def tagMap(self):
"""Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping ASN.1 tags to ASN.1 objects within callee object.
"""
try:
return self._tagMap
except AttributeError:
self._tagMap = tagmap.TagMap({self._tagSet: self})
return self._tagMap
def isSameTypeWith(self, other, matchTags=True, matchConstraints=True):
"""Examine |ASN.1| type for equality with other ASN.1 type.
ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints
(:py:mod:`~pyasn1.type.constraint`) are examined when carrying
out ASN.1 types comparison.
No Python inheritance relationship between PyASN1 objects is considered.
Parameters
----------
other: a pyasn1 type object
Class instance representing ASN.1 type.
Returns
-------
: :class:`bool`
:class:`True` if *other* is |ASN.1| type,
:class:`False` otherwise.
"""
return self is other or \
(not matchTags or
self._tagSet == other.tagSet) and \
(not matchConstraints or
self._subtypeSpec == other.subtypeSpec)
def isSuperTypeOf(self, other, matchTags=True, matchConstraints=True):
"""Examine |ASN.1| type for subtype relationship with other ASN.1 type.
ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints
(:py:mod:`~pyasn1.type.constraint`) are examined when carrying
out ASN.1 types comparison.
No Python inheritance relationship between PyASN1 objects is considered.
Parameters
----------
other: a pyasn1 type object
Class instance representing ASN.1 type.
Returns
-------
: :class:`bool`
:class:`True` if *other* is a subtype of |ASN.1| type,
:class:`False` otherwise.
"""
return (not matchTags or
self._tagSet.isSuperTagSetOf(other.tagSet)) and \
(not matchConstraints or
(self._subtypeSpec.isSuperTypeOf(other.subtypeSpec)))
@staticmethod
def isNoValue(*values):
for value in values:
if value is not None and value is not noValue:
return False
return True
# backward compatibility
def getTagSet(self):
return self.tagSet
def getEffectiveTagSet(self):
return self.effectiveTagSet
def getTagMap(self):
return self.tagMap
def getSubtypeSpec(self):
return self.subtypeSpec
class NoValue(object):
"""Create a singleton instance of NoValue class.
NoValue object can be used as an initializer on PyASN1 type class
instantiation to represent ASN.1 type rather than ASN.1 data value.
No operations other than type comparison can be performed on
a PyASN1 type object.
"""
skipMethods = ('__getattribute__', '__getattr__', '__setattr__', '__delattr__',
'__class__', '__init__', '__del__', '__new__', '__repr__',
'__qualname__', '__objclass__', 'im_class', '__sizeof__')
_instance = None
def __new__(cls):
if cls._instance is None:
def getPlug(name):
def plug(self, *args, **kw):
raise error.PyAsn1Error('Uninitialized ASN.1 value ("%s" attribute looked up)' % name)
return plug
op_names = [name
for typ in (str, int, list, dict)
for name in dir(typ)
if name not in cls.skipMethods and name.startswith('__') and name.endswith('__') and callable(getattr(typ, name))]
for name in set(op_names):
setattr(cls, name, getPlug(name))
cls._instance = object.__new__(cls)
return cls._instance
def __getattr__(self, attr):
if attr in self.skipMethods:
raise AttributeError('attribute %s not present' % attr)
raise error.PyAsn1Error('No value for "%s"' % attr)
def __repr__(self):
return '%s()' % self.__class__.__name__
noValue = NoValue()
# Base class for "simple" ASN.1 objects. These are immutable.
class AbstractSimpleAsn1Item(Asn1ItemBase):
#: Default payload value
defaultValue = noValue
def __init__(self, value=noValue, tagSet=None, subtypeSpec=None):
Asn1ItemBase.__init__(self, tagSet, subtypeSpec)
if value is None or value is noValue:
value = self.defaultValue
else:
value = self.prettyIn(value)
try:
self._subtypeSpec(value)
except error.PyAsn1Error:
exType, exValue, exTb = sys.exc_info()
raise exType('%s at %s' % (exValue, self.__class__.__name__))
self.__hashedValue = None
self._value = value
self._len = None
def __repr__(self):
representation = []
if self._value is not self.defaultValue:
representation.append(self.prettyOut(self._value))
if self._tagSet is not self.__class__.tagSet:
representation.append('tagSet=%r' % (self._tagSet,))
if self._subtypeSpec is not self.subtypeSpec:
representation.append('subtypeSpec=%r' % (self._subtypeSpec,))
return '%s(%s)' % (self.__class__.__name__, ', '.join(representation))
def __str__(self):
return str(self._value)
def __eq__(self, other):
return self is other and True or self._value == other
def __ne__(self, other):
return self._value != other
def __lt__(self, other):
return self._value < other
def __le__(self, other):
return self._value <= other
def __gt__(self, other):
return self._value > other
def __ge__(self, other):
return self._value >= other
if sys.version_info[0] <= 2:
def __nonzero__(self):
return self._value and True or False
else:
def __bool__(self):
return self._value and True or False
def __hash__(self):
if self.__hashedValue is None:
self.__hashedValue = hash(self._value)
return self.__hashedValue
@property
def isValue(self):
"""Indicate if |ASN.1| object represents ASN.1 type or ASN.1 value.
The PyASN1 type objects can only participate in types comparison
and serve as a blueprint for serialization codecs to resolve
ambiguous types.
The PyASN1 value objects can additionally participate in most
of built-in Python operations.
Returns
-------
: :class:`bool`
:class:`True` if object represents ASN.1 value and type,
:class:`False` if object represents just ASN.1 type.
"""
return self._value is not noValue
def clone(self, value=noValue, tagSet=None, subtypeSpec=None):
"""Create a copy of a |ASN.1| type or object.
Any parameters to the *clone()* method will replace corresponding
properties of the |ASN.1| object.
Parameters
----------
value: :class:`tuple`, :class:`str` or |ASN.1| object
Initialization value to pass to new ASN.1 object instead of
inheriting one from the caller.
tagSet: :py:class:`~pyasn1.type.tag.TagSet`
Object representing ASN.1 tag(s) to use in new object instead of inheriting from the caller
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
Object representing ASN.1 subtype constraint(s) to use in new object instead of inheriting from the caller
Returns
-------
:
new instance of |ASN.1| type/value
"""
isModified = False
if value is None or value is noValue:
value = self._value
else:
isModified = True
if tagSet is None or tagSet is noValue:
tagSet = self._tagSet
else:
isModified = True
if subtypeSpec is None or subtypeSpec is noValue:
subtypeSpec = self._subtypeSpec
else:
isModified = True
if isModified:
return self.__class__(value, tagSet, subtypeSpec)
else:
return self
def subtype(self, value=noValue, implicitTag=None, explicitTag=None,
subtypeSpec=None):
"""Create a copy of a |ASN.1| type or object.
Any parameters to the *subtype()* method will be added to the corresponding
properties of the |ASN.1| object.
Parameters
----------
value: :class:`tuple`, :class:`str` or |ASN.1| object
Initialization value to pass to new ASN.1 object instead of
inheriting one from the caller.
implicitTag: :py:class:`~pyasn1.type.tag.Tag`
Implicitly apply given ASN.1 tag object to caller's
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
new object's ASN.1 tag(s).
explicitTag: :py:class:`~pyasn1.type.tag.Tag`
Explicitly apply given ASN.1 tag object to caller's
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
new object's ASN.1 tag(s).
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
Add ASN.1 constraints object to one of the caller, then
use the result as new object's ASN.1 constraints.
Returns
-------
:
new instance of |ASN.1| type/value
"""
isModified = False
if value is None or value is noValue:
value = self._value
else:
isModified = True
if implicitTag is not None and implicitTag is not noValue:
tagSet = self._tagSet.tagImplicitly(implicitTag)
isModified = True
elif explicitTag is not None and explicitTag is not noValue:
tagSet = self._tagSet.tagExplicitly(explicitTag)
isModified = True
else:
tagSet = self._tagSet
if subtypeSpec is None or subtypeSpec is noValue:
subtypeSpec = self._subtypeSpec
else:
subtypeSpec = self._subtypeSpec + subtypeSpec
isModified = True
if isModified:
return self.__class__(value, tagSet, subtypeSpec)
else:
return self
def prettyIn(self, value):
return value
def prettyOut(self, value):
return str(value)
def prettyPrint(self, scope=0):
"""Provide human-friendly printable object representation.
Returns
-------
: :class:`str`
human-friendly type and/or value representation.
"""
if self.isValue:
return self.prettyOut(self._value)
else:
return '<no value>'
# XXX Compatibility stub
def prettyPrinter(self, scope=0):
return self.prettyPrint(scope)
# noinspection PyUnusedLocal
def prettyPrintType(self, scope=0):
return '%s -> %s' % (self.tagSet, self.__class__.__name__)
# backward compatibility
def hasValue(self):
return self.isValue
#
# Constructed types:
# * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice
# * ASN1 types and values are represened by Python class instances
# * Value initialization is made for defaulted components only
# * Primary method of component addressing is by-position. Data model for base
# type is Python sequence. Additional type-specific addressing methods
# may be implemented for particular types.
# * SequenceOf and SetOf types do not implement any additional methods
# * Sequence, Set and Choice types also implement by-identifier addressing
# * Sequence, Set and Choice types also implement by-asn1-type (tag) addressing
# * Sequence and Set types may include optional and defaulted
# components
# * Constructed types hold a reference to component types used for value
# verification and ordering.
# * Component type is a scalar type for SequenceOf/SetOf types and a list
# of types for Sequence/Set/Choice.
#
def setupComponent():
"""Returns a sentinel value.
Indicates to a constructed type to set up its inner component so that it
can be referred to. This is useful in situation when you want to populate
descendants of a constructed type what requires being able to refer to
their parent types along the way.
Example
-------
>>> constructed['record'] = setupComponent()
>>> constructed['record']['scalar'] = 42
"""
return noValue
class AbstractConstructedAsn1Item(Asn1ItemBase):
#: If `True`, requires exact component type matching,
#: otherwise subtype relation is only enforced
strictConstraints = False
def __init__(self, componentType=None, tagSet=None,
subtypeSpec=None, sizeSpec=None):
Asn1ItemBase.__init__(self, tagSet, subtypeSpec)
if componentType is None:
self._componentType = self.componentType
else:
self._componentType = componentType
if sizeSpec is None:
self._sizeSpec = self.sizeSpec
else:
self._sizeSpec = sizeSpec
self._componentValues = []
def __repr__(self):
representation = []
if self._componentType is not self.componentType:
representation.append('componentType=%r' % (self._componentType,))
if self._tagSet is not self.__class__.tagSet:
representation.append('tagSet=%r' % (self._tagSet,))
if self._subtypeSpec is not self.subtypeSpec:
representation.append('subtypeSpec=%r' % (self._subtypeSpec,))
representation = '%s(%s)' % (self.__class__.__name__, ', '.join(representation))
if self._componentValues:
for idx, component in enumerate(self._componentValues):
if component is None or component is noValue:
continue
representation += '.setComponentByPosition(%d, %s)' % (idx, repr(component))
return representation
def __eq__(self, other):
return self is other and True or self._componentValues == other
def __ne__(self, other):
return self._componentValues != other
def __lt__(self, other):
return self._componentValues < other
def __le__(self, other):
return self._componentValues <= other
def __gt__(self, other):
return self._componentValues > other
def __ge__(self, other):
return self._componentValues >= other
if sys.version_info[0] <= 2:
def __nonzero__(self):
return self._componentValues and True or False
else:
def __bool__(self):
return self._componentValues and True or False
def _cloneComponentValues(self, myClone, cloneValueFlag):
pass
def clone(self, tagSet=None, subtypeSpec=None, sizeSpec=None, cloneValueFlag=None):
"""Create a copy of a |ASN.1| type or object.
Any parameters to the *clone()* method will replace corresponding
properties of the |ASN.1| object.
Parameters
----------
tagSet: :py:class:`~pyasn1.type.tag.TagSet`
Object representing non-default ASN.1 tag(s)
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
Object representing non-default ASN.1 subtype constraint(s)
sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
Object representing non-default ASN.1 size constraint(s)
Returns
-------
:
new instance of |ASN.1| type/value
"""
if tagSet is None:
tagSet = self._tagSet
if subtypeSpec is None:
subtypeSpec = self._subtypeSpec
if sizeSpec is None:
sizeSpec = self._sizeSpec
clone = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec)
if cloneValueFlag:
self._cloneComponentValues(clone, cloneValueFlag)
return clone
def subtype(self, implicitTag=None, explicitTag=None, subtypeSpec=None,
sizeSpec=None, cloneValueFlag=None):
"""Create a copy of a |ASN.1| type or object.
Any parameters to the *subtype()* method will be added to the corresponding
properties of the |ASN.1| object.
Parameters
----------
tagSet: :py:class:`~pyasn1.type.tag.TagSet`
Object representing non-default ASN.1 tag(s)
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
Object representing non-default ASN.1 subtype constraint(s)
sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
Object representing non-default ASN.1 size constraint(s)
Returns
-------
:
new instance of |ASN.1| type/value
"""
if implicitTag is not None and implicitTag is not noValue:
tagSet = self._tagSet.tagImplicitly(implicitTag)
elif explicitTag is not None and explicitTag is not noValue:
tagSet = self._tagSet.tagExplicitly(explicitTag)
else:
tagSet = self._tagSet
if subtypeSpec is None or subtypeSpec is noValue:
subtypeSpec = self._subtypeSpec
else:
subtypeSpec = self._subtypeSpec + subtypeSpec
if sizeSpec is None or sizeSpec is noValue:
sizeSpec = self._sizeSpec
else:
sizeSpec += self._sizeSpec
clone = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec)
if cloneValueFlag:
self._cloneComponentValues(clone, cloneValueFlag)
return clone
def verifySizeSpec(self):
self._sizeSpec(self)
def getComponentByPosition(self, idx):
raise error.PyAsn1Error('Method not implemented')
def setComponentByPosition(self, idx, value, verifyConstraints=True):
raise error.PyAsn1Error('Method not implemented')
def setComponents(self, *args, **kwargs):
for idx, value in enumerate(args):
self[idx] = value
for k in kwargs:
self[k] = kwargs[k]
return self
def getComponentType(self):
return self._componentType
# backward compatibility -- no-op
def setDefaultComponents(self):
pass
@property
def componentTagMap(self):
raise error.PyAsn1Error('Method not implemented')
def __getitem__(self, idx):
return self.getComponentByPosition(idx)
def __setitem__(self, idx, value):
self.setComponentByPosition(idx, value)
def __len__(self):
return len(self._componentValues)
def clear(self):
self._componentValues = []
# backward compatibility
def getComponentTagMap(self):
return self.componentTagMap

View file

@ -1,378 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
import sys
from pyasn1.type import univ, tag
from pyasn1 import error
__all__ = ['NumericString', 'PrintableString', 'TeletexString', 'T61String', 'VideotexString',
'IA5String', 'GraphicString', 'VisibleString', 'ISO646String',
'GeneralString', 'UniversalString', 'BMPString', 'UTF8String']
NoValue = univ.NoValue
noValue = univ.noValue
class AbstractCharacterString(univ.OctetString):
"""Creates |ASN.1| type or object.
|ASN.1| objects are immutable and duck-type Python 2 :class:`unicode` or Python 3 :class:`str`.
When used in octet-stream context, |ASN.1| type assumes "|encoding|" encoding.
Parameters
----------
value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object
unicode object (Python 2) or string (Python 3), alternatively string
(Python 2) or bytes (Python 3) representing octet-stream of serialized
unicode string (note `encoding` parameter) or |ASN.1| class instance.
tagSet: :py:class:`~pyasn1.type.tag.TagSet`
Object representing non-default ASN.1 tag(s)
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
Object representing non-default ASN.1 subtype constraint(s)
encoding: :py:class:`str`
Unicode codec ID to encode/decode :class:`unicode` (Python 2) or
:class:`str` (Python 3) the payload when |ASN.1| object is used
in octet-stream context.
Raises
------
: :py:class:`pyasn1.error.PyAsn1Error`
On constraint violation or bad initializer.
"""
if sys.version_info[0] <= 2:
def __str__(self):
try:
return self._value.encode(self._encoding)
except UnicodeEncodeError:
raise error.PyAsn1Error(
'Can\'t encode string \'%s\' with \'%s\' codec' % (self._value, self._encoding)
)
def __unicode__(self):
return unicode(self._value)
def prettyIn(self, value):
if isinstance(value, unicode):
return value
elif isinstance(value, str):
try:
return value.decode(self._encoding)
except (LookupError, UnicodeDecodeError):
raise error.PyAsn1Error(
'Can\'t decode string \'%s\' with \'%s\' codec' % (value, self._encoding)
)
elif isinstance(value, (tuple, list)):
try:
return self.prettyIn(''.join([chr(x) for x in value]))
except ValueError:
raise error.PyAsn1Error(
'Bad %s initializer \'%s\'' % (self.__class__.__name__, value)
)
else:
try:
return unicode(value)
except UnicodeDecodeError:
raise error.PyAsn1Error(
'Can\'t turn object \'%s\' into unicode' % (value,)
)
def asOctets(self, padding=True):
return str(self)
def asNumbers(self, padding=True):
return tuple([ord(x) for x in str(self)])
else:
def __str__(self):
return str(self._value)
def __bytes__(self):
try:
return self._value.encode(self._encoding)
except UnicodeEncodeError:
raise error.PyAsn1Error(
'Can\'t encode string \'%s\' with \'%s\' codec' % (self._value, self._encoding)
)
def prettyIn(self, value):
if isinstance(value, str):
return value
elif isinstance(value, bytes):
try:
return value.decode(self._encoding)
except UnicodeDecodeError:
raise error.PyAsn1Error(
'Can\'t decode string \'%s\' with \'%s\' codec' % (value, self._encoding)
)
elif isinstance(value, (tuple, list)):
return self.prettyIn(bytes(value))
else:
try:
return str(value)
except (UnicodeDecodeError, ValueError):
raise error.PyAsn1Error(
'Can\'t turn object \'%s\' into unicode' % (value,)
)
def asOctets(self, padding=True):
return bytes(self)
def asNumbers(self, padding=True):
return tuple(bytes(self))
def prettyOut(self, value):
return value
def __reversed__(self):
return reversed(self._value)
def clone(self, value=noValue, tagSet=None, subtypeSpec=None,
encoding=None, binValue=noValue, hexValue=noValue):
"""Creates a copy of a |ASN.1| type or object.
Any parameters to the *clone()* method will replace corresponding
properties of the |ASN.1| object.
Parameters
----------
value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object
unicode object (Python 2) or string (Python 3), alternatively string
(Python 2) or bytes (Python 3) representing octet-stream of serialized
unicode string (note `encoding` parameter) or |ASN.1| class instance.
tagSet: :py:class:`~pyasn1.type.tag.TagSet`
Object representing non-default ASN.1 tag(s)
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
Object representing non-default ASN.1 subtype constraint(s)
encoding: :py:class:`str`
Unicode codec ID to encode/decode :py:class:`unicode` (Python 2) or
:py:class:`str` (Python 3) the payload when |ASN.1| object is used
in octet-stream context.
Returns
-------
:
new instance of |ASN.1| type/value
"""
return univ.OctetString.clone(self, value, tagSet, subtypeSpec, encoding, binValue, hexValue)
def subtype(self, value=noValue, implicitTag=None, explicitTag=None,
subtypeSpec=None, encoding=None, binValue=noValue, hexValue=noValue):
"""Creates a copy of a |ASN.1| type or object.
Any parameters to the *subtype()* method will be added to the corresponding
properties of the |ASN.1| object.
Parameters
----------
value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object
unicode object (Python 2) or string (Python 3), alternatively string
(Python 2) or bytes (Python 3) representing octet-stream of serialized
unicode string (note `encoding` parameter) or |ASN.1| class instance.
implicitTag: :py:class:`~pyasn1.type.tag.Tag`
Implicitly apply given ASN.1 tag object to caller's
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
new object's ASN.1 tag(s).
explicitTag: :py:class:`~pyasn1.type.tag.Tag`
Explicitly apply given ASN.1 tag object to caller's
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
new object's ASN.1 tag(s).
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
Object representing non-default ASN.1 subtype constraint(s)
encoding: :py:class:`str`
Unicode codec ID to encode/decode :py:class:`unicode` (Python 2) or
:py:class:`str` (Python 3) the payload when |ASN.1| object is used
in octet-stream context.
Returns
-------
:
new instance of |ASN.1| type/value
"""
return univ.OctetString.subtype(self, value, implicitTag, explicitTag, subtypeSpec, encoding, binValue, hexValue)
class NumericString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (class attribute) or return (class or instance attribute) a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18)
)
encoding = 'us-ascii'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class PrintableString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (class attribute) or return (class or instance attribute) a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 19)
)
encoding = 'us-ascii'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class TeletexString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (class attribute) or return (class or instance attribute) a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20)
)
encoding = 'iso-8859-1'
class T61String(TeletexString):
__doc__ = TeletexString.__doc__
class VideotexString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (class attribute) or return (class or instance attribute) a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 21)
)
encoding = 'iso-8859-1'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class IA5String(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (class attribute) or return (class or instance attribute) a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 22)
)
encoding = 'us-ascii'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class GraphicString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (class attribute) or return (class or instance attribute) a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 25)
)
encoding = 'iso-8859-1'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class VisibleString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (class attribute) or return (class or instance attribute) a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26)
)
encoding = 'us-ascii'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class ISO646String(VisibleString):
__doc__ = VisibleString.__doc__
class GeneralString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (class attribute) or return (class or instance attribute) a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27)
)
encoding = 'iso-8859-1'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class UniversalString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (class attribute) or return (class or instance attribute) a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 28)
)
encoding = "utf-32-be"
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class BMPString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (class attribute) or return (class or instance attribute) a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30)
)
encoding = "utf-16-be"
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class UTF8String(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (class attribute) or return (class or instance attribute) a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
)
encoding = "utf-8"
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()

View file

@ -1,283 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
# Original concept and code by Mike C. Fletcher.
#
import sys
from pyasn1.type import error
__all__ = ['SingleValueConstraint', 'ContainedSubtypeConstraint', 'ValueRangeConstraint',
'ValueSizeConstraint', 'PermittedAlphabetConstraint', 'InnerTypeConstraint',
'ConstraintsExclusion', 'ConstraintsIntersection', 'ConstraintsUnion']
class AbstractConstraint(object):
"""Abstract base-class for constraint objects
Constraints should be stored in a simple sequence in the
namespace of their client Asn1Item sub-classes in cases
when ASN.1 constraint is define.
"""
def __init__(self, *values):
self._valueMap = set()
self._setValues(values)
self.__hashedValues = None
def __call__(self, value, idx=None):
if not self._values:
return
try:
self._testValue(value, idx)
except error.ValueConstraintError:
raise error.ValueConstraintError(
'%s failed at: %r' % (self, sys.exc_info()[1])
)
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
', '.join([repr(x) for x in self._values])
)
def __eq__(self, other):
return self is other and True or self._values == other
def __ne__(self, other):
return self._values != other
def __lt__(self, other):
return self._values < other
def __le__(self, other):
return self._values <= other
def __gt__(self, other):
return self._values > other
def __ge__(self, other):
return self._values >= other
if sys.version_info[0] <= 2:
def __nonzero__(self):
return self._values and True or False
else:
def __bool__(self):
return self._values and True or False
def __hash__(self):
if self.__hashedValues is None:
self.__hashedValues = hash((self.__class__.__name__, self._values))
return self.__hashedValues
# descriptor protocol
def __get__(self, instance, owner):
if instance is None:
return self
# This is a bit of hack: look up instance attribute first,
# then try class attribute if instance attribute with that
# name is not available.
# The rationale is to have `.subtypeSpec`/`.sizeSpec` readable-writeable
# as a class attribute and read-only as instance attribute.
try:
return instance._subtypeSpec
except AttributeError:
try:
return instance._sizeSpec
except AttributeError:
return self
def __set__(self, instance, value):
raise AttributeError('attribute is read-only')
def _setValues(self, values):
self._values = values
def _testValue(self, value, idx):
raise error.ValueConstraintError(value)
# Constraints derivation logic
def getValueMap(self):
return self._valueMap
def isSuperTypeOf(self, otherConstraint):
return (otherConstraint is self or
not self._values or
otherConstraint == self or
self in otherConstraint.getValueMap())
def isSubTypeOf(self, otherConstraint):
return (otherConstraint is self or
not self or
otherConstraint == self or
otherConstraint in self._valueMap)
class SingleValueConstraint(AbstractConstraint):
"""Value must be part of defined values constraint"""
def _setValues(self, values):
self._values = values
self._set = set(values)
def _testValue(self, value, idx):
if value not in self._set:
raise error.ValueConstraintError(value)
class ContainedSubtypeConstraint(AbstractConstraint):
"""Value must satisfy all of defined set of constraints"""
def _testValue(self, value, idx):
for c in self._values:
c(value, idx)
class ValueRangeConstraint(AbstractConstraint):
"""Value must be within start and stop values (inclusive)"""
def _testValue(self, value, idx):
if value < self.start or value > self.stop:
raise error.ValueConstraintError(value)
def _setValues(self, values):
if len(values) != 2:
raise error.PyAsn1Error(
'%s: bad constraint values' % (self.__class__.__name__,)
)
self.start, self.stop = values
if self.start > self.stop:
raise error.PyAsn1Error(
'%s: screwed constraint values (start > stop): %s > %s' % (
self.__class__.__name__,
self.start, self.stop
)
)
AbstractConstraint._setValues(self, values)
class ValueSizeConstraint(ValueRangeConstraint):
"""len(value) must be within start and stop values (inclusive)"""
def _testValue(self, value, idx):
valueSize = len(value)
if valueSize < self.start or valueSize > self.stop:
raise error.ValueConstraintError(value)
class PermittedAlphabetConstraint(SingleValueConstraint):
def _setValues(self, values):
self._values = values
self._set = set(values)
def _testValue(self, value, idx):
if not self._set.issuperset(value):
raise error.ValueConstraintError(value)
# This is a bit kludgy, meaning two op modes within a single constraint
class InnerTypeConstraint(AbstractConstraint):
"""Value must satisfy type and presense constraints"""
def _testValue(self, value, idx):
if self.__singleTypeConstraint:
self.__singleTypeConstraint(value)
elif self.__multipleTypeConstraint:
if idx not in self.__multipleTypeConstraint:
raise error.ValueConstraintError(value)
constraint, status = self.__multipleTypeConstraint[idx]
if status == 'ABSENT': # XXX presense is not checked!
raise error.ValueConstraintError(value)
constraint(value)
def _setValues(self, values):
self.__multipleTypeConstraint = {}
self.__singleTypeConstraint = None
for v in values:
if isinstance(v, tuple):
self.__multipleTypeConstraint[v[0]] = v[1], v[2]
else:
self.__singleTypeConstraint = v
AbstractConstraint._setValues(self, values)
# Boolean ops on constraints
class ConstraintsExclusion(AbstractConstraint):
"""Value must not fit the single constraint"""
def _testValue(self, value, idx):
try:
self._values[0](value, idx)
except error.ValueConstraintError:
return
else:
raise error.ValueConstraintError(value)
def _setValues(self, values):
if len(values) != 1:
raise error.PyAsn1Error('Single constraint expected')
AbstractConstraint._setValues(self, values)
class AbstractConstraintSet(AbstractConstraint):
"""Value must not satisfy the single constraint"""
def __getitem__(self, idx):
return self._values[idx]
def __iter__(self):
return iter(self._values)
def __add__(self, value):
return self.__class__(*(self._values + (value,)))
def __radd__(self, value):
return self.__class__(*((value,) + self._values))
def __len__(self):
return len(self._values)
# Constraints inclusion in sets
def _setValues(self, values):
self._values = values
for constraint in values:
if constraint:
self._valueMap.add(constraint)
self._valueMap.update(constraint.getValueMap())
class ConstraintsIntersection(AbstractConstraintSet):
"""Value must satisfy all constraints"""
def _testValue(self, value, idx):
for constraint in self._values:
constraint(value, idx)
class ConstraintsUnion(AbstractConstraintSet):
"""Value must satisfy at least one constraint"""
def _testValue(self, value, idx):
for constraint in self._values:
try:
constraint(value, idx)
except error.ValueConstraintError:
pass
else:
return
raise error.ValueConstraintError(
'all of %s failed for \"%s\"' % (self._values, value)
)
# XXX
# add tests for type check

View file

@ -1,11 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
from pyasn1.error import PyAsn1Error
class ValueConstraintError(PyAsn1Error):
pass

View file

@ -1,475 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
import sys
from pyasn1.type import tagmap
from pyasn1 import error
__all__ = ['NamedType', 'OptionalNamedType', 'DefaultedNamedType', 'NamedTypes']
class NamedType(object):
"""Create named field object for a constructed ASN.1 type.
The |NamedType| object represents a single name and ASN.1 type of a constructed ASN.1 type.
|NamedType| objects are immutable and duck-type Python :class:`tuple` objects
holding *name* and *asn1Object* components.
Parameters
----------
name: :py:class:`str`
Field name
asn1Object:
ASN.1 type object
"""
isOptional = False
isDefaulted = False
def __init__(self, name, asn1Object):
self.__name = name
self.__type = asn1Object
self.__nameAndType = name, asn1Object
def __repr__(self):
return '%s(%r, %r)' % (self.__class__.__name__, self.__name, self.__type)
def __eq__(self, other):
return self.__nameAndType == other
def __ne__(self, other):
return self.__nameAndType != other
def __lt__(self, other):
return self.__nameAndType < other
def __le__(self, other):
return self.__nameAndType <= other
def __gt__(self, other):
return self.__nameAndType > other
def __ge__(self, other):
return self.__nameAndType >= other
def __hash__(self):
return hash(self.__nameAndType)
def __getitem__(self, idx):
return self.__nameAndType[idx]
def __iter__(self):
return iter(self.__nameAndType)
@property
def name(self):
return self.__name
@property
def asn1Object(self):
return self.__type
# Backward compatibility
def getName(self):
return self.name
def getType(self):
return self.asn1Object
class OptionalNamedType(NamedType):
__doc__ = NamedType.__doc__
isOptional = True
class DefaultedNamedType(NamedType):
__doc__ = NamedType.__doc__
isDefaulted = True
class NamedTypes(object):
"""Create a collection of named fields for a constructed ASN.1 type.
The NamedTypes object represents a collection of named fields of a constructed ASN.1 type.
*NamedTypes* objects are immutable and duck-type Python :class:`dict` objects
holding *name* as keys and ASN.1 type object as values.
Parameters
----------
*namedTypes: :class:`~pyasn1.type.namedtype.NamedType`
"""
def __init__(self, *namedTypes):
self.__namedTypes = namedTypes
self.__namedTypesLen = len(self.__namedTypes)
self.__minTagSet = None
self.__tagToPosMapImpl = None
self.__nameToPosMapImpl = None
self.__ambigiousTypesImpl = None
self.__tagMap = {}
self.__hasOptionalOrDefault = None
self.__requiredComponents = None
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__, ', '.join([repr(x) for x in self.__namedTypes])
)
def __eq__(self, other):
return self.__namedTypes == other
def __ne__(self, other):
return self.__namedTypes != other
def __lt__(self, other):
return self.__namedTypes < other
def __le__(self, other):
return self.__namedTypes <= other
def __gt__(self, other):
return self.__namedTypes > other
def __ge__(self, other):
return self.__namedTypes >= other
def __hash__(self):
return hash(self.__namedTypes)
def __getitem__(self, idx):
try:
return self.__namedTypes[idx]
except TypeError:
return self.__namedTypes[self.__nameToPosMap[idx]]
def __contains__(self, key):
return key in self.__nameToPosMap
def __iter__(self):
return (x[0] for x in self.__namedTypes)
if sys.version_info[0] <= 2:
def __nonzero__(self):
return self.__namedTypesLen > 0
else:
def __bool__(self):
return self.__namedTypesLen > 0
def __len__(self):
return self.__namedTypesLen
# Python dict protocol
def values(self):
return (namedType.asn1Object for namedType in self.__namedTypes)
def keys(self):
return (namedType.name for namedType in self.__namedTypes)
def items(self):
return ((namedType.name, namedType.asn1Object) for namedType in self.__namedTypes)
def clone(self):
return self.__class__(*self.__namedTypes)
@property
def __tagToPosMap(self):
if self.__tagToPosMapImpl is None:
self.__tagToPosMapImpl = {}
for idx, namedType in enumerate(self.__namedTypes):
tagMap = namedType.asn1Object.tagMap
if not tagMap:
continue
for _tagSet in tagMap.presentTypes:
if _tagSet in self.__tagToPosMapImpl:
raise error.PyAsn1Error('Duplicate type %s in %s' % (_tagSet, namedType))
self.__tagToPosMapImpl[_tagSet] = idx
return self.__tagToPosMapImpl
@property
def __nameToPosMap(self):
if self.__nameToPosMapImpl is None:
self.__nameToPosMapImpl = {}
for idx, namedType in enumerate(self.__namedTypes):
if namedType.name in self.__nameToPosMapImpl:
raise error.PyAsn1Error('Duplicate name %s in %s' % (namedType.name, namedType))
self.__nameToPosMapImpl[namedType.name] = idx
return self.__nameToPosMapImpl
@property
def __ambigiousTypes(self):
if self.__ambigiousTypesImpl is None:
self.__ambigiousTypesImpl = {}
ambigiousTypes = ()
for idx, namedType in reversed(tuple(enumerate(self.__namedTypes))):
if namedType.isOptional or namedType.isDefaulted:
ambigiousTypes = (namedType,) + ambigiousTypes
else:
ambigiousTypes = (namedType,)
self.__ambigiousTypesImpl[idx] = NamedTypes(*ambigiousTypes)
return self.__ambigiousTypesImpl
def getTypeByPosition(self, idx):
"""Return ASN.1 type object by its position in fields set.
Parameters
----------
idx: :py:class:`int`
Field index
Returns
-------
:
ASN.1 type
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If given position is out of fields range
"""
try:
return self.__namedTypes[idx].asn1Object
except IndexError:
raise error.PyAsn1Error('Type position out of range')
def getPositionByType(self, tagSet):
"""Return field position by its ASN.1 type.
Parameters
----------
tagSet: :class:`~pysnmp.type.tag.TagSet`
ASN.1 tag set distinguishing one ASN.1 type from others.
Returns
-------
: :py:class:`int`
ASN.1 type position in fields set
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If *tagSet* is not present or ASN.1 types are not unique within callee *NamedTypes*
"""
try:
return self.__tagToPosMap[tagSet]
except KeyError:
raise error.PyAsn1Error('Type %s not found' % (tagSet,))
def getNameByPosition(self, idx):
"""Return field name by its position in fields set.
Parameters
----------
idx: :py:class:`idx`
Field index
Returns
-------
: :py:class:`str`
Field name
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If given field name is not present in callee *NamedTypes*
"""
try:
return self.__namedTypes[idx].name
except IndexError:
raise error.PyAsn1Error('Type position out of range')
def getPositionByName(self, name):
"""Return field position by filed name.
Parameters
----------
name: :py:class:`str`
Field name
Returns
-------
: :py:class:`int`
Field position in fields set
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If *name* is not present or not unique within callee *NamedTypes*
"""
try:
return self.__nameToPosMap[name]
except KeyError:
raise error.PyAsn1Error('Name %s not found' % (name,))
def getTagMapNearPosition(self, idx):
"""Return ASN.1 types that are allowed at or past given field position.
Some ASN.1 serialization allow for skipping optional and defaulted fields.
Some constructed ASN.1 types allow reordering of the fields. When recovering
such objects it may be important to know which types can possibly be
present at any given position in the field sets.
Parameters
----------
idx: :py:class:`int`
Field index
Returns
-------
: :class:`~pyasn1.type.tagmap.TagMap`
Map if ASN.1 types allowed at given field position
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If given position is out of fields range
"""
try:
return self.__ambigiousTypes[idx].getTagMap()
except KeyError:
raise error.PyAsn1Error('Type position out of range')
def getPositionNearType(self, tagSet, idx):
"""Return the closest field position where given ASN.1 type is allowed.
Some ASN.1 serialization allow for skipping optional and defaulted fields.
Some constructed ASN.1 types allow reordering of the fields. When recovering
such objects it may be important to know at which field position, in field set,
given *tagSet* is allowed at or past *idx* position.
Parameters
----------
tagSet: :class:`~pyasn1.type.tag.TagSet`
ASN.1 type which field position to look up
idx: :py:class:`int`
Field position at or past which to perform ASN.1 type look up
Returns
-------
: :py:class:`int`
Field position in fields set
Raises
------
: :class:`~pyasn1.error.PyAsn1Error`
If *tagSet* is not present or not unique within callee *NamedTypes*
or *idx* is out of fields range
"""
try:
return idx + self.__ambigiousTypes[idx].getPositionByType(tagSet)
except KeyError:
raise error.PyAsn1Error('Type position out of range')
@property
def minTagSet(self):
"""Return the minimal TagSet among ASN.1 type in callee *NamedTypes*.
Some ASN.1 types/serialization protocols require ASN.1 types to be
arranged based on their numerical tag value. The *minTagSet* property
returns that.
Returns
-------
: :class:`~pyasn1.type.tagset.TagSet`
Minimal TagSet among ASN.1 types in callee *NamedTypes*
"""
if self.__minTagSet is None:
for namedType in self.__namedTypes:
asn1Object = namedType.asn1Object
try:
tagSet = asn1Object.getMinTagSet()
except AttributeError:
tagSet = asn1Object.tagSet
if self.__minTagSet is None or tagSet < self.__minTagSet:
self.__minTagSet = tagSet
return self.__minTagSet
def getTagMap(self, unique=False):
"""Create a *TagMap* object from tags and types recursively.
Create a new :class:`~pyasn1.type.tagmap.TagMap` object by
combining tags from *TagMap* objects of children types and
associating them with their immediate child type.
Example
-------
.. code-block:: python
OuterType ::= CHOICE {
innerType INTEGER
}
Calling *.getTagMap()* on *OuterType* will yield a map like this:
.. code-block:: python
Integer.tagSet -> Choice
Parameters
----------
unique: :py:class:`bool`
If `True`, duplicate *TagSet* objects occurring while building
new *TagMap* would cause error.
Returns
-------
: :class:`~pyasn1.type.tagmap.TagMap`
New *TagMap* holding *TagSet* object gathered from childen types.
"""
if unique not in self.__tagMap:
presentTypes = {}
skipTypes = {}
defaultType = None
for namedType in self.__namedTypes:
tagMap = namedType.asn1Object.tagMap
for tagSet in tagMap:
if unique and tagSet in presentTypes:
raise error.PyAsn1Error('Non-unique tagSet %s' % (tagSet,))
presentTypes[tagSet] = namedType.asn1Object
skipTypes.update(tagMap.skipTypes)
if defaultType is None:
defaultType = tagMap.defaultType
elif tagMap.defaultType is not None:
raise error.PyAsn1Error('Duplicate default ASN.1 type at %s' % (self,))
self.__tagMap[unique] = tagmap.TagMap(presentTypes, skipTypes, defaultType)
return self.__tagMap[unique]
@property
def hasOptionalOrDefault(self):
if self.__hasOptionalOrDefault is None:
self.__hasOptionalOrDefault = bool([True for namedType in self.__namedTypes if namedType.isDefaulted or namedType.isOptional])
return self.__hasOptionalOrDefault
@property
def namedTypes(self):
return iter(self.__namedTypes)
@property
def requiredComponents(self):
if self.__requiredComponents is None:
self.__requiredComponents = frozenset(
[idx for idx, nt in enumerate(self.__namedTypes) if not nt.isOptional and not nt.isDefaulted]
)
return self.__requiredComponents

View file

@ -1,94 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
# ASN.1 named integers
#
from pyasn1 import error
__all__ = ['NamedValues']
class NamedValues(object):
def __init__(self, *namedValues):
self.nameToValIdx = {}
self.valToNameIdx = {}
self.namedValues = ()
automaticVal = 1
for namedValue in namedValues:
if isinstance(namedValue, tuple):
name, val = namedValue
else:
name = namedValue
val = automaticVal
if name in self.nameToValIdx:
raise error.PyAsn1Error('Duplicate name %s' % (name,))
self.nameToValIdx[name] = val
if val in self.valToNameIdx:
raise error.PyAsn1Error('Duplicate value %s=%s' % (name, val))
self.valToNameIdx[val] = name
self.namedValues = self.namedValues + ((name, val),)
automaticVal += 1
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, ', '.join([repr(x) for x in self.namedValues]))
def __str__(self):
return str(self.namedValues)
def __eq__(self, other):
return tuple(self) == tuple(other)
def __ne__(self, other):
return tuple(self) != tuple(other)
def __lt__(self, other):
return tuple(self) < tuple(other)
def __le__(self, other):
return tuple(self) <= tuple(other)
def __gt__(self, other):
return tuple(self) > tuple(other)
def __ge__(self, other):
return tuple(self) >= tuple(other)
def __hash__(self):
return hash(tuple(self))
def getName(self, value):
if value in self.valToNameIdx:
return self.valToNameIdx[value]
def getValue(self, name):
if name in self.nameToValIdx:
return self.nameToValIdx[name]
def getValues(self, *names):
try:
return [self.nameToValIdx[name] for name in names]
except KeyError:
raise error.PyAsn1Error(
'Unknown bit identifier(s): %s' % (set(names).difference(self.nameToValIdx),)
)
def __getitem__(self, i):
return self.namedValues[i]
def __len__(self):
return len(self.namedValues)
def __add__(self, namedValues):
return self.__class__(*self.namedValues + namedValues)
def __radd__(self, namedValues):
return self.__class__(*namedValues + tuple(self))
def clone(self, *namedValues):
return self.__class__(*tuple(self) + namedValues)
# XXX clone/subtype?

View file

@ -1,342 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
from pyasn1 import error
__all__ = ['tagClassUniversal', 'tagClassApplication', 'tagClassContext',
'tagClassPrivate', 'tagFormatSimple', 'tagFormatConstructed',
'tagCategoryImplicit', 'tagCategoryExplicit', 'tagCategoryUntagged',
'Tag', 'TagSet']
#: Identifier for ASN.1 class UNIVERSAL
tagClassUniversal = 0x00
#: Identifier for ASN.1 class APPLICATION
tagClassApplication = 0x40
#: Identifier for ASN.1 class context-specific
tagClassContext = 0x80
#: Identifier for ASN.1 class private
tagClassPrivate = 0xC0
#: Identifier for "simple" ASN.1 structure (e.g. scalar)
tagFormatSimple = 0x00
#: Identifier for "constructed" ASN.1 structure (e.g. may have inner components)
tagFormatConstructed = 0x20
tagCategoryImplicit = 0x01
tagCategoryExplicit = 0x02
tagCategoryUntagged = 0x04
class Tag(object):
"""Create ASN.1 tag
Represents ASN.1 tag that can be attached to a ASN.1 type to make
types distinguishable from each other.
*Tag* objects are immutable and duck-type Python :class:`tuple` objects
holding three integer components of a tag.
Parameters
----------
tagClass: :py:class:`int`
Tag *class* value
tagFormat: :py:class:`int`
Tag *format* value
tagId: :py:class:`int`
Tag ID value
"""
def __init__(self, tagClass, tagFormat, tagId):
if tagId < 0:
raise error.PyAsn1Error('Negative tag ID (%s) not allowed' % tagId)
self.__tagClass = tagClass
self.__tagFormat = tagFormat
self.__tagId = tagId
self.__tagClassId = tagClass, tagId
self.__lazyHash = None
def __str__(self):
return '[%s:%s:%s]' % (self.__tagClass, self.__tagFormat, self.__tagId)
def __repr__(self):
return '%s(tagClass=%s, tagFormat=%s, tagId=%s)' % (
(self.__class__.__name__, self.__tagClass, self.__tagFormat, self.__tagId)
)
def __eq__(self, other):
return self.__tagClassId == other
def __ne__(self, other):
return self.__tagClassId != other
def __lt__(self, other):
return self.__tagClassId < other
def __le__(self, other):
return self.__tagClassId <= other
def __gt__(self, other):
return self.__tagClassId > other
def __ge__(self, other):
return self.__tagClassId >= other
def __hash__(self):
if self.__lazyHash is None:
self.__lazyHash = hash(self.__tagClassId)
return self.__lazyHash
def __getitem__(self, idx):
if idx == 0:
return self.__tagClass
elif idx == 1:
return self.__tagFormat
elif idx == 2:
return self.__tagId
else:
raise IndexError()
def __iter__(self):
yield self.__tagClass
yield self.__tagFormat
yield self.__tagId
def __and__(self, otherTag):
return self.__class__(self.__tagClass & otherTag.tagClass,
self.__tagFormat & otherTag.tagFormat,
self.__tagId & otherTag.tagId)
def __or__(self, otherTag):
return self.__class__(self.__tagClass | otherTag.tagClass,
self.__tagFormat | otherTag.tagFormat,
self.__tagId | otherTag.tagId)
@property
def tagClass(self):
"""ASN.1 tag class
Returns
-------
: :py:class:`int`
Tag class
"""
return self.__tagClass
@property
def tagFormat(self):
"""ASN.1 tag format
Returns
-------
: :py:class:`int`
Tag format
"""
return self.__tagFormat
@property
def tagId(self):
"""ASN.1 tag ID
Returns
-------
: :py:class:`int`
Tag ID
"""
return self.__tagId
class TagSet(object):
"""Create a collection of ASN.1 tags
Represents a combination of :class:`~pyasn1.type.tag.Tag` objects
that can be attached to a ASN.1 type to make types distinguishable
from each other.
*TagSet* objects are immutable and duck-type Python :class:`tuple` objects
holding arbitrary number of :class:`~pyasn1.type.tag.Tag` objects.
Parameters
----------
baseTag: :class:`~pyasn1.type.tag.Tag`
Base *Tag* object. This tag survives IMPLICIT tagging.
*superTags: :class:`~pyasn1.type.tag.Tag`
Additional *Tag* objects taking part in subtyping.
"""
def __init__(self, baseTag=(), *superTags):
self.__baseTag = baseTag
self.__superTags = superTags
self.__superTagsSignature = tuple(
[(superTag.tagClass, superTag.tagId) for superTag in superTags]
)
self.__lenOfSuperTags = len(superTags)
self.__lazyHash = None
def __str__(self):
return self.__superTags and '+'.join([str(x) for x in self.__superTags]) or '[untagged]'
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__, '(), ' + ', '.join([repr(x) for x in self.__superTags])
)
def __add__(self, superTag):
return self.__class__(self.__baseTag, *self.__superTags + (superTag,))
def __radd__(self, superTag):
return self.__class__(self.__baseTag, *(superTag,) + self.__superTags)
def __getitem__(self, i):
if i.__class__ is slice:
return self.__class__(self.__baseTag, *self.__superTags[i])
else:
return self.__superTags[i]
def __eq__(self, other):
return self.__superTagsSignature == other
def __ne__(self, other):
return self.__superTagsSignature != other
def __lt__(self, other):
return self.__superTagsSignature < other
def __le__(self, other):
return self.__superTagsSignature <= other
def __gt__(self, other):
return self.__superTagsSignature > other
def __ge__(self, other):
return self.__superTagsSignature >= other
def __hash__(self):
if self.__lazyHash is None:
self.__lazyHash = hash(self.__superTags)
return self.__lazyHash
def __len__(self):
return self.__lenOfSuperTags
# descriptor protocol
def __get__(self, instance, owner):
if instance is None:
return self
# This is a bit of hack: look up instance attribute first,
# then try class attribute if instance attribute with that
# name is not available.
# The rationale is to have `.tagSet` readable-writeable
# as a class attribute and read-only as instance attribute.
try:
return instance._tagSet
except AttributeError:
return self
def __set__(self, instance, value):
raise AttributeError('attribute is read-only')
@property
def baseTag(self):
"""Return base ASN.1 tag
Returns
-------
: :class:`~pyasn1.type.tag.Tag`
Base tag of this *TagSet*
"""
return self.__baseTag
@property
def superTags(self):
"""Return ASN.1 tags
Returns
-------
: :py:class:`tuple`
Tuple of :class:`~pyasn1.type.tag.Tag` objects that this *TagSet* contains
"""
return self.__superTags
def tagExplicitly(self, superTag):
"""Return explicitly tagged *TagSet*
Create a new *TagSet* representing callee *TagSet* explicitly tagged
with passed tag(s). With explicit tagging mode, new tags are appended
to existing tag(s).
Parameters
----------
superTag: :class:`~pyasn1.type.tag.Tag`
*Tag* object to tag this *TagSet*
Returns
-------
: :class:`~pyasn1.type.tag.TagSet`
New *TagSet* object
"""
if superTag.tagClass == tagClassUniversal:
raise error.PyAsn1Error('Can\'t tag with UNIVERSAL class tag')
if superTag.tagFormat != tagFormatConstructed:
superTag = Tag(superTag.tagClass, tagFormatConstructed, superTag.tagId)
return self + superTag
def tagImplicitly(self, superTag):
"""Return implicitly tagged *TagSet*
Create a new *TagSet* representing callee *TagSet* implicitly tagged
with passed tag(s). With implicit tagging mode, new tag(s) replace the
last existing tag.
Parameters
----------
superTag: :class:`~pyasn1.type.tag.Tag`
*Tag* object to tag this *TagSet*
Returns
-------
: :class:`~pyasn1.type.tag.TagSet`
New *TagSet* object
"""
if self.__superTags:
superTag = Tag(superTag.tagClass, self.__superTags[-1].tagFormat, superTag.tagId)
return self[:-1] + superTag
def isSuperTagSetOf(self, tagSet):
"""Test type relationship against given *TagSet*
The callee is considered to be a supertype of given *TagSet*
tag-wise if all tags in *TagSet* are present in the callee and
they are in the same order.
Parameters
----------
tagSet: :class:`~pyasn1.type.tag.TagSet`
*TagSet* object to evaluate against the callee
Returns
-------
: :py:class:`bool`
`True` if callee is a supertype of *tagSet*
"""
if len(tagSet) < self.__lenOfSuperTags:
return False
return self.__superTags == tagSet[:self.__lenOfSuperTags]
# Backward compatibility
def getBaseTag(self):
return self.__baseTag
def initTagSet(tag):
return TagSet(tag, tag)

View file

@ -1,102 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
from pyasn1 import error
__all__ = ['TagMap']
class TagMap(object):
"""Map *TagSet* objects to ASN.1 types
Create an object mapping *TagSet* object to ASN.1 type.
*TagMap* objects are immutable and duck-type read-only Python
:class:`dict` objects holding *TagSet* objects as keys and ASN.1
type objects as values.
Parameters
----------
presentTypes: :py:class:`dict`
Map of :class:`~pyasn1.type.tag.TagSet` to ASN.1 objects considered
as being unconditionally present in the *TagMap*.
skipTypes: :py:class:`dict`
A collection of :class:`~pyasn1.type.tag.TagSet` objects considered
as absent in the *TagMap* even when *defaultType* is present.
defaultType: ASN.1 type object
An ASN.1 type object callee *TagMap* returns for any *TagSet* key not present
in *presentTypes* (unless given key is present in *skipTypes*).
"""
def __init__(self, presentTypes=None, skipTypes=None, defaultType=None):
self.__presentTypes = presentTypes or {}
self.__skipTypes = skipTypes or {}
self.__defaultType = defaultType
def __contains__(self, tagSet):
return (tagSet in self.__presentTypes or
self.__defaultType is not None and tagSet not in self.__skipTypes)
def __getitem__(self, tagSet):
try:
return self.__presentTypes[tagSet]
except KeyError:
if self.__defaultType is None:
raise KeyError()
elif tagSet in self.__skipTypes:
raise error.PyAsn1Error('Key in negative map')
else:
return self.__defaultType
def __iter__(self):
return iter(self.__presentTypes)
def __repr__(self):
s = self.__class__.__name__ + '('
if self.__presentTypes:
s += 'presentTypes=%r, ' % (self.__presentTypes,)
if self.__skipTypes:
s += 'skipTypes=%r, ' % (self.__skipTypes,)
if self.__defaultType is not None:
s += 'defaultType=%r' % (self.__defaultType,)
return s + ')'
def __str__(self):
s = self.__class__.__name__ + ': '
if self.__presentTypes:
s += 'presentTypes: %s, ' % ', '.join([x.prettyPrintType() for x in self.__presentTypes.values()])
if self.__skipTypes:
s += 'skipTypes: %s, ' % ', '.join([x.prettyPrintType() for x in self.__skipTypes.values()])
if self.__defaultType is not None:
s += 'defaultType: %s, ' % self.__defaultType.prettyPrintType()
return s
@property
def presentTypes(self):
"""Return *TagSet* to ASN.1 type map present in callee *TagMap*"""
return self.__presentTypes
@property
def skipTypes(self):
"""Return *TagSet* collection unconditionally absent in callee *TagMap*"""
return self.__skipTypes
@property
def defaultType(self):
"""Return default ASN.1 type being returned for any missing *TagSet*"""
return self.__defaultType
# Backward compatibility
def getPosMap(self):
return self.presentTypes
def getNegMap(self):
return self.skipTypes
def getDef(self):
return self.defaultType

File diff suppressed because it is too large Load diff

View file

@ -1,39 +0,0 @@
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
from pyasn1.type import univ, char, tag
__all__ = ['ObjectDescriptor', 'GeneralizedTime', 'UTCTime']
NoValue = univ.NoValue
noValue = univ.noValue
class ObjectDescriptor(char.GraphicString):
__doc__ = char.GraphicString.__doc__
#: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
tagSet = char.GraphicString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 7)
)
class GeneralizedTime(char.VisibleString):
__doc__ = char.GraphicString.__doc__
#: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
tagSet = char.VisibleString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24)
)
class UTCTime(char.VisibleString):
__doc__ = char.GraphicString.__doc__
#: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
tagSet = char.VisibleString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 23)
)

View file

@ -1,27 +0,0 @@
This code is public domain. Everyone has the right to do whatever they want
with it for any purpose.
In case your jurisdiction does not consider the above disclaimer valid or
enforceable, here's an MIT license for you:
The MIT License (MIT)
Copyright (c) 2013 Vitalik Buterin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View file

@ -1 +0,0 @@
include bitcoin/english.txt

View file

@ -1,142 +0,0 @@
# Pybitcointools, Python library for Bitcoin signatures and transactions
### Advantages:
* Functions have a simple interface, inputting and outputting in standard formats
* No classes
* Many functions can be taken out and used individually
* Supports binary, hex and base58
* Transaction deserialization format almost compatible with BitcoinJS
* Electrum and BIP0032 support
* Make and publish a transaction all in a single command line instruction
* Includes non-bitcoin-specific conversion and JSON utilities
### Disadvantages:
* Not a full node, has no idea what blocks are
* Relies on centralized service (blockchain.info) for blockchain operations, although operations do have backups (eligius, blockr.io)
### Example usage (best way to learn :) ):
> from bitcoin import *
> priv = sha256('some big long brainwallet password')
> priv
'57c617d9b4e1f7af6ec97ca2ff57e94a28279a7eedd4d12a99fa11170e94f5a4'
> pub = privtopub(priv)
> pub
'0420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9'
> addr = pubtoaddr(pub)
> addr
'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'
> h = history(addr)
> h
[{'output': u'97f7c7d8ac85e40c255f8a763b6cd9a68f3a94d2e93e8bfa08f977b92e55465e:0', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}, {'output': u'4cc806bb04f730c445c60b3e0f4f44b54769a1c196ca37d8d4002135e4abd171:1', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}]
> outs = [{'value': 90000, 'address': '16iw1MQ1sy1DtRPYw3ao1bCamoyBJtRB4t'}]
> tx = mktx(h,outs)
> tx
'01000000025e46552eb977f908fa8b3ee9d2943a8fa6d96c3b768a5f250ce485acd8c7f7970000000000ffffffff71d1abe4352100d4d837ca96c1a16947b5444f0f3e0bc645c430f704bb06c84c0100000000ffffffff01905f0100000000001976a9143ec6c3ed8dfc3ceabcc1cbdb0c5aef4e2d02873c88ac00000000'
> tx2 = sign(tx,0,priv)
> tx2
'01000000025e46552eb977f908fa8b3ee9d2943a8fa6d96c3b768a5f250ce485acd8c7f797000000008b483045022100dd29d89a28451febb990fb1dafa21245b105140083ced315ebcdea187572b3990220713f2e554f384d29d7abfedf39f0eb92afba0ef46f374e49d43a728a0ff6046e01410420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9ffffffff71d1abe4352100d4d837ca96c1a16947b5444f0f3e0bc645c430f704bb06c84c0100000000ffffffff01905f0100000000001976a9143ec6c3ed8dfc3ceabcc1cbdb0c5aef4e2d02873c88ac00000000'
> tx3 = sign(tx2,1,priv)
> tx3
'01000000025e46552eb977f908fa8b3ee9d2943a8fa6d96c3b768a5f250ce485acd8c7f797000000008b483045022100dd29d89a28451febb990fb1dafa21245b105140083ced315ebcdea187572b3990220713f2e554f384d29d7abfedf39f0eb92afba0ef46f374e49d43a728a0ff6046e01410420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9ffffffff71d1abe4352100d4d837ca96c1a16947b5444f0f3e0bc645c430f704bb06c84c010000008c4930460221008bbaaaf172adfefc3a1315dc7312c88645832ff76d52e0029d127e65bbeeabe1022100fdeb89658d503cf2737cedb4049e5070f689c50a9b6c85997d49e0787938f93901410420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9ffffffff01905f0100000000001976a9143ec6c3ed8dfc3ceabcc1cbdb0c5aef4e2d02873c88ac00000000'
> pushtx(tx3)
'Transaction Submitted'
Or using the pybtctool command line interface:
@vub: pybtctool random_electrum_seed
484ccb566edb66c65dd0fd2e4d90ef65
@vub: pybtctool electrum_privkey 484ccb566edb66c65dd0fd2e4d90ef65 0 0
593240c2205e7b7b5d7c13393b7c9553497854b75c7470b76aeca50cd4a894d7
@vub: pybtctool electrum_mpk 484ccb566edb66c65dd0fd2e4d90ef65
484e42865b8e9a6ea8262fd1cde666b557393258ed598d842e563ad9e5e6c70a97e387eefdef123c1b8b4eb21fe210c6216ad7cc1e4186fbbba70f0e2c062c25
@vub: pybtctool bip32_master_key 21456t243rhgtucyadh3wgyrcubw3grydfbng
xprv9s21ZrQH143K2napkeoHT48gWmoJa89KCQj4nqLfdGybyWHP9Z8jvCGzuEDv4ihCyoed7RFPNbc9NxoSF7cAvH9AaNSvepUaeqbSpJZ4rbT
@vub: pybtctool bip32_ckd xprv9s21ZrQH143K2napkeoHT48gWmoJa89KCQj4nqLfdGybyWHP9Z8jvCGzuEDv4ihCyoed7RFPNbc9NxoSF7cAvH9AaNSvepUaeqbSpJZ4rbT 0
xprv9vfzYrpwo7QHFdtrcvsSCTrBESFPUf1g7NRvayy1QkEfUekpDKLfqvHjgypF5w3nAvnwPjtQUNkyywWNkLbiUS95khfHCzJXFkLEdwRepbw
@vub: pybtctool bip32_privtopub xprv9s21ZrQH143K2napkeoHT48gWmoJa89KCQj4nqLfdGybyWHP9Z8jvCGzuEDv4ihCyoed7RFPNbc9NxoSF7cAvH9AaNSvepUaeqbSpJZ4rbT
xpub661MyMwAqRbcFGfHrgLHpC5R4odnyasAZdefbDkHBcWarJcXh6SzTzbUkWuhnP142ZFdKdAJSuTSaiGDYjvm7bCLmA8DZqksYjJbYmcgrYF
The -s option lets you read arguments from the command line
@vub: pybtctool sha256 'some big long brainwallet password' | pybtctool -s privtoaddr | pybtctool -s history
[{'output': u'97f7c7d8ac85e40c255f8a763b6cd9a68f3a94d2e93e8bfa08f977b92e55465e:0', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}, {'output': u'4cc806bb04f730c445c60b3e0f4f44b54769a1c196ca37d8d4002135e4abd171:1', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}]
@vub: pybtctool random_electrum_seed | pybtctool -s electrum_privkey 0 0
593240c2205e7b7b5d7c13393b7c9553497854b75c7470b76aeca50cd4a894d7
The -b option lets you read binary data as an argument
@vub: pybtctool sha256 123 | pybtctool -s changebase 16 256 | pybtctool -b changebase 256 16
a665a45920422f9d417e4867efdc4fb8a04a1f3fff1fa07e998e86f7f7a27ae30a
The -j option lets you read json from the command line (-J to split a json list into multiple arguments)
@vub: pybtctool unspent 1FxkfJQLJTXpW6QmxGT6oF43ZH959ns8Cq | pybtctool -j select 200000001 | pybtctool -j mksend 1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P:20000 1FxkfJQLJTXpW6QmxGT6oF43ZH959ns8Cq 1000 | pybtctool -s signall 805cd74ca322633372b9bfb857f3be41db0b8de43a3c44353b238c0acff9d523
0100000003d5001aae8358ae98cb02c1b6f9859dc1ac3dbc1e9cc88632afeb7b7e3c510a49000000008b4830450221009e03bb6122437767e2ca785535824f4ed13d2ebbb9fa4f9becc6d6f4e1e217dc022064577353c08d8d974250143d920d3b963b463e43bbb90f3371060645c49266b90141048ef80f6bd6b073407a69299c2ba89de48adb59bb9689a5ab040befbbebcfbb15d01b006a6b825121a0d2c546c277acb60f0bd3203bd501b8d67c7dba91f27f47ffffffff1529d655dff6a0f6c9815ee835312fb3ca4df622fde21b6b9097666e9284087d010000008a473044022035dd67d18b575ebd339d05ca6ffa1d27d7549bd993aeaf430985795459fc139402201aaa162cc50181cee493870c9479b1148243a33923cb77be44a73ca554a4e5d60141048ef80f6bd6b073407a69299c2ba89de48adb59bb9689a5ab040befbbebcfbb15d01b006a6b825121a0d2c546c277acb60f0bd3203bd501b8d67c7dba91f27f47ffffffff23d5f9cf0a8c233b35443c3ae48d0bdb41bef357b8bfb972336322a34cd75c80010000008b483045022014daa5c5bbe9b3e5f2539a5cd8e22ce55bc84788f946c5b3643ecac85b4591a9022100a4062074a1df3fa0aea5ef67368d0b1f0eaac520bee6e417c682d83cd04330450141048ef80f6bd6b073407a69299c2ba89de48adb59bb9689a5ab040befbbebcfbb15d01b006a6b825121a0d2c546c277acb60f0bd3203bd501b8d67c7dba91f27f47ffffffff02204e0000000000001976a914946cb2e08075bcbaf157e47bcb67eb2b2339d24288ac5b3c4411000000001976a914a41d15ae657ad3bfd0846771a34d7584c37d54a288ac00000000
Fun stuff with json:
@vub: pybtctool history 1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P | pybtctool -j multiaccess value | pybtctool -j sum
625216206372
@vub: pybtctool history 1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P | pybtctool -j count
6198
### Listing of main commands:
* privkey_to_pubkey : (privkey) -> pubkey
* privtopub : (privkey) -> pubkey
* pubkey_to_address : (pubkey) -> address
* pubtoaddr : (pubkey) -> address
* privkey_to_address : (privkey) -> address
* privtoaddr : (privkey) -> address
* add : (key1, key2) -> key1 + key2 (works on privkeys or pubkeys)
* multiply : (pubkey, privkey) -> returns pubkey * privkey
* ecdsa_sign : (message, privkey) -> sig
* ecdsa_verify : (message, sig, pubkey) -> True/False
* ecdsa_recover : (message, sig) -> pubkey
* random_key : () -> privkey
* random_electrum_seed : () -> electrum seed
* electrum_stretch : (seed) -> secret exponent
* electrum_privkey : (seed or secret exponent, i, type) -> privkey
* electrum_mpk : (seed or secret exponent) -> master public key
* electrum_pubkey : (seed or secexp or mpk) -> pubkey
* bip32_master_key : (seed) -> bip32 master key
* bip32_ckd : (private or public bip32 key, i) -> child key
* bip32_privtopub : (private bip32 key) -> public bip32 key
* bip32_extract_key : (private or public bip32_key) -> privkey or pubkey
* deserialize : (hex or bin transaction) -> JSON tx
* serialize : (JSON tx) -> hex or bin tx
* mktx : (inputs, outputs) -> tx
* mksend : (inputs, outputs, change_addr, fee) -> tx
* sign : (tx, i, privkey) -> tx with index i signed with privkey
* multisign : (tx, i, script, privkey) -> signature
* apply_multisignatures: (tx, i, script, sigs) -> tx with index i signed with sigs
* scriptaddr : (script) -> P2SH address
* mk_multisig_script : (pubkeys, k, n) -> k-of-n multisig script from pubkeys
* verify_tx_input : (tx, i, script, sig, pub) -> True/False
* tx_hash : (hex or bin tx) -> hash
* history : (address1, address2, etc) -> outputs to those addresses
* unspent : (address1, address2, etc) -> unspent outputs to those addresses
* fetchtx : (txash) -> tx if present
* pushtx : (hex or bin tx) -> tries to push to blockchain.info/pushtx
* access : (json list/object, prop) -> desired property of that json object
* multiaccess : (json list, prop) -> like access, but mapped across each list element
* slice : (json list, start, end) -> given slice of the list
* count : (json list) -> number of elements
* sum : (json list) -> sum of all values

View file

@ -1,10 +0,0 @@
from .py2specials import *
from .py3specials import *
from .main import *
from .transaction import *
from .deterministic import *
from .bci import *
from .composite import *
from .stealth import *
from .blocks import *
from .mnemonic import *

View file

@ -1,528 +0,0 @@
#!/usr/bin/python
import json, re
import random
import sys
try:
from urllib.request import build_opener
except:
from urllib2 import build_opener
# Makes a request to a given URL (first arg) and optional params (second arg)
def make_request(*args):
opener = build_opener()
opener.addheaders = [('User-agent',
'Mozilla/5.0'+str(random.randrange(1000000)))]
try:
return opener.open(*args).read().strip()
except Exception as e:
try:
p = e.read().strip()
except:
p = e
raise Exception(p)
def is_testnet(inp):
'''Checks if inp is a testnet address or if UTXO is a known testnet TxID'''
if isinstance(inp, (list, tuple)) and len(inp) >= 1:
return any([is_testnet(x) for x in inp])
elif not isinstance(inp, basestring): # sanity check
raise TypeError("Input must be str/unicode, not type %s" % str(type(inp)))
if not inp or (inp.lower() in ("btc", "testnet")):
pass
## ADDRESSES
if inp[0] in "123mn":
if re.match("^[2mn][a-km-zA-HJ-NP-Z0-9]{26,33}$", inp):
return True
elif re.match("^[13][a-km-zA-HJ-NP-Z0-9]{26,33}$", inp):
return False
else:
#sys.stderr.write("Bad address format %s")
return None
## TXID
elif re.match('^[0-9a-fA-F]{64}$', inp):
base_url = "http://api.blockcypher.com/v1/btc/{network}/txs/{txid}?includesHex=false"
try:
# try testnet fetchtx
make_request(base_url.format(network="test3", txid=inp.lower()))
return True
except:
# try mainnet fetchtx
make_request(base_url.format(network="main", txid=inp.lower()))
return False
sys.stderr.write("TxID %s has no match for testnet or mainnet (Bad TxID)")
return None
else:
raise TypeError("{0} is unknown input".format(inp))
def set_network(*args):
'''Decides if args for unspent/fetchtx/pushtx are mainnet or testnet'''
r = []
for arg in args:
if not arg:
pass
if isinstance(arg, basestring):
r.append(is_testnet(arg))
elif isinstance(arg, (list, tuple)):
return set_network(*arg)
if any(r) and not all(r):
raise Exception("Mixed Testnet/Mainnet queries")
return "testnet" if any(r) else "btc"
def parse_addr_args(*args):
# Valid input formats: unspent([addr1, addr2, addr3])
# unspent([addr1, addr2, addr3], network)
# unspent(addr1, addr2, addr3)
# unspent(addr1, addr2, addr3, network)
addr_args = args
network = "btc"
if len(args) == 0:
return [], 'btc'
if len(args) >= 1 and args[-1] in ('testnet', 'btc'):
network = args[-1]
addr_args = args[:-1]
if len(addr_args) == 1 and isinstance(addr_args, list):
network = set_network(*addr_args[0])
addr_args = addr_args[0]
if addr_args and isinstance(addr_args, tuple) and isinstance(addr_args[0], list):
addr_args = addr_args[0]
network = set_network(addr_args)
return network, addr_args
# Gets the unspent outputs of one or more addresses
def bci_unspent(*args):
network, addrs = parse_addr_args(*args)
u = []
for a in addrs:
try:
data = make_request('https://blockchain.info/unspent?active='+a)
except Exception as e:
if str(e) == 'No free outputs to spend':
continue
else:
raise Exception(e)
try:
jsonobj = json.loads(data.decode("utf-8"))
for o in jsonobj["unspent_outputs"]:
h = o['tx_hash'].decode('hex')[::-1].encode('hex')
u.append({
"output": h+':'+str(o['tx_output_n']),
"value": o['value']
})
except:
raise Exception("Failed to decode data: "+data)
return u
def blockr_unspent(*args):
# Valid input formats: blockr_unspent([addr1, addr2,addr3])
# blockr_unspent(addr1, addr2, addr3)
# blockr_unspent([addr1, addr2, addr3], network)
# blockr_unspent(addr1, addr2, addr3, network)
# Where network is 'btc' or 'testnet'
network, addr_args = parse_addr_args(*args)
if network == 'testnet':
blockr_url = 'http://tbtc.blockr.io/api/v1/address/unspent/'
elif network == 'btc':
blockr_url = 'http://btc.blockr.io/api/v1/address/unspent/'
else:
raise Exception(
'Unsupported network {0} for blockr_unspent'.format(network))
if len(addr_args) == 0:
return []
elif isinstance(addr_args[0], list):
addrs = addr_args[0]
else:
addrs = addr_args
res = make_request(blockr_url+','.join(addrs))
data = json.loads(res.decode("utf-8"))['data']
o = []
if 'unspent' in data:
data = [data]
for dat in data:
for u in dat['unspent']:
o.append({
"output": u['tx']+':'+str(u['n']),
"value": int(u['amount'].replace('.', ''))
})
return o
def helloblock_unspent(*args):
addrs, network = parse_addr_args(*args)
if network == 'testnet':
url = 'https://testnet.helloblock.io/v1/addresses/%s/unspents?limit=500&offset=%s'
elif network == 'btc':
url = 'https://mainnet.helloblock.io/v1/addresses/%s/unspents?limit=500&offset=%s'
o = []
for addr in addrs:
for offset in xrange(0, 10**9, 500):
res = make_request(url % (addr, offset))
data = json.loads(res.decode("utf-8"))["data"]
if not len(data["unspents"]):
break
elif offset:
sys.stderr.write("Getting more unspents: %d\n" % offset)
for dat in data["unspents"]:
o.append({
"output": dat["txHash"]+':'+str(dat["index"]),
"value": dat["value"],
})
return o
unspent_getters = {
'bci': bci_unspent,
'blockr': blockr_unspent,
'helloblock': helloblock_unspent
}
def unspent(*args, **kwargs):
f = unspent_getters.get(kwargs.get('source', ''), bci_unspent)
return f(*args)
# Gets the transaction output history of a given set of addresses,
# including whether or not they have been spent
def history(*args):
# Valid input formats: history([addr1, addr2,addr3])
# history(addr1, addr2, addr3)
if len(args) == 0:
return []
elif isinstance(args[0], list):
addrs = args[0]
else:
addrs = args
txs = []
for addr in addrs:
offset = 0
while 1:
gathered = False
while not gathered:
try:
data = make_request(
'https://blockchain.info/address/%s?format=json&offset=%s' %
(addr, offset))
gathered = True
except Exception as e:
try:
sys.stderr.write(e.read().strip())
except:
sys.stderr.write(str(e))
gathered = False
try:
jsonobj = json.loads(data.decode("utf-8"))
except:
raise Exception("Failed to decode data: "+data)
txs.extend(jsonobj["txs"])
if len(jsonobj["txs"]) < 50:
break
offset += 50
sys.stderr.write("Fetching more transactions... "+str(offset)+'\n')
outs = {}
for tx in txs:
for o in tx["out"]:
if o.get('addr', None) in addrs:
key = str(tx["tx_index"])+':'+str(o["n"])
outs[key] = {
"address": o["addr"],
"value": o["value"],
"output": tx["hash"]+':'+str(o["n"]),
"block_height": tx.get("block_height", None)
}
for tx in txs:
for i, inp in enumerate(tx["inputs"]):
if "prev_out" in inp:
if inp["prev_out"].get("addr", None) in addrs:
key = str(inp["prev_out"]["tx_index"]) + \
':'+str(inp["prev_out"]["n"])
if outs.get(key):
outs[key]["spend"] = tx["hash"]+':'+str(i)
return [outs[k] for k in outs]
# Pushes a transaction to the network using https://blockchain.info/pushtx
def bci_pushtx(tx):
if not re.match('^[0-9a-fA-F]*$', tx):
tx = tx.encode('hex')
return make_request('https://blockchain.info/pushtx', 'tx='+tx)
def eligius_pushtx(tx):
if not re.match('^[0-9a-fA-F]*$', tx):
tx = tx.encode('hex')
s = make_request(
'http://eligius.st/~wizkid057/newstats/pushtxn.php',
'transaction='+tx+'&send=Push')
strings = re.findall('string[^"]*"[^"]*"', s)
for string in strings:
quote = re.findall('"[^"]*"', string)[0]
if len(quote) >= 5:
return quote[1:-1]
def blockr_pushtx(tx, network='btc'):
if network == 'testnet':
blockr_url = 'http://tbtc.blockr.io/api/v1/tx/push'
elif network == 'btc':
blockr_url = 'http://btc.blockr.io/api/v1/tx/push'
else:
raise Exception(
'Unsupported network {0} for blockr_pushtx'.format(network))
if not re.match('^[0-9a-fA-F]*$', tx):
tx = tx.encode('hex')
return make_request(blockr_url, '{"hex":"%s"}' % tx)
def helloblock_pushtx(tx):
if not re.match('^[0-9a-fA-F]*$', tx):
tx = tx.encode('hex')
return make_request('https://mainnet.helloblock.io/v1/transactions',
'rawTxHex='+tx)
pushtx_getters = {
'bci': bci_pushtx,
'blockr': blockr_pushtx,
'helloblock': helloblock_pushtx
}
def pushtx(*args, **kwargs):
f = pushtx_getters.get(kwargs.get('source', ''), bci_pushtx)
return f(*args)
def last_block_height(network='btc'):
if network == 'testnet':
data = make_request('http://tbtc.blockr.io/api/v1/block/info/last')
jsonobj = json.loads(data.decode("utf-8"))
return jsonobj["data"]["nb"]
data = make_request('https://blockchain.info/latestblock')
jsonobj = json.loads(data.decode("utf-8"))
return jsonobj["height"]
# Gets a specific transaction
def bci_fetchtx(txhash):
if isinstance(txhash, list):
return [bci_fetchtx(h) for h in txhash]
if not re.match('^[0-9a-fA-F]*$', txhash):
txhash = txhash.encode('hex')
data = make_request('https://blockchain.info/rawtx/'+txhash+'?format=hex')
return data
def blockr_fetchtx(txhash, network='btc'):
if network == 'testnet':
blockr_url = 'http://tbtc.blockr.io/api/v1/tx/raw/'
elif network == 'btc':
blockr_url = 'http://btc.blockr.io/api/v1/tx/raw/'
else:
raise Exception(
'Unsupported network {0} for blockr_fetchtx'.format(network))
if isinstance(txhash, list):
txhash = ','.join([x.encode('hex') if not re.match('^[0-9a-fA-F]*$', x)
else x for x in txhash])
jsondata = json.loads(make_request(blockr_url+txhash).decode("utf-8"))
return [d['tx']['hex'] for d in jsondata['data']]
else:
if not re.match('^[0-9a-fA-F]*$', txhash):
txhash = txhash.encode('hex')
jsondata = json.loads(make_request(blockr_url+txhash).decode("utf-8"))
return jsondata['data']['tx']['hex']
def helloblock_fetchtx(txhash, network='btc'):
if isinstance(txhash, list):
return [helloblock_fetchtx(h) for h in txhash]
if not re.match('^[0-9a-fA-F]*$', txhash):
txhash = txhash.encode('hex')
if network == 'testnet':
url = 'https://testnet.helloblock.io/v1/transactions/'
elif network == 'btc':
url = 'https://mainnet.helloblock.io/v1/transactions/'
else:
raise Exception(
'Unsupported network {0} for helloblock_fetchtx'.format(network))
data = json.loads(make_request(url + txhash).decode("utf-8"))["data"]["transaction"]
o = {
"locktime": data["locktime"],
"version": data["version"],
"ins": [],
"outs": []
}
for inp in data["inputs"]:
o["ins"].append({
"script": inp["scriptSig"],
"outpoint": {
"index": inp["prevTxoutIndex"],
"hash": inp["prevTxHash"],
},
"sequence": 4294967295
})
for outp in data["outputs"]:
o["outs"].append({
"value": outp["value"],
"script": outp["scriptPubKey"]
})
from .transaction import serialize
from .transaction import txhash as TXHASH
tx = serialize(o)
assert TXHASH(tx) == txhash
return tx
fetchtx_getters = {
'bci': bci_fetchtx,
'blockr': blockr_fetchtx,
'helloblock': helloblock_fetchtx
}
def fetchtx(*args, **kwargs):
f = fetchtx_getters.get(kwargs.get('source', ''), bci_fetchtx)
return f(*args)
def firstbits(address):
if len(address) >= 25:
return make_request('https://blockchain.info/q/getfirstbits/'+address)
else:
return make_request(
'https://blockchain.info/q/resolvefirstbits/'+address)
def get_block_at_height(height):
j = json.loads(make_request("https://blockchain.info/block-height/" +
str(height)+"?format=json").decode("utf-8"))
for b in j['blocks']:
if b['main_chain'] is True:
return b
raise Exception("Block at this height not found")
def _get_block(inp):
if len(str(inp)) < 64:
return get_block_at_height(inp)
else:
return json.loads(make_request(
'https://blockchain.info/rawblock/'+inp).decode("utf-8"))
def bci_get_block_header_data(inp):
j = _get_block(inp)
return {
'version': j['ver'],
'hash': j['hash'],
'prevhash': j['prev_block'],
'timestamp': j['time'],
'merkle_root': j['mrkl_root'],
'bits': j['bits'],
'nonce': j['nonce'],
}
def blockr_get_block_header_data(height, network='btc'):
if network == 'testnet':
blockr_url = "http://tbtc.blockr.io/api/v1/block/raw/"
elif network == 'btc':
blockr_url = "http://btc.blockr.io/api/v1/block/raw/"
else:
raise Exception(
'Unsupported network {0} for blockr_get_block_header_data'.format(network))
k = json.loads(make_request(blockr_url + str(height)).decode("utf-8"))
j = k['data']
return {
'version': j['version'],
'hash': j['hash'],
'prevhash': j['previousblockhash'],
'timestamp': j['time'],
'merkle_root': j['merkleroot'],
'bits': int(j['bits'], 16),
'nonce': j['nonce'],
}
def get_block_timestamp(height, network='btc'):
if network == 'testnet':
blockr_url = "http://tbtc.blockr.io/api/v1/block/info/"
elif network == 'btc':
blockr_url = "http://btc.blockr.io/api/v1/block/info/"
else:
raise Exception(
'Unsupported network {0} for get_block_timestamp'.format(network))
import time, calendar
if isinstance(height, list):
k = json.loads(make_request(blockr_url + ','.join([str(x) for x in height])).decode("utf-8"))
o = {x['nb']: calendar.timegm(time.strptime(x['time_utc'],
"%Y-%m-%dT%H:%M:%SZ")) for x in k['data']}
return [o[x] for x in height]
else:
k = json.loads(make_request(blockr_url + str(height)).decode("utf-8"))
j = k['data']['time_utc']
return calendar.timegm(time.strptime(j, "%Y-%m-%dT%H:%M:%SZ"))
block_header_data_getters = {
'bci': bci_get_block_header_data,
'blockr': blockr_get_block_header_data
}
def get_block_header_data(inp, **kwargs):
f = block_header_data_getters.get(kwargs.get('source', ''),
bci_get_block_header_data)
return f(inp, **kwargs)
def get_txs_in_block(inp):
j = _get_block(inp)
hashes = [t['hash'] for t in j['tx']]
return hashes
def get_block_height(txhash):
j = json.loads(make_request('https://blockchain.info/rawtx/'+txhash).decode("utf-8"))
return j['block_height']
# fromAddr, toAddr, 12345, changeAddress
def get_tx_composite(inputs, outputs, output_value, change_address=None, network=None):
"""mktx using blockcypher API"""
inputs = [inputs] if not isinstance(inputs, list) else inputs
outputs = [outputs] if not isinstance(outputs, list) else outputs
network = set_network(change_address or inputs) if not network else network.lower()
url = "http://api.blockcypher.com/v1/btc/{network}/txs/new?includeToSignTx=true".format(
network=('test3' if network=='testnet' else 'main'))
is_address = lambda a: bool(re.match("^[123mn][a-km-zA-HJ-NP-Z0-9]{26,33}$", a))
if any([is_address(x) for x in inputs]):
inputs_type = 'addresses' # also accepts UTXOs, only addresses supported presently
if any([is_address(x) for x in outputs]):
outputs_type = 'addresses' # TODO: add UTXO support
data = {
'inputs': [{inputs_type: inputs}],
'confirmations': 0,
'preference': 'high',
'outputs': [{outputs_type: outputs, "value": output_value}]
}
if change_address:
data["change_address"] = change_address #
jdata = json.loads(make_request(url, data))
hash, txh = jdata.get("tosign")[0], jdata.get("tosign_tx")[0]
assert bin_dbl_sha256(txh.decode('hex')).encode('hex') == hash, "checksum mismatch %s" % hash
return txh.encode("utf-8")
blockcypher_mktx = get_tx_composite

View file

@ -1,50 +0,0 @@
from .main import *
def serialize_header(inp):
o = encode(inp['version'], 256, 4)[::-1] + \
inp['prevhash'].decode('hex')[::-1] + \
inp['merkle_root'].decode('hex')[::-1] + \
encode(inp['timestamp'], 256, 4)[::-1] + \
encode(inp['bits'], 256, 4)[::-1] + \
encode(inp['nonce'], 256, 4)[::-1]
h = bin_sha256(bin_sha256(o))[::-1].encode('hex')
assert h == inp['hash'], (sha256(o), inp['hash'])
return o.encode('hex')
def deserialize_header(inp):
inp = inp.decode('hex')
return {
"version": decode(inp[:4][::-1], 256),
"prevhash": inp[4:36][::-1].encode('hex'),
"merkle_root": inp[36:68][::-1].encode('hex'),
"timestamp": decode(inp[68:72][::-1], 256),
"bits": decode(inp[72:76][::-1], 256),
"nonce": decode(inp[76:80][::-1], 256),
"hash": bin_sha256(bin_sha256(inp))[::-1].encode('hex')
}
def mk_merkle_proof(header, hashes, index):
nodes = [h.decode('hex')[::-1] for h in hashes]
if len(nodes) % 2 and len(nodes) > 2:
nodes.append(nodes[-1])
layers = [nodes]
while len(nodes) > 1:
newnodes = []
for i in range(0, len(nodes) - 1, 2):
newnodes.append(bin_sha256(bin_sha256(nodes[i] + nodes[i+1])))
if len(newnodes) % 2 and len(newnodes) > 2:
newnodes.append(newnodes[-1])
nodes = newnodes
layers.append(nodes)
# Sanity check, make sure merkle root is valid
assert nodes[0][::-1].encode('hex') == header['merkle_root']
merkle_siblings = \
[layers[i][(index >> i) ^ 1] for i in range(len(layers)-1)]
return {
"hash": hashes[index],
"siblings": [x[::-1].encode('hex') for x in merkle_siblings],
"header": header
}

View file

@ -1,128 +0,0 @@
from .main import *
from .transaction import *
from .bci import *
from .deterministic import *
from .blocks import *
# Takes privkey, address, value (satoshis), fee (satoshis)
def send(frm, to, value, fee=10000, **kwargs):
return sendmultitx(frm, to + ":" + str(value), fee, **kwargs)
# Takes privkey, "address1:value1,address2:value2" (satoshis), fee (satoshis)
def sendmultitx(frm, *args, **kwargs):
tv, fee = args[:-1], int(args[-1])
outs = []
outvalue = 0
for a in tv:
outs.append(a)
outvalue += int(a.split(":")[1])
u = unspent(privtoaddr(frm), **kwargs)
u2 = select(u, int(outvalue)+int(fee))
argz = u2 + outs + [privtoaddr(frm), fee]
tx = mksend(*argz)
tx2 = signall(tx, frm)
return pushtx(tx2, **kwargs)
# Takes address, address, value (satoshis), fee(satoshis)
def preparetx(frm, to, value, fee=10000, **kwargs):
tovalues = to + ":" + str(value)
return preparemultitx(frm, tovalues, fee, **kwargs)
# Takes address, address:value, address:value ... (satoshis), fee(satoshis)
def preparemultitx(frm, *args, **kwargs):
tv, fee = args[:-1], int(args[-1])
outs = []
outvalue = 0
for a in tv:
outs.append(a)
outvalue += int(a.split(":")[1])
u = unspent(frm, **kwargs)
u2 = select(u, int(outvalue)+int(fee))
argz = u2 + outs + [frm, fee]
return mksend(*argz)
# BIP32 hierarchical deterministic multisig script
def bip32_hdm_script(*args):
if len(args) == 3:
keys, req, path = args
else:
i, keys, path = 0, [], []
while len(args[i]) > 40:
keys.append(args[i])
i += 1
req = int(args[i])
path = map(int, args[i+1:])
pubs = sorted(map(lambda x: bip32_descend(x, path), keys))
return mk_multisig_script(pubs, req)
# BIP32 hierarchical deterministic multisig address
def bip32_hdm_addr(*args):
return scriptaddr(bip32_hdm_script(*args))
# Setup a coinvault transaction
def setup_coinvault_tx(tx, script):
txobj = deserialize(tx)
N = deserialize_script(script)[-2]
for inp in txobj["ins"]:
inp["script"] = serialize_script([None] * (N+1) + [script])
return serialize(txobj)
# Sign a coinvault transaction
def sign_coinvault_tx(tx, priv):
pub = privtopub(priv)
txobj = deserialize(tx)
subscript = deserialize_script(txobj['ins'][0]['script'])
oscript = deserialize_script(subscript[-1])
k, pubs = oscript[0], oscript[1:-2]
for j in range(len(txobj['ins'])):
scr = deserialize_script(txobj['ins'][j]['script'])
for i, p in enumerate(pubs):
if p == pub:
scr[i+1] = multisign(tx, j, subscript[-1], priv)
if len(filter(lambda x: x, scr[1:-1])) >= k:
scr = [None] + filter(lambda x: x, scr[1:-1])[:k] + [scr[-1]]
txobj['ins'][j]['script'] = serialize_script(scr)
return serialize(txobj)
# Inspects a transaction
def inspect(tx, **kwargs):
d = deserialize(tx)
isum = 0
ins = {}
for _in in d['ins']:
h = _in['outpoint']['hash']
i = _in['outpoint']['index']
prevout = deserialize(fetchtx(h, **kwargs))['outs'][i]
isum += prevout['value']
a = script_to_address(prevout['script'])
ins[a] = ins.get(a, 0) + prevout['value']
outs = []
osum = 0
for _out in d['outs']:
outs.append({'address': script_to_address(_out['script']),
'value': _out['value']})
osum += _out['value']
return {
'fee': isum - osum,
'outs': outs,
'ins': ins
}
def merkle_prove(txhash):
blocknum = str(get_block_height(txhash))
header = get_block_header_data(blocknum)
hashes = get_txs_in_block(blocknum)
i = hashes.index(txhash)
return mk_merkle_proof(header, hashes, i)

View file

@ -1,199 +0,0 @@
from .main import *
import hmac
import hashlib
from binascii import hexlify
# Electrum wallets
def electrum_stretch(seed):
return slowsha(seed)
# Accepts seed or stretched seed, returns master public key
def electrum_mpk(seed):
if len(seed) == 32:
seed = electrum_stretch(seed)
return privkey_to_pubkey(seed)[2:]
# Accepts (seed or stretched seed), index and secondary index
# (conventionally 0 for ordinary addresses, 1 for change) , returns privkey
def electrum_privkey(seed, n, for_change=0):
if len(seed) == 32:
seed = electrum_stretch(seed)
mpk = electrum_mpk(seed)
offset = dbl_sha256(from_int_representation_to_bytes(n)+b':'+from_int_representation_to_bytes(for_change)+b':'+binascii.unhexlify(mpk))
return add_privkeys(seed, offset)
# Accepts (seed or stretched seed or master pubkey), index and secondary index
# (conventionally 0 for ordinary addresses, 1 for change) , returns pubkey
def electrum_pubkey(masterkey, n, for_change=0):
if len(masterkey) == 32:
mpk = electrum_mpk(electrum_stretch(masterkey))
elif len(masterkey) == 64:
mpk = electrum_mpk(masterkey)
else:
mpk = masterkey
bin_mpk = encode_pubkey(mpk, 'bin_electrum')
offset = bin_dbl_sha256(from_int_representation_to_bytes(n)+b':'+from_int_representation_to_bytes(for_change)+b':'+bin_mpk)
return add_pubkeys('04'+mpk, privtopub(offset))
# seed/stretched seed/pubkey -> address (convenience method)
def electrum_address(masterkey, n, for_change=0, version=0):
return pubkey_to_address(electrum_pubkey(masterkey, n, for_change), version)
# Given a master public key, a private key from that wallet and its index,
# cracks the secret exponent which can be used to generate all other private
# keys in the wallet
def crack_electrum_wallet(mpk, pk, n, for_change=0):
bin_mpk = encode_pubkey(mpk, 'bin_electrum')
offset = dbl_sha256(str(n)+':'+str(for_change)+':'+bin_mpk)
return subtract_privkeys(pk, offset)
# Below code ASSUMES binary inputs and compressed pubkeys
MAINNET_PRIVATE = b'\x04\x88\xAD\xE4'
MAINNET_PUBLIC = b'\x04\x88\xB2\x1E'
TESTNET_PRIVATE = b'\x04\x35\x83\x94'
TESTNET_PUBLIC = b'\x04\x35\x87\xCF'
PRIVATE = [MAINNET_PRIVATE, TESTNET_PRIVATE]
PUBLIC = [MAINNET_PUBLIC, TESTNET_PUBLIC]
# BIP32 child key derivation
def raw_bip32_ckd(rawtuple, i):
vbytes, depth, fingerprint, oldi, chaincode, key = rawtuple
i = int(i)
if vbytes in PRIVATE:
priv = key
pub = privtopub(key)
else:
pub = key
if i >= 2**31:
if vbytes in PUBLIC:
raise Exception("Can't do private derivation on public key!")
I = hmac.new(chaincode, b'\x00'+priv[:32]+encode(i, 256, 4), hashlib.sha512).digest()
else:
I = hmac.new(chaincode, pub+encode(i, 256, 4), hashlib.sha512).digest()
if vbytes in PRIVATE:
newkey = add_privkeys(I[:32]+B'\x01', priv)
fingerprint = bin_hash160(privtopub(key))[:4]
if vbytes in PUBLIC:
newkey = add_pubkeys(compress(privtopub(I[:32])), key)
fingerprint = bin_hash160(key)[:4]
return (vbytes, depth + 1, fingerprint, i, I[32:], newkey)
def bip32_serialize(rawtuple):
vbytes, depth, fingerprint, i, chaincode, key = rawtuple
i = encode(i, 256, 4)
chaincode = encode(hash_to_int(chaincode), 256, 32)
keydata = b'\x00'+key[:-1] if vbytes in PRIVATE else key
bindata = vbytes + from_int_to_byte(depth % 256) + fingerprint + i + chaincode + keydata
return changebase(bindata+bin_dbl_sha256(bindata)[:4], 256, 58)
def bip32_deserialize(data):
dbin = changebase(data, 58, 256)
if bin_dbl_sha256(dbin[:-4])[:4] != dbin[-4:]:
raise Exception("Invalid checksum")
vbytes = dbin[0:4]
depth = from_byte_to_int(dbin[4])
fingerprint = dbin[5:9]
i = decode(dbin[9:13], 256)
chaincode = dbin[13:45]
key = dbin[46:78]+b'\x01' if vbytes in PRIVATE else dbin[45:78]
return (vbytes, depth, fingerprint, i, chaincode, key)
def raw_bip32_privtopub(rawtuple):
vbytes, depth, fingerprint, i, chaincode, key = rawtuple
newvbytes = MAINNET_PUBLIC if vbytes == MAINNET_PRIVATE else TESTNET_PUBLIC
return (newvbytes, depth, fingerprint, i, chaincode, privtopub(key))
def bip32_privtopub(data):
return bip32_serialize(raw_bip32_privtopub(bip32_deserialize(data)))
def bip32_ckd(data, i):
return bip32_serialize(raw_bip32_ckd(bip32_deserialize(data), i))
def bip32_master_key(seed, vbytes=MAINNET_PRIVATE):
I = hmac.new(from_string_to_bytes("Bitcoin seed"), seed, hashlib.sha512).digest()
return bip32_serialize((vbytes, 0, b'\x00'*4, 0, I[32:], I[:32]+b'\x01'))
def bip32_bin_extract_key(data):
return bip32_deserialize(data)[-1]
def bip32_extract_key(data):
return safe_hexlify(bip32_deserialize(data)[-1])
# Exploits the same vulnerability as above in Electrum wallets
# Takes a BIP32 pubkey and one of the child privkeys of its corresponding
# privkey and returns the BIP32 privkey associated with that pubkey
def raw_crack_bip32_privkey(parent_pub, priv):
vbytes, depth, fingerprint, i, chaincode, key = priv
pvbytes, pdepth, pfingerprint, pi, pchaincode, pkey = parent_pub
i = int(i)
if i >= 2**31:
raise Exception("Can't crack private derivation!")
I = hmac.new(pchaincode, pkey+encode(i, 256, 4), hashlib.sha512).digest()
pprivkey = subtract_privkeys(key, I[:32]+b'\x01')
newvbytes = MAINNET_PRIVATE if vbytes == MAINNET_PUBLIC else TESTNET_PRIVATE
return (newvbytes, pdepth, pfingerprint, pi, pchaincode, pprivkey)
def crack_bip32_privkey(parent_pub, priv):
dsppub = bip32_deserialize(parent_pub)
dspriv = bip32_deserialize(priv)
return bip32_serialize(raw_crack_bip32_privkey(dsppub, dspriv))
def coinvault_pub_to_bip32(*args):
if len(args) == 1:
args = args[0].split(' ')
vals = map(int, args[34:])
I1 = ''.join(map(chr, vals[:33]))
I2 = ''.join(map(chr, vals[35:67]))
return bip32_serialize((MAINNET_PUBLIC, 0, b'\x00'*4, 0, I2, I1))
def coinvault_priv_to_bip32(*args):
if len(args) == 1:
args = args[0].split(' ')
vals = map(int, args[34:])
I2 = ''.join(map(chr, vals[35:67]))
I3 = ''.join(map(chr, vals[72:104]))
return bip32_serialize((MAINNET_PRIVATE, 0, b'\x00'*4, 0, I2, I3+b'\x01'))
def bip32_descend(*args):
if len(args) == 2 and isinstance(args[1], list):
key, path = args
else:
key, path = args[0], map(int, args[1:])
for p in path:
key = bip32_ckd(key, p)
return bip32_extract_key(key)

File diff suppressed because it is too large Load diff

View file

@ -1,581 +0,0 @@
#!/usr/bin/python
from .py2specials import *
from .py3specials import *
import binascii
import hashlib
import re
import sys
import os
import base64
import time
import random
import hmac
from .ripemd import *
# Elliptic curve parameters (secp256k1)
P = 2**256 - 2**32 - 977
N = 115792089237316195423570985008687907852837564279074904382605163141518161494337
A = 0
B = 7
Gx = 55066263022277343669578718895168534326250603453777594175500187360389116729240
Gy = 32670510020758816978083085130507043184471273380659243275938904335757337482424
G = (Gx, Gy)
def change_curve(p, n, a, b, gx, gy):
global P, N, A, B, Gx, Gy, G
P, N, A, B, Gx, Gy = p, n, a, b, gx, gy
G = (Gx, Gy)
def getG():
return G
# Extended Euclidean Algorithm
def inv(a, n):
if a == 0:
return 0
lm, hm = 1, 0
low, high = a % n, n
while low > 1:
r = high//low
nm, new = hm-lm*r, high-low*r
lm, low, hm, high = nm, new, lm, low
return lm % n
# JSON access (for pybtctool convenience)
def access(obj, prop):
if isinstance(obj, dict):
if prop in obj:
return obj[prop]
elif '.' in prop:
return obj[float(prop)]
else:
return obj[int(prop)]
else:
return obj[int(prop)]
def multiaccess(obj, prop):
return [access(o, prop) for o in obj]
def slice(obj, start=0, end=2**200):
return obj[int(start):int(end)]
def count(obj):
return len(obj)
_sum = sum
def sum(obj):
return _sum(obj)
def isinf(p):
return p[0] == 0 and p[1] == 0
def to_jacobian(p):
o = (p[0], p[1], 1)
return o
def jacobian_double(p):
if not p[1]:
return (0, 0, 0)
ysq = (p[1] ** 2) % P
S = (4 * p[0] * ysq) % P
M = (3 * p[0] ** 2 + A * p[2] ** 4) % P
nx = (M**2 - 2 * S) % P
ny = (M * (S - nx) - 8 * ysq ** 2) % P
nz = (2 * p[1] * p[2]) % P
return (nx, ny, nz)
def jacobian_add(p, q):
if not p[1]:
return q
if not q[1]:
return p
U1 = (p[0] * q[2] ** 2) % P
U2 = (q[0] * p[2] ** 2) % P
S1 = (p[1] * q[2] ** 3) % P
S2 = (q[1] * p[2] ** 3) % P
if U1 == U2:
if S1 != S2:
return (0, 0, 1)
return jacobian_double(p)
H = U2 - U1
R = S2 - S1
H2 = (H * H) % P
H3 = (H * H2) % P
U1H2 = (U1 * H2) % P
nx = (R ** 2 - H3 - 2 * U1H2) % P
ny = (R * (U1H2 - nx) - S1 * H3) % P
nz = (H * p[2] * q[2]) % P
return (nx, ny, nz)
def from_jacobian(p):
z = inv(p[2], P)
return ((p[0] * z**2) % P, (p[1] * z**3) % P)
def jacobian_multiply(a, n):
if a[1] == 0 or n == 0:
return (0, 0, 1)
if n == 1:
return a
if n < 0 or n >= N:
return jacobian_multiply(a, n % N)
if (n % 2) == 0:
return jacobian_double(jacobian_multiply(a, n//2))
if (n % 2) == 1:
return jacobian_add(jacobian_double(jacobian_multiply(a, n//2)), a)
def fast_multiply(a, n):
return from_jacobian(jacobian_multiply(to_jacobian(a), n))
def fast_add(a, b):
return from_jacobian(jacobian_add(to_jacobian(a), to_jacobian(b)))
# Functions for handling pubkey and privkey formats
def get_pubkey_format(pub):
if is_python2:
two = '\x02'
three = '\x03'
four = '\x04'
else:
two = 2
three = 3
four = 4
if isinstance(pub, (tuple, list)): return 'decimal'
elif len(pub) == 65 and pub[0] == four: return 'bin'
elif len(pub) == 130 and pub[0:2] == '04': return 'hex'
elif len(pub) == 33 and pub[0] in [two, three]: return 'bin_compressed'
elif len(pub) == 66 and pub[0:2] in ['02', '03']: return 'hex_compressed'
elif len(pub) == 64: return 'bin_electrum'
elif len(pub) == 128: return 'hex_electrum'
else: raise Exception("Pubkey not in recognized format")
def encode_pubkey(pub, formt):
if not isinstance(pub, (tuple, list)):
pub = decode_pubkey(pub)
if formt == 'decimal': return pub
elif formt == 'bin': return b'\x04' + encode(pub[0], 256, 32) + encode(pub[1], 256, 32)
elif formt == 'bin_compressed':
return from_int_to_byte(2+(pub[1] % 2)) + encode(pub[0], 256, 32)
elif formt == 'hex': return '04' + encode(pub[0], 16, 64) + encode(pub[1], 16, 64)
elif formt == 'hex_compressed':
return '0'+str(2+(pub[1] % 2)) + encode(pub[0], 16, 64)
elif formt == 'bin_electrum': return encode(pub[0], 256, 32) + encode(pub[1], 256, 32)
elif formt == 'hex_electrum': return encode(pub[0], 16, 64) + encode(pub[1], 16, 64)
else: raise Exception("Invalid format!")
def decode_pubkey(pub, formt=None):
if not formt: formt = get_pubkey_format(pub)
if formt == 'decimal': return pub
elif formt == 'bin': return (decode(pub[1:33], 256), decode(pub[33:65], 256))
elif formt == 'bin_compressed':
x = decode(pub[1:33], 256)
beta = pow(int(x*x*x+A*x+B), int((P+1)//4), int(P))
y = (P-beta) if ((beta + from_byte_to_int(pub[0])) % 2) else beta
return (x, y)
elif formt == 'hex': return (decode(pub[2:66], 16), decode(pub[66:130], 16))
elif formt == 'hex_compressed':
return decode_pubkey(safe_from_hex(pub), 'bin_compressed')
elif formt == 'bin_electrum':
return (decode(pub[:32], 256), decode(pub[32:64], 256))
elif formt == 'hex_electrum':
return (decode(pub[:64], 16), decode(pub[64:128], 16))
else: raise Exception("Invalid format!")
def get_privkey_format(priv):
if isinstance(priv, int_types): return 'decimal'
elif len(priv) == 32: return 'bin'
elif len(priv) == 33: return 'bin_compressed'
elif len(priv) == 64: return 'hex'
elif len(priv) == 66: return 'hex_compressed'
else:
bin_p = b58check_to_bin(priv)
if len(bin_p) == 32: return 'wif'
elif len(bin_p) == 33: return 'wif_compressed'
else: raise Exception("WIF does not represent privkey")
def encode_privkey(priv, formt, vbyte=0):
if not isinstance(priv, int_types):
return encode_privkey(decode_privkey(priv), formt, vbyte)
if formt == 'decimal': return priv
elif formt == 'bin': return encode(priv, 256, 32)
elif formt == 'bin_compressed': return encode(priv, 256, 32)+b'\x01'
elif formt == 'hex': return encode(priv, 16, 64)
elif formt == 'hex_compressed': return encode(priv, 16, 64)+'01'
elif formt == 'wif':
return bin_to_b58check(encode(priv, 256, 32), 128+int(vbyte))
elif formt == 'wif_compressed':
return bin_to_b58check(encode(priv, 256, 32)+b'\x01', 128+int(vbyte))
else: raise Exception("Invalid format!")
def decode_privkey(priv,formt=None):
if not formt: formt = get_privkey_format(priv)
if formt == 'decimal': return priv
elif formt == 'bin': return decode(priv, 256)
elif formt == 'bin_compressed': return decode(priv[:32], 256)
elif formt == 'hex': return decode(priv, 16)
elif formt == 'hex_compressed': return decode(priv[:64], 16)
elif formt == 'wif': return decode(b58check_to_bin(priv),256)
elif formt == 'wif_compressed':
return decode(b58check_to_bin(priv)[:32],256)
else: raise Exception("WIF does not represent privkey")
def add_pubkeys(p1, p2):
f1, f2 = get_pubkey_format(p1), get_pubkey_format(p2)
return encode_pubkey(fast_add(decode_pubkey(p1, f1), decode_pubkey(p2, f2)), f1)
def add_privkeys(p1, p2):
f1, f2 = get_privkey_format(p1), get_privkey_format(p2)
return encode_privkey((decode_privkey(p1, f1) + decode_privkey(p2, f2)) % N, f1)
def mul_privkeys(p1, p2):
f1, f2 = get_privkey_format(p1), get_privkey_format(p2)
return encode_privkey((decode_privkey(p1, f1) * decode_privkey(p2, f2)) % N, f1)
def multiply(pubkey, privkey):
f1, f2 = get_pubkey_format(pubkey), get_privkey_format(privkey)
pubkey, privkey = decode_pubkey(pubkey, f1), decode_privkey(privkey, f2)
# http://safecurves.cr.yp.to/twist.html
if not isinf(pubkey) and (pubkey[0]**3+B-pubkey[1]*pubkey[1]) % P != 0:
raise Exception("Point not on curve")
return encode_pubkey(fast_multiply(pubkey, privkey), f1)
def divide(pubkey, privkey):
factor = inv(decode_privkey(privkey), N)
return multiply(pubkey, factor)
def compress(pubkey):
f = get_pubkey_format(pubkey)
if 'compressed' in f: return pubkey
elif f == 'bin': return encode_pubkey(decode_pubkey(pubkey, f), 'bin_compressed')
elif f == 'hex' or f == 'decimal':
return encode_pubkey(decode_pubkey(pubkey, f), 'hex_compressed')
def decompress(pubkey):
f = get_pubkey_format(pubkey)
if 'compressed' not in f: return pubkey
elif f == 'bin_compressed': return encode_pubkey(decode_pubkey(pubkey, f), 'bin')
elif f == 'hex_compressed' or f == 'decimal':
return encode_pubkey(decode_pubkey(pubkey, f), 'hex')
def privkey_to_pubkey(privkey):
f = get_privkey_format(privkey)
privkey = decode_privkey(privkey, f)
if privkey >= N:
raise Exception("Invalid privkey")
if f in ['bin', 'bin_compressed', 'hex', 'hex_compressed', 'decimal']:
return encode_pubkey(fast_multiply(G, privkey), f)
else:
return encode_pubkey(fast_multiply(G, privkey), f.replace('wif', 'hex'))
privtopub = privkey_to_pubkey
def privkey_to_address(priv, magicbyte=0):
return pubkey_to_address(privkey_to_pubkey(priv), magicbyte)
privtoaddr = privkey_to_address
def neg_pubkey(pubkey):
f = get_pubkey_format(pubkey)
pubkey = decode_pubkey(pubkey, f)
return encode_pubkey((pubkey[0], (P-pubkey[1]) % P), f)
def neg_privkey(privkey):
f = get_privkey_format(privkey)
privkey = decode_privkey(privkey, f)
return encode_privkey((N - privkey) % N, f)
def subtract_pubkeys(p1, p2):
f1, f2 = get_pubkey_format(p1), get_pubkey_format(p2)
k2 = decode_pubkey(p2, f2)
return encode_pubkey(fast_add(decode_pubkey(p1, f1), (k2[0], (P - k2[1]) % P)), f1)
def subtract_privkeys(p1, p2):
f1, f2 = get_privkey_format(p1), get_privkey_format(p2)
k2 = decode_privkey(p2, f2)
return encode_privkey((decode_privkey(p1, f1) - k2) % N, f1)
# Hashes
def bin_hash160(string):
intermed = hashlib.sha256(string).digest()
digest = ''
try:
digest = hashlib.new('ripemd160', intermed).digest()
except:
digest = RIPEMD160(intermed).digest()
return digest
def hash160(string):
return safe_hexlify(bin_hash160(string))
def bin_sha256(string):
binary_data = string if isinstance(string, bytes) else bytes(string, 'utf-8')
return hashlib.sha256(binary_data).digest()
def sha256(string):
return bytes_to_hex_string(bin_sha256(string))
def bin_ripemd160(string):
try:
digest = hashlib.new('ripemd160', string).digest()
except:
digest = RIPEMD160(string).digest()
return digest
def ripemd160(string):
return safe_hexlify(bin_ripemd160(string))
def bin_dbl_sha256(s):
bytes_to_hash = from_string_to_bytes(s)
return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest()
def dbl_sha256(string):
return safe_hexlify(bin_dbl_sha256(string))
def bin_slowsha(string):
string = from_string_to_bytes(string)
orig_input = string
for i in range(100000):
string = hashlib.sha256(string + orig_input).digest()
return string
def slowsha(string):
return safe_hexlify(bin_slowsha(string))
def hash_to_int(x):
if len(x) in [40, 64]:
return decode(x, 16)
return decode(x, 256)
def num_to_var_int(x):
x = int(x)
if x < 253: return from_int_to_byte(x)
elif x < 65536: return from_int_to_byte(253)+encode(x, 256, 2)[::-1]
elif x < 4294967296: return from_int_to_byte(254) + encode(x, 256, 4)[::-1]
else: return from_int_to_byte(255) + encode(x, 256, 8)[::-1]
# WTF, Electrum?
def electrum_sig_hash(message):
padded = b"\x18Bitcoin Signed Message:\n" + num_to_var_int(len(message)) + from_string_to_bytes(message)
return bin_dbl_sha256(padded)
def random_key():
# Gotta be secure after that java.SecureRandom fiasco...
entropy = random_string(32) \
+ str(random.randrange(2**256)) \
+ str(int(time.time() * 1000000))
return sha256(entropy)
def random_electrum_seed():
entropy = os.urandom(32) \
+ str(random.randrange(2**256)) \
+ str(int(time.time() * 1000000))
return sha256(entropy)[:32]
# Encodings
def b58check_to_bin(inp):
leadingzbytes = len(re.match('^1*', inp).group(0))
data = b'\x00' * leadingzbytes + changebase(inp, 58, 256)
assert bin_dbl_sha256(data[:-4])[:4] == data[-4:]
return data[1:-4]
def get_version_byte(inp):
leadingzbytes = len(re.match('^1*', inp).group(0))
data = b'\x00' * leadingzbytes + changebase(inp, 58, 256)
assert bin_dbl_sha256(data[:-4])[:4] == data[-4:]
return ord(data[0])
def hex_to_b58check(inp, magicbyte=0):
return bin_to_b58check(binascii.unhexlify(inp), magicbyte)
def b58check_to_hex(inp):
return safe_hexlify(b58check_to_bin(inp))
def pubkey_to_address(pubkey, magicbyte=0):
if isinstance(pubkey, (list, tuple)):
pubkey = encode_pubkey(pubkey, 'bin')
if len(pubkey) in [66, 130]:
return bin_to_b58check(
bin_hash160(binascii.unhexlify(pubkey)), magicbyte)
return bin_to_b58check(bin_hash160(pubkey), magicbyte)
pubtoaddr = pubkey_to_address
def is_privkey(priv):
try:
get_privkey_format(priv)
return True
except:
return False
def is_pubkey(pubkey):
try:
get_pubkey_format(pubkey)
return True
except:
return False
def is_address(addr):
ADDR_RE = re.compile("^[123mn][a-km-zA-HJ-NP-Z0-9]{26,33}$")
return bool(ADDR_RE.match(addr))
# EDCSA
def encode_sig(v, r, s):
vb, rb, sb = from_int_to_byte(v), encode(r, 256), encode(s, 256)
result = base64.b64encode(vb+b'\x00'*(32-len(rb))+rb+b'\x00'*(32-len(sb))+sb)
return result if is_python2 else str(result, 'utf-8')
def decode_sig(sig):
bytez = base64.b64decode(sig)
return from_byte_to_int(bytez[0]), decode(bytez[1:33], 256), decode(bytez[33:], 256)
# https://tools.ietf.org/html/rfc6979#section-3.2
def deterministic_generate_k(msghash, priv):
v = b'\x01' * 32
k = b'\x00' * 32
priv = encode_privkey(priv, 'bin')
msghash = encode(hash_to_int(msghash), 256, 32)
k = hmac.new(k, v+b'\x00'+priv+msghash, hashlib.sha256).digest()
v = hmac.new(k, v, hashlib.sha256).digest()
k = hmac.new(k, v+b'\x01'+priv+msghash, hashlib.sha256).digest()
v = hmac.new(k, v, hashlib.sha256).digest()
return decode(hmac.new(k, v, hashlib.sha256).digest(), 256)
def ecdsa_raw_sign(msghash, priv):
z = hash_to_int(msghash)
k = deterministic_generate_k(msghash, priv)
r, y = fast_multiply(G, k)
s = inv(k, N) * (z + r*decode_privkey(priv)) % N
v, r, s = 27+((y % 2) ^ (0 if s * 2 < N else 1)), r, s if s * 2 < N else N - s
if 'compressed' in get_privkey_format(priv):
v += 4
return v, r, s
def ecdsa_sign(msg, priv):
v, r, s = ecdsa_raw_sign(electrum_sig_hash(msg), priv)
sig = encode_sig(v, r, s)
assert ecdsa_verify(msg, sig,
privtopub(priv)), "Bad Sig!\t %s\nv = %d\n,r = %d\ns = %d" % (sig, v, r, s)
return sig
def ecdsa_raw_verify(msghash, vrs, pub):
v, r, s = vrs
if not (27 <= v <= 34):
return False
w = inv(s, N)
z = hash_to_int(msghash)
u1, u2 = z*w % N, r*w % N
x, y = fast_add(fast_multiply(G, u1), fast_multiply(decode_pubkey(pub), u2))
return bool(r == x and (r % N) and (s % N))
# For BitcoinCore, (msg = addr or msg = "") be default
def ecdsa_verify_addr(msg, sig, addr):
assert is_address(addr)
Q = ecdsa_recover(msg, sig)
magic = get_version_byte(addr)
return (addr == pubtoaddr(Q, int(magic))) or (addr == pubtoaddr(compress(Q), int(magic)))
def ecdsa_verify(msg, sig, pub):
if is_address(pub):
return ecdsa_verify_addr(msg, sig, pub)
return ecdsa_raw_verify(electrum_sig_hash(msg), decode_sig(sig), pub)
def ecdsa_raw_recover(msghash, vrs):
v, r, s = vrs
if not (27 <= v <= 34):
raise ValueError("%d must in range 27-31" % v)
x = r
xcubedaxb = (x*x*x+A*x+B) % P
beta = pow(xcubedaxb, (P+1)//4, P)
y = beta if v % 2 ^ beta % 2 else (P - beta)
# If xcubedaxb is not a quadratic residue, then r cannot be the x coord
# for a point on the curve, and so the sig is invalid
if (xcubedaxb - y*y) % P != 0 or not (r % N) or not (s % N):
return False
z = hash_to_int(msghash)
Gz = jacobian_multiply((Gx, Gy, 1), (N - z) % N)
XY = jacobian_multiply((x, y, 1), s)
Qr = jacobian_add(Gz, XY)
Q = jacobian_multiply(Qr, inv(r, N))
Q = from_jacobian(Q)
# if ecdsa_raw_verify(msghash, vrs, Q):
return Q
# return False
def ecdsa_recover(msg, sig):
v,r,s = decode_sig(sig)
Q = ecdsa_raw_recover(electrum_sig_hash(msg), (v,r,s))
return encode_pubkey(Q, 'hex_compressed') if v >= 31 else encode_pubkey(Q, 'hex')

View file

@ -1,127 +0,0 @@
import hashlib
import os.path
import binascii
import random
from bisect import bisect_left
wordlist_english=list(open(os.path.join(os.path.dirname(os.path.realpath(__file__)),'english.txt'),'r'))
def eint_to_bytes(entint,entbits):
a=hex(entint)[2:].rstrip('L').zfill(32)
print(a)
return binascii.unhexlify(a)
def mnemonic_int_to_words(mint,mint_num_words,wordlist=wordlist_english):
backwords=[wordlist[(mint >> (11*x)) & 0x7FF].strip() for x in range(mint_num_words)]
return backwords[::-1]
def entropy_cs(entbytes):
entropy_size=8*len(entbytes)
checksum_size=entropy_size//32
hd=hashlib.sha256(entbytes).hexdigest()
csint=int(hd,16) >> (256-checksum_size)
return csint,checksum_size
def entropy_to_words(entbytes,wordlist=wordlist_english):
if(len(entbytes) < 4 or len(entbytes) % 4 != 0):
raise ValueError("The size of the entropy must be a multiple of 4 bytes (multiple of 32 bits)")
entropy_size=8*len(entbytes)
csint,checksum_size = entropy_cs(entbytes)
entint=int(binascii.hexlify(entbytes),16)
mint=(entint << checksum_size) | csint
mint_num_words=(entropy_size+checksum_size)//11
return mnemonic_int_to_words(mint,mint_num_words,wordlist)
def words_bisect(word,wordlist=wordlist_english):
lo=bisect_left(wordlist,word)
hi=len(wordlist)-bisect_left(wordlist[:lo:-1],word)
return lo,hi
def words_split(wordstr,wordlist=wordlist_english):
def popword(wordstr,wordlist):
for fwl in range(1,9):
w=wordstr[:fwl].strip()
lo,hi=words_bisect(w,wordlist)
if(hi-lo == 1):
return w,wordstr[fwl:].lstrip()
wordlist=wordlist[lo:hi]
raise Exception("Wordstr %s not found in list" %(w))
words=[]
tail=wordstr
while(len(tail)):
head,tail=popword(tail,wordlist)
words.append(head)
return words
def words_to_mnemonic_int(words,wordlist=wordlist_english):
if(isinstance(words,str)):
words=words_split(words,wordlist)
return sum([wordlist.index(w) << (11*x) for x,w in enumerate(words[::-1])])
def words_verify(words,wordlist=wordlist_english):
if(isinstance(words,str)):
words=words_split(words,wordlist)
mint = words_to_mnemonic_int(words,wordlist)
mint_bits=len(words)*11
cs_bits=mint_bits//32
entropy_bits=mint_bits-cs_bits
eint=mint >> cs_bits
csint=mint & ((1 << cs_bits)-1)
ebytes=_eint_to_bytes(eint,entropy_bits)
return csint == entropy_cs(ebytes)
def mnemonic_to_seed(mnemonic_phrase,passphrase=b''):
try:
from hashlib import pbkdf2_hmac
def pbkdf2_hmac_sha256(password,salt,iters=2048):
return pbkdf2_hmac(hash_name='sha512',password=password,salt=salt,iterations=iters)
except:
try:
from Crypto.Protocol.KDF import PBKDF2
from Crypto.Hash import SHA512,HMAC
def pbkdf2_hmac_sha256(password,salt,iters=2048):
return PBKDF2(password=password,salt=salt,dkLen=64,count=iters,prf=lambda p,s: HMAC.new(p,s,SHA512).digest())
except:
try:
from pbkdf2 import PBKDF2
import hmac
def pbkdf2_hmac_sha256(password,salt,iters=2048):
return PBKDF2(password,salt, iterations=iters, macmodule=hmac, digestmodule=hashlib.sha512).read(64)
except:
raise RuntimeError("No implementation of pbkdf2 was found!")
return pbkdf2_hmac_sha256(password=mnemonic_phrase,salt=b'mnemonic'+passphrase)
def words_mine(prefix,entbits,satisfunction,wordlist=wordlist_english,randombits=random.getrandbits):
prefix_bits=len(prefix)*11
mine_bits=entbits-prefix_bits
pint=words_to_mnemonic_int(prefix,wordlist)
pint<<=mine_bits
dint=randombits(mine_bits)
count=0
while(not satisfunction(entropy_to_words(eint_to_bytes(pint+dint,entbits)))):
dint=randombits(mine_bits)
if((count & 0xFFFF) == 0):
print("Searched %f percent of the space" % (float(count)/float(1 << mine_bits)))
return entropy_to_words(eint_to_bytes(pint+dint,entbits))
if __name__=="__main__":
import json
testvectors=json.load(open('vectors.json','r'))
passed=True
for v in testvectors['english']:
ebytes=binascii.unhexlify(v[0])
w=' '.join(entropy_to_words(ebytes))
seed=mnemonic_to_seed(w,passphrase='TREZOR')
passed = passed and w==v[1]
passed = passed and binascii.hexlify(seed)==v[2]
print("Tests %s." % ("Passed" if passed else "Failed"))

View file

@ -1,98 +0,0 @@
import sys, re
import binascii
import os
import hashlib
if sys.version_info.major == 2:
string_types = (str, unicode)
string_or_bytes_types = string_types
int_types = (int, float, long)
# Base switching
code_strings = {
2: '01',
10: '0123456789',
16: '0123456789abcdef',
32: 'abcdefghijklmnopqrstuvwxyz234567',
58: '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz',
256: ''.join([chr(x) for x in range(256)])
}
def bin_dbl_sha256(s):
bytes_to_hash = from_string_to_bytes(s)
return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest()
def lpad(msg, symbol, length):
if len(msg) >= length:
return msg
return symbol * (length - len(msg)) + msg
def get_code_string(base):
if base in code_strings:
return code_strings[base]
else:
raise ValueError("Invalid base!")
def changebase(string, frm, to, minlen=0):
if frm == to:
return lpad(string, get_code_string(frm)[0], minlen)
return encode(decode(string, frm), to, minlen)
def bin_to_b58check(inp, magicbyte=0):
if magicbyte == 0:
inp = '\x00' + inp
while magicbyte > 0:
inp = chr(int(magicbyte % 256)) + inp
magicbyte //= 256
leadingzbytes = len(re.match('^\x00*', inp).group(0))
checksum = bin_dbl_sha256(inp)[:4]
return '1' * leadingzbytes + changebase(inp+checksum, 256, 58)
def bytes_to_hex_string(b):
return b.encode('hex')
def safe_from_hex(s):
return s.decode('hex')
def from_int_representation_to_bytes(a):
return str(a)
def from_int_to_byte(a):
return chr(a)
def from_byte_to_int(a):
return ord(a)
def from_bytes_to_string(s):
return s
def from_string_to_bytes(a):
return a
def safe_hexlify(a):
return binascii.hexlify(a)
def encode(val, base, minlen=0):
base, minlen = int(base), int(minlen)
code_string = get_code_string(base)
result = ""
while val > 0:
result = code_string[val % base] + result
val //= base
return code_string[0] * max(minlen - len(result), 0) + result
def decode(string, base):
base = int(base)
code_string = get_code_string(base)
result = 0
if base == 16:
string = string.lower()
while len(string) > 0:
result *= base
result += code_string.find(string[0])
string = string[1:]
return result
def random_string(x):
return os.urandom(x)

View file

@ -1,123 +0,0 @@
import sys, os
import binascii
import hashlib
if sys.version_info.major == 3:
string_types = (str)
string_or_bytes_types = (str, bytes)
int_types = (int, float)
# Base switching
code_strings = {
2: '01',
10: '0123456789',
16: '0123456789abcdef',
32: 'abcdefghijklmnopqrstuvwxyz234567',
58: '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz',
256: ''.join([chr(x) for x in range(256)])
}
def bin_dbl_sha256(s):
bytes_to_hash = from_string_to_bytes(s)
return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest()
def lpad(msg, symbol, length):
if len(msg) >= length:
return msg
return symbol * (length - len(msg)) + msg
def get_code_string(base):
if base in code_strings:
return code_strings[base]
else:
raise ValueError("Invalid base!")
def changebase(string, frm, to, minlen=0):
if frm == to:
return lpad(string, get_code_string(frm)[0], minlen)
return encode(decode(string, frm), to, minlen)
def bin_to_b58check(inp, magicbyte=0):
if magicbyte == 0:
inp = from_int_to_byte(0) + inp
while magicbyte > 0:
inp = from_int_to_byte(magicbyte % 256) + inp
magicbyte //= 256
leadingzbytes = 0
for x in inp:
if x != 0:
break
leadingzbytes += 1
checksum = bin_dbl_sha256(inp)[:4]
return '1' * leadingzbytes + changebase(inp+checksum, 256, 58)
def bytes_to_hex_string(b):
if isinstance(b, str):
return b
return ''.join('{:02x}'.format(y) for y in b)
def safe_from_hex(s):
return bytes.fromhex(s)
def from_int_representation_to_bytes(a):
return bytes(str(a), 'utf-8')
def from_int_to_byte(a):
return bytes([a])
def from_byte_to_int(a):
return a
def from_string_to_bytes(a):
return a if isinstance(a, bytes) else bytes(a, 'utf-8')
def safe_hexlify(a):
return str(binascii.hexlify(a), 'utf-8')
def encode(val, base, minlen=0):
base, minlen = int(base), int(minlen)
code_string = get_code_string(base)
result_bytes = bytes()
while val > 0:
curcode = code_string[val % base]
result_bytes = bytes([ord(curcode)]) + result_bytes
val //= base
pad_size = minlen - len(result_bytes)
padding_element = b'\x00' if base == 256 else b'1' \
if base == 58 else b'0'
if (pad_size > 0):
result_bytes = padding_element*pad_size + result_bytes
result_string = ''.join([chr(y) for y in result_bytes])
result = result_bytes if base == 256 else result_string
return result
def decode(string, base):
if base == 256 and isinstance(string, str):
string = bytes(bytearray.fromhex(string))
base = int(base)
code_string = get_code_string(base)
result = 0
if base == 256:
def extract(d, cs):
return d
else:
def extract(d, cs):
return cs.find(d if isinstance(d, str) else chr(d))
if base == 16:
string = string.lower()
while len(string) > 0:
result *= base
result += extract(string[0], code_string)
string = string[1:]
return result
def random_string(x):
return str(os.urandom(x))

View file

@ -1,414 +0,0 @@
## ripemd.py - pure Python implementation of the RIPEMD-160 algorithm.
## Bjorn Edstrom <be@bjrn.se> 16 december 2007.
##
## Copyrights
## ==========
##
## This code is a derived from an implementation by Markus Friedl which is
## subject to the following license. This Python implementation is not
## subject to any other license.
##
##/*
## * Copyright (c) 2001 Markus Friedl. All rights reserved.
## *
## * Redistribution and use in source and binary forms, with or without
## * modification, are permitted provided that the following conditions
## * are met:
## * 1. Redistributions of source code must retain the above copyright
## * notice, this list of conditions and the following disclaimer.
## * 2. Redistributions in binary form must reproduce the above copyright
## * notice, this list of conditions and the following disclaimer in the
## * documentation and/or other materials provided with the distribution.
## *
## * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
## * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
## * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
## * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
## * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
## * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
## * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
## */
##/*
## * Preneel, Bosselaers, Dobbertin, "The Cryptographic Hash Function RIPEMD-160",
## * RSA Laboratories, CryptoBytes, Volume 3, Number 2, Autumn 1997,
## * ftp://ftp.rsasecurity.com/pub/cryptobytes/crypto3n2.pdf
## */
try:
import psyco
psyco.full()
except ImportError:
pass
import sys
is_python2 = sys.version_info.major == 2
#block_size = 1
digest_size = 20
digestsize = 20
try:
range = xrange
except:
pass
class RIPEMD160:
"""Return a new RIPEMD160 object. An optional string argument
may be provided; if present, this string will be automatically
hashed."""
def __init__(self, arg=None):
self.ctx = RMDContext()
if arg:
self.update(arg)
self.dig = None
def update(self, arg):
"""update(arg)"""
RMD160Update(self.ctx, arg, len(arg))
self.dig = None
def digest(self):
"""digest()"""
if self.dig:
return self.dig
ctx = self.ctx.copy()
self.dig = RMD160Final(self.ctx)
self.ctx = ctx
return self.dig
def hexdigest(self):
"""hexdigest()"""
dig = self.digest()
hex_digest = ''
for d in dig:
if (is_python2):
hex_digest += '%02x' % ord(d)
else:
hex_digest += '%02x' % d
return hex_digest
def copy(self):
"""copy()"""
import copy
return copy.deepcopy(self)
def new(arg=None):
"""Return a new RIPEMD160 object. An optional string argument
may be provided; if present, this string will be automatically
hashed."""
return RIPEMD160(arg)
#
# Private.
#
class RMDContext:
def __init__(self):
self.state = [0x67452301, 0xEFCDAB89, 0x98BADCFE,
0x10325476, 0xC3D2E1F0] # uint32
self.count = 0 # uint64
self.buffer = [0]*64 # uchar
def copy(self):
ctx = RMDContext()
ctx.state = self.state[:]
ctx.count = self.count
ctx.buffer = self.buffer[:]
return ctx
K0 = 0x00000000
K1 = 0x5A827999
K2 = 0x6ED9EBA1
K3 = 0x8F1BBCDC
K4 = 0xA953FD4E
KK0 = 0x50A28BE6
KK1 = 0x5C4DD124
KK2 = 0x6D703EF3
KK3 = 0x7A6D76E9
KK4 = 0x00000000
def ROL(n, x):
return ((x << n) & 0xffffffff) | (x >> (32 - n))
def F0(x, y, z):
return x ^ y ^ z
def F1(x, y, z):
return (x & y) | (((~x) % 0x100000000) & z)
def F2(x, y, z):
return (x | ((~y) % 0x100000000)) ^ z
def F3(x, y, z):
return (x & z) | (((~z) % 0x100000000) & y)
def F4(x, y, z):
return x ^ (y | ((~z) % 0x100000000))
def R(a, b, c, d, e, Fj, Kj, sj, rj, X):
a = ROL(sj, (a + Fj(b, c, d) + X[rj] + Kj) % 0x100000000) + e
c = ROL(10, c)
return a % 0x100000000, c
PADDING = [0x80] + [0]*63
import sys
import struct
def RMD160Transform(state, block): #uint32 state[5], uchar block[64]
x = [0]*16
if sys.byteorder == 'little':
if is_python2:
x = struct.unpack('<16L', ''.join([chr(x) for x in block[0:64]]))
else:
x = struct.unpack('<16L', bytes(block[0:64]))
else:
raise "Error!!"
a = state[0]
b = state[1]
c = state[2]
d = state[3]
e = state[4]
#/* Round 1 */
a, c = R(a, b, c, d, e, F0, K0, 11, 0, x);
e, b = R(e, a, b, c, d, F0, K0, 14, 1, x);
d, a = R(d, e, a, b, c, F0, K0, 15, 2, x);
c, e = R(c, d, e, a, b, F0, K0, 12, 3, x);
b, d = R(b, c, d, e, a, F0, K0, 5, 4, x);
a, c = R(a, b, c, d, e, F0, K0, 8, 5, x);
e, b = R(e, a, b, c, d, F0, K0, 7, 6, x);
d, a = R(d, e, a, b, c, F0, K0, 9, 7, x);
c, e = R(c, d, e, a, b, F0, K0, 11, 8, x);
b, d = R(b, c, d, e, a, F0, K0, 13, 9, x);
a, c = R(a, b, c, d, e, F0, K0, 14, 10, x);
e, b = R(e, a, b, c, d, F0, K0, 15, 11, x);
d, a = R(d, e, a, b, c, F0, K0, 6, 12, x);
c, e = R(c, d, e, a, b, F0, K0, 7, 13, x);
b, d = R(b, c, d, e, a, F0, K0, 9, 14, x);
a, c = R(a, b, c, d, e, F0, K0, 8, 15, x); #/* #15 */
#/* Round 2 */
e, b = R(e, a, b, c, d, F1, K1, 7, 7, x);
d, a = R(d, e, a, b, c, F1, K1, 6, 4, x);
c, e = R(c, d, e, a, b, F1, K1, 8, 13, x);
b, d = R(b, c, d, e, a, F1, K1, 13, 1, x);
a, c = R(a, b, c, d, e, F1, K1, 11, 10, x);
e, b = R(e, a, b, c, d, F1, K1, 9, 6, x);
d, a = R(d, e, a, b, c, F1, K1, 7, 15, x);
c, e = R(c, d, e, a, b, F1, K1, 15, 3, x);
b, d = R(b, c, d, e, a, F1, K1, 7, 12, x);
a, c = R(a, b, c, d, e, F1, K1, 12, 0, x);
e, b = R(e, a, b, c, d, F1, K1, 15, 9, x);
d, a = R(d, e, a, b, c, F1, K1, 9, 5, x);
c, e = R(c, d, e, a, b, F1, K1, 11, 2, x);
b, d = R(b, c, d, e, a, F1, K1, 7, 14, x);
a, c = R(a, b, c, d, e, F1, K1, 13, 11, x);
e, b = R(e, a, b, c, d, F1, K1, 12, 8, x); #/* #31 */
#/* Round 3 */
d, a = R(d, e, a, b, c, F2, K2, 11, 3, x);
c, e = R(c, d, e, a, b, F2, K2, 13, 10, x);
b, d = R(b, c, d, e, a, F2, K2, 6, 14, x);
a, c = R(a, b, c, d, e, F2, K2, 7, 4, x);
e, b = R(e, a, b, c, d, F2, K2, 14, 9, x);
d, a = R(d, e, a, b, c, F2, K2, 9, 15, x);
c, e = R(c, d, e, a, b, F2, K2, 13, 8, x);
b, d = R(b, c, d, e, a, F2, K2, 15, 1, x);
a, c = R(a, b, c, d, e, F2, K2, 14, 2, x);
e, b = R(e, a, b, c, d, F2, K2, 8, 7, x);
d, a = R(d, e, a, b, c, F2, K2, 13, 0, x);
c, e = R(c, d, e, a, b, F2, K2, 6, 6, x);
b, d = R(b, c, d, e, a, F2, K2, 5, 13, x);
a, c = R(a, b, c, d, e, F2, K2, 12, 11, x);
e, b = R(e, a, b, c, d, F2, K2, 7, 5, x);
d, a = R(d, e, a, b, c, F2, K2, 5, 12, x); #/* #47 */
#/* Round 4 */
c, e = R(c, d, e, a, b, F3, K3, 11, 1, x);
b, d = R(b, c, d, e, a, F3, K3, 12, 9, x);
a, c = R(a, b, c, d, e, F3, K3, 14, 11, x);
e, b = R(e, a, b, c, d, F3, K3, 15, 10, x);
d, a = R(d, e, a, b, c, F3, K3, 14, 0, x);
c, e = R(c, d, e, a, b, F3, K3, 15, 8, x);
b, d = R(b, c, d, e, a, F3, K3, 9, 12, x);
a, c = R(a, b, c, d, e, F3, K3, 8, 4, x);
e, b = R(e, a, b, c, d, F3, K3, 9, 13, x);
d, a = R(d, e, a, b, c, F3, K3, 14, 3, x);
c, e = R(c, d, e, a, b, F3, K3, 5, 7, x);
b, d = R(b, c, d, e, a, F3, K3, 6, 15, x);
a, c = R(a, b, c, d, e, F3, K3, 8, 14, x);
e, b = R(e, a, b, c, d, F3, K3, 6, 5, x);
d, a = R(d, e, a, b, c, F3, K3, 5, 6, x);
c, e = R(c, d, e, a, b, F3, K3, 12, 2, x); #/* #63 */
#/* Round 5 */
b, d = R(b, c, d, e, a, F4, K4, 9, 4, x);
a, c = R(a, b, c, d, e, F4, K4, 15, 0, x);
e, b = R(e, a, b, c, d, F4, K4, 5, 5, x);
d, a = R(d, e, a, b, c, F4, K4, 11, 9, x);
c, e = R(c, d, e, a, b, F4, K4, 6, 7, x);
b, d = R(b, c, d, e, a, F4, K4, 8, 12, x);
a, c = R(a, b, c, d, e, F4, K4, 13, 2, x);
e, b = R(e, a, b, c, d, F4, K4, 12, 10, x);
d, a = R(d, e, a, b, c, F4, K4, 5, 14, x);
c, e = R(c, d, e, a, b, F4, K4, 12, 1, x);
b, d = R(b, c, d, e, a, F4, K4, 13, 3, x);
a, c = R(a, b, c, d, e, F4, K4, 14, 8, x);
e, b = R(e, a, b, c, d, F4, K4, 11, 11, x);
d, a = R(d, e, a, b, c, F4, K4, 8, 6, x);
c, e = R(c, d, e, a, b, F4, K4, 5, 15, x);
b, d = R(b, c, d, e, a, F4, K4, 6, 13, x); #/* #79 */
aa = a;
bb = b;
cc = c;
dd = d;
ee = e;
a = state[0]
b = state[1]
c = state[2]
d = state[3]
e = state[4]
#/* Parallel round 1 */
a, c = R(a, b, c, d, e, F4, KK0, 8, 5, x)
e, b = R(e, a, b, c, d, F4, KK0, 9, 14, x)
d, a = R(d, e, a, b, c, F4, KK0, 9, 7, x)
c, e = R(c, d, e, a, b, F4, KK0, 11, 0, x)
b, d = R(b, c, d, e, a, F4, KK0, 13, 9, x)
a, c = R(a, b, c, d, e, F4, KK0, 15, 2, x)
e, b = R(e, a, b, c, d, F4, KK0, 15, 11, x)
d, a = R(d, e, a, b, c, F4, KK0, 5, 4, x)
c, e = R(c, d, e, a, b, F4, KK0, 7, 13, x)
b, d = R(b, c, d, e, a, F4, KK0, 7, 6, x)
a, c = R(a, b, c, d, e, F4, KK0, 8, 15, x)
e, b = R(e, a, b, c, d, F4, KK0, 11, 8, x)
d, a = R(d, e, a, b, c, F4, KK0, 14, 1, x)
c, e = R(c, d, e, a, b, F4, KK0, 14, 10, x)
b, d = R(b, c, d, e, a, F4, KK0, 12, 3, x)
a, c = R(a, b, c, d, e, F4, KK0, 6, 12, x) #/* #15 */
#/* Parallel round 2 */
e, b = R(e, a, b, c, d, F3, KK1, 9, 6, x)
d, a = R(d, e, a, b, c, F3, KK1, 13, 11, x)
c, e = R(c, d, e, a, b, F3, KK1, 15, 3, x)
b, d = R(b, c, d, e, a, F3, KK1, 7, 7, x)
a, c = R(a, b, c, d, e, F3, KK1, 12, 0, x)
e, b = R(e, a, b, c, d, F3, KK1, 8, 13, x)
d, a = R(d, e, a, b, c, F3, KK1, 9, 5, x)
c, e = R(c, d, e, a, b, F3, KK1, 11, 10, x)
b, d = R(b, c, d, e, a, F3, KK1, 7, 14, x)
a, c = R(a, b, c, d, e, F3, KK1, 7, 15, x)
e, b = R(e, a, b, c, d, F3, KK1, 12, 8, x)
d, a = R(d, e, a, b, c, F3, KK1, 7, 12, x)
c, e = R(c, d, e, a, b, F3, KK1, 6, 4, x)
b, d = R(b, c, d, e, a, F3, KK1, 15, 9, x)
a, c = R(a, b, c, d, e, F3, KK1, 13, 1, x)
e, b = R(e, a, b, c, d, F3, KK1, 11, 2, x) #/* #31 */
#/* Parallel round 3 */
d, a = R(d, e, a, b, c, F2, KK2, 9, 15, x)
c, e = R(c, d, e, a, b, F2, KK2, 7, 5, x)
b, d = R(b, c, d, e, a, F2, KK2, 15, 1, x)
a, c = R(a, b, c, d, e, F2, KK2, 11, 3, x)
e, b = R(e, a, b, c, d, F2, KK2, 8, 7, x)
d, a = R(d, e, a, b, c, F2, KK2, 6, 14, x)
c, e = R(c, d, e, a, b, F2, KK2, 6, 6, x)
b, d = R(b, c, d, e, a, F2, KK2, 14, 9, x)
a, c = R(a, b, c, d, e, F2, KK2, 12, 11, x)
e, b = R(e, a, b, c, d, F2, KK2, 13, 8, x)
d, a = R(d, e, a, b, c, F2, KK2, 5, 12, x)
c, e = R(c, d, e, a, b, F2, KK2, 14, 2, x)
b, d = R(b, c, d, e, a, F2, KK2, 13, 10, x)
a, c = R(a, b, c, d, e, F2, KK2, 13, 0, x)
e, b = R(e, a, b, c, d, F2, KK2, 7, 4, x)
d, a = R(d, e, a, b, c, F2, KK2, 5, 13, x) #/* #47 */
#/* Parallel round 4 */
c, e = R(c, d, e, a, b, F1, KK3, 15, 8, x)
b, d = R(b, c, d, e, a, F1, KK3, 5, 6, x)
a, c = R(a, b, c, d, e, F1, KK3, 8, 4, x)
e, b = R(e, a, b, c, d, F1, KK3, 11, 1, x)
d, a = R(d, e, a, b, c, F1, KK3, 14, 3, x)
c, e = R(c, d, e, a, b, F1, KK3, 14, 11, x)
b, d = R(b, c, d, e, a, F1, KK3, 6, 15, x)
a, c = R(a, b, c, d, e, F1, KK3, 14, 0, x)
e, b = R(e, a, b, c, d, F1, KK3, 6, 5, x)
d, a = R(d, e, a, b, c, F1, KK3, 9, 12, x)
c, e = R(c, d, e, a, b, F1, KK3, 12, 2, x)
b, d = R(b, c, d, e, a, F1, KK3, 9, 13, x)
a, c = R(a, b, c, d, e, F1, KK3, 12, 9, x)
e, b = R(e, a, b, c, d, F1, KK3, 5, 7, x)
d, a = R(d, e, a, b, c, F1, KK3, 15, 10, x)
c, e = R(c, d, e, a, b, F1, KK3, 8, 14, x) #/* #63 */
#/* Parallel round 5 */
b, d = R(b, c, d, e, a, F0, KK4, 8, 12, x)
a, c = R(a, b, c, d, e, F0, KK4, 5, 15, x)
e, b = R(e, a, b, c, d, F0, KK4, 12, 10, x)
d, a = R(d, e, a, b, c, F0, KK4, 9, 4, x)
c, e = R(c, d, e, a, b, F0, KK4, 12, 1, x)
b, d = R(b, c, d, e, a, F0, KK4, 5, 5, x)
a, c = R(a, b, c, d, e, F0, KK4, 14, 8, x)
e, b = R(e, a, b, c, d, F0, KK4, 6, 7, x)
d, a = R(d, e, a, b, c, F0, KK4, 8, 6, x)
c, e = R(c, d, e, a, b, F0, KK4, 13, 2, x)
b, d = R(b, c, d, e, a, F0, KK4, 6, 13, x)
a, c = R(a, b, c, d, e, F0, KK4, 5, 14, x)
e, b = R(e, a, b, c, d, F0, KK4, 15, 0, x)
d, a = R(d, e, a, b, c, F0, KK4, 13, 3, x)
c, e = R(c, d, e, a, b, F0, KK4, 11, 9, x)
b, d = R(b, c, d, e, a, F0, KK4, 11, 11, x) #/* #79 */
t = (state[1] + cc + d) % 0x100000000;
state[1] = (state[2] + dd + e) % 0x100000000;
state[2] = (state[3] + ee + a) % 0x100000000;
state[3] = (state[4] + aa + b) % 0x100000000;
state[4] = (state[0] + bb + c) % 0x100000000;
state[0] = t % 0x100000000;
pass
def RMD160Update(ctx, inp, inplen):
if type(inp) == str:
inp = [ord(i)&0xff for i in inp]
have = int((ctx.count // 8) % 64)
inplen = int(inplen)
need = 64 - have
ctx.count += 8 * inplen
off = 0
if inplen >= need:
if have:
for i in range(need):
ctx.buffer[have+i] = inp[i]
RMD160Transform(ctx.state, ctx.buffer)
off = need
have = 0
while off + 64 <= inplen:
RMD160Transform(ctx.state, inp[off:]) #<---
off += 64
if off < inplen:
# memcpy(ctx->buffer + have, input+off, len-off);
for i in range(inplen - off):
ctx.buffer[have+i] = inp[off+i]
def RMD160Final(ctx):
size = struct.pack("<Q", ctx.count)
padlen = 64 - ((ctx.count // 8) % 64)
if padlen < 1+8:
padlen += 64
RMD160Update(ctx, PADDING, padlen-8)
RMD160Update(ctx, size, 8)
return struct.pack("<5L", *ctx.state)
assert '37f332f68db77bd9d7edd4969571ad671cf9dd3b' == \
new('The quick brown fox jumps over the lazy dog').hexdigest()
assert '132072df690933835eb8b6ad0b77e7b6f14acad7' == \
new('The quick brown fox jumps over the lazy cog').hexdigest()
assert '9c1185a5c5e9fc54612808977ee8f548b2258d31' == \
new('').hexdigest()

View file

@ -1,100 +0,0 @@
import main as main
import transaction as tx
# Shared secrets and uncovering pay keys
def shared_secret_sender(scan_pubkey, ephem_privkey):
shared_point = main.multiply(scan_pubkey, ephem_privkey)
shared_secret = main.sha256(main.encode_pubkey(shared_point, 'bin_compressed'))
return shared_secret
def shared_secret_receiver(ephem_pubkey, scan_privkey):
shared_point = main.multiply(ephem_pubkey, scan_privkey)
shared_secret = main.sha256(main.encode_pubkey(shared_point, 'bin_compressed'))
return shared_secret
def uncover_pay_pubkey_sender(scan_pubkey, spend_pubkey, ephem_privkey):
shared_secret = shared_secret_sender(scan_pubkey, ephem_privkey)
return main.add_pubkeys(spend_pubkey, main.privtopub(shared_secret))
def uncover_pay_pubkey_receiver(scan_privkey, spend_pubkey, ephem_pubkey):
shared_secret = shared_secret_receiver(ephem_pubkey, scan_privkey)
return main.add_pubkeys(spend_pubkey, main.privtopub(shared_secret))
def uncover_pay_privkey(scan_privkey, spend_privkey, ephem_pubkey):
shared_secret = shared_secret_receiver(ephem_pubkey, scan_privkey)
return main.add_privkeys(spend_privkey, shared_secret)
# Address encoding
# Functions for basic stealth addresses,
# i.e. one scan key, one spend key, no prefix
def pubkeys_to_basic_stealth_address(scan_pubkey, spend_pubkey, magic_byte=42):
# magic_byte = 42 for mainnet, 43 for testnet.
hex_scankey = main.encode_pubkey(scan_pubkey, 'hex_compressed')
hex_spendkey = main.encode_pubkey(spend_pubkey, 'hex_compressed')
hex_data = '00{0:066x}01{1:066x}0100'.format(int(hex_scankey, 16), int(hex_spendkey, 16))
addr = main.hex_to_b58check(hex_data, magic_byte)
return addr
def basic_stealth_address_to_pubkeys(stealth_address):
hex_data = main.b58check_to_hex(stealth_address)
if len(hex_data) != 140:
raise Exception('Stealth address is not of basic type (one scan key, one spend key, no prefix)')
scan_pubkey = hex_data[2:68]
spend_pubkey = hex_data[70:136]
return scan_pubkey, spend_pubkey
# Sending stealth payments
def mk_stealth_metadata_script(ephem_pubkey, nonce):
op_return = '6a'
msg_size = '26'
version = '06'
return op_return + msg_size + version + '{0:08x}'.format(nonce) + main.encode_pubkey(ephem_pubkey, 'hex_compressed')
def mk_stealth_tx_outputs(stealth_addr, value, ephem_privkey, nonce, network='btc'):
scan_pubkey, spend_pubkey = basic_stealth_address_to_pubkeys(stealth_addr)
if network == 'btc':
btc_magic_byte = 42
if stealth_addr != pubkeys_to_basic_stealth_address(scan_pubkey, spend_pubkey, btc_magic_byte):
raise Exception('Invalid btc mainnet stealth address: ' + stealth_addr)
magic_byte_addr = 0
elif network == 'testnet':
testnet_magic_byte = 43
if stealth_addr != pubkeys_to_basic_stealth_address(scan_pubkey, spend_pubkey, testnet_magic_byte):
raise Exception('Invalid testnet stealth address: ' + stealth_addr)
magic_byte_addr = 111
ephem_pubkey = main.privkey_to_pubkey(ephem_privkey)
output0 = {'script': mk_stealth_metadata_script(ephem_pubkey, nonce),
'value': 0}
pay_pubkey = uncover_pay_pubkey_sender(scan_pubkey, spend_pubkey, ephem_privkey)
pay_addr = main.pubkey_to_address(pay_pubkey, magic_byte_addr)
output1 = {'address': pay_addr,
'value': value}
return [output0, output1]
# Receiving stealth payments
def ephem_pubkey_from_tx_script(stealth_tx_script):
if len(stealth_tx_script) != 80:
raise Exception('Wrong format for stealth tx output')
return stealth_tx_script[14:]

View file

@ -1,514 +0,0 @@
#!/usr/bin/python
import binascii, re, json, copy, sys
from .main import *
from _functools import reduce
### Hex to bin converter and vice versa for objects
def json_is_base(obj, base):
if not is_python2 and isinstance(obj, bytes):
return False
alpha = get_code_string(base)
if isinstance(obj, string_types):
for i in range(len(obj)):
if alpha.find(obj[i]) == -1:
return False
return True
elif isinstance(obj, int_types) or obj is None:
return True
elif isinstance(obj, list):
for i in range(len(obj)):
if not json_is_base(obj[i], base):
return False
return True
else:
for x in obj:
if not json_is_base(obj[x], base):
return False
return True
def json_changebase(obj, changer):
if isinstance(obj, string_or_bytes_types):
return changer(obj)
elif isinstance(obj, int_types) or obj is None:
return obj
elif isinstance(obj, list):
return [json_changebase(x, changer) for x in obj]
return dict((x, json_changebase(obj[x], changer)) for x in obj)
# Transaction serialization and deserialization
def deserialize(tx):
if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
#tx = bytes(bytearray.fromhex(tx))
return json_changebase(deserialize(binascii.unhexlify(tx)),
lambda x: safe_hexlify(x))
# http://stackoverflow.com/questions/4851463/python-closure-write-to-variable-in-parent-scope
# Python's scoping rules are demented, requiring me to make pos an object
# so that it is call-by-reference
pos = [0]
def read_as_int(bytez):
pos[0] += bytez
return decode(tx[pos[0]-bytez:pos[0]][::-1], 256)
def read_var_int():
pos[0] += 1
val = from_byte_to_int(tx[pos[0]-1])
if val < 253:
return val
return read_as_int(pow(2, val - 252))
def read_bytes(bytez):
pos[0] += bytez
return tx[pos[0]-bytez:pos[0]]
def read_var_string():
size = read_var_int()
return read_bytes(size)
obj = {"ins": [], "outs": []}
obj["version"] = read_as_int(4)
ins = read_var_int()
for i in range(ins):
obj["ins"].append({
"outpoint": {
"hash": read_bytes(32)[::-1],
"index": read_as_int(4)
},
"script": read_var_string(),
"sequence": read_as_int(4)
})
outs = read_var_int()
for i in range(outs):
obj["outs"].append({
"value": read_as_int(8),
"script": read_var_string()
})
obj["locktime"] = read_as_int(4)
return obj
def serialize(txobj):
#if isinstance(txobj, bytes):
# txobj = bytes_to_hex_string(txobj)
o = []
if json_is_base(txobj, 16):
json_changedbase = json_changebase(txobj, lambda x: binascii.unhexlify(x))
hexlified = safe_hexlify(serialize(json_changedbase))
return hexlified
o.append(encode(txobj["version"], 256, 4)[::-1])
o.append(num_to_var_int(len(txobj["ins"])))
for inp in txobj["ins"]:
o.append(inp["outpoint"]["hash"][::-1])
o.append(encode(inp["outpoint"]["index"], 256, 4)[::-1])
o.append(num_to_var_int(len(inp["script"]))+(inp["script"] if inp["script"] or is_python2 else bytes()))
o.append(encode(inp["sequence"], 256, 4)[::-1])
o.append(num_to_var_int(len(txobj["outs"])))
for out in txobj["outs"]:
o.append(encode(out["value"], 256, 8)[::-1])
o.append(num_to_var_int(len(out["script"]))+out["script"])
o.append(encode(txobj["locktime"], 256, 4)[::-1])
return ''.join(o) if is_python2 else reduce(lambda x,y: x+y, o, bytes())
# Hashing transactions for signing
SIGHASH_ALL = 1
SIGHASH_NONE = 2
SIGHASH_SINGLE = 3
# this works like SIGHASH_ANYONECANPAY | SIGHASH_ALL, might as well make it explicit while
# we fix the constant
SIGHASH_ANYONECANPAY = 0x81
def signature_form(tx, i, script, hashcode=SIGHASH_ALL):
i, hashcode = int(i), int(hashcode)
if isinstance(tx, string_or_bytes_types):
return serialize(signature_form(deserialize(tx), i, script, hashcode))
newtx = copy.deepcopy(tx)
for inp in newtx["ins"]:
inp["script"] = ""
newtx["ins"][i]["script"] = script
if hashcode == SIGHASH_NONE:
newtx["outs"] = []
elif hashcode == SIGHASH_SINGLE:
newtx["outs"] = newtx["outs"][:len(newtx["ins"])]
for out in newtx["outs"][:len(newtx["ins"]) - 1]:
out['value'] = 2**64 - 1
out['script'] = ""
elif hashcode == SIGHASH_ANYONECANPAY:
newtx["ins"] = [newtx["ins"][i]]
else:
pass
return newtx
# Making the actual signatures
def der_encode_sig(v, r, s):
b1, b2 = safe_hexlify(encode(r, 256)), safe_hexlify(encode(s, 256))
if len(b1) and b1[0] in '89abcdef':
b1 = '00' + b1
if len(b2) and b2[0] in '89abcdef':
b2 = '00' + b2
left = '02'+encode(len(b1)//2, 16, 2)+b1
right = '02'+encode(len(b2)//2, 16, 2)+b2
return '30'+encode(len(left+right)//2, 16, 2)+left+right
def der_decode_sig(sig):
leftlen = decode(sig[6:8], 16)*2
left = sig[8:8+leftlen]
rightlen = decode(sig[10+leftlen:12+leftlen], 16)*2
right = sig[12+leftlen:12+leftlen+rightlen]
return (None, decode(left, 16), decode(right, 16))
def is_bip66(sig):
"""Checks hex DER sig for BIP66 consistency"""
#https://raw.githubusercontent.com/bitcoin/bips/master/bip-0066.mediawiki
#0x30 [total-len] 0x02 [R-len] [R] 0x02 [S-len] [S] [sighash]
sig = bytearray.fromhex(sig) if re.match('^[0-9a-fA-F]*$', sig) else bytearray(sig)
if (sig[0] == 0x30) and (sig[1] == len(sig)-2): # check if sighash is missing
sig.extend(b"\1") # add SIGHASH_ALL for testing
#assert (sig[-1] & 124 == 0) and (not not sig[-1]), "Bad SIGHASH value"
if len(sig) < 9 or len(sig) > 73: return False
if (sig[0] != 0x30): return False
if (sig[1] != len(sig)-3): return False
rlen = sig[3]
if (5+rlen >= len(sig)): return False
slen = sig[5+rlen]
if (rlen + slen + 7 != len(sig)): return False
if (sig[2] != 0x02): return False
if (rlen == 0): return False
if (sig[4] & 0x80): return False
if (rlen > 1 and (sig[4] == 0x00) and not (sig[5] & 0x80)): return False
if (sig[4+rlen] != 0x02): return False
if (slen == 0): return False
if (sig[rlen+6] & 0x80): return False
if (slen > 1 and (sig[6+rlen] == 0x00) and not (sig[7+rlen] & 0x80)):
return False
return True
def txhash(tx, hashcode=None):
if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
tx = changebase(tx, 16, 256)
if hashcode:
return dbl_sha256(from_string_to_bytes(tx) + encode(int(hashcode), 256, 4)[::-1])
else:
return safe_hexlify(bin_dbl_sha256(tx)[::-1])
def bin_txhash(tx, hashcode=None):
return binascii.unhexlify(txhash(tx, hashcode))
def ecdsa_tx_sign(tx, priv, hashcode=SIGHASH_ALL):
rawsig = ecdsa_raw_sign(bin_txhash(tx, hashcode), priv)
return der_encode_sig(*rawsig)+encode(hashcode, 16, 2)
def ecdsa_tx_verify(tx, sig, pub, hashcode=SIGHASH_ALL):
return ecdsa_raw_verify(bin_txhash(tx, hashcode), der_decode_sig(sig), pub)
def ecdsa_tx_recover(tx, sig, hashcode=SIGHASH_ALL):
z = bin_txhash(tx, hashcode)
_, r, s = der_decode_sig(sig)
left = ecdsa_raw_recover(z, (0, r, s))
right = ecdsa_raw_recover(z, (1, r, s))
return (encode_pubkey(left, 'hex'), encode_pubkey(right, 'hex'))
# Scripts
def mk_pubkey_script(addr):
# Keep the auxiliary functions around for altcoins' sake
return '76a914' + b58check_to_hex(addr) + '88ac'
def mk_scripthash_script(addr):
return 'a914' + b58check_to_hex(addr) + '87'
# Address representation to output script
def address_to_script(addr):
if addr[0] == '3' or addr[0] == '2':
return mk_scripthash_script(addr)
else:
return mk_pubkey_script(addr)
# Output script to address representation
def script_to_address(script, vbyte=0):
if re.match('^[0-9a-fA-F]*$', script):
script = binascii.unhexlify(script)
if script[:3] == b'\x76\xa9\x14' and script[-2:] == b'\x88\xac' and len(script) == 25:
return bin_to_b58check(script[3:-2], vbyte) # pubkey hash addresses
else:
if vbyte in [111, 196]:
# Testnet
scripthash_byte = 196
elif vbyte == 0:
# Mainnet
scripthash_byte = 5
else:
scripthash_byte = vbyte
# BIP0016 scripthash addresses
return bin_to_b58check(script[2:-1], scripthash_byte)
def p2sh_scriptaddr(script, magicbyte=5):
if re.match('^[0-9a-fA-F]*$', script):
script = binascii.unhexlify(script)
return hex_to_b58check(hash160(script), magicbyte)
scriptaddr = p2sh_scriptaddr
def deserialize_script(script):
if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script):
return json_changebase(deserialize_script(binascii.unhexlify(script)),
lambda x: safe_hexlify(x))
out, pos = [], 0
while pos < len(script):
code = from_byte_to_int(script[pos])
if code == 0:
out.append(None)
pos += 1
elif code <= 75:
out.append(script[pos+1:pos+1+code])
pos += 1 + code
elif code <= 78:
szsz = pow(2, code - 76)
sz = decode(script[pos+szsz: pos:-1], 256)
out.append(script[pos + 1 + szsz:pos + 1 + szsz + sz])
pos += 1 + szsz + sz
elif code <= 96:
out.append(code - 80)
pos += 1
else:
out.append(code)
pos += 1
return out
def serialize_script_unit(unit):
if isinstance(unit, int):
if unit < 16:
return from_int_to_byte(unit + 80)
else:
return from_int_to_byte(unit)
elif unit is None:
return b'\x00'
else:
if len(unit) <= 75:
return from_int_to_byte(len(unit))+unit
elif len(unit) < 256:
return from_int_to_byte(76)+from_int_to_byte(len(unit))+unit
elif len(unit) < 65536:
return from_int_to_byte(77)+encode(len(unit), 256, 2)[::-1]+unit
else:
return from_int_to_byte(78)+encode(len(unit), 256, 4)[::-1]+unit
if is_python2:
def serialize_script(script):
if json_is_base(script, 16):
return binascii.hexlify(serialize_script(json_changebase(script,
lambda x: binascii.unhexlify(x))))
return ''.join(map(serialize_script_unit, script))
else:
def serialize_script(script):
if json_is_base(script, 16):
return safe_hexlify(serialize_script(json_changebase(script,
lambda x: binascii.unhexlify(x))))
result = bytes()
for b in map(serialize_script_unit, script):
result += b if isinstance(b, bytes) else bytes(b, 'utf-8')
return result
def mk_multisig_script(*args): # [pubs],k or pub1,pub2...pub[n],k
if isinstance(args[0], list):
pubs, k = args[0], int(args[1])
else:
pubs = list(filter(lambda x: len(str(x)) >= 32, args))
k = int(args[len(pubs)])
return serialize_script([k]+pubs+[len(pubs)]+[0xae])
# Signing and verifying
def verify_tx_input(tx, i, script, sig, pub):
if re.match('^[0-9a-fA-F]*$', tx):
tx = binascii.unhexlify(tx)
if re.match('^[0-9a-fA-F]*$', script):
script = binascii.unhexlify(script)
if not re.match('^[0-9a-fA-F]*$', sig):
sig = safe_hexlify(sig)
hashcode = decode(sig[-2:], 16)
modtx = signature_form(tx, int(i), script, hashcode)
return ecdsa_tx_verify(modtx, sig, pub, hashcode)
def sign(tx, i, priv, hashcode=SIGHASH_ALL):
i = int(i)
if (not is_python2 and isinstance(re, bytes)) or not re.match('^[0-9a-fA-F]*$', tx):
return binascii.unhexlify(sign(safe_hexlify(tx), i, priv))
if len(priv) <= 33:
priv = safe_hexlify(priv)
pub = privkey_to_pubkey(priv)
address = pubkey_to_address(pub)
signing_tx = signature_form(tx, i, mk_pubkey_script(address), hashcode)
sig = ecdsa_tx_sign(signing_tx, priv, hashcode)
txobj = deserialize(tx)
txobj["ins"][i]["script"] = serialize_script([sig, pub])
return serialize(txobj)
def signall(tx, priv):
# if priv is a dictionary, assume format is
# { 'txinhash:txinidx' : privkey }
if isinstance(priv, dict):
for e, i in enumerate(deserialize(tx)["ins"]):
k = priv["%s:%d" % (i["outpoint"]["hash"], i["outpoint"]["index"])]
tx = sign(tx, e, k)
else:
for i in range(len(deserialize(tx)["ins"])):
tx = sign(tx, i, priv)
return tx
def multisign(tx, i, script, pk, hashcode=SIGHASH_ALL):
if re.match('^[0-9a-fA-F]*$', tx):
tx = binascii.unhexlify(tx)
if re.match('^[0-9a-fA-F]*$', script):
script = binascii.unhexlify(script)
modtx = signature_form(tx, i, script, hashcode)
return ecdsa_tx_sign(modtx, pk, hashcode)
def apply_multisignatures(*args):
# tx,i,script,sigs OR tx,i,script,sig1,sig2...,sig[n]
tx, i, script = args[0], int(args[1]), args[2]
sigs = args[3] if isinstance(args[3], list) else list(args[3:])
if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script):
script = binascii.unhexlify(script)
sigs = [binascii.unhexlify(x) if x[:2] == '30' else x for x in sigs]
if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
return safe_hexlify(apply_multisignatures(binascii.unhexlify(tx), i, script, sigs))
# Not pushing empty elements on the top of the stack if passing no
# script (in case of bare multisig inputs there is no script)
script_blob = [] if script.__len__() == 0 else [script]
txobj = deserialize(tx)
txobj["ins"][i]["script"] = serialize_script([None]+sigs+script_blob)
return serialize(txobj)
def is_inp(arg):
return len(arg) > 64 or "output" in arg or "outpoint" in arg
def mktx(*args):
# [in0, in1...],[out0, out1...] or in0, in1 ... out0 out1 ...
ins, outs = [], []
for arg in args:
if isinstance(arg, list):
for a in arg: (ins if is_inp(a) else outs).append(a)
else:
(ins if is_inp(arg) else outs).append(arg)
txobj = {"locktime": 0, "version": 1, "ins": [], "outs": []}
for i in ins:
if isinstance(i, dict) and "outpoint" in i:
txobj["ins"].append(i)
else:
if isinstance(i, dict) and "output" in i:
i = i["output"]
txobj["ins"].append({
"outpoint": {"hash": i[:64], "index": int(i[65:])},
"script": "",
"sequence": 4294967295
})
for o in outs:
if isinstance(o, string_or_bytes_types):
addr = o[:o.find(':')]
val = int(o[o.find(':')+1:])
o = {}
if re.match('^[0-9a-fA-F]*$', addr):
o["script"] = addr
else:
o["address"] = addr
o["value"] = val
outobj = {}
if "address" in o:
outobj["script"] = address_to_script(o["address"])
elif "script" in o:
outobj["script"] = o["script"]
else:
raise Exception("Could not find 'address' or 'script' in output.")
outobj["value"] = o["value"]
txobj["outs"].append(outobj)
return serialize(txobj)
def select(unspent, value):
value = int(value)
high = [u for u in unspent if u["value"] >= value]
high.sort(key=lambda u: u["value"])
low = [u for u in unspent if u["value"] < value]
low.sort(key=lambda u: -u["value"])
if len(high):
return [high[0]]
i, tv = 0, 0
while tv < value and i < len(low):
tv += low[i]["value"]
i += 1
if tv < value:
raise Exception("Not enough funds")
return low[:i]
# Only takes inputs of the form { "output": blah, "value": foo }
def mksend(*args):
argz, change, fee = args[:-2], args[-2], int(args[-1])
ins, outs = [], []
for arg in argz:
if isinstance(arg, list):
for a in arg:
(ins if is_inp(a) else outs).append(a)
else:
(ins if is_inp(arg) else outs).append(arg)
isum = sum([i["value"] for i in ins])
osum, outputs2 = 0, []
for o in outs:
if isinstance(o, string_types):
o2 = {
"address": o[:o.find(':')],
"value": int(o[o.find(':')+1:])
}
else:
o2 = o
outputs2.append(o2)
osum += o2["value"]
if isum < osum+fee:
raise Exception("Not enough money")
elif isum > osum+fee+5430:
outputs2 += [{"address": change, "value": isum-osum-fee}]
return mktx(ins, outputs2)

View file

@ -1,36 +0,0 @@
#!/usr/bin/python
import sys, json, re
from bitcoin import *
if len(sys.argv) == 1:
print "pybtctool <command> <arg1> <arg2> ..."
else:
cmdargs, preargs, kwargs = [], [], {}
i = 2
# Process first arg tag
if sys.argv[1] == '-s':
preargs.extend(re.findall(r'\S\S*', sys.stdin.read()))
elif sys.argv[1] == '-B':
preargs.extend([sys.stdin.read()])
elif sys.argv[1] == '-b':
preargs.extend([sys.stdin.read()[:-1]])
elif sys.argv[1] == '-j':
preargs.extend([json.loads(sys.stdin.read())])
elif sys.argv[1] == '-J':
preargs.extend(json.loads(sys.stdin.read()))
else:
i = 1
while i < len(sys.argv):
if sys.argv[i][:2] == '--':
kwargs[sys.argv[i][2:]] = sys.argv[i+1]
i += 2
else:
cmdargs.append(sys.argv[i])
i += 1
cmd = cmdargs[0]
args = preargs + cmdargs[1:]
o = vars()[cmd](*args, **kwargs)
if isinstance(o, (list, dict)):
print json.dumps(o)
else:
print o

View file

@ -1,17 +0,0 @@
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='bitcoin',
version='1.1.42',
description='Python Bitcoin Tools',
author='Vitalik Buterin',
author_email='vbuterin@gmail.com',
url='http://github.com/vbuterin/pybitcointools',
packages=['bitcoin'],
scripts=['pybtctool'],
include_package_data=True,
data_files=[("", ["LICENSE"]), ("bitcoin", ["bitcoin/english.txt"])],
)

View file

@ -1,496 +0,0 @@
import json
import os
import random
import unittest
import bitcoin.ripemd as ripemd
from bitcoin import *
class TestECCArithmetic(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('Starting ECC arithmetic tests')
def test_all(self):
for i in range(8):
print('### Round %d' % (i+1))
x, y = random.randrange(2**256), random.randrange(2**256)
self.assertEqual(
multiply(multiply(G, x), y)[0],
multiply(multiply(G, y), x)[0]
)
self.assertEqual(
add_pubkeys(multiply(G, x), multiply(G, y))[0],
multiply(G, add_privkeys(x, y))[0]
)
hx, hy = encode(x % N, 16, 64), encode(y % N, 16, 64)
self.assertEqual(
multiply(multiply(G, hx), hy)[0],
multiply(multiply(G, hy), hx)[0]
)
self.assertEqual(
add_pubkeys(multiply(G, hx), multiply(G, hy))[0],
multiply(G, add_privkeys(hx, hy))[0]
)
self.assertEqual(
b58check_to_hex(pubtoaddr(privtopub(x))),
b58check_to_hex(pubtoaddr(multiply(G, hx), 23))
)
p = privtopub(sha256(str(x)))
if i % 2 == 1:
p = changebase(p, 16, 256)
self.assertEqual(p, decompress(compress(p)))
self.assertEqual(G[0], multiply(divide(G, x), x)[0])
class TestBases(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('Starting base change tests')
def test_all(self):
data = [
[10, '65535', 16, 'ffff'],
[16, 'deadbeef', 10, '3735928559'],
[10, '0', 16, ''],
[256, b'34567', 10, '219919234615'],
[10, '444', 16, '1bc'],
[256, b'\x03\x04\x05\x06\x07', 10, '12952339975'],
[16, '3132333435', 256, b'12345']
]
for prebase, preval, postbase, postval in data:
self.assertEqual(changebase(preval, prebase, postbase), postval)
for i in range(100):
x = random.randrange(1, 9999999999999999)
frm = random.choice([2, 10, 16, 58, 256])
to = random.choice([2, 10, 16, 58, 256])
self.assertEqual(decode(encode(x, to), to), x)
self.assertEqual(changebase(encode(x, frm), frm, to), encode(x, to))
self.assertEqual(decode(changebase(encode(x, frm), frm, to), to), x)
class TestElectrumWalletInternalConsistency(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('Starting Electrum wallet internal consistency tests')
def test_all(self):
for i in range(3):
seed = sha256(str(random.randrange(2**40)))[:32]
mpk = electrum_mpk(seed)
for i in range(5):
pk = electrum_privkey(seed, i)
pub = electrum_pubkey((mpk, seed)[i % 2], i)
pub2 = privtopub(pk)
self.assertEqual(
pub,
pub2,
'Does not match! Details:\nseed: %s\nmpk: %s\npriv: %s\npub: %s\npub2: %s' % (
seed, mpk, pk, pub, pub2
)
)
class TestRawSignRecover(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("Basic signing and recovery tests")
def test_all(self):
for i in range(20):
k = sha256(str(i))
s = ecdsa_raw_sign('35' * 32, k)
self.assertEqual(
ecdsa_raw_recover('35' * 32, s),
decode_pubkey(privtopub(k))
)
class TestTransactionSignVerify(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("Transaction-style signing and verification tests")
def test_all(self):
alphabet = "1234567890qwertyuiopasdfghjklzxcvbnm"
for i in range(10):
msg = ''.join([random.choice(alphabet) for i in range(random.randrange(20, 200))])
priv = sha256(str(random.randrange(2**256)))
pub = privtopub(priv)
sig = ecdsa_tx_sign(msg, priv)
self.assertTrue(
ecdsa_tx_verify(msg, sig, pub),
"Verification error"
)
self.assertIn(
pub,
ecdsa_tx_recover(msg, sig),
"Recovery failed"
)
class TestSerialize(unittest.TestCase):
def test_serialize(self):
tx = '0100000001239f932c780e517015842f3b02ff765fba97f9f63f9f1bc718b686a56ed9c73400000000fd5d010047304402200c40fa58d3f6d5537a343cf9c8d13bc7470baf1d13867e0de3e535cd6b4354c802200f2b48f67494835b060d0b2ff85657d2ba2d9ea4e697888c8cb580e8658183a801483045022056f488c59849a4259e7cef70fe5d6d53a4bd1c59a195b0577bd81cb76044beca022100a735b319fa66af7b178fc719b93f905961ef4d4446deca8757a90de2106dd98a014cc95241046c7d87fd72caeab48e937f2feca9e9a4bd77f0eff4ebb2dbbb9855c023e334e188d32aaec4632ea4cbc575c037d8101aec73d029236e7b1c2380f3e4ad7edced41046fd41cddf3bbda33a240b417a825cc46555949917c7ccf64c59f42fd8dfe95f34fae3b09ed279c8c5b3530510e8cca6230791102eef9961d895e8db54af0563c410488d618b988efd2511fc1f9c03f11c210808852b07fe46128c1a6b1155aa22cdf4b6802460ba593db2d11c7e6cbe19cedef76b7bcabd05d26fd97f4c5a59b225053aeffffffff0310270000000000001976a914a89733100315c37d228a529853af341a9d290a4588ac409c00000000000017a9142b56f9a4009d9ff99b8f97bea4455cd71135f5dd87409c00000000000017a9142b56f9a4009d9ff99b8f97bea4455cd71135f5dd8700000000'
self.assertEqual(
serialize(deserialize(tx)),
tx,
"Serialize roundtrip failed"
)
def test_serialize_script(self):
script = '47304402200c40fa58d3f6d5537a343cf9c8d13bc7470baf1d13867e0de3e535cd6b4354c802200f2b48f67494835b060d0b2ff85657d2ba2d9ea4e697888c8cb580e8658183a801483045022056f488c59849a4259e7cef70fe5d6d53a4bd1c59a195b0577bd81cb76044beca022100a735b319fa66af7b178fc719b93f905961ef4d4446deca8757a90de2106dd98a014cc95241046c7d87fd72caeab48e937f2feca9e9a4bd77f0eff4ebb2dbbb9855c023e334e188d32aaec4632ea4cbc575c037d8101aec73d029236e7b1c2380f3e4ad7edced41046fd41cddf3bbda33a240b417a825cc46555949917c7ccf64c59f42fd8dfe95f34fae3b09ed279c8c5b3530510e8cca6230791102eef9961d895e8db54af0563c410488d618b988efd2511fc1f9c03f11c210808852b07fe46128c1a6b1155aa22cdf4b6802460ba593db2d11c7e6cbe19cedef76b7bcabd05d26fd97f4c5a59b225053ae'
self.assertEqual(
serialize_script(deserialize_script(script)),
script,
"Script serialize roundtrip failed"
)
class TestTransaction(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("Attempting transaction creation")
# FIXME: I don't know how to write this as a unit test.
# What should be asserted?
def test_all(self):
privs = [sha256(str(random.randrange(2**256))) for x in range(4)]
pubs = [privtopub(priv) for priv in privs]
addresses = [pubtoaddr(pub) for pub in pubs]
mscript = mk_multisig_script(pubs[1:], 2, 3)
msigaddr = p2sh_scriptaddr(mscript)
tx = mktx(['01'*32+':1', '23'*32+':2'], [msigaddr+':20202', addresses[0]+':40404'])
tx1 = sign(tx, 1, privs[0])
sig1 = multisign(tx, 0, mscript, privs[1])
self.assertTrue(verify_tx_input(tx1, 0, mscript, sig1, pubs[1]), "Verification Error")
sig3 = multisign(tx, 0, mscript, privs[3])
self.assertTrue(verify_tx_input(tx1, 0, mscript, sig3, pubs[3]), "Verification Error")
tx2 = apply_multisignatures(tx1, 0, mscript, [sig1, sig3])
print("Outputting transaction: ", tx2)
# https://github.com/vbuterin/pybitcointools/issues/71
def test_multisig(self):
script = mk_multisig_script(["0254236f7d1124fc07600ad3eec5ac47393bf963fbf0608bcce255e685580d16d9",
"03560cad89031c412ad8619398bd43b3d673cb5bdcdac1afc46449382c6a8e0b2b"],
2)
self.assertEqual(p2sh_scriptaddr(script), "33byJBaS5N45RHFcatTSt9ZjiGb6nK4iV3")
self.assertEqual(p2sh_scriptaddr(script, 0x05), "33byJBaS5N45RHFcatTSt9ZjiGb6nK4iV3")
self.assertEqual(p2sh_scriptaddr(script, 5), "33byJBaS5N45RHFcatTSt9ZjiGb6nK4iV3")
self.assertEqual(p2sh_scriptaddr(script, 0xc4), "2MuABMvWTgpZRd4tAG25KW6YzvcoGVZDZYP")
self.assertEqual(p2sh_scriptaddr(script, 196), "2MuABMvWTgpZRd4tAG25KW6YzvcoGVZDZYP")
class TestDeterministicGenerate(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("Beginning RFC6979 deterministic signing tests")
def test_all(self):
# Created with python-ecdsa 0.9
# Code to make your own vectors:
# class gen:
# def order(self): return 115792089237316195423570985008687907852837564279074904382605163141518161494337
# dummy = gen()
# for i in range(10): ecdsa.rfc6979.generate_k(dummy, i, hashlib.sha256, hashlib.sha256(str(i)).digest())
test_vectors = [
32783320859482229023646250050688645858316445811207841524283044428614360139869,
109592113955144883013243055602231029997040992035200230706187150761552110229971,
65765393578006003630736298397268097590176526363988568884298609868706232621488,
85563144787585457107933685459469453513056530050186673491900346620874099325918,
99829559501561741463404068005537785834525504175465914981205926165214632019533,
7755945018790142325513649272940177083855222863968691658328003977498047013576,
81516639518483202269820502976089105897400159721845694286620077204726637043798,
52824159213002398817852821148973968315579759063230697131029801896913602807019,
44033460667645047622273556650595158811264350043302911918907282441675680538675,
32396602643737403620316035551493791485834117358805817054817536312402837398361
]
for i, ti in enumerate(test_vectors):
mine = deterministic_generate_k(bin_sha256(str(i)), encode(i, 256, 32))
self.assertEqual(
ti,
mine,
"Test vector does not match. Details:\n%s\n%s" % (
ti,
mine
)
)
class TestBIP0032(unittest.TestCase):
"""See: https://en.bitcoin.it/wiki/BIP_0032"""
@classmethod
def setUpClass(cls):
print("Beginning BIP0032 tests")
def _full_derive(self, key, chain):
if len(chain) == 0:
return key
elif chain[0] == 'pub':
return self._full_derive(bip32_privtopub(key), chain[1:])
else:
return self._full_derive(bip32_ckd(key, chain[0]), chain[1:])
def test_all(self):
test_vectors = [
[[], 'xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi'],
[['pub'], 'xpub661MyMwAqRbcFtXgS5sYJABqqG9YLmC4Q1Rdap9gSE8NqtwybGhePY2gZ29ESFjqJoCu1Rupje8YtGqsefD265TMg7usUDFdp6W1EGMcet8'],
[[2**31], 'xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6KCesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7'],
[[2**31, 1], 'xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs'],
[[2**31, 1, 2**31 + 2], 'xprv9z4pot5VBttmtdRTWfWQmoH1taj2axGVzFqSb8C9xaxKymcFzXBDptWmT7FwuEzG3ryjH4ktypQSAewRiNMjANTtpgP4mLTj34bhnZX7UiM'],
[[2**31, 1, 2**31 + 2, 'pub', 2, 1000000000], 'xpub6H1LXWLaKsWFhvm6RVpEL9P4KfRZSW7abD2ttkWP3SSQvnyA8FSVqNTEcYFgJS2UaFcxupHiYkro49S8yGasTvXEYBVPamhGW6cFJodrTHy']
]
mk = bip32_master_key(safe_from_hex('000102030405060708090a0b0c0d0e0f'))
for tv in test_vectors:
left, right = self._full_derive(mk, tv[0]), tv[1]
self.assertEqual(
left,
right,
r"Test vector does not match. Details: \n%s\n%s\n\%s" % (
tv[0],
[x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(left)],
[x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(right)],
)
)
def test_all_testnet(self):
test_vectors = [
[[], 'tprv8ZgxMBicQKsPeDgjzdC36fs6bMjGApWDNLR9erAXMs5skhMv36j9MV5ecvfavji5khqjWaWSFhN3YcCUUdiKH6isR4Pwy3U5y5egddBr16m'],
[['pub'], 'tpubD6NzVbkrYhZ4XgiXtGrdW5XDAPFCL9h7we1vwNCpn8tGbBcgfVYjXyhWo4E1xkh56hjod1RhGjxbaTLV3X4FyWuejifB9jusQ46QzG87VKp'],
[[2**31], 'tprv8bxNLu25VazNnppTCP4fyhyCvBHcYtzE3wr3cwYeL4HA7yf6TLGEUdS4QC1vLT63TkjRssqJe4CvGNEC8DzW5AoPUw56D1Ayg6HY4oy8QZ9'],
[[2**31, 1], 'tprv8e8VYgZxtHsSdGrtvdxYaSrryZGiYviWzGWtDDKTGh5NMXAEB8gYSCLHpFCywNs5uqV7ghRjimALQJkRFZnUrLHpzi2pGkwqLtbubgWuQ8q'],
[[2**31, 1, 2**31 + 2], 'tprv8gjmbDPpbAirVSezBEMuwSu1Ci9EpUJWKokZTYccSZSomNMLytWyLdtDNHRbucNaRJWWHANf9AzEdWVAqahfyRjVMKbNRhBmxAM8EJr7R15'],
[[2**31, 1, 2**31 + 2, 'pub', 2, 1000000000], 'tpubDHNy3kAG39ThyiwwsgoKY4iRenXDRtce8qdCFJZXPMCJg5dsCUHayp84raLTpvyiNA9sXPob5rgqkKvkN8S7MMyXbnEhGJMW64Cf4vFAoaF']
]
mk = bip32_master_key(safe_from_hex('000102030405060708090a0b0c0d0e0f'), TESTNET_PRIVATE)
for tv in test_vectors:
left, right = self._full_derive(mk, tv[0]), tv[1]
self.assertEqual(
left,
right,
r"Test vector does not match. Details:\n%s\n%s\n%s\n\%s" % (
left,
tv[0],
[x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(left)],
[x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(right)],
)
)
def test_extra(self):
master = bip32_master_key(safe_from_hex("000102030405060708090a0b0c0d0e0f"))
# m/0
assert bip32_ckd(master, "0") == "xprv9uHRZZhbkedL37eZEnyrNsQPFZYRAvjy5rt6M1nbEkLSo378x1CQQLo2xxBvREwiK6kqf7GRNvsNEchwibzXaV6i5GcsgyjBeRguXhKsi4R"
assert bip32_privtopub(bip32_ckd(master, "0")) == "xpub68Gmy5EVb2BdFbj2LpWrk1M7obNuaPTpT5oh9QCCo5sRfqSHVYWex97WpDZzszdzHzxXDAzPLVSwybe4uPYkSk4G3gnrPqqkV9RyNzAcNJ1"
# m/1
assert bip32_ckd(master, "1") == "xprv9uHRZZhbkedL4yTpidDvuVfrdUkTbhDHviERRBkbzbNDZeMjWzqzKAdxWhzftGDSxDmBdakjqHiZJbkwiaTEXJdjZAaAjMZEE3PMbMrPJih"
assert bip32_privtopub(bip32_ckd(master, "1")) == "xpub68Gmy5EVb2BdHTYHpekwGdcbBWax19w9HwA2DaADYvuCSSgt4YAErxxSN1KWSnmyqkwRNbnTj3XiUBKmHeC8rTjLRPjSULcDKQQgfgJDppq"
# m/0/0
assert bip32_ckd(bip32_ckd(master, "0"), "0") == "xprv9ww7sMFLzJMzur2oEQDB642fbsMS4q6JRraMVTrM9bTWBq7NDS8ZpmsKVB4YF3mZecqax1fjnsPF19xnsJNfRp4RSyexacULXMKowSACTRc"
assert bip32_privtopub(bip32_ckd(bip32_ckd(master, "0"), "0")) == "xpub6AvUGrnEpfvJ8L7GLRkBTByQ9uBvUHp9o5VxHrFxhvzV4dSWkySpNaBoLR9FpbnwRmTa69yLHF3QfcaxbWT7gWdwws5k4dpmJvqpEuMWwnj"
# m/0'
assert bip32_ckd(master, 2**31) == "xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6KCesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7"
assert bip32_privtopub(bip32_ckd(master, 2**31)) == "xpub68Gmy5EdvgibQVfPdqkBBCHxA5htiqg55crXYuXoQRKfDBFA1WEjWgP6LHhwBZeNK1VTsfTFUHCdrfp1bgwQ9xv5ski8PX9rL2dZXvgGDnw"
# m/1'
assert bip32_ckd(master, 2**31 + 1) == "xprv9uHRZZhk6KAJFszJGW6LoUFq92uL7FvkBhmYiMurCWPHLJZkX2aGvNdRUBNnJu7nv36WnwCN59uNy6sxLDZvvNSgFz3TCCcKo7iutQzpg78"
assert bip32_privtopub(bip32_ckd(master, 2**31 + 1)) == "xpub68Gmy5EdvgibUN4mNXdMAcCZh4jpWiebYvh9WkKTkqvGD6tu4ZtXUAwuKSyF5DFZVmotf9UHFTGqSXo9qyDBSn47RkaN6Aedt9JbL7zcgSL"
# m/1'
assert bip32_ckd(master, 1 + 2**31) == "xprv9uHRZZhk6KAJFszJGW6LoUFq92uL7FvkBhmYiMurCWPHLJZkX2aGvNdRUBNnJu7nv36WnwCN59uNy6sxLDZvvNSgFz3TCCcKo7iutQzpg78"
assert bip32_privtopub(bip32_ckd(master, 1 + 2**31)) == "xpub68Gmy5EdvgibUN4mNXdMAcCZh4jpWiebYvh9WkKTkqvGD6tu4ZtXUAwuKSyF5DFZVmotf9UHFTGqSXo9qyDBSn47RkaN6Aedt9JbL7zcgSL"
# m/0'/0
assert bip32_ckd(bip32_ckd(master, 2**31), "0") == "xprv9wTYmMFdV23N21MM6dLNavSQV7Sj7meSPXx6AV5eTdqqGLjycVjb115Ec5LgRAXscPZgy5G4jQ9csyyZLN3PZLxoM1h3BoPuEJzsgeypdKj"
assert bip32_privtopub(bip32_ckd(bip32_ckd(master, 2**31), "0")) == "xpub6ASuArnXKPbfEVRpCesNx4P939HDXENHkksgxsVG1yNp9958A33qYoPiTN9QrJmWFa2jNLdK84bWmyqTSPGtApP8P7nHUYwxHPhqmzUyeFG"
# m/0'/0'
assert bip32_ckd(bip32_ckd(master, 2**31), 2**31) == "xprv9wTYmMFmpgaLB5Hge4YtaGqCKpsYPTD9vXWSsmdZrNU3Y2i4WoBykm6ZteeCLCCZpGxdHQuqEhM6Gdo2X6CVrQiTw6AAneF9WSkA9ewaxtS"
assert bip32_privtopub(bip32_ckd(bip32_ckd(master, 2**31), 2**31)) == "xpub6ASuArnff48dPZN9k65twQmvsri2nuw1HkS3gA3BQi12Qq3D4LWEJZR3jwCAr1NhsFMcQcBkmevmub6SLP37bNq91SEShXtEGUbX3GhNaGk"
# m/44'/0'/0'/0/0
assert bip32_ckd(bip32_ckd(bip32_ckd(bip32_ckd(bip32_ckd(master, 44 + 2**31), 2**31), 2**31), 0), 0) == "xprvA4A9CuBXhdBtCaLxwrw64Jaran4n1rgzeS5mjH47Ds8V67uZS8tTkG8jV3BZi83QqYXPcN4v8EjK2Aof4YcEeqLt688mV57gF4j6QZWdP9U"
assert bip32_privtopub(bip32_ckd(bip32_ckd(bip32_ckd(bip32_ckd(bip32_ckd(master, 44 + 2**31), 2**31), 2**31), 0), 0)) == "xpub6H9VcQiRXzkBR4RS3tU6RSXb8ouGRKQr1f1NXfTinCfTxvEhygCiJ4TDLHz1dyQ6d2Vz8Ne7eezkrViwaPo2ZMsNjVtFwvzsQXCDV6HJ3cV"
class TestStartingAddressAndScriptGenerationConsistency(unittest.TestCase):
@classmethod
def setUpClass(cls):
print("Starting address and script generation consistency tests")
def test_all(self):
for i in range(5):
a = privtoaddr(random_key())
self.assertEqual(a, script_to_address(address_to_script(a)))
self.assertEqual(a, script_to_address(address_to_script(a), 0))
self.assertEqual(a, script_to_address(address_to_script(a), 0x00))
b = privtoaddr(random_key(), 5)
self.assertEqual(b, script_to_address(address_to_script(b)))
self.assertEqual(b, script_to_address(address_to_script(b), 0))
self.assertEqual(b, script_to_address(address_to_script(b), 0x00))
self.assertEqual(b, script_to_address(address_to_script(b), 5))
self.assertEqual(b, script_to_address(address_to_script(b), 0x05))
for i in range(5):
a = privtoaddr(random_key(), 0x6f)
self.assertEqual(a, script_to_address(address_to_script(a), 111))
self.assertEqual(a, script_to_address(address_to_script(a), 0x6f))
b = privtoaddr(random_key(), 0xc4)
self.assertEqual(b, script_to_address(address_to_script(b), 111))
self.assertEqual(b, script_to_address(address_to_script(b), 0x6f))
self.assertEqual(b, script_to_address(address_to_script(b), 196))
self.assertEqual(b, script_to_address(address_to_script(b), 0xc4))
class TestRipeMD160PythonBackup(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('Testing the pure python backup for ripemd160')
def test_all(self):
strvec = [
'',
'The quick brown fox jumps over the lazy dog',
'The quick brown fox jumps over the lazy cog',
'Nobody inspects the spammish repetition'
]
target = [
'9c1185a5c5e9fc54612808977ee8f548b2258d31',
'37f332f68db77bd9d7edd4969571ad671cf9dd3b',
'132072df690933835eb8b6ad0b77e7b6f14acad7',
'cc4a5ce1b3df48aec5d22d1f16b894a0b894eccc'
]
hash160target = [
'b472a266d0bd89c13706a4132ccfb16f7c3b9fcb',
'0e3397b4abc7a382b3ea2365883c3c7ca5f07600',
'53e0dacac5249e46114f65cb1f30d156b14e0bdc',
'1c9b7b48049a8f98699bca22a5856c5ef571cd68'
]
for i, s in enumerate(strvec):
digest = ripemd.RIPEMD160(s).digest()
hash160digest = ripemd.RIPEMD160(bin_sha256(s)).digest()
self.assertEqual(bytes_to_hex_string(digest), target[i])
self.assertEqual(bytes_to_hex_string(hash160digest), hash160target[i])
self.assertEqual(bytes_to_hex_string(bin_hash160(from_string_to_bytes(s))), hash160target[i])
self.assertEqual(hash160(from_string_to_bytes(s)), hash160target[i])
class TestScriptVsAddressOutputs(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('Testing script vs address outputs')
def test_all(self):
addr0 = '1Lqgj1ThNfwLgHMp5qJUerYsuUEm8vHmVG'
script0 = '76a914d99f84267d1f90f3e870a5e9d2399918140be61d88ac'
addr1 = '31oSGBBNrpCiENH3XMZpiP6GTC4tad4bMy'
script1 = 'a9140136d001619faba572df2ef3d193a57ad29122d987'
inputs = [{
'output': 'cd6219ea108119dc62fce09698b649efde56eca7ce223a3315e8b431f6280ce7:0',
'value': 158000
}]
outputs = [
[{'address': addr0, 'value': 1000}, {'address': addr1, 'value': 2000}],
[{'script': script0, 'value': 1000}, {'address': addr1, 'value': 2000}],
[{'address': addr0, 'value': 1000}, {'script': script1, 'value': 2000}],
[{'script': script0, 'value': 1000}, {'script': script1, 'value': 2000}],
[addr0 + ':1000', addr1 + ':2000'],
[script0 + ':1000', addr1 + ':2000'],
[addr0 + ':1000', script1 + ':2000'],
[script0 + ':1000', script1 + ':2000']
]
for outs in outputs:
tx_struct = deserialize(mktx(inputs, outs))
self.assertEqual(tx_struct['outs'], outputs[3])
class TestConversions(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.privkey_hex = (
"e9873d79c6d87dc0fb6a5778633389f4453213303da61f20bd67fc233aa33262"
)
cls.privkey_bin = (
b"\xe9\x87=y\xc6\xd8}\xc0\xfbjWxc3\x89\xf4E2\x130=\xa6\x1f \xbdg\xfc#:\xa32b"
)
cls.pubkey_hex = (
"04588d202afcc1ee4ab5254c7847ec25b9a135bbda0f2bc69ee1a714749fd77dc9f88ff2a00d7e752d44cbe16e1ebcf0890b76ec7c78886109dee76ccfc8445424"
)
cls.pubkey_bin = (
b"\x04X\x8d *\xfc\xc1\xeeJ\xb5%LxG\xec%\xb9\xa15\xbb\xda\x0f+\xc6\x9e\xe1\xa7\x14t\x9f\xd7}\xc9\xf8\x8f\xf2\xa0\r~u-D\xcb\xe1n\x1e\xbc\xf0\x89\x0bv\xec|x\x88a\t\xde\xe7l\xcf\xc8DT$"
)
def test_privkey_to_pubkey(self):
pubkey_hex = privkey_to_pubkey(self.privkey_hex)
self.assertEqual(pubkey_hex, self.pubkey_hex)
def test_changebase(self):
self.assertEqual(
self.pubkey_bin,
changebase(
self.pubkey_hex, 16, 256, minlen=len(self.pubkey_bin)
)
)
self.assertEqual(
self.pubkey_hex,
changebase(
self.pubkey_bin, 256, 16, minlen=len(self.pubkey_hex)
)
)
self.assertEqual(
self.privkey_bin,
changebase(
self.privkey_hex, 16, 256, minlen=len(self.privkey_bin)
)
)
self.assertEqual(
self.privkey_hex,
changebase(
self.privkey_bin, 256, 16, minlen=len(self.privkey_hex)
)
)
if __name__ == '__main__':
unittest.main()

View file

@ -1,92 +0,0 @@
import bitcoin as bc
import sys
import unittest
class TestStealth(unittest.TestCase):
def setUp(self):
if sys.getrecursionlimit() < 1000:
sys.setrecursionlimit(1000)
self.addr = 'vJmtjxSDxNPXL4RNapp9ARdqKz3uJyf1EDGjr1Fgqs9c8mYsVH82h8wvnA4i5rtJ57mr3kor1EVJrd4e5upACJd588xe52yXtzumxj'
self.scan_pub = '025e58a31122b38c86abc119b9379fe247410aee87a533f9c07b189aef6c3c1f52'
self.scan_priv = '3e49e7257cb31db997edb1cf8299af0f37e2663e2260e4b8033e49d39a6d02f2'
self.spend_pub = '03616562c98e7d7b74be409a787cec3a912122f3fb331a9bee9b0b73ce7b9f50af'
self.spend_priv = 'aa3db0cfb3edc94de4d10f873f8190843f2a17484f6021a95a7742302c744748'
self.ephem_pub = '03403d306ec35238384c7e340393335f9bc9bb4a2e574eb4e419452c4ea19f14b0'
self.ephem_priv = '9e63abaf8dcd5ea3919e6de0b6c544e00bf51bf92496113a01d6e369944dc091'
self.shared_secret = 'a4047ee231f4121e3a99a3a3378542e34a384b865a9917789920e1f13ffd91c6'
self.pay_pub = '02726112ad39cb6bf848b1b1ef30b88e35286bf99f746c2be575f96c0e02a9357c'
self.pay_priv = '4e422fb1e5e1db6c1f6ab32a7706d368ceb385e7fab098e633c5c5949c3b97cd'
self.testnet_addr = 'waPUuLLykSnY3itzf1AyrQZm42F7KyB7SR5zpfqmnzPXWhx9kXLzV3EcyqzDdpTwngiyCCMUqztS9S1d7XJs3JMt3MsHPDpBCudvx9'
def test_address_encoding(self):
sc_pub, sp_pub = bc.basic_stealth_address_to_pubkeys(self.addr)
self.assertEqual(sc_pub, self.scan_pub)
self.assertEqual(sp_pub, self.spend_pub)
stealth_addr2 = bc.pubkeys_to_basic_stealth_address(sc_pub, sp_pub)
self.assertEqual(stealth_addr2, self.addr)
magic_byte_testnet = 43
sc_pub, sp_pub = bc.basic_stealth_address_to_pubkeys(self.testnet_addr)
self.assertEqual(sc_pub, self.scan_pub)
self.assertEqual(sp_pub, self.spend_pub)
stealth_addr2 = bc.pubkeys_to_basic_stealth_address(sc_pub, sp_pub, magic_byte_testnet)
self.assertEqual(stealth_addr2, self.testnet_addr)
def test_shared_secret(self):
sh_sec = bc.shared_secret_sender(self.scan_pub, self.ephem_priv)
self.assertEqual(sh_sec, self.shared_secret)
sh_sec2 = bc.shared_secret_receiver(self.ephem_pub, self.scan_priv)
self.assertEqual(sh_sec2, self.shared_secret)
def test_uncover_pay_keys(self):
pub = bc.uncover_pay_pubkey_sender(self.scan_pub, self.spend_pub, self.ephem_priv)
pub2 = bc.uncover_pay_pubkey_receiver(self.scan_priv, self.spend_pub, self.ephem_pub)
self.assertEqual(pub, self.pay_pub)
self.assertEqual(pub2, self.pay_pub)
priv = bc.uncover_pay_privkey(self.scan_priv, self.spend_priv, self.ephem_pub)
self.assertEqual(priv, self.pay_priv)
def test_stealth_metadata_script(self):
nonce = int('deadbeef', 16)
script = bc.mk_stealth_metadata_script(self.ephem_pub, nonce)
self.assertEqual(script[6:], 'deadbeef' + self.ephem_pub)
eph_pub = bc.ephem_pubkey_from_tx_script(script)
self.assertEqual(eph_pub, self.ephem_pub)
def test_stealth_tx_outputs(self):
nonce = int('deadbeef', 16)
value = 10**8
outputs = bc.mk_stealth_tx_outputs(self.addr, value, self.ephem_priv, nonce)
self.assertEqual(outputs[0]['value'], 0)
self.assertEqual(outputs[0]['script'], '6a2606deadbeef' + self.ephem_pub)
self.assertEqual(outputs[1]['address'], bc.pubkey_to_address(self.pay_pub))
self.assertEqual(outputs[1]['value'], value)
outputs = bc.mk_stealth_tx_outputs(self.testnet_addr, value, self.ephem_priv, nonce, 'testnet')
self.assertEqual(outputs[0]['value'], 0)
self.assertEqual(outputs[0]['script'], '6a2606deadbeef' + self.ephem_pub)
self.assertEqual(outputs[1]['address'], bc.pubkey_to_address(self.pay_pub, 111))
self.assertEqual(outputs[1]['value'], value)
self.assertRaises(Exception, bc.mk_stealth_tx_outputs, self.testnet_addr, value, self.ephem_priv, nonce, 'btc')
self.assertRaises(Exception, bc.mk_stealth_tx_outputs, self.addr, value, self.ephem_priv, nonce, 'testnet')
if __name__ == '__main__':
unittest.main()

View file

@ -1,674 +0,0 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.

View file

@ -1,67 +0,0 @@
# PyElliptic
PyElliptic is a high level wrapper for the cryptographic library : OpenSSL.
Under the GNU General Public License
Python3 compatible. For GNU/Linux and Windows.
Require OpenSSL
## Features
### Asymmetric cryptography using Elliptic Curve Cryptography (ECC)
* Key agreement : ECDH
* Digital signatures : ECDSA
* Hybrid encryption : ECIES (like RSA)
### Symmetric cryptography
* AES-128 (CBC, OFB, CFB)
* AES-256 (CBC, OFB, CFB)
* Blowfish (CFB and CBC)
* RC4
### Other
* CSPRNG
* HMAC (using SHA512)
* PBKDF2 (SHA256 and SHA512)
## Example
```python
#!/usr/bin/python
import pyelliptic
# Symmetric encryption
iv = pyelliptic.Cipher.gen_IV('aes-256-cfb')
ctx = pyelliptic.Cipher("secretkey", iv, 1, ciphername='aes-256-cfb')
ciphertext = ctx.update('test1')
ciphertext += ctx.update('test2')
ciphertext += ctx.final()
ctx2 = pyelliptic.Cipher("secretkey", iv, 0, ciphername='aes-256-cfb')
print ctx2.ciphering(ciphertext)
# Asymmetric encryption
alice = pyelliptic.ECC() # default curve: sect283r1
bob = pyelliptic.ECC(curve='sect571r1')
ciphertext = alice.encrypt("Hello Bob", bob.get_pubkey())
print bob.decrypt(ciphertext)
signature = bob.sign("Hello Alice")
# alice's job :
print pyelliptic.ECC(pubkey=bob.get_pubkey()).verify(signature, "Hello Alice")
# ERROR !!!
try:
key = alice.get_ecdh_key(bob.get_pubkey())
except: print("For ECDH key agreement, the keys must be defined on the same curve !")
alice = pyelliptic.ECC(curve='sect571r1')
print alice.get_ecdh_key(bob.get_pubkey()).encode('hex')
print bob.get_ecdh_key(alice.get_pubkey()).encode('hex')
```

Some files were not shown because too many files have changed in this diff Show more