initial commit
This commit is contained in:
Binary file not shown.
BIN
Lambdas/HPIoTConnect-d3e57d2b-a312-42c9-a50c-730acabd0f17.zip
Normal file
BIN
Lambdas/HPIoTConnect-d3e57d2b-a312-42c9-a50c-730acabd0f17.zip
Normal file
Binary file not shown.
BIN
Lambdas/HPIoTDisconnect-d8964309-3c8f-49e8-ab4a-bf87b36b4263.zip
Normal file
BIN
Lambdas/HPIoTDisconnect-d8964309-3c8f-49e8-ab4a-bf87b36b4263.zip
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
Lambdas/HPIoTgetData-35e8720b-7195-449e-aa91-16a0c0597cbd.zip
Normal file
BIN
Lambdas/HPIoTgetData-35e8720b-7195-449e-aa91-16a0c0597cbd.zip
Normal file
Binary file not shown.
17
Lambdas/Websocket Authorizer/ecdsa/__init__.py
Normal file
17
Lambdas/Websocket Authorizer/ecdsa/__init__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
__all__ = ["curves", "der", "ecdsa", "ellipticcurve", "keys", "numbertheory",
|
||||
"test_pyecdsa", "util", "six"]
|
||||
from .keys import SigningKey, VerifyingKey, BadSignatureError, BadDigestError,\
|
||||
MalformedPointError
|
||||
from .curves import NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1
|
||||
from .der import UnexpectedDER
|
||||
|
||||
_hush_pyflakes = [SigningKey, VerifyingKey, BadSignatureError, BadDigestError,
|
||||
MalformedPointError, UnexpectedDER,
|
||||
NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1]
|
||||
del _hush_pyflakes
|
||||
|
||||
# This code comes from http://github.com/warner/python-ecdsa
|
||||
|
||||
from ._version import get_versions
|
||||
__version__ = get_versions()['version']
|
||||
del get_versions
|
||||
11
Lambdas/Websocket Authorizer/ecdsa/_version.py
Normal file
11
Lambdas/Websocket Authorizer/ecdsa/_version.py
Normal file
@@ -0,0 +1,11 @@
|
||||
|
||||
# This file was generated by 'versioneer.py' (0.12) from
|
||||
# revision-control system data, or from the parent directory name of an
|
||||
# unpacked source archive. Distribution tarballs contain a pre-generated copy
|
||||
# of this file.
|
||||
|
||||
version_version = '0.13.3'
|
||||
version_full = '7add2213c992f51267eed8288b560f3f4108a28d'
|
||||
def get_versions(default={}, verbose=False):
|
||||
return {'version': version_version, 'full': version_full}
|
||||
|
||||
46
Lambdas/Websocket Authorizer/ecdsa/curves.py
Normal file
46
Lambdas/Websocket Authorizer/ecdsa/curves.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from __future__ import division
|
||||
|
||||
from . import der, ecdsa
|
||||
|
||||
class UnknownCurveError(Exception):
|
||||
pass
|
||||
|
||||
def orderlen(order):
|
||||
return (1+len("%x"%order))//2 # bytes
|
||||
|
||||
# the NIST curves
|
||||
class Curve:
|
||||
def __init__(self, name, curve, generator, oid, openssl_name=None):
|
||||
self.name = name
|
||||
self.openssl_name = openssl_name # maybe None
|
||||
self.curve = curve
|
||||
self.generator = generator
|
||||
self.order = generator.order()
|
||||
self.baselen = orderlen(self.order)
|
||||
self.verifying_key_length = 2*self.baselen
|
||||
self.signature_length = 2*self.baselen
|
||||
self.oid = oid
|
||||
self.encoded_oid = der.encode_oid(*oid)
|
||||
|
||||
NIST192p = Curve("NIST192p", ecdsa.curve_192, ecdsa.generator_192,
|
||||
(1, 2, 840, 10045, 3, 1, 1), "prime192v1")
|
||||
NIST224p = Curve("NIST224p", ecdsa.curve_224, ecdsa.generator_224,
|
||||
(1, 3, 132, 0, 33), "secp224r1")
|
||||
NIST256p = Curve("NIST256p", ecdsa.curve_256, ecdsa.generator_256,
|
||||
(1, 2, 840, 10045, 3, 1, 7), "prime256v1")
|
||||
NIST384p = Curve("NIST384p", ecdsa.curve_384, ecdsa.generator_384,
|
||||
(1, 3, 132, 0, 34), "secp384r1")
|
||||
NIST521p = Curve("NIST521p", ecdsa.curve_521, ecdsa.generator_521,
|
||||
(1, 3, 132, 0, 35), "secp521r1")
|
||||
SECP256k1 = Curve("SECP256k1", ecdsa.curve_secp256k1, ecdsa.generator_secp256k1,
|
||||
(1, 3, 132, 0, 10), "secp256k1")
|
||||
|
||||
curves = [NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1]
|
||||
|
||||
def find_curve(oid_curve):
|
||||
for c in curves:
|
||||
if c.oid == oid_curve:
|
||||
return c
|
||||
raise UnknownCurveError("I don't know about the curve with oid %s."
|
||||
"I only know about these: %s" %
|
||||
(oid_curve, [c.name for c in curves]))
|
||||
231
Lambdas/Websocket Authorizer/ecdsa/der.py
Normal file
231
Lambdas/Websocket Authorizer/ecdsa/der.py
Normal file
@@ -0,0 +1,231 @@
|
||||
from __future__ import division
|
||||
|
||||
import binascii
|
||||
import base64
|
||||
from .six import int2byte, b, integer_types, text_type
|
||||
|
||||
class UnexpectedDER(Exception):
|
||||
pass
|
||||
|
||||
def encode_constructed(tag, value):
|
||||
return int2byte(0xa0+tag) + encode_length(len(value)) + value
|
||||
def encode_integer(r):
|
||||
assert r >= 0 # can't support negative numbers yet
|
||||
h = ("%x" % r).encode()
|
||||
if len(h) % 2:
|
||||
h = b("0") + h
|
||||
s = binascii.unhexlify(h)
|
||||
num = s[0] if isinstance(s[0], integer_types) else ord(s[0])
|
||||
if num <= 0x7f:
|
||||
return b("\x02") + int2byte(len(s)) + s
|
||||
else:
|
||||
# DER integers are two's complement, so if the first byte is
|
||||
# 0x80-0xff then we need an extra 0x00 byte to prevent it from
|
||||
# looking negative.
|
||||
return b("\x02") + int2byte(len(s)+1) + b("\x00") + s
|
||||
|
||||
def encode_bitstring(s):
|
||||
return b("\x03") + encode_length(len(s)) + s
|
||||
def encode_octet_string(s):
|
||||
return b("\x04") + encode_length(len(s)) + s
|
||||
def encode_oid(first, second, *pieces):
|
||||
assert first <= 2
|
||||
assert second <= 39
|
||||
encoded_pieces = [int2byte(40*first+second)] + [encode_number(p)
|
||||
for p in pieces]
|
||||
body = b('').join(encoded_pieces)
|
||||
return b('\x06') + encode_length(len(body)) + body
|
||||
def encode_sequence(*encoded_pieces):
|
||||
total_len = sum([len(p) for p in encoded_pieces])
|
||||
return b('\x30') + encode_length(total_len) + b('').join(encoded_pieces)
|
||||
def encode_number(n):
|
||||
b128_digits = []
|
||||
while n:
|
||||
b128_digits.insert(0, (n & 0x7f) | 0x80)
|
||||
n = n >> 7
|
||||
if not b128_digits:
|
||||
b128_digits.append(0)
|
||||
b128_digits[-1] &= 0x7f
|
||||
return b('').join([int2byte(d) for d in b128_digits])
|
||||
|
||||
def remove_constructed(string):
|
||||
s0 = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
if (s0 & 0xe0) != 0xa0:
|
||||
raise UnexpectedDER("wanted constructed tag (0xa0-0xbf), got 0x%02x"
|
||||
% s0)
|
||||
tag = s0 & 0x1f
|
||||
length, llen = read_length(string[1:])
|
||||
body = string[1+llen:1+llen+length]
|
||||
rest = string[1+llen+length:]
|
||||
return tag, body, rest
|
||||
|
||||
def remove_sequence(string):
|
||||
if not string:
|
||||
raise UnexpectedDER("Empty string does not encode a sequence")
|
||||
if not string.startswith(b("\x30")):
|
||||
n = string[0] if isinstance(string[0], integer_types) else \
|
||||
ord(string[0])
|
||||
raise UnexpectedDER("wanted type 'sequence' (0x30), got 0x%02x" % n)
|
||||
length, lengthlength = read_length(string[1:])
|
||||
if length > len(string) - 1 - lengthlength:
|
||||
raise UnexpectedDER("Length longer than the provided buffer")
|
||||
endseq = 1+lengthlength+length
|
||||
return string[1+lengthlength:endseq], string[endseq:]
|
||||
|
||||
def remove_octet_string(string):
|
||||
if not string.startswith(b("\x04")):
|
||||
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
raise UnexpectedDER("wanted octetstring (0x04), got 0x%02x" % n)
|
||||
length, llen = read_length(string[1:])
|
||||
body = string[1+llen:1+llen+length]
|
||||
rest = string[1+llen+length:]
|
||||
return body, rest
|
||||
|
||||
def remove_object(string):
|
||||
if not string.startswith(b("\x06")):
|
||||
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
raise UnexpectedDER("wanted object (0x06), got 0x%02x" % n)
|
||||
length, lengthlength = read_length(string[1:])
|
||||
body = string[1+lengthlength:1+lengthlength+length]
|
||||
rest = string[1+lengthlength+length:]
|
||||
numbers = []
|
||||
while body:
|
||||
n, ll = read_number(body)
|
||||
numbers.append(n)
|
||||
body = body[ll:]
|
||||
n0 = numbers.pop(0)
|
||||
first = n0//40
|
||||
second = n0-(40*first)
|
||||
numbers.insert(0, first)
|
||||
numbers.insert(1, second)
|
||||
return tuple(numbers), rest
|
||||
|
||||
def remove_integer(string):
|
||||
if not string:
|
||||
raise UnexpectedDER("Empty string is an invalid encoding of an "
|
||||
"integer")
|
||||
if not string.startswith(b("\x02")):
|
||||
n = string[0] if isinstance(string[0], integer_types) \
|
||||
else ord(string[0])
|
||||
raise UnexpectedDER("wanted type 'integer' (0x02), got 0x%02x" % n)
|
||||
length, llen = read_length(string[1:])
|
||||
if length > len(string) - 1 - llen:
|
||||
raise UnexpectedDER("Length longer than provided buffer")
|
||||
if length == 0:
|
||||
raise UnexpectedDER("0-byte long encoding of integer")
|
||||
numberbytes = string[1+llen:1+llen+length]
|
||||
rest = string[1+llen+length:]
|
||||
msb = numberbytes[0] if isinstance(numberbytes[0], integer_types) \
|
||||
else ord(numberbytes[0])
|
||||
if not msb < 0x80:
|
||||
raise UnexpectedDER("Negative integers are not supported")
|
||||
# check if the encoding is the minimal one (DER requirement)
|
||||
if length > 1 and not msb:
|
||||
# leading zero byte is allowed if the integer would have been
|
||||
# considered a negative number otherwise
|
||||
smsb = numberbytes[1] if isinstance(numberbytes[1], integer_types) \
|
||||
else ord(numberbytes[1])
|
||||
if smsb < 0x80:
|
||||
raise UnexpectedDER("Invalid encoding of integer, unnecessary "
|
||||
"zero padding bytes")
|
||||
return int(binascii.hexlify(numberbytes), 16), rest
|
||||
|
||||
def read_number(string):
|
||||
number = 0
|
||||
llen = 0
|
||||
# base-128 big endian, with b7 set in all but the last byte
|
||||
while True:
|
||||
if llen > len(string):
|
||||
raise UnexpectedDER("ran out of length bytes")
|
||||
number = number << 7
|
||||
d = string[llen] if isinstance(string[llen], integer_types) else ord(string[llen])
|
||||
number += (d & 0x7f)
|
||||
llen += 1
|
||||
if not d & 0x80:
|
||||
break
|
||||
return number, llen
|
||||
|
||||
def encode_length(l):
|
||||
assert l >= 0
|
||||
if l < 0x80:
|
||||
return int2byte(l)
|
||||
s = ("%x" % l).encode()
|
||||
if len(s)%2:
|
||||
s = b("0")+s
|
||||
s = binascii.unhexlify(s)
|
||||
llen = len(s)
|
||||
return int2byte(0x80|llen) + s
|
||||
|
||||
def read_length(string):
|
||||
if not string:
|
||||
raise UnexpectedDER("Empty string can't encode valid length value")
|
||||
num = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
if not (num & 0x80):
|
||||
# short form
|
||||
return (num & 0x7f), 1
|
||||
# else long-form: b0&0x7f is number of additional base256 length bytes,
|
||||
# big-endian
|
||||
llen = num & 0x7f
|
||||
if not llen:
|
||||
raise UnexpectedDER("Invalid length encoding, length of length is 0")
|
||||
if llen > len(string)-1:
|
||||
raise UnexpectedDER("Length of length longer than provided buffer")
|
||||
# verify that the encoding is minimal possible (DER requirement)
|
||||
msb = string[1] if isinstance(string[1], integer_types) else ord(string[1])
|
||||
if not msb or llen == 1 and msb < 0x80:
|
||||
raise UnexpectedDER("Not minimal encoding of length")
|
||||
return int(binascii.hexlify(string[1:1+llen]), 16), 1+llen
|
||||
|
||||
def remove_bitstring(string):
|
||||
num = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
if not string.startswith(b("\x03")):
|
||||
raise UnexpectedDER("wanted bitstring (0x03), got 0x%02x" % num)
|
||||
length, llen = read_length(string[1:])
|
||||
body = string[1+llen:1+llen+length]
|
||||
rest = string[1+llen+length:]
|
||||
return body, rest
|
||||
|
||||
# SEQUENCE([1, STRING(secexp), cont[0], OBJECT(curvename), cont[1], BINTSTRING)
|
||||
|
||||
|
||||
# signatures: (from RFC3279)
|
||||
# ansi-X9-62 OBJECT IDENTIFIER ::= {
|
||||
# iso(1) member-body(2) us(840) 10045 }
|
||||
#
|
||||
# id-ecSigType OBJECT IDENTIFIER ::= {
|
||||
# ansi-X9-62 signatures(4) }
|
||||
# ecdsa-with-SHA1 OBJECT IDENTIFIER ::= {
|
||||
# id-ecSigType 1 }
|
||||
## so 1,2,840,10045,4,1
|
||||
## so 0x42, .. ..
|
||||
|
||||
# Ecdsa-Sig-Value ::= SEQUENCE {
|
||||
# r INTEGER,
|
||||
# s INTEGER }
|
||||
|
||||
# id-public-key-type OBJECT IDENTIFIER ::= { ansi-X9.62 2 }
|
||||
#
|
||||
# id-ecPublicKey OBJECT IDENTIFIER ::= { id-publicKeyType 1 }
|
||||
|
||||
# I think the secp224r1 identifier is (t=06,l=05,v=2b81040021)
|
||||
# secp224r1 OBJECT IDENTIFIER ::= {
|
||||
# iso(1) identified-organization(3) certicom(132) curve(0) 33 }
|
||||
# and the secp384r1 is (t=06,l=05,v=2b81040022)
|
||||
# secp384r1 OBJECT IDENTIFIER ::= {
|
||||
# iso(1) identified-organization(3) certicom(132) curve(0) 34 }
|
||||
|
||||
def unpem(pem):
|
||||
if isinstance(pem, text_type):
|
||||
pem = pem.encode()
|
||||
|
||||
d = b("").join([l.strip() for l in pem.split(b("\n"))
|
||||
if l and not l.startswith(b("-----"))])
|
||||
return base64.b64decode(d)
|
||||
def topem(der, name):
|
||||
b64 = base64.b64encode(der)
|
||||
lines = [("-----BEGIN %s-----\n" % name).encode()]
|
||||
lines.extend([b64[start:start+64]+b("\n")
|
||||
for start in range(0, len(b64), 64)])
|
||||
lines.append(("-----END %s-----\n" % name).encode())
|
||||
return b("").join(lines)
|
||||
|
||||
576
Lambdas/Websocket Authorizer/ecdsa/ecdsa.py
Normal file
576
Lambdas/Websocket Authorizer/ecdsa/ecdsa.py
Normal file
@@ -0,0 +1,576 @@
|
||||
#! /usr/bin/env python
|
||||
|
||||
"""
|
||||
Implementation of Elliptic-Curve Digital Signatures.
|
||||
|
||||
Classes and methods for elliptic-curve signatures:
|
||||
private keys, public keys, signatures,
|
||||
NIST prime-modulus curves with modulus lengths of
|
||||
192, 224, 256, 384, and 521 bits.
|
||||
|
||||
Example:
|
||||
|
||||
# (In real-life applications, you would probably want to
|
||||
# protect against defects in SystemRandom.)
|
||||
from random import SystemRandom
|
||||
randrange = SystemRandom().randrange
|
||||
|
||||
# Generate a public/private key pair using the NIST Curve P-192:
|
||||
|
||||
g = generator_192
|
||||
n = g.order()
|
||||
secret = randrange( 1, n )
|
||||
pubkey = Public_key( g, g * secret )
|
||||
privkey = Private_key( pubkey, secret )
|
||||
|
||||
# Signing a hash value:
|
||||
|
||||
hash = randrange( 1, n )
|
||||
signature = privkey.sign( hash, randrange( 1, n ) )
|
||||
|
||||
# Verifying a signature for a hash value:
|
||||
|
||||
if pubkey.verifies( hash, signature ):
|
||||
print_("Demo verification succeeded.")
|
||||
else:
|
||||
print_("*** Demo verification failed.")
|
||||
|
||||
# Verification fails if the hash value is modified:
|
||||
|
||||
if pubkey.verifies( hash-1, signature ):
|
||||
print_("**** Demo verification failed to reject tampered hash.")
|
||||
else:
|
||||
print_("Demo verification correctly rejected tampered hash.")
|
||||
|
||||
Version of 2009.05.16.
|
||||
|
||||
Revision history:
|
||||
2005.12.31 - Initial version.
|
||||
2008.11.25 - Substantial revisions introducing new classes.
|
||||
2009.05.16 - Warn against using random.randrange in real applications.
|
||||
2009.05.17 - Use random.SystemRandom by default.
|
||||
|
||||
Written in 2005 by Peter Pearson and placed in the public domain.
|
||||
"""
|
||||
|
||||
from .six import int2byte, b, print_
|
||||
from . import ellipticcurve
|
||||
from . import numbertheory
|
||||
import random
|
||||
|
||||
|
||||
|
||||
class Signature( object ):
|
||||
"""ECDSA signature.
|
||||
"""
|
||||
def __init__( self, r, s ):
|
||||
self.r = r
|
||||
self.s = s
|
||||
|
||||
|
||||
|
||||
class Public_key( object ):
|
||||
"""Public key for ECDSA.
|
||||
"""
|
||||
|
||||
def __init__( self, generator, point ):
|
||||
"""generator is the Point that generates the group,
|
||||
point is the Point that defines the public key.
|
||||
"""
|
||||
|
||||
self.curve = generator.curve()
|
||||
self.generator = generator
|
||||
self.point = point
|
||||
n = generator.order()
|
||||
if not n:
|
||||
raise RuntimeError("Generator point must have order.")
|
||||
if not n * point == ellipticcurve.INFINITY:
|
||||
raise RuntimeError("Generator point order is bad.")
|
||||
if point.x() < 0 or n <= point.x() or point.y() < 0 or n <= point.y():
|
||||
raise RuntimeError("Generator point has x or y out of range.")
|
||||
|
||||
|
||||
def verifies( self, hash, signature ):
|
||||
"""Verify that signature is a valid signature of hash.
|
||||
Return True if the signature is valid.
|
||||
"""
|
||||
|
||||
# From X9.62 J.3.1.
|
||||
|
||||
G = self.generator
|
||||
n = G.order()
|
||||
r = signature.r
|
||||
s = signature.s
|
||||
if r < 1 or r > n-1: return False
|
||||
if s < 1 or s > n-1: return False
|
||||
c = numbertheory.inverse_mod( s, n )
|
||||
u1 = ( hash * c ) % n
|
||||
u2 = ( r * c ) % n
|
||||
xy = u1 * G + u2 * self.point
|
||||
v = xy.x() % n
|
||||
return v == r
|
||||
|
||||
|
||||
|
||||
class Private_key( object ):
|
||||
"""Private key for ECDSA.
|
||||
"""
|
||||
|
||||
def __init__( self, public_key, secret_multiplier ):
|
||||
"""public_key is of class Public_key;
|
||||
secret_multiplier is a large integer.
|
||||
"""
|
||||
|
||||
self.public_key = public_key
|
||||
self.secret_multiplier = secret_multiplier
|
||||
|
||||
def sign( self, hash, random_k ):
|
||||
"""Return a signature for the provided hash, using the provided
|
||||
random nonce. It is absolutely vital that random_k be an unpredictable
|
||||
number in the range [1, self.public_key.point.order()-1]. If
|
||||
an attacker can guess random_k, he can compute our private key from a
|
||||
single signature. Also, if an attacker knows a few high-order
|
||||
bits (or a few low-order bits) of random_k, he can compute our private
|
||||
key from many signatures. The generation of nonces with adequate
|
||||
cryptographic strength is very difficult and far beyond the scope
|
||||
of this comment.
|
||||
|
||||
May raise RuntimeError, in which case retrying with a new
|
||||
random value k is in order.
|
||||
"""
|
||||
|
||||
G = self.public_key.generator
|
||||
n = G.order()
|
||||
k = random_k % n
|
||||
p1 = k * G
|
||||
r = p1.x()
|
||||
if r == 0: raise RuntimeError("amazingly unlucky random number r")
|
||||
s = ( numbertheory.inverse_mod( k, n ) * \
|
||||
( hash + ( self.secret_multiplier * r ) % n ) ) % n
|
||||
if s == 0: raise RuntimeError("amazingly unlucky random number s")
|
||||
return Signature( r, s )
|
||||
|
||||
|
||||
|
||||
def int_to_string( x ):
|
||||
"""Convert integer x into a string of bytes, as per X9.62."""
|
||||
assert x >= 0
|
||||
if x == 0: return b('\0')
|
||||
result = []
|
||||
while x:
|
||||
ordinal = x & 0xFF
|
||||
result.append(int2byte(ordinal))
|
||||
x >>= 8
|
||||
|
||||
result.reverse()
|
||||
return b('').join(result)
|
||||
|
||||
|
||||
def string_to_int( s ):
|
||||
"""Convert a string of bytes into an integer, as per X9.62."""
|
||||
result = 0
|
||||
for c in s:
|
||||
if not isinstance(c, int): c = ord( c )
|
||||
result = 256 * result + c
|
||||
return result
|
||||
|
||||
|
||||
def digest_integer( m ):
|
||||
"""Convert an integer into a string of bytes, compute
|
||||
its SHA-1 hash, and convert the result to an integer."""
|
||||
#
|
||||
# I don't expect this function to be used much. I wrote
|
||||
# it in order to be able to duplicate the examples
|
||||
# in ECDSAVS.
|
||||
#
|
||||
from hashlib import sha1
|
||||
return string_to_int( sha1( int_to_string( m ) ).digest() )
|
||||
|
||||
|
||||
def point_is_valid( generator, x, y ):
|
||||
"""Is (x,y) a valid public key based on the specified generator?"""
|
||||
|
||||
# These are the tests specified in X9.62.
|
||||
|
||||
n = generator.order()
|
||||
curve = generator.curve()
|
||||
if x < 0 or n <= x or y < 0 or n <= y:
|
||||
return False
|
||||
if not curve.contains_point( x, y ):
|
||||
return False
|
||||
if not n*ellipticcurve.Point( curve, x, y ) == \
|
||||
ellipticcurve.INFINITY:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
|
||||
# NIST Curve P-192:
|
||||
_p = 6277101735386680763835789423207666416083908700390324961279
|
||||
_r = 6277101735386680763835789423176059013767194773182842284081
|
||||
# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L
|
||||
# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65L
|
||||
_b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
|
||||
_Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
|
||||
_Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
|
||||
|
||||
curve_192 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_192 = ellipticcurve.Point( curve_192, _Gx, _Gy, _r )
|
||||
|
||||
|
||||
# NIST Curve P-224:
|
||||
_p = 26959946667150639794667015087019630673557916260026308143510066298881
|
||||
_r = 26959946667150639794667015087019625940457807714424391721682722368061
|
||||
# s = 0xbd71344799d5c7fcdc45b59fa3b9ab8f6a948bc5L
|
||||
# c = 0x5b056c7e11dd68f40469ee7f3c7a7d74f7d121116506d031218291fbL
|
||||
_b = 0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4
|
||||
_Gx =0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21
|
||||
_Gy = 0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34
|
||||
|
||||
curve_224 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_224 = ellipticcurve.Point( curve_224, _Gx, _Gy, _r )
|
||||
|
||||
# NIST Curve P-256:
|
||||
_p = 115792089210356248762697446949407573530086143415290314195533631308867097853951
|
||||
_r = 115792089210356248762697446949407573529996955224135760342422259061068512044369
|
||||
# s = 0xc49d360886e704936a6678e1139d26b7819f7e90L
|
||||
# c = 0x7efba1662985be9403cb055c75d4f7e0ce8d84a9c5114abcaf3177680104fa0dL
|
||||
_b = 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b
|
||||
_Gx = 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296
|
||||
_Gy = 0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5
|
||||
|
||||
curve_256 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_256 = ellipticcurve.Point( curve_256, _Gx, _Gy, _r )
|
||||
|
||||
# NIST Curve P-384:
|
||||
_p = 39402006196394479212279040100143613805079739270465446667948293404245721771496870329047266088258938001861606973112319
|
||||
_r = 39402006196394479212279040100143613805079739270465446667946905279627659399113263569398956308152294913554433653942643
|
||||
# s = 0xa335926aa319a27a1d00896a6773a4827acdac73L
|
||||
# c = 0x79d1e655f868f02fff48dcdee14151ddb80643c1406d0ca10dfe6fc52009540a495e8042ea5f744f6e184667cc722483L
|
||||
_b = 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aef
|
||||
_Gx = 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760ab7
|
||||
_Gy = 0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5f
|
||||
|
||||
curve_384 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_384 = ellipticcurve.Point( curve_384, _Gx, _Gy, _r )
|
||||
|
||||
# NIST Curve P-521:
|
||||
_p = 6864797660130609714981900799081393217269435300143305409394463459185543183397656052122559640661454554977296311391480858037121987999716643812574028291115057151
|
||||
_r = 6864797660130609714981900799081393217269435300143305409394463459185543183397655394245057746333217197532963996371363321113864768612440380340372808892707005449
|
||||
# s = 0xd09e8800291cb85396cc6717393284aaa0da64baL
|
||||
# c = 0x0b48bfa5f420a34949539d2bdfc264eeeeb077688e44fbf0ad8f6d0edb37bd6b533281000518e19f1b9ffbe0fe9ed8a3c2200b8f875e523868c70c1e5bf55bad637L
|
||||
_b = 0x051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00
|
||||
_Gx = 0xc6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66
|
||||
_Gy = 0x11839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650
|
||||
|
||||
curve_521 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_521 = ellipticcurve.Point( curve_521, _Gx, _Gy, _r )
|
||||
|
||||
# Certicom secp256-k1
|
||||
_a = 0x0000000000000000000000000000000000000000000000000000000000000000
|
||||
_b = 0x0000000000000000000000000000000000000000000000000000000000000007
|
||||
_p = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f
|
||||
_Gx = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
|
||||
_Gy = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
|
||||
_r = 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
|
||||
|
||||
curve_secp256k1 = ellipticcurve.CurveFp( _p, _a, _b)
|
||||
generator_secp256k1 = ellipticcurve.Point( curve_secp256k1, _Gx, _Gy, _r)
|
||||
|
||||
|
||||
|
||||
def __main__():
|
||||
class TestFailure(Exception): pass
|
||||
|
||||
def test_point_validity( generator, x, y, expected ):
|
||||
"""generator defines the curve; is (x,y) a point on
|
||||
this curve? "expected" is True if the right answer is Yes."""
|
||||
if point_is_valid( generator, x, y ) == expected:
|
||||
print_("Point validity tested as expected.")
|
||||
else:
|
||||
raise TestFailure("*** Point validity test gave wrong result.")
|
||||
|
||||
def test_signature_validity( Msg, Qx, Qy, R, S, expected ):
|
||||
"""Msg = message, Qx and Qy represent the base point on
|
||||
elliptic curve c192, R and S are the signature, and
|
||||
"expected" is True iff the signature is expected to be valid."""
|
||||
pubk = Public_key( generator_192,
|
||||
ellipticcurve.Point( curve_192, Qx, Qy ) )
|
||||
got = pubk.verifies( digest_integer( Msg ), Signature( R, S ) )
|
||||
if got == expected:
|
||||
print_("Signature tested as expected: got %s, expected %s." % \
|
||||
( got, expected ))
|
||||
else:
|
||||
raise TestFailure("*** Signature test failed: got %s, expected %s." % \
|
||||
( got, expected ))
|
||||
|
||||
print_("NIST Curve P-192:")
|
||||
|
||||
p192 = generator_192
|
||||
|
||||
# From X9.62:
|
||||
|
||||
d = 651056770906015076056810763456358567190100156695615665659
|
||||
Q = d * p192
|
||||
if Q.x() != 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5:
|
||||
raise TestFailure("*** p192 * d came out wrong.")
|
||||
else:
|
||||
print_("p192 * d came out right.")
|
||||
|
||||
k = 6140507067065001063065065565667405560006161556565665656654
|
||||
R = k * p192
|
||||
if R.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||
or R.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||
raise TestFailure("*** k * p192 came out wrong.")
|
||||
else:
|
||||
print_("k * p192 came out right.")
|
||||
|
||||
u1 = 2563697409189434185194736134579731015366492496392189760599
|
||||
u2 = 6266643813348617967186477710235785849136406323338782220568
|
||||
temp = u1 * p192 + u2 * Q
|
||||
if temp.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||
or temp.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||
raise TestFailure("*** u1 * p192 + u2 * Q came out wrong.")
|
||||
else:
|
||||
print_("u1 * p192 + u2 * Q came out right.")
|
||||
|
||||
e = 968236873715988614170569073515315707566766479517
|
||||
pubk = Public_key( generator_192, generator_192 * d )
|
||||
privk = Private_key( pubk, d )
|
||||
sig = privk.sign( e, k )
|
||||
r, s = sig.r, sig.s
|
||||
if r != 3342403536405981729393488334694600415596881826869351677613 \
|
||||
or s != 5735822328888155254683894997897571951568553642892029982342:
|
||||
raise TestFailure("*** r or s came out wrong.")
|
||||
else:
|
||||
print_("r and s came out right.")
|
||||
|
||||
valid = pubk.verifies( e, sig )
|
||||
if valid: print_("Signature verified OK.")
|
||||
else: raise TestFailure("*** Signature failed verification.")
|
||||
|
||||
valid = pubk.verifies( e-1, sig )
|
||||
if not valid: print_("Forgery was correctly rejected.")
|
||||
else: raise TestFailure("*** Forgery was erroneously accepted.")
|
||||
|
||||
print_("Testing point validity, as per ECDSAVS.pdf B.2.2:")
|
||||
|
||||
test_point_validity( \
|
||||
p192, \
|
||||
0xcd6d0f029a023e9aaca429615b8f577abee685d8257cc83a, \
|
||||
0x00019c410987680e9fb6c0b6ecc01d9a2647c8bae27721bacdfc, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x00017f2fce203639e9eaf9fb50b81fc32776b30e3b02af16c73b, \
|
||||
0x95da95c5e72dd48e229d4748d4eee658a9a54111b23b2adb, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x4f77f8bc7fccbadd5760f4938746d5f253ee2168c1cf2792, \
|
||||
0x000147156ff824d131629739817edb197717c41aab5c2a70f0f6, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0xc58d61f88d905293bcd4cd0080bcb1b7f811f2ffa41979f6, \
|
||||
0x8804dc7a7c4c7f8b5d437f5156f3312ca7d6de8a0e11867f, \
|
||||
True )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0xcdf56c1aa3d8afc53c521adf3ffb96734a6a630a4a5b5a70, \
|
||||
0x97c1c44a5fb229007b5ec5d25f7413d170068ffd023caa4e, \
|
||||
True )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x89009c0dc361c81e99280c8e91df578df88cdf4b0cdedced, \
|
||||
0x27be44a529b7513e727251f128b34262a0fd4d8ec82377b9, \
|
||||
True )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x6a223d00bd22c52833409a163e057e5b5da1def2a197dd15, \
|
||||
0x7b482604199367f1f303f9ef627f922f97023e90eae08abf, \
|
||||
True )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x6dccbde75c0948c98dab32ea0bc59fe125cf0fb1a3798eda, \
|
||||
0x0001171a3e0fa60cf3096f4e116b556198de430e1fbd330c8835, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0xd266b39e1f491fc4acbbbc7d098430931cfa66d55015af12, \
|
||||
0x193782eb909e391a3148b7764e6b234aa94e48d30a16dbb2, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x9d6ddbcd439baa0c6b80a654091680e462a7d1d3f1ffeb43, \
|
||||
0x6ad8efc4d133ccf167c44eb4691c80abffb9f82b932b8caa, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x146479d944e6bda87e5b35818aa666a4c998a71f4e95edbc, \
|
||||
0xa86d6fe62bc8fbd88139693f842635f687f132255858e7f6, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0xe594d4a598046f3598243f50fd2c7bd7d380edb055802253, \
|
||||
0x509014c0c4d6b536e3ca750ec09066af39b4c8616a53a923, \
|
||||
False )
|
||||
|
||||
print_("Trying signature-verification tests from ECDSAVS.pdf B.2.4:")
|
||||
print_("P-192:")
|
||||
Msg = 0x84ce72aa8699df436059f052ac51b6398d2511e49631bcb7e71f89c499b9ee425dfbc13a5f6d408471b054f2655617cbbaf7937b7c80cd8865cf02c8487d30d2b0fbd8b2c4e102e16d828374bbc47b93852f212d5043c3ea720f086178ff798cc4f63f787b9c2e419efa033e7644ea7936f54462dc21a6c4580725f7f0e7d158
|
||||
Qx = 0xd9dbfb332aa8e5ff091e8ce535857c37c73f6250ffb2e7ac
|
||||
Qy = 0x282102e364feded3ad15ddf968f88d8321aa268dd483ebc4
|
||||
R = 0x64dca58a20787c488d11d6dd96313f1b766f2d8efe122916
|
||||
S = 0x1ecba28141e84ab4ecad92f56720e2cc83eb3d22dec72479
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
||||
|
||||
Msg = 0x94bb5bacd5f8ea765810024db87f4224ad71362a3c28284b2b9f39fab86db12e8beb94aae899768229be8fdb6c4f12f28912bb604703a79ccff769c1607f5a91450f30ba0460d359d9126cbd6296be6d9c4bb96c0ee74cbb44197c207f6db326ab6f5a659113a9034e54be7b041ced9dcf6458d7fb9cbfb2744d999f7dfd63f4
|
||||
Qx = 0x3e53ef8d3112af3285c0e74842090712cd324832d4277ae7
|
||||
Qy = 0xcc75f8952d30aec2cbb719fc6aa9934590b5d0ff5a83adb7
|
||||
R = 0x8285261607283ba18f335026130bab31840dcfd9c3e555af
|
||||
S = 0x356d89e1b04541afc9704a45e9c535ce4a50929e33d7e06c
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
||||
|
||||
Msg = 0xf6227a8eeb34afed1621dcc89a91d72ea212cb2f476839d9b4243c66877911b37b4ad6f4448792a7bbba76c63bdd63414b6facab7dc71c3396a73bd7ee14cdd41a659c61c99b779cecf07bc51ab391aa3252386242b9853ea7da67fd768d303f1b9b513d401565b6f1eb722dfdb96b519fe4f9bd5de67ae131e64b40e78c42dd
|
||||
Qx = 0x16335dbe95f8e8254a4e04575d736befb258b8657f773cb7
|
||||
Qy = 0x421b13379c59bc9dce38a1099ca79bbd06d647c7f6242336
|
||||
R = 0x4141bd5d64ea36c5b0bd21ef28c02da216ed9d04522b1e91
|
||||
S = 0x159a6aa852bcc579e821b7bb0994c0861fb08280c38daa09
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x16b5f93afd0d02246f662761ed8e0dd9504681ed02a253006eb36736b563097ba39f81c8e1bce7a16c1339e345efabbc6baa3efb0612948ae51103382a8ee8bc448e3ef71e9f6f7a9676694831d7f5dd0db5446f179bcb737d4a526367a447bfe2c857521c7f40b6d7d7e01a180d92431fb0bbd29c04a0c420a57b3ed26ccd8a
|
||||
Qx = 0xfd14cdf1607f5efb7b1793037b15bdf4baa6f7c16341ab0b
|
||||
Qy = 0x83fa0795cc6c4795b9016dac928fd6bac32f3229a96312c4
|
||||
R = 0x8dfdb832951e0167c5d762a473c0416c5c15bc1195667dc1
|
||||
S = 0x1720288a2dc13fa1ec78f763f8fe2ff7354a7e6fdde44520
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x08a2024b61b79d260e3bb43ef15659aec89e5b560199bc82cf7c65c77d39192e03b9a895d766655105edd9188242b91fbde4167f7862d4ddd61e5d4ab55196683d4f13ceb90d87aea6e07eb50a874e33086c4a7cb0273a8e1c4408f4b846bceae1ebaac1b2b2ea851a9b09de322efe34cebe601653efd6ddc876ce8c2f2072fb
|
||||
Qx = 0x674f941dc1a1f8b763c9334d726172d527b90ca324db8828
|
||||
Qy = 0x65adfa32e8b236cb33a3e84cf59bfb9417ae7e8ede57a7ff
|
||||
R = 0x9508b9fdd7daf0d8126f9e2bc5a35e4c6d800b5b804d7796
|
||||
S = 0x36f2bf6b21b987c77b53bb801b3435a577e3d493744bfab0
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x1843aba74b0789d4ac6b0b8923848023a644a7b70afa23b1191829bbe4397ce15b629bf21a8838298653ed0c19222b95fa4f7390d1b4c844d96e645537e0aae98afb5c0ac3bd0e4c37f8daaff25556c64e98c319c52687c904c4de7240a1cc55cd9756b7edaef184e6e23b385726e9ffcba8001b8f574987c1a3fedaaa83ca6d
|
||||
Qx = 0x10ecca1aad7220b56a62008b35170bfd5e35885c4014a19f
|
||||
Qy = 0x04eb61984c6c12ade3bc47f3c629ece7aa0a033b9948d686
|
||||
R = 0x82bfa4e82c0dfe9274169b86694e76ce993fd83b5c60f325
|
||||
S = 0xa97685676c59a65dbde002fe9d613431fb183e8006d05633
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x5a478f4084ddd1a7fea038aa9732a822106385797d02311aeef4d0264f824f698df7a48cfb6b578cf3da416bc0799425bb491be5b5ecc37995b85b03420a98f2c4dc5c31a69a379e9e322fbe706bbcaf0f77175e05cbb4fa162e0da82010a278461e3e974d137bc746d1880d6eb02aa95216014b37480d84b87f717bb13f76e1
|
||||
Qx = 0x6636653cb5b894ca65c448277b29da3ad101c4c2300f7c04
|
||||
Qy = 0xfdf1cbb3fc3fd6a4f890b59e554544175fa77dbdbeb656c1
|
||||
R = 0xeac2ddecddfb79931a9c3d49c08de0645c783a24cb365e1c
|
||||
S = 0x3549fee3cfa7e5f93bc47d92d8ba100e881a2a93c22f8d50
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0xc598774259a058fa65212ac57eaa4f52240e629ef4c310722088292d1d4af6c39b49ce06ba77e4247b20637174d0bd67c9723feb57b5ead232b47ea452d5d7a089f17c00b8b6767e434a5e16c231ba0efa718a340bf41d67ea2d295812ff1b9277daacb8bc27b50ea5e6443bcf95ef4e9f5468fe78485236313d53d1c68f6ba2
|
||||
Qx = 0xa82bd718d01d354001148cd5f69b9ebf38ff6f21898f8aaa
|
||||
Qy = 0xe67ceede07fc2ebfafd62462a51e4b6c6b3d5b537b7caf3e
|
||||
R = 0x4d292486c620c3de20856e57d3bb72fcde4a73ad26376955
|
||||
S = 0xa85289591a6081d5728825520e62ff1c64f94235c04c7f95
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0xca98ed9db081a07b7557f24ced6c7b9891269a95d2026747add9e9eb80638a961cf9c71a1b9f2c29744180bd4c3d3db60f2243c5c0b7cc8a8d40a3f9a7fc910250f2187136ee6413ffc67f1a25e1c4c204fa9635312252ac0e0481d89b6d53808f0c496ba87631803f6c572c1f61fa049737fdacce4adff757afed4f05beb658
|
||||
Qx = 0x7d3b016b57758b160c4fca73d48df07ae3b6b30225126c2f
|
||||
Qy = 0x4af3790d9775742bde46f8da876711be1b65244b2b39e7ec
|
||||
R = 0x95f778f5f656511a5ab49a5d69ddd0929563c29cbc3a9e62
|
||||
S = 0x75c87fc358c251b4c83d2dd979faad496b539f9f2ee7a289
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x31dd9a54c8338bea06b87eca813d555ad1850fac9742ef0bbe40dad400e10288acc9c11ea7dac79eb16378ebea9490e09536099f1b993e2653cd50240014c90a9c987f64545abc6a536b9bd2435eb5e911fdfde2f13be96ea36ad38df4ae9ea387b29cced599af777338af2794820c9cce43b51d2112380a35802ab7e396c97a
|
||||
Qx = 0x9362f28c4ef96453d8a2f849f21e881cd7566887da8beb4a
|
||||
Qy = 0xe64d26d8d74c48a024ae85d982ee74cd16046f4ee5333905
|
||||
R = 0xf3923476a296c88287e8de914b0b324ad5a963319a4fe73b
|
||||
S = 0xf0baeed7624ed00d15244d8ba2aede085517dbdec8ac65f5
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
||||
|
||||
Msg = 0xb2b94e4432267c92f9fdb9dc6040c95ffa477652761290d3c7de312283f6450d89cc4aabe748554dfb6056b2d8e99c7aeaad9cdddebdee9dbc099839562d9064e68e7bb5f3a6bba0749ca9a538181fc785553a4000785d73cc207922f63e8ce1112768cb1de7b673aed83a1e4a74592f1268d8e2a4e9e63d414b5d442bd0456d
|
||||
Qx = 0xcc6fc032a846aaac25533eb033522824f94e670fa997ecef
|
||||
Qy = 0xe25463ef77a029eccda8b294fd63dd694e38d223d30862f1
|
||||
R = 0x066b1d07f3a40e679b620eda7f550842a35c18b80c5ebe06
|
||||
S = 0xa0b0fb201e8f2df65e2c4508ef303bdc90d934016f16b2dc
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x4366fcadf10d30d086911de30143da6f579527036937007b337f7282460eae5678b15cccda853193ea5fc4bc0a6b9d7a31128f27e1214988592827520b214eed5052f7775b750b0c6b15f145453ba3fee24a085d65287e10509eb5d5f602c440341376b95c24e5c4727d4b859bfe1483d20538acdd92c7997fa9c614f0f839d7
|
||||
Qx = 0x955c908fe900a996f7e2089bee2f6376830f76a19135e753
|
||||
Qy = 0xba0c42a91d3847de4a592a46dc3fdaf45a7cc709b90de520
|
||||
R = 0x1f58ad77fc04c782815a1405b0925e72095d906cbf52a668
|
||||
S = 0xf2e93758b3af75edf784f05a6761c9b9a6043c66b845b599
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x543f8af57d750e33aa8565e0cae92bfa7a1ff78833093421c2942cadf9986670a5ff3244c02a8225e790fbf30ea84c74720abf99cfd10d02d34377c3d3b41269bea763384f372bb786b5846f58932defa68023136cd571863b304886e95e52e7877f445b9364b3f06f3c28da12707673fecb4b8071de06b6e0a3c87da160cef3
|
||||
Qx = 0x31f7fa05576d78a949b24812d4383107a9a45bb5fccdd835
|
||||
Qy = 0x8dc0eb65994a90f02b5e19bd18b32d61150746c09107e76b
|
||||
R = 0xbe26d59e4e883dde7c286614a767b31e49ad88789d3a78ff
|
||||
S = 0x8762ca831c1ce42df77893c9b03119428e7a9b819b619068
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0xd2e8454143ce281e609a9d748014dcebb9d0bc53adb02443a6aac2ffe6cb009f387c346ecb051791404f79e902ee333ad65e5c8cb38dc0d1d39a8dc90add5023572720e5b94b190d43dd0d7873397504c0c7aef2727e628eb6a74411f2e400c65670716cb4a815dc91cbbfeb7cfe8c929e93184c938af2c078584da045e8f8d1
|
||||
Qx = 0x66aa8edbbdb5cf8e28ceb51b5bda891cae2df84819fe25c0
|
||||
Qy = 0x0c6bc2f69030a7ce58d4a00e3b3349844784a13b8936f8da
|
||||
R = 0xa4661e69b1734f4a71b788410a464b71e7ffe42334484f23
|
||||
S = 0x738421cf5e049159d69c57a915143e226cac8355e149afe9
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x6660717144040f3e2f95a4e25b08a7079c702a8b29babad5a19a87654bc5c5afa261512a11b998a4fb36b5d8fe8bd942792ff0324b108120de86d63f65855e5461184fc96a0a8ffd2ce6d5dfb0230cbbdd98f8543e361b3205f5da3d500fdc8bac6db377d75ebef3cb8f4d1ff738071ad0938917889250b41dd1d98896ca06fb
|
||||
Qx = 0xbcfacf45139b6f5f690a4c35a5fffa498794136a2353fc77
|
||||
Qy = 0x6f4a6c906316a6afc6d98fe1f0399d056f128fe0270b0f22
|
||||
R = 0x9db679a3dafe48f7ccad122933acfe9da0970b71c94c21c1
|
||||
S = 0x984c2db99827576c0a41a5da41e07d8cc768bc82f18c9da9
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
|
||||
|
||||
print_("Testing the example code:")
|
||||
|
||||
# Building a public/private key pair from the NIST Curve P-192:
|
||||
|
||||
g = generator_192
|
||||
n = g.order()
|
||||
|
||||
# (random.SystemRandom is supposed to provide
|
||||
# crypto-quality random numbers, but as Debian recently
|
||||
# illustrated, a systems programmer can accidentally
|
||||
# demolish this security, so in serious applications
|
||||
# further precautions are appropriate.)
|
||||
|
||||
randrange = random.SystemRandom().randrange
|
||||
|
||||
secret = randrange( 1, n )
|
||||
pubkey = Public_key( g, g * secret )
|
||||
privkey = Private_key( pubkey, secret )
|
||||
|
||||
# Signing a hash value:
|
||||
|
||||
hash = randrange( 1, n )
|
||||
signature = privkey.sign( hash, randrange( 1, n ) )
|
||||
|
||||
# Verifying a signature for a hash value:
|
||||
|
||||
if pubkey.verifies( hash, signature ):
|
||||
print_("Demo verification succeeded.")
|
||||
else:
|
||||
raise TestFailure("*** Demo verification failed.")
|
||||
|
||||
if pubkey.verifies( hash-1, signature ):
|
||||
raise TestFailure( "**** Demo verification failed to reject tampered hash.")
|
||||
else:
|
||||
print_("Demo verification correctly rejected tampered hash.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
__main__()
|
||||
293
Lambdas/Websocket Authorizer/ecdsa/ellipticcurve.py
Normal file
293
Lambdas/Websocket Authorizer/ecdsa/ellipticcurve.py
Normal file
@@ -0,0 +1,293 @@
|
||||
#! /usr/bin/env python
|
||||
#
|
||||
# Implementation of elliptic curves, for cryptographic applications.
|
||||
#
|
||||
# This module doesn't provide any way to choose a random elliptic
|
||||
# curve, nor to verify that an elliptic curve was chosen randomly,
|
||||
# because one can simply use NIST's standard curves.
|
||||
#
|
||||
# Notes from X9.62-1998 (draft):
|
||||
# Nomenclature:
|
||||
# - Q is a public key.
|
||||
# The "Elliptic Curve Domain Parameters" include:
|
||||
# - q is the "field size", which in our case equals p.
|
||||
# - p is a big prime.
|
||||
# - G is a point of prime order (5.1.1.1).
|
||||
# - n is the order of G (5.1.1.1).
|
||||
# Public-key validation (5.2.2):
|
||||
# - Verify that Q is not the point at infinity.
|
||||
# - Verify that X_Q and Y_Q are in [0,p-1].
|
||||
# - Verify that Q is on the curve.
|
||||
# - Verify that nQ is the point at infinity.
|
||||
# Signature generation (5.3):
|
||||
# - Pick random k from [1,n-1].
|
||||
# Signature checking (5.4.2):
|
||||
# - Verify that r and s are in [1,n-1].
|
||||
#
|
||||
# Version of 2008.11.25.
|
||||
#
|
||||
# Revision history:
|
||||
# 2005.12.31 - Initial version.
|
||||
# 2008.11.25 - Change CurveFp.is_on to contains_point.
|
||||
#
|
||||
# Written in 2005 by Peter Pearson and placed in the public domain.
|
||||
|
||||
from __future__ import division
|
||||
|
||||
from .six import print_
|
||||
from . import numbertheory
|
||||
|
||||
class CurveFp( object ):
|
||||
"""Elliptic Curve over the field of integers modulo a prime."""
|
||||
def __init__( self, p, a, b ):
|
||||
"""The curve of points satisfying y^2 = x^3 + a*x + b (mod p)."""
|
||||
self.__p = p
|
||||
self.__a = a
|
||||
self.__b = b
|
||||
|
||||
def p( self ):
|
||||
return self.__p
|
||||
|
||||
def a( self ):
|
||||
return self.__a
|
||||
|
||||
def b( self ):
|
||||
return self.__b
|
||||
|
||||
def contains_point( self, x, y ):
|
||||
"""Is the point (x,y) on this curve?"""
|
||||
return ( y * y - ( x * x * x + self.__a * x + self.__b ) ) % self.__p == 0
|
||||
|
||||
|
||||
|
||||
class Point( object ):
|
||||
"""A point on an elliptic curve. Altering x and y is forbidding,
|
||||
but they can be read by the x() and y() methods."""
|
||||
def __init__( self, curve, x, y, order = None ):
|
||||
"""curve, x, y, order; order (optional) is the order of this point."""
|
||||
self.__curve = curve
|
||||
self.__x = x
|
||||
self.__y = y
|
||||
self.__order = order
|
||||
# self.curve is allowed to be None only for INFINITY:
|
||||
if self.__curve: assert self.__curve.contains_point( x, y )
|
||||
if order: assert self * order == INFINITY
|
||||
|
||||
def __eq__( self, other ):
|
||||
"""Return True if the points are identical, False otherwise."""
|
||||
if self.__curve == other.__curve \
|
||||
and self.__x == other.__x \
|
||||
and self.__y == other.__y:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def __add__( self, other ):
|
||||
"""Add one point to another point."""
|
||||
|
||||
# X9.62 B.3:
|
||||
|
||||
if other == INFINITY: return self
|
||||
if self == INFINITY: return other
|
||||
assert self.__curve == other.__curve
|
||||
if self.__x == other.__x:
|
||||
if ( self.__y + other.__y ) % self.__curve.p() == 0:
|
||||
return INFINITY
|
||||
else:
|
||||
return self.double()
|
||||
|
||||
p = self.__curve.p()
|
||||
|
||||
l = ( ( other.__y - self.__y ) * \
|
||||
numbertheory.inverse_mod( other.__x - self.__x, p ) ) % p
|
||||
|
||||
x3 = ( l * l - self.__x - other.__x ) % p
|
||||
y3 = ( l * ( self.__x - x3 ) - self.__y ) % p
|
||||
|
||||
return Point( self.__curve, x3, y3 )
|
||||
|
||||
def __mul__( self, other ):
|
||||
"""Multiply a point by an integer."""
|
||||
|
||||
def leftmost_bit( x ):
|
||||
assert x > 0
|
||||
result = 1
|
||||
while result <= x: result = 2 * result
|
||||
return result // 2
|
||||
|
||||
e = other
|
||||
if self.__order: e = e % self.__order
|
||||
if e == 0: return INFINITY
|
||||
if self == INFINITY: return INFINITY
|
||||
assert e > 0
|
||||
|
||||
# From X9.62 D.3.2:
|
||||
|
||||
e3 = 3 * e
|
||||
negative_self = Point( self.__curve, self.__x, -self.__y, self.__order )
|
||||
i = leftmost_bit( e3 ) // 2
|
||||
result = self
|
||||
# print_("Multiplying %s by %d (e3 = %d):" % ( self, other, e3 ))
|
||||
while i > 1:
|
||||
result = result.double()
|
||||
if ( e3 & i ) != 0 and ( e & i ) == 0: result = result + self
|
||||
if ( e3 & i ) == 0 and ( e & i ) != 0: result = result + negative_self
|
||||
# print_(". . . i = %d, result = %s" % ( i, result ))
|
||||
i = i // 2
|
||||
|
||||
return result
|
||||
|
||||
def __rmul__( self, other ):
|
||||
"""Multiply a point by an integer."""
|
||||
|
||||
return self * other
|
||||
|
||||
def __str__( self ):
|
||||
if self == INFINITY: return "infinity"
|
||||
return "(%d,%d)" % ( self.__x, self.__y )
|
||||
|
||||
def double( self ):
|
||||
"""Return a new point that is twice the old."""
|
||||
|
||||
if self == INFINITY:
|
||||
return INFINITY
|
||||
|
||||
# X9.62 B.3:
|
||||
|
||||
p = self.__curve.p()
|
||||
a = self.__curve.a()
|
||||
|
||||
l = ( ( 3 * self.__x * self.__x + a ) * \
|
||||
numbertheory.inverse_mod( 2 * self.__y, p ) ) % p
|
||||
|
||||
x3 = ( l * l - 2 * self.__x ) % p
|
||||
y3 = ( l * ( self.__x - x3 ) - self.__y ) % p
|
||||
|
||||
return Point( self.__curve, x3, y3 )
|
||||
|
||||
def x( self ):
|
||||
return self.__x
|
||||
|
||||
def y( self ):
|
||||
return self.__y
|
||||
|
||||
def curve( self ):
|
||||
return self.__curve
|
||||
|
||||
def order( self ):
|
||||
return self.__order
|
||||
|
||||
|
||||
# This one point is the Point At Infinity for all purposes:
|
||||
INFINITY = Point( None, None, None )
|
||||
|
||||
def __main__():
|
||||
|
||||
class FailedTest(Exception): pass
|
||||
def test_add( c, x1, y1, x2, y2, x3, y3 ):
|
||||
"""We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3)."""
|
||||
p1 = Point( c, x1, y1 )
|
||||
p2 = Point( c, x2, y2 )
|
||||
p3 = p1 + p2
|
||||
print_("%s + %s = %s" % ( p1, p2, p3 ), end=' ')
|
||||
if p3.x() != x3 or p3.y() != y3:
|
||||
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
||||
else:
|
||||
print_(" Good.")
|
||||
|
||||
def test_double( c, x1, y1, x3, y3 ):
|
||||
"""We expect that on curve c, 2*(x1,y1) = (x3, y3)."""
|
||||
p1 = Point( c, x1, y1 )
|
||||
p3 = p1.double()
|
||||
print_("%s doubled = %s" % ( p1, p3 ), end=' ')
|
||||
if p3.x() != x3 or p3.y() != y3:
|
||||
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
||||
else:
|
||||
print_(" Good.")
|
||||
|
||||
def test_double_infinity( c ):
|
||||
"""We expect that on curve c, 2*INFINITY = INFINITY."""
|
||||
p1 = INFINITY
|
||||
p3 = p1.double()
|
||||
print_("%s doubled = %s" % ( p1, p3 ), end=' ')
|
||||
if p3.x() != INFINITY.x() or p3.y() != INFINITY.y():
|
||||
raise FailedTest("Failure: should give (%d,%d)." % ( INFINITY.x(), INFINITY.y() ))
|
||||
else:
|
||||
print_(" Good.")
|
||||
|
||||
def test_multiply( c, x1, y1, m, x3, y3 ):
|
||||
"""We expect that on curve c, m*(x1,y1) = (x3,y3)."""
|
||||
p1 = Point( c, x1, y1 )
|
||||
p3 = p1 * m
|
||||
print_("%s * %d = %s" % ( p1, m, p3 ), end=' ')
|
||||
if p3.x() != x3 or p3.y() != y3:
|
||||
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
||||
else:
|
||||
print_(" Good.")
|
||||
|
||||
|
||||
# A few tests from X9.62 B.3:
|
||||
|
||||
c = CurveFp( 23, 1, 1 )
|
||||
test_add( c, 3, 10, 9, 7, 17, 20 )
|
||||
test_double( c, 3, 10, 7, 12 )
|
||||
test_add( c, 3, 10, 3, 10, 7, 12 ) # (Should just invoke double.)
|
||||
test_multiply( c, 3, 10, 2, 7, 12 )
|
||||
|
||||
test_double_infinity(c)
|
||||
|
||||
# From X9.62 I.1 (p. 96):
|
||||
|
||||
g = Point( c, 13, 7, 7 )
|
||||
|
||||
check = INFINITY
|
||||
for i in range( 7 + 1 ):
|
||||
p = ( i % 7 ) * g
|
||||
print_("%s * %d = %s, expected %s . . ." % ( g, i, p, check ), end=' ')
|
||||
if p == check:
|
||||
print_(" Good.")
|
||||
else:
|
||||
raise FailedTest("Bad.")
|
||||
check = check + g
|
||||
|
||||
# NIST Curve P-192:
|
||||
p = 6277101735386680763835789423207666416083908700390324961279
|
||||
r = 6277101735386680763835789423176059013767194773182842284081
|
||||
#s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L
|
||||
c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65
|
||||
b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
|
||||
Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
|
||||
Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
|
||||
|
||||
c192 = CurveFp( p, -3, b )
|
||||
p192 = Point( c192, Gx, Gy, r )
|
||||
|
||||
# Checking against some sample computations presented
|
||||
# in X9.62:
|
||||
|
||||
d = 651056770906015076056810763456358567190100156695615665659
|
||||
Q = d * p192
|
||||
if Q.x() != 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5:
|
||||
raise FailedTest("p192 * d came out wrong.")
|
||||
else:
|
||||
print_("p192 * d came out right.")
|
||||
|
||||
k = 6140507067065001063065065565667405560006161556565665656654
|
||||
R = k * p192
|
||||
if R.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||
or R.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||
raise FailedTest("k * p192 came out wrong.")
|
||||
else:
|
||||
print_("k * p192 came out right.")
|
||||
|
||||
u1 = 2563697409189434185194736134579731015366492496392189760599
|
||||
u2 = 6266643813348617967186477710235785849136406323338782220568
|
||||
temp = u1 * p192 + u2 * Q
|
||||
if temp.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||
or temp.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||
raise FailedTest("u1 * p192 + u2 * Q came out wrong.")
|
||||
else:
|
||||
print_("u1 * p192 + u2 * Q came out right.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
__main__()
|
||||
307
Lambdas/Websocket Authorizer/ecdsa/keys.py
Normal file
307
Lambdas/Websocket Authorizer/ecdsa/keys.py
Normal file
@@ -0,0 +1,307 @@
|
||||
import binascii
|
||||
|
||||
from . import ecdsa
|
||||
from . import der
|
||||
from . import rfc6979
|
||||
from . import ellipticcurve
|
||||
from .curves import NIST192p, find_curve
|
||||
from .util import string_to_number, number_to_string, randrange
|
||||
from .util import sigencode_string, sigdecode_string
|
||||
from .util import oid_ecPublicKey, encoded_oid_ecPublicKey, MalformedSignature
|
||||
from .six import PY3, b
|
||||
from hashlib import sha1
|
||||
|
||||
class BadSignatureError(Exception):
|
||||
pass
|
||||
class BadDigestError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class MalformedPointError(AssertionError):
|
||||
pass
|
||||
|
||||
|
||||
class VerifyingKey:
|
||||
def __init__(self, _error__please_use_generate=None):
|
||||
if not _error__please_use_generate:
|
||||
raise TypeError("Please use SigningKey.generate() to construct me")
|
||||
|
||||
@classmethod
|
||||
def from_public_point(klass, point, curve=NIST192p, hashfunc=sha1):
|
||||
self = klass(_error__please_use_generate=True)
|
||||
self.curve = curve
|
||||
self.default_hashfunc = hashfunc
|
||||
self.pubkey = ecdsa.Public_key(curve.generator, point)
|
||||
self.pubkey.order = curve.order
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def from_string(klass, string, curve=NIST192p, hashfunc=sha1,
|
||||
validate_point=True):
|
||||
order = curve.order
|
||||
if len(string) != curve.verifying_key_length:
|
||||
raise MalformedPointError(
|
||||
"Malformed encoding of public point. Expected string {0} bytes"
|
||||
" long, received {1} bytes long string".format(
|
||||
curve.verifying_key_length, len(string)))
|
||||
xs = string[:curve.baselen]
|
||||
ys = string[curve.baselen:]
|
||||
if len(xs) != curve.baselen:
|
||||
raise MalformedPointError("Unexpected length of encoded x")
|
||||
if len(ys) != curve.baselen:
|
||||
raise MalformedPointError("Unexpected length of encoded y")
|
||||
x = string_to_number(xs)
|
||||
y = string_to_number(ys)
|
||||
if validate_point and not ecdsa.point_is_valid(curve.generator, x, y):
|
||||
raise MalformedPointError("Point does not lie on the curve")
|
||||
point = ellipticcurve.Point(curve.curve, x, y, order)
|
||||
return klass.from_public_point(point, curve, hashfunc)
|
||||
|
||||
@classmethod
|
||||
def from_pem(klass, string):
|
||||
return klass.from_der(der.unpem(string))
|
||||
|
||||
@classmethod
|
||||
def from_der(klass, string):
|
||||
# [[oid_ecPublicKey,oid_curve], point_str_bitstring]
|
||||
s1,empty = der.remove_sequence(string)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER pubkey: %s" %
|
||||
binascii.hexlify(empty))
|
||||
s2,point_str_bitstring = der.remove_sequence(s1)
|
||||
# s2 = oid_ecPublicKey,oid_curve
|
||||
oid_pk, rest = der.remove_object(s2)
|
||||
oid_curve, empty = der.remove_object(rest)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER pubkey objects: %s" %
|
||||
binascii.hexlify(empty))
|
||||
if oid_pk != oid_ecPublicKey:
|
||||
raise der.UnexpectedDER(
|
||||
"Unexpected OID in encoding, received {0}, expected {1}"
|
||||
.format(oid_pk, oid_ecPublicKey))
|
||||
curve = find_curve(oid_curve)
|
||||
point_str, empty = der.remove_bitstring(point_str_bitstring)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after pubkey pointstring: %s" %
|
||||
binascii.hexlify(empty))
|
||||
if not point_str.startswith(b("\x00\x04")):
|
||||
raise der.UnexpectedDER(
|
||||
"Unsupported or invalid encoding of pubcli key")
|
||||
return klass.from_string(point_str[2:], curve)
|
||||
|
||||
def to_string(self):
|
||||
# VerifyingKey.from_string(vk.to_string()) == vk as long as the
|
||||
# curves are the same: the curve itself is not included in the
|
||||
# serialized form
|
||||
order = self.pubkey.order
|
||||
x_str = number_to_string(self.pubkey.point.x(), order)
|
||||
y_str = number_to_string(self.pubkey.point.y(), order)
|
||||
return x_str + y_str
|
||||
|
||||
def to_pem(self):
|
||||
return der.topem(self.to_der(), "PUBLIC KEY")
|
||||
|
||||
def to_der(self):
|
||||
order = self.pubkey.order
|
||||
x_str = number_to_string(self.pubkey.point.x(), order)
|
||||
y_str = number_to_string(self.pubkey.point.y(), order)
|
||||
point_str = b("\x00\x04") + x_str + y_str
|
||||
return der.encode_sequence(der.encode_sequence(encoded_oid_ecPublicKey,
|
||||
self.curve.encoded_oid),
|
||||
der.encode_bitstring(point_str))
|
||||
|
||||
def verify(self, signature, data, hashfunc=None, sigdecode=sigdecode_string):
|
||||
hashfunc = hashfunc or self.default_hashfunc
|
||||
digest = hashfunc(data).digest()
|
||||
return self.verify_digest(signature, digest, sigdecode)
|
||||
|
||||
def verify_digest(self, signature, digest, sigdecode=sigdecode_string):
|
||||
if len(digest) > self.curve.baselen:
|
||||
raise BadDigestError("this curve (%s) is too short "
|
||||
"for your digest (%d)" % (self.curve.name,
|
||||
8*len(digest)))
|
||||
number = string_to_number(digest)
|
||||
try:
|
||||
r, s = sigdecode(signature, self.pubkey.order)
|
||||
except (der.UnexpectedDER, MalformedSignature) as e:
|
||||
raise BadSignatureError("Malformed formatting of signature", e)
|
||||
sig = ecdsa.Signature(r, s)
|
||||
if self.pubkey.verifies(number, sig):
|
||||
return True
|
||||
raise BadSignatureError("Signature verification failed")
|
||||
|
||||
class SigningKey:
|
||||
def __init__(self, _error__please_use_generate=None):
|
||||
if not _error__please_use_generate:
|
||||
raise TypeError("Please use SigningKey.generate() to construct me")
|
||||
|
||||
@classmethod
|
||||
def generate(klass, curve=NIST192p, entropy=None, hashfunc=sha1):
|
||||
secexp = randrange(curve.order, entropy)
|
||||
return klass.from_secret_exponent(secexp, curve, hashfunc)
|
||||
|
||||
# to create a signing key from a short (arbitrary-length) seed, convert
|
||||
# that seed into an integer with something like
|
||||
# secexp=util.randrange_from_seed__X(seed, curve.order), and then pass
|
||||
# that integer into SigningKey.from_secret_exponent(secexp, curve)
|
||||
|
||||
@classmethod
|
||||
def from_secret_exponent(klass, secexp, curve=NIST192p, hashfunc=sha1):
|
||||
self = klass(_error__please_use_generate=True)
|
||||
self.curve = curve
|
||||
self.default_hashfunc = hashfunc
|
||||
self.baselen = curve.baselen
|
||||
n = curve.order
|
||||
if not 1 <= secexp < n:
|
||||
raise MalformedPointError(
|
||||
"Invalid value for secexp, expected integer between 1 and {0}"
|
||||
.format(n))
|
||||
pubkey_point = curve.generator*secexp
|
||||
pubkey = ecdsa.Public_key(curve.generator, pubkey_point)
|
||||
pubkey.order = n
|
||||
self.verifying_key = VerifyingKey.from_public_point(pubkey_point, curve,
|
||||
hashfunc)
|
||||
self.privkey = ecdsa.Private_key(pubkey, secexp)
|
||||
self.privkey.order = n
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def from_string(klass, string, curve=NIST192p, hashfunc=sha1):
|
||||
if len(string) != curve.baselen:
|
||||
raise MalformedPointError(
|
||||
"Invalid length of private key, received {0}, expected {1}"
|
||||
.format(len(string), curve.baselen))
|
||||
secexp = string_to_number(string)
|
||||
return klass.from_secret_exponent(secexp, curve, hashfunc)
|
||||
|
||||
@classmethod
|
||||
def from_pem(klass, string, hashfunc=sha1):
|
||||
# the privkey pem file has two sections: "EC PARAMETERS" and "EC
|
||||
# PRIVATE KEY". The first is redundant.
|
||||
if PY3 and isinstance(string, str):
|
||||
string = string.encode()
|
||||
privkey_pem = string[string.index(b("-----BEGIN EC PRIVATE KEY-----")):]
|
||||
return klass.from_der(der.unpem(privkey_pem), hashfunc)
|
||||
@classmethod
|
||||
def from_der(klass, string, hashfunc=sha1):
|
||||
# SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1),
|
||||
# cont[1],bitstring])
|
||||
s, empty = der.remove_sequence(string)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER privkey: %s" %
|
||||
binascii.hexlify(empty))
|
||||
one, s = der.remove_integer(s)
|
||||
if one != 1:
|
||||
raise der.UnexpectedDER("expected '1' at start of DER privkey,"
|
||||
" got %d" % one)
|
||||
privkey_str, s = der.remove_octet_string(s)
|
||||
tag, curve_oid_str, s = der.remove_constructed(s)
|
||||
if tag != 0:
|
||||
raise der.UnexpectedDER("expected tag 0 in DER privkey,"
|
||||
" got %d" % tag)
|
||||
curve_oid, empty = der.remove_object(curve_oid_str)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER privkey "
|
||||
"curve_oid: %s" % binascii.hexlify(empty))
|
||||
curve = find_curve(curve_oid)
|
||||
|
||||
# we don't actually care about the following fields
|
||||
#
|
||||
#tag, pubkey_bitstring, s = der.remove_constructed(s)
|
||||
#if tag != 1:
|
||||
# raise der.UnexpectedDER("expected tag 1 in DER privkey, got %d"
|
||||
# % tag)
|
||||
#pubkey_str = der.remove_bitstring(pubkey_bitstring)
|
||||
#if empty != "":
|
||||
# raise der.UnexpectedDER("trailing junk after DER privkey "
|
||||
# "pubkeystr: %s" % binascii.hexlify(empty))
|
||||
|
||||
# our from_string method likes fixed-length privkey strings
|
||||
if len(privkey_str) < curve.baselen:
|
||||
privkey_str = b("\x00")*(curve.baselen-len(privkey_str)) + privkey_str
|
||||
return klass.from_string(privkey_str, curve, hashfunc)
|
||||
|
||||
def to_string(self):
|
||||
secexp = self.privkey.secret_multiplier
|
||||
s = number_to_string(secexp, self.privkey.order)
|
||||
return s
|
||||
|
||||
def to_pem(self):
|
||||
# TODO: "BEGIN ECPARAMETERS"
|
||||
return der.topem(self.to_der(), "EC PRIVATE KEY")
|
||||
|
||||
def to_der(self):
|
||||
# SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1),
|
||||
# cont[1],bitstring])
|
||||
encoded_vk = b("\x00\x04") + self.get_verifying_key().to_string()
|
||||
return der.encode_sequence(der.encode_integer(1),
|
||||
der.encode_octet_string(self.to_string()),
|
||||
der.encode_constructed(0, self.curve.encoded_oid),
|
||||
der.encode_constructed(1, der.encode_bitstring(encoded_vk)),
|
||||
)
|
||||
|
||||
def get_verifying_key(self):
|
||||
return self.verifying_key
|
||||
|
||||
def sign_deterministic(self, data, hashfunc=None, sigencode=sigencode_string):
|
||||
hashfunc = hashfunc or self.default_hashfunc
|
||||
digest = hashfunc(data).digest()
|
||||
|
||||
return self.sign_digest_deterministic(digest, hashfunc=hashfunc, sigencode=sigencode)
|
||||
|
||||
def sign_digest_deterministic(self, digest, hashfunc=None, sigencode=sigencode_string):
|
||||
"""
|
||||
Calculates 'k' from data itself, removing the need for strong
|
||||
random generator and producing deterministic (reproducible) signatures.
|
||||
See RFC 6979 for more details.
|
||||
"""
|
||||
secexp = self.privkey.secret_multiplier
|
||||
k = rfc6979.generate_k(
|
||||
self.curve.generator.order(), secexp, hashfunc, digest)
|
||||
|
||||
return self.sign_digest(digest, sigencode=sigencode, k=k)
|
||||
|
||||
def sign(self, data, entropy=None, hashfunc=None, sigencode=sigencode_string, k=None):
|
||||
"""
|
||||
hashfunc= should behave like hashlib.sha1 . The output length of the
|
||||
hash (in bytes) must not be longer than the length of the curve order
|
||||
(rounded up to the nearest byte), so using SHA256 with nist256p is
|
||||
ok, but SHA256 with nist192p is not. (In the 2**-96ish unlikely event
|
||||
of a hash output larger than the curve order, the hash will
|
||||
effectively be wrapped mod n).
|
||||
|
||||
Use hashfunc=hashlib.sha1 to match openssl's -ecdsa-with-SHA1 mode,
|
||||
or hashfunc=hashlib.sha256 for openssl-1.0.0's -ecdsa-with-SHA256.
|
||||
"""
|
||||
|
||||
hashfunc = hashfunc or self.default_hashfunc
|
||||
h = hashfunc(data).digest()
|
||||
return self.sign_digest(h, entropy, sigencode, k)
|
||||
|
||||
def sign_digest(self, digest, entropy=None, sigencode=sigencode_string, k=None):
|
||||
if len(digest) > self.curve.baselen:
|
||||
raise BadDigestError("this curve (%s) is too short "
|
||||
"for your digest (%d)" % (self.curve.name,
|
||||
8*len(digest)))
|
||||
number = string_to_number(digest)
|
||||
r, s = self.sign_number(number, entropy, k)
|
||||
return sigencode(r, s, self.privkey.order)
|
||||
|
||||
def sign_number(self, number, entropy=None, k=None):
|
||||
# returns a pair of numbers
|
||||
order = self.privkey.order
|
||||
# privkey.sign() may raise RuntimeError in the amazingly unlikely
|
||||
# (2**-192) event that r=0 or s=0, because that would leak the key.
|
||||
# We could re-try with a different 'k', but we couldn't test that
|
||||
# code, so I choose to allow the signature to fail instead.
|
||||
|
||||
# If k is set, it is used directly. In other cases
|
||||
# it is generated using entropy function
|
||||
if k is not None:
|
||||
_k = k
|
||||
else:
|
||||
_k = randrange(order, entropy)
|
||||
|
||||
assert 1 <= _k < order
|
||||
sig = self.privkey.sign(number, _k)
|
||||
return sig.r, sig.s
|
||||
613
Lambdas/Websocket Authorizer/ecdsa/numbertheory.py
Normal file
613
Lambdas/Websocket Authorizer/ecdsa/numbertheory.py
Normal file
@@ -0,0 +1,613 @@
|
||||
#! /usr/bin/env python
|
||||
#
|
||||
# Provide some simple capabilities from number theory.
|
||||
#
|
||||
# Version of 2008.11.14.
|
||||
#
|
||||
# Written in 2005 and 2006 by Peter Pearson and placed in the public domain.
|
||||
# Revision history:
|
||||
# 2008.11.14: Use pow( base, exponent, modulus ) for modular_exp.
|
||||
# Make gcd and lcm accept arbitrarly many arguments.
|
||||
|
||||
from __future__ import division
|
||||
|
||||
from .six import print_, integer_types
|
||||
from .six.moves import reduce
|
||||
|
||||
import math
|
||||
|
||||
|
||||
class Error( Exception ):
|
||||
"""Base class for exceptions in this module."""
|
||||
pass
|
||||
|
||||
class SquareRootError( Error ):
|
||||
pass
|
||||
|
||||
class NegativeExponentError( Error ):
|
||||
pass
|
||||
|
||||
|
||||
def modular_exp( base, exponent, modulus ):
|
||||
"Raise base to exponent, reducing by modulus"
|
||||
if exponent < 0:
|
||||
raise NegativeExponentError( "Negative exponents (%d) not allowed" \
|
||||
% exponent )
|
||||
return pow( base, exponent, modulus )
|
||||
# result = 1L
|
||||
# x = exponent
|
||||
# b = base + 0L
|
||||
# while x > 0:
|
||||
# if x % 2 > 0: result = (result * b) % modulus
|
||||
# x = x // 2
|
||||
# b = ( b * b ) % modulus
|
||||
# return result
|
||||
|
||||
|
||||
def polynomial_reduce_mod( poly, polymod, p ):
|
||||
"""Reduce poly by polymod, integer arithmetic modulo p.
|
||||
|
||||
Polynomials are represented as lists of coefficients
|
||||
of increasing powers of x."""
|
||||
|
||||
# This module has been tested only by extensive use
|
||||
# in calculating modular square roots.
|
||||
|
||||
# Just to make this easy, require a monic polynomial:
|
||||
assert polymod[-1] == 1
|
||||
|
||||
assert len( polymod ) > 1
|
||||
|
||||
while len( poly ) >= len( polymod ):
|
||||
if poly[-1] != 0:
|
||||
for i in range( 2, len( polymod ) + 1 ):
|
||||
poly[-i] = ( poly[-i] - poly[-1] * polymod[-i] ) % p
|
||||
poly = poly[0:-1]
|
||||
|
||||
return poly
|
||||
|
||||
|
||||
|
||||
def polynomial_multiply_mod( m1, m2, polymod, p ):
|
||||
"""Polynomial multiplication modulo a polynomial over ints mod p.
|
||||
|
||||
Polynomials are represented as lists of coefficients
|
||||
of increasing powers of x."""
|
||||
|
||||
# This is just a seat-of-the-pants implementation.
|
||||
|
||||
# This module has been tested only by extensive use
|
||||
# in calculating modular square roots.
|
||||
|
||||
# Initialize the product to zero:
|
||||
|
||||
prod = ( len( m1 ) + len( m2 ) - 1 ) * [0]
|
||||
|
||||
# Add together all the cross-terms:
|
||||
|
||||
for i in range( len( m1 ) ):
|
||||
for j in range( len( m2 ) ):
|
||||
prod[i+j] = ( prod[i+j] + m1[i] * m2[j] ) % p
|
||||
|
||||
return polynomial_reduce_mod( prod, polymod, p )
|
||||
|
||||
|
||||
def polynomial_exp_mod( base, exponent, polymod, p ):
|
||||
"""Polynomial exponentiation modulo a polynomial over ints mod p.
|
||||
|
||||
Polynomials are represented as lists of coefficients
|
||||
of increasing powers of x."""
|
||||
|
||||
# Based on the Handbook of Applied Cryptography, algorithm 2.227.
|
||||
|
||||
# This module has been tested only by extensive use
|
||||
# in calculating modular square roots.
|
||||
|
||||
assert exponent < p
|
||||
|
||||
if exponent == 0: return [ 1 ]
|
||||
|
||||
G = base
|
||||
k = exponent
|
||||
if k%2 == 1: s = G
|
||||
else: s = [ 1 ]
|
||||
|
||||
while k > 1:
|
||||
k = k // 2
|
||||
G = polynomial_multiply_mod( G, G, polymod, p )
|
||||
if k%2 == 1: s = polynomial_multiply_mod( G, s, polymod, p )
|
||||
|
||||
return s
|
||||
|
||||
|
||||
|
||||
def jacobi( a, n ):
|
||||
"""Jacobi symbol"""
|
||||
|
||||
# Based on the Handbook of Applied Cryptography (HAC), algorithm 2.149.
|
||||
|
||||
# This function has been tested by comparison with a small
|
||||
# table printed in HAC, and by extensive use in calculating
|
||||
# modular square roots.
|
||||
|
||||
assert n >= 3
|
||||
assert n%2 == 1
|
||||
a = a % n
|
||||
if a == 0: return 0
|
||||
if a == 1: return 1
|
||||
a1, e = a, 0
|
||||
while a1%2 == 0:
|
||||
a1, e = a1//2, e+1
|
||||
if e%2 == 0 or n%8 == 1 or n%8 == 7: s = 1
|
||||
else: s = -1
|
||||
if a1 == 1: return s
|
||||
if n%4 == 3 and a1%4 == 3: s = -s
|
||||
return s * jacobi( n % a1, a1 )
|
||||
|
||||
|
||||
|
||||
def square_root_mod_prime( a, p ):
|
||||
"""Modular square root of a, mod p, p prime."""
|
||||
|
||||
# Based on the Handbook of Applied Cryptography, algorithms 3.34 to 3.39.
|
||||
|
||||
# This module has been tested for all values in [0,p-1] for
|
||||
# every prime p from 3 to 1229.
|
||||
|
||||
assert 0 <= a < p
|
||||
assert 1 < p
|
||||
|
||||
if a == 0: return 0
|
||||
if p == 2: return a
|
||||
|
||||
jac = jacobi( a, p )
|
||||
if jac == -1: raise SquareRootError( "%d has no square root modulo %d" \
|
||||
% ( a, p ) )
|
||||
|
||||
if p % 4 == 3: return modular_exp( a, (p+1)//4, p )
|
||||
|
||||
if p % 8 == 5:
|
||||
d = modular_exp( a, (p-1)//4, p )
|
||||
if d == 1: return modular_exp( a, (p+3)//8, p )
|
||||
if d == p-1: return ( 2 * a * modular_exp( 4*a, (p-5)//8, p ) ) % p
|
||||
raise RuntimeError("Shouldn't get here.")
|
||||
|
||||
for b in range( 2, p ):
|
||||
if jacobi( b*b-4*a, p ) == -1:
|
||||
f = ( a, -b, 1 )
|
||||
ff = polynomial_exp_mod( ( 0, 1 ), (p+1)//2, f, p )
|
||||
assert ff[1] == 0
|
||||
return ff[0]
|
||||
raise RuntimeError("No b found.")
|
||||
|
||||
|
||||
|
||||
def inverse_mod( a, m ):
|
||||
"""Inverse of a mod m."""
|
||||
|
||||
if a < 0 or m <= a: a = a % m
|
||||
|
||||
# From Ferguson and Schneier, roughly:
|
||||
|
||||
c, d = a, m
|
||||
uc, vc, ud, vd = 1, 0, 0, 1
|
||||
while c != 0:
|
||||
q, c, d = divmod( d, c ) + ( c, )
|
||||
uc, vc, ud, vd = ud - q*uc, vd - q*vc, uc, vc
|
||||
|
||||
# At this point, d is the GCD, and ud*a+vd*m = d.
|
||||
# If d == 1, this means that ud is a inverse.
|
||||
|
||||
assert d == 1
|
||||
if ud > 0: return ud
|
||||
else: return ud + m
|
||||
|
||||
|
||||
def gcd2(a, b):
|
||||
"""Greatest common divisor using Euclid's algorithm."""
|
||||
while a:
|
||||
a, b = b%a, a
|
||||
return b
|
||||
|
||||
|
||||
def gcd( *a ):
|
||||
"""Greatest common divisor.
|
||||
|
||||
Usage: gcd( [ 2, 4, 6 ] )
|
||||
or: gcd( 2, 4, 6 )
|
||||
"""
|
||||
|
||||
if len( a ) > 1: return reduce( gcd2, a )
|
||||
if hasattr( a[0], "__iter__" ): return reduce( gcd2, a[0] )
|
||||
return a[0]
|
||||
|
||||
|
||||
def lcm2(a,b):
|
||||
"""Least common multiple of two integers."""
|
||||
|
||||
return (a*b)//gcd(a,b)
|
||||
|
||||
|
||||
def lcm( *a ):
|
||||
"""Least common multiple.
|
||||
|
||||
Usage: lcm( [ 3, 4, 5 ] )
|
||||
or: lcm( 3, 4, 5 )
|
||||
"""
|
||||
|
||||
if len( a ) > 1: return reduce( lcm2, a )
|
||||
if hasattr( a[0], "__iter__" ): return reduce( lcm2, a[0] )
|
||||
return a[0]
|
||||
|
||||
|
||||
|
||||
def factorization( n ):
|
||||
"""Decompose n into a list of (prime,exponent) pairs."""
|
||||
|
||||
assert isinstance( n, integer_types )
|
||||
|
||||
if n < 2: return []
|
||||
|
||||
result = []
|
||||
d = 2
|
||||
|
||||
# Test the small primes:
|
||||
|
||||
for d in smallprimes:
|
||||
if d > n: break
|
||||
q, r = divmod( n, d )
|
||||
if r == 0:
|
||||
count = 1
|
||||
while d <= n:
|
||||
n = q
|
||||
q, r = divmod( n, d )
|
||||
if r != 0: break
|
||||
count = count + 1
|
||||
result.append( ( d, count ) )
|
||||
|
||||
# If n is still greater than the last of our small primes,
|
||||
# it may require further work:
|
||||
|
||||
if n > smallprimes[-1]:
|
||||
if is_prime( n ): # If what's left is prime, it's easy:
|
||||
result.append( ( n, 1 ) )
|
||||
else: # Ugh. Search stupidly for a divisor:
|
||||
d = smallprimes[-1]
|
||||
while 1:
|
||||
d = d + 2 # Try the next divisor.
|
||||
q, r = divmod( n, d )
|
||||
if q < d: break # n < d*d means we're done, n = 1 or prime.
|
||||
if r == 0: # d divides n. How many times?
|
||||
count = 1
|
||||
n = q
|
||||
while d <= n: # As long as d might still divide n,
|
||||
q, r = divmod( n, d ) # see if it does.
|
||||
if r != 0: break
|
||||
n = q # It does. Reduce n, increase count.
|
||||
count = count + 1
|
||||
result.append( ( d, count ) )
|
||||
if n > 1: result.append( ( n, 1 ) )
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
def phi( n ):
|
||||
"""Return the Euler totient function of n."""
|
||||
|
||||
assert isinstance( n, integer_types )
|
||||
|
||||
if n < 3: return 1
|
||||
|
||||
result = 1
|
||||
ff = factorization( n )
|
||||
for f in ff:
|
||||
e = f[1]
|
||||
if e > 1:
|
||||
result = result * f[0] ** (e-1) * ( f[0] - 1 )
|
||||
else:
|
||||
result = result * ( f[0] - 1 )
|
||||
return result
|
||||
|
||||
|
||||
def carmichael( n ):
|
||||
"""Return Carmichael function of n.
|
||||
|
||||
Carmichael(n) is the smallest integer x such that
|
||||
m**x = 1 mod n for all m relatively prime to n.
|
||||
"""
|
||||
|
||||
return carmichael_of_factorized( factorization( n ) )
|
||||
|
||||
|
||||
def carmichael_of_factorized( f_list ):
|
||||
"""Return the Carmichael function of a number that is
|
||||
represented as a list of (prime,exponent) pairs.
|
||||
"""
|
||||
|
||||
if len( f_list ) < 1: return 1
|
||||
|
||||
result = carmichael_of_ppower( f_list[0] )
|
||||
for i in range( 1, len( f_list ) ):
|
||||
result = lcm( result, carmichael_of_ppower( f_list[i] ) )
|
||||
|
||||
return result
|
||||
|
||||
def carmichael_of_ppower( pp ):
|
||||
"""Carmichael function of the given power of the given prime.
|
||||
"""
|
||||
|
||||
p, a = pp
|
||||
if p == 2 and a > 2: return 2**(a-2)
|
||||
else: return (p-1) * p**(a-1)
|
||||
|
||||
|
||||
|
||||
def order_mod( x, m ):
|
||||
"""Return the order of x in the multiplicative group mod m.
|
||||
"""
|
||||
|
||||
# Warning: this implementation is not very clever, and will
|
||||
# take a long time if m is very large.
|
||||
|
||||
if m <= 1: return 0
|
||||
|
||||
assert gcd( x, m ) == 1
|
||||
|
||||
z = x
|
||||
result = 1
|
||||
while z != 1:
|
||||
z = ( z * x ) % m
|
||||
result = result + 1
|
||||
return result
|
||||
|
||||
|
||||
def largest_factor_relatively_prime( a, b ):
|
||||
"""Return the largest factor of a relatively prime to b.
|
||||
"""
|
||||
|
||||
while 1:
|
||||
d = gcd( a, b )
|
||||
if d <= 1: break
|
||||
b = d
|
||||
while 1:
|
||||
q, r = divmod( a, d )
|
||||
if r > 0:
|
||||
break
|
||||
a = q
|
||||
return a
|
||||
|
||||
|
||||
def kinda_order_mod( x, m ):
|
||||
"""Return the order of x in the multiplicative group mod m',
|
||||
where m' is the largest factor of m relatively prime to x.
|
||||
"""
|
||||
|
||||
return order_mod( x, largest_factor_relatively_prime( m, x ) )
|
||||
|
||||
|
||||
def is_prime( n ):
|
||||
"""Return True if x is prime, False otherwise.
|
||||
|
||||
We use the Miller-Rabin test, as given in Menezes et al. p. 138.
|
||||
This test is not exact: there are composite values n for which
|
||||
it returns True.
|
||||
|
||||
In testing the odd numbers from 10000001 to 19999999,
|
||||
about 66 composites got past the first test,
|
||||
5 got past the second test, and none got past the third.
|
||||
Since factors of 2, 3, 5, 7, and 11 were detected during
|
||||
preliminary screening, the number of numbers tested by
|
||||
Miller-Rabin was (19999999 - 10000001)*(2/3)*(4/5)*(6/7)
|
||||
= 4.57 million.
|
||||
"""
|
||||
|
||||
# (This is used to study the risk of false positives:)
|
||||
global miller_rabin_test_count
|
||||
|
||||
miller_rabin_test_count = 0
|
||||
|
||||
if n <= smallprimes[-1]:
|
||||
if n in smallprimes: return True
|
||||
else: return False
|
||||
|
||||
if gcd( n, 2*3*5*7*11 ) != 1: return False
|
||||
|
||||
# Choose a number of iterations sufficient to reduce the
|
||||
# probability of accepting a composite below 2**-80
|
||||
# (from Menezes et al. Table 4.4):
|
||||
|
||||
t = 40
|
||||
n_bits = 1 + int( math.log( n, 2 ) )
|
||||
for k, tt in ( ( 100, 27 ),
|
||||
( 150, 18 ),
|
||||
( 200, 15 ),
|
||||
( 250, 12 ),
|
||||
( 300, 9 ),
|
||||
( 350, 8 ),
|
||||
( 400, 7 ),
|
||||
( 450, 6 ),
|
||||
( 550, 5 ),
|
||||
( 650, 4 ),
|
||||
( 850, 3 ),
|
||||
( 1300, 2 ),
|
||||
):
|
||||
if n_bits < k: break
|
||||
t = tt
|
||||
|
||||
# Run the test t times:
|
||||
|
||||
s = 0
|
||||
r = n - 1
|
||||
while ( r % 2 ) == 0:
|
||||
s = s + 1
|
||||
r = r // 2
|
||||
for i in range( t ):
|
||||
a = smallprimes[ i ]
|
||||
y = modular_exp( a, r, n )
|
||||
if y != 1 and y != n-1:
|
||||
j = 1
|
||||
while j <= s - 1 and y != n - 1:
|
||||
y = modular_exp( y, 2, n )
|
||||
if y == 1:
|
||||
miller_rabin_test_count = i + 1
|
||||
return False
|
||||
j = j + 1
|
||||
if y != n-1:
|
||||
miller_rabin_test_count = i + 1
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def next_prime( starting_value ):
|
||||
"Return the smallest prime larger than the starting value."
|
||||
|
||||
if starting_value < 2: return 2
|
||||
result = ( starting_value + 1 ) | 1
|
||||
while not is_prime( result ): result = result + 2
|
||||
return result
|
||||
|
||||
|
||||
smallprimes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41,
|
||||
43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97,
|
||||
101, 103, 107, 109, 113, 127, 131, 137, 139, 149,
|
||||
151, 157, 163, 167, 173, 179, 181, 191, 193, 197,
|
||||
199, 211, 223, 227, 229, 233, 239, 241, 251, 257,
|
||||
263, 269, 271, 277, 281, 283, 293, 307, 311, 313,
|
||||
317, 331, 337, 347, 349, 353, 359, 367, 373, 379,
|
||||
383, 389, 397, 401, 409, 419, 421, 431, 433, 439,
|
||||
443, 449, 457, 461, 463, 467, 479, 487, 491, 499,
|
||||
503, 509, 521, 523, 541, 547, 557, 563, 569, 571,
|
||||
577, 587, 593, 599, 601, 607, 613, 617, 619, 631,
|
||||
641, 643, 647, 653, 659, 661, 673, 677, 683, 691,
|
||||
701, 709, 719, 727, 733, 739, 743, 751, 757, 761,
|
||||
769, 773, 787, 797, 809, 811, 821, 823, 827, 829,
|
||||
839, 853, 857, 859, 863, 877, 881, 883, 887, 907,
|
||||
911, 919, 929, 937, 941, 947, 953, 967, 971, 977,
|
||||
983, 991, 997, 1009, 1013, 1019, 1021, 1031, 1033,
|
||||
1039, 1049, 1051, 1061, 1063, 1069, 1087, 1091, 1093,
|
||||
1097, 1103, 1109, 1117, 1123, 1129, 1151, 1153, 1163,
|
||||
1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, 1229]
|
||||
|
||||
miller_rabin_test_count = 0
|
||||
|
||||
def __main__():
|
||||
|
||||
# Making sure locally defined exceptions work:
|
||||
# p = modular_exp( 2, -2, 3 )
|
||||
# p = square_root_mod_prime( 2, 3 )
|
||||
|
||||
|
||||
print_("Testing gcd...")
|
||||
assert gcd( 3*5*7, 3*5*11, 3*5*13 ) == 3*5
|
||||
assert gcd( [ 3*5*7, 3*5*11, 3*5*13 ] ) == 3*5
|
||||
assert gcd( 3 ) == 3
|
||||
|
||||
print_("Testing lcm...")
|
||||
assert lcm( 3, 5*3, 7*3 ) == 3*5*7
|
||||
assert lcm( [ 3, 5*3, 7*3 ] ) == 3*5*7
|
||||
assert lcm( 3 ) == 3
|
||||
|
||||
print_("Testing next_prime...")
|
||||
bigprimes = ( 999671,
|
||||
999683,
|
||||
999721,
|
||||
999727,
|
||||
999749,
|
||||
999763,
|
||||
999769,
|
||||
999773,
|
||||
999809,
|
||||
999853,
|
||||
999863,
|
||||
999883,
|
||||
999907,
|
||||
999917,
|
||||
999931,
|
||||
999953,
|
||||
999959,
|
||||
999961,
|
||||
999979,
|
||||
999983 )
|
||||
|
||||
for i in range( len( bigprimes ) - 1 ):
|
||||
assert next_prime( bigprimes[i] ) == bigprimes[ i+1 ]
|
||||
|
||||
error_tally = 0
|
||||
|
||||
# Test the square_root_mod_prime function:
|
||||
|
||||
for p in smallprimes:
|
||||
print_("Testing square_root_mod_prime for modulus p = %d." % p)
|
||||
squares = []
|
||||
|
||||
for root in range( 0, 1+p//2 ):
|
||||
sq = ( root * root ) % p
|
||||
squares.append( sq )
|
||||
calculated = square_root_mod_prime( sq, p )
|
||||
if ( calculated * calculated ) % p != sq:
|
||||
error_tally = error_tally + 1
|
||||
print_("Failed to find %d as sqrt( %d ) mod %d. Said %d." % \
|
||||
( root, sq, p, calculated ))
|
||||
|
||||
for nonsquare in range( 0, p ):
|
||||
if nonsquare not in squares:
|
||||
try:
|
||||
calculated = square_root_mod_prime( nonsquare, p )
|
||||
except SquareRootError:
|
||||
pass
|
||||
else:
|
||||
error_tally = error_tally + 1
|
||||
print_("Failed to report no root for sqrt( %d ) mod %d." % \
|
||||
( nonsquare, p ))
|
||||
|
||||
# Test the jacobi function:
|
||||
for m in range( 3, 400, 2 ):
|
||||
print_("Testing jacobi for modulus m = %d." % m)
|
||||
if is_prime( m ):
|
||||
squares = []
|
||||
for root in range( 1, m ):
|
||||
if jacobi( root * root, m ) != 1:
|
||||
error_tally = error_tally + 1
|
||||
print_("jacobi( %d * %d, %d ) != 1" % ( root, root, m ))
|
||||
squares.append( root * root % m )
|
||||
for i in range( 1, m ):
|
||||
if not i in squares:
|
||||
if jacobi( i, m ) != -1:
|
||||
error_tally = error_tally + 1
|
||||
print_("jacobi( %d, %d ) != -1" % ( i, m ))
|
||||
else: # m is not prime.
|
||||
f = factorization( m )
|
||||
for a in range( 1, m ):
|
||||
c = 1
|
||||
for i in f:
|
||||
c = c * jacobi( a, i[0] ) ** i[1]
|
||||
if c != jacobi( a, m ):
|
||||
error_tally = error_tally + 1
|
||||
print_("%d != jacobi( %d, %d )" % ( c, a, m ))
|
||||
|
||||
|
||||
# Test the inverse_mod function:
|
||||
print_("Testing inverse_mod . . .")
|
||||
import random
|
||||
n_tests = 0
|
||||
for i in range( 100 ):
|
||||
m = random.randint( 20, 10000 )
|
||||
for j in range( 100 ):
|
||||
a = random.randint( 1, m-1 )
|
||||
if gcd( a, m ) == 1:
|
||||
n_tests = n_tests + 1
|
||||
inv = inverse_mod( a, m )
|
||||
if inv <= 0 or inv >= m or ( a * inv ) % m != 1:
|
||||
error_tally = error_tally + 1
|
||||
print_("%d = inverse_mod( %d, %d ) is wrong." % ( inv, a, m ))
|
||||
assert n_tests > 1000
|
||||
print_(n_tests, " tests of inverse_mod completed.")
|
||||
|
||||
class FailedTest(Exception): pass
|
||||
print_(error_tally, "errors detected.")
|
||||
if error_tally != 0:
|
||||
raise FailedTest("%d errors detected" % error_tally)
|
||||
|
||||
if __name__ == '__main__':
|
||||
__main__()
|
||||
103
Lambdas/Websocket Authorizer/ecdsa/rfc6979.py
Normal file
103
Lambdas/Websocket Authorizer/ecdsa/rfc6979.py
Normal file
@@ -0,0 +1,103 @@
|
||||
'''
|
||||
RFC 6979:
|
||||
Deterministic Usage of the Digital Signature Algorithm (DSA) and
|
||||
Elliptic Curve Digital Signature Algorithm (ECDSA)
|
||||
|
||||
http://tools.ietf.org/html/rfc6979
|
||||
|
||||
Many thanks to Coda Hale for his implementation in Go language:
|
||||
https://github.com/codahale/rfc6979
|
||||
'''
|
||||
|
||||
import hmac
|
||||
from binascii import hexlify
|
||||
from .util import number_to_string, number_to_string_crop
|
||||
from .six import b
|
||||
|
||||
try:
|
||||
bin(0)
|
||||
except NameError:
|
||||
binmap = {"0": "0000", "1": "0001", "2": "0010", "3": "0011",
|
||||
"4": "0100", "5": "0101", "6": "0110", "7": "0111",
|
||||
"8": "1000", "9": "1001", "a": "1010", "b": "1011",
|
||||
"c": "1100", "d": "1101", "e": "1110", "f": "1111"}
|
||||
def bin(value): # for python2.5
|
||||
v = "".join(binmap[x] for x in "%x"%abs(value)).lstrip("0")
|
||||
if value < 0:
|
||||
return "-0b" + v
|
||||
return "0b" + v
|
||||
|
||||
def bit_length(num):
|
||||
# http://docs.python.org/dev/library/stdtypes.html#int.bit_length
|
||||
s = bin(num) # binary representation: bin(-37) --> '-0b100101'
|
||||
s = s.lstrip('-0b') # remove leading zeros and minus sign
|
||||
return len(s) # len('100101') --> 6
|
||||
|
||||
def bits2int(data, qlen):
|
||||
x = int(hexlify(data), 16)
|
||||
l = len(data) * 8
|
||||
|
||||
if l > qlen:
|
||||
return x >> (l-qlen)
|
||||
return x
|
||||
|
||||
def bits2octets(data, order):
|
||||
z1 = bits2int(data, bit_length(order))
|
||||
z2 = z1 - order
|
||||
|
||||
if z2 < 0:
|
||||
z2 = z1
|
||||
|
||||
return number_to_string_crop(z2, order)
|
||||
|
||||
# https://tools.ietf.org/html/rfc6979#section-3.2
|
||||
def generate_k(order, secexp, hash_func, data):
|
||||
'''
|
||||
order - order of the DSA generator used in the signature
|
||||
secexp - secure exponent (private key) in numeric form
|
||||
hash_func - reference to the same hash function used for generating hash
|
||||
data - hash in binary form of the signing data
|
||||
'''
|
||||
|
||||
qlen = bit_length(order)
|
||||
holen = hash_func().digest_size
|
||||
rolen = (qlen + 7) / 8
|
||||
bx = number_to_string(secexp, order) + bits2octets(data, order)
|
||||
|
||||
# Step B
|
||||
v = b('\x01') * holen
|
||||
|
||||
# Step C
|
||||
k = b('\x00') * holen
|
||||
|
||||
# Step D
|
||||
|
||||
k = hmac.new(k, v+b('\x00')+bx, hash_func).digest()
|
||||
|
||||
# Step E
|
||||
v = hmac.new(k, v, hash_func).digest()
|
||||
|
||||
# Step F
|
||||
k = hmac.new(k, v+b('\x01')+bx, hash_func).digest()
|
||||
|
||||
# Step G
|
||||
v = hmac.new(k, v, hash_func).digest()
|
||||
|
||||
# Step H
|
||||
while True:
|
||||
# Step H1
|
||||
t = b('')
|
||||
|
||||
# Step H2
|
||||
while len(t) < rolen:
|
||||
v = hmac.new(k, v, hash_func).digest()
|
||||
t += v
|
||||
|
||||
# Step H3
|
||||
secret = bits2int(t, qlen)
|
||||
|
||||
if secret >= 1 and secret < order:
|
||||
return secret
|
||||
|
||||
k = hmac.new(k, v+b('\x00'), hash_func).digest()
|
||||
v = hmac.new(k, v, hash_func).digest()
|
||||
394
Lambdas/Websocket Authorizer/ecdsa/six.py
Normal file
394
Lambdas/Websocket Authorizer/ecdsa/six.py
Normal file
@@ -0,0 +1,394 @@
|
||||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
# Copyright (c) 2010-2012 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
# this software and associated documentation files (the "Software"), to deal in
|
||||
# the Software without restriction, including without limitation the rights to
|
||||
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
# the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.2.0"
|
||||
|
||||
|
||||
# True if we are running on Python 3.
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
integer_types = int,
|
||||
class_types = type,
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
|
||||
MAXSIZE = sys.maxsize
|
||||
else:
|
||||
string_types = basestring,
|
||||
integer_types = (int, long)
|
||||
class_types = (type, types.ClassType)
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
if sys.platform.startswith("java"):
|
||||
# Jython always uses 32 bits.
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
len(X())
|
||||
except OverflowError:
|
||||
# 32-bit
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# 64-bit
|
||||
MAXSIZE = int((1 << 63) - 1)
|
||||
del X
|
||||
|
||||
|
||||
def _add_doc(func, doc):
|
||||
"""Add documentation to a function."""
|
||||
func.__doc__ = doc
|
||||
|
||||
|
||||
def _import_module(name):
|
||||
"""Import module, returning the module after the last dot."""
|
||||
__import__(name)
|
||||
return sys.modules[name]
|
||||
|
||||
|
||||
class _LazyDescr(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result)
|
||||
# This is a bit ugly, but it avoids running this again.
|
||||
delattr(tp, self.name)
|
||||
return result
|
||||
|
||||
|
||||
class MovedModule(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old, new=None):
|
||||
super(MovedModule, self).__init__(name)
|
||||
if PY3:
|
||||
if new is None:
|
||||
new = name
|
||||
self.mod = new
|
||||
else:
|
||||
self.mod = old
|
||||
|
||||
def _resolve(self):
|
||||
return _import_module(self.mod)
|
||||
|
||||
|
||||
class MovedAttribute(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||
super(MovedAttribute, self).__init__(name)
|
||||
if PY3:
|
||||
if new_mod is None:
|
||||
new_mod = name
|
||||
self.mod = new_mod
|
||||
if new_attr is None:
|
||||
if old_attr is None:
|
||||
new_attr = name
|
||||
else:
|
||||
new_attr = old_attr
|
||||
self.attr = new_attr
|
||||
else:
|
||||
self.mod = old_mod
|
||||
if old_attr is None:
|
||||
old_attr = name
|
||||
self.attr = old_attr
|
||||
|
||||
def _resolve(self):
|
||||
module = _import_module(self.mod)
|
||||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
|
||||
class _MovedItems(types.ModuleType):
|
||||
"""Lazy loading of moved objects"""
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||
MovedModule("cPickle", "cPickle", "pickle"),
|
||||
MovedModule("queue", "Queue"),
|
||||
MovedModule("reprlib", "repr"),
|
||||
MovedModule("socketserver", "SocketServer"),
|
||||
MovedModule("tkinter", "Tkinter"),
|
||||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||
"tkinter.colorchooser"),
|
||||
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||
"tkinter.commondialog"),
|
||||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||
"tkinter.simpledialog"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
del attr
|
||||
|
||||
moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
"""Add an item to six.moves."""
|
||||
setattr(_MovedItems, move.name, move)
|
||||
|
||||
|
||||
def remove_move(name):
|
||||
"""Remove item from six.moves."""
|
||||
try:
|
||||
delattr(_MovedItems, name)
|
||||
except AttributeError:
|
||||
try:
|
||||
del moves.__dict__[name]
|
||||
except KeyError:
|
||||
raise AttributeError("no such move, %r" % (name,))
|
||||
|
||||
|
||||
if PY3:
|
||||
_meth_func = "__func__"
|
||||
_meth_self = "__self__"
|
||||
|
||||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
|
||||
_iterkeys = "keys"
|
||||
_itervalues = "values"
|
||||
_iteritems = "items"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
|
||||
_func_code = "func_code"
|
||||
_func_defaults = "func_defaults"
|
||||
|
||||
_iterkeys = "iterkeys"
|
||||
_itervalues = "itervalues"
|
||||
_iteritems = "iteritems"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
||||
next = advance_iterator
|
||||
|
||||
|
||||
try:
|
||||
callable = callable
|
||||
except NameError:
|
||||
def callable(obj):
|
||||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||
|
||||
|
||||
if PY3:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound
|
||||
|
||||
Iterator = object
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound.im_func
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
return type(self).__next__(self)
|
||||
|
||||
callable = callable
|
||||
_add_doc(get_unbound_function,
|
||||
"""Get the function out of a possibly unbound function""")
|
||||
|
||||
|
||||
get_method_function = operator.attrgetter(_meth_func)
|
||||
get_method_self = operator.attrgetter(_meth_self)
|
||||
get_function_code = operator.attrgetter(_func_code)
|
||||
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||
|
||||
|
||||
def iterkeys(d):
|
||||
"""Return an iterator over the keys of a dictionary."""
|
||||
return iter(getattr(d, _iterkeys)())
|
||||
|
||||
def itervalues(d):
|
||||
"""Return an iterator over the values of a dictionary."""
|
||||
return iter(getattr(d, _itervalues)())
|
||||
|
||||
def iteritems(d):
|
||||
"""Return an iterator over the (key, value) pairs of a dictionary."""
|
||||
return iter(getattr(d, _iteritems)())
|
||||
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
def u(s):
|
||||
return s
|
||||
if sys.version_info[1] <= 1:
|
||||
def int2byte(i):
|
||||
return bytes((i,))
|
||||
else:
|
||||
# This is about 2x faster than the implementation above on 3.2+
|
||||
int2byte = operator.methodcaller("to_bytes", 1, "big")
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
else:
|
||||
def b(s):
|
||||
return s
|
||||
def u(s):
|
||||
if isinstance(s, unicode):
|
||||
return s
|
||||
return unicode(s, "unicode_escape")
|
||||
int2byte = chr
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
if PY3:
|
||||
import builtins
|
||||
exec_ = getattr(builtins, "exec")
|
||||
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
|
||||
print_ = getattr(builtins, "print")
|
||||
del builtins
|
||||
|
||||
else:
|
||||
def exec_(_code_, _globs_=None, _locs_=None):
|
||||
"""Execute code in a namespace."""
|
||||
if _globs_ is None:
|
||||
frame = sys._getframe(1)
|
||||
_globs_ = frame.f_globals
|
||||
if _locs_ is None:
|
||||
_locs_ = frame.f_locals
|
||||
del frame
|
||||
elif _locs_ is None:
|
||||
_locs_ = _globs_
|
||||
exec("""exec _code_ in _globs_, _locs_""")
|
||||
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
raise tp, value, tb
|
||||
""")
|
||||
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
"""The new-style print function."""
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
fp.write(data)
|
||||
want_unicode = False
|
||||
sep = kwargs.pop("sep", None)
|
||||
if sep is not None:
|
||||
if isinstance(sep, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(sep, str):
|
||||
raise TypeError("sep must be None or a string")
|
||||
end = kwargs.pop("end", None)
|
||||
if end is not None:
|
||||
if isinstance(end, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(end, str):
|
||||
raise TypeError("end must be None or a string")
|
||||
if kwargs:
|
||||
raise TypeError("invalid keyword arguments to print()")
|
||||
if not want_unicode:
|
||||
for arg in args:
|
||||
if isinstance(arg, unicode):
|
||||
want_unicode = True
|
||||
break
|
||||
if want_unicode:
|
||||
newline = unicode("\n")
|
||||
space = unicode(" ")
|
||||
else:
|
||||
newline = "\n"
|
||||
space = " "
|
||||
if sep is None:
|
||||
sep = space
|
||||
if end is None:
|
||||
end = newline
|
||||
for i, arg in enumerate(args):
|
||||
if i:
|
||||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
|
||||
def with_metaclass(meta, base=object):
|
||||
"""Create a base class with a metaclass."""
|
||||
return meta("NewBase", (base,), {})
|
||||
88
Lambdas/Websocket Authorizer/ecdsa/test_der.py
Normal file
88
Lambdas/Websocket Authorizer/ecdsa/test_der.py
Normal file
@@ -0,0 +1,88 @@
|
||||
|
||||
# compatibility with Python 2.6, for that we need unittest2 package,
|
||||
# which is not available on 3.3 or 3.4
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
import unittest
|
||||
from .der import remove_integer, UnexpectedDER, read_length
|
||||
from .six import b
|
||||
|
||||
class TestRemoveInteger(unittest.TestCase):
|
||||
# DER requires the integers to be 0-padded only if they would be
|
||||
# interpreted as negative, check if those errors are detected
|
||||
def test_non_minimal_encoding(self):
|
||||
with self.assertRaises(UnexpectedDER):
|
||||
remove_integer(b('\x02\x02\x00\x01'))
|
||||
|
||||
def test_negative_with_high_bit_set(self):
|
||||
with self.assertRaises(UnexpectedDER):
|
||||
remove_integer(b('\x02\x01\x80'))
|
||||
|
||||
def test_two_zero_bytes_with_high_bit_set(self):
|
||||
with self.assertRaises(UnexpectedDER):
|
||||
remove_integer(b('\x02\x03\x00\x00\xff'))
|
||||
|
||||
def test_zero_length_integer(self):
|
||||
with self.assertRaises(UnexpectedDER):
|
||||
remove_integer(b('\x02\x00'))
|
||||
|
||||
def test_empty_string(self):
|
||||
with self.assertRaises(UnexpectedDER):
|
||||
remove_integer(b(''))
|
||||
|
||||
def test_encoding_of_zero(self):
|
||||
val, rem = remove_integer(b('\x02\x01\x00'))
|
||||
|
||||
self.assertEqual(val, 0)
|
||||
self.assertFalse(rem)
|
||||
|
||||
def test_encoding_of_127(self):
|
||||
val, rem = remove_integer(b('\x02\x01\x7f'))
|
||||
|
||||
self.assertEqual(val, 127)
|
||||
self.assertFalse(rem)
|
||||
|
||||
def test_encoding_of_128(self):
|
||||
val, rem = remove_integer(b('\x02\x02\x00\x80'))
|
||||
|
||||
self.assertEqual(val, 128)
|
||||
self.assertFalse(rem)
|
||||
|
||||
|
||||
class TestReadLength(unittest.TestCase):
|
||||
# DER requires the lengths between 0 and 127 to be encoded using the short
|
||||
# form and lengths above that encoded with minimal number of bytes
|
||||
# necessary
|
||||
def test_zero_length(self):
|
||||
self.assertEqual((0, 1), read_length(b('\x00')))
|
||||
|
||||
def test_two_byte_zero_length(self):
|
||||
with self.assertRaises(UnexpectedDER):
|
||||
read_length(b('\x81\x00'))
|
||||
|
||||
def test_two_byte_small_length(self):
|
||||
with self.assertRaises(UnexpectedDER):
|
||||
read_length(b('\x81\x7f'))
|
||||
|
||||
def test_long_form_with_zero_length(self):
|
||||
with self.assertRaises(UnexpectedDER):
|
||||
read_length(b('\x80'))
|
||||
|
||||
def test_smallest_two_byte_length(self):
|
||||
self.assertEqual((128, 2), read_length(b('\x81\x80')))
|
||||
|
||||
def test_zero_padded_length(self):
|
||||
with self.assertRaises(UnexpectedDER):
|
||||
read_length(b('\x82\x00\x80'))
|
||||
|
||||
def test_two_three_byte_length(self):
|
||||
self.assertEqual((256, 3), read_length(b'\x82\x01\x00'))
|
||||
|
||||
def test_empty_string(self):
|
||||
with self.assertRaises(UnexpectedDER):
|
||||
read_length(b(''))
|
||||
|
||||
def test_length_overflow(self):
|
||||
with self.assertRaises(UnexpectedDER):
|
||||
read_length(b('\x83\x01\x00'))
|
||||
87
Lambdas/Websocket Authorizer/ecdsa/test_malformed_sigs.py
Normal file
87
Lambdas/Websocket Authorizer/ecdsa/test_malformed_sigs.py
Normal file
@@ -0,0 +1,87 @@
|
||||
from __future__ import with_statement, division
|
||||
|
||||
import pytest
|
||||
import hashlib
|
||||
|
||||
from .six import b, binary_type
|
||||
from .keys import SigningKey, VerifyingKey
|
||||
from .keys import BadSignatureError
|
||||
from .util import sigencode_der, sigencode_string
|
||||
from .util import sigdecode_der, sigdecode_string
|
||||
from .curves import curves, NIST256p, NIST521p
|
||||
|
||||
der_sigs = []
|
||||
example_data = b("some data to sign")
|
||||
|
||||
# Just NIST256p with SHA256 is 560 test cases, all curves with all hashes is
|
||||
# few thousand slow test cases; execute the most interesting only
|
||||
|
||||
#for curve in curves:
|
||||
for curve in [NIST521p]:
|
||||
#for hash_alg in ["md5", "sha1", "sha224", "sha256", "sha384", "sha512"]:
|
||||
for hash_alg in ["sha256"]:
|
||||
key = SigningKey.generate(curve)
|
||||
signature = key.sign(example_data, hashfunc=getattr(hashlib, hash_alg),
|
||||
sigencode=sigencode_der)
|
||||
for pos in range(len(signature)):
|
||||
for xor in (1<<i for i in range(8)):
|
||||
der_sigs.append(pytest.param(
|
||||
key.verifying_key, hash_alg,
|
||||
signature, pos, xor,
|
||||
id="{0}-{1}-pos-{2}-xor-{3}".format(
|
||||
curve.name, hash_alg, pos, xor)))
|
||||
|
||||
|
||||
@pytest.mark.parametrize("verifying_key,hash_alg,signature,pos,xor", der_sigs)
|
||||
def test_fuzzed_der_signatures(verifying_key, hash_alg, signature, pos, xor):
|
||||
# check if a malformed DER encoded signature causes the same exception
|
||||
# to be raised irrespective of the type of error
|
||||
sig = bytearray(signature)
|
||||
sig[pos] ^= xor
|
||||
sig = binary_type(sig)
|
||||
|
||||
try:
|
||||
verifying_key.verify(sig, example_data, getattr(hashlib, hash_alg),
|
||||
sigdecode_der)
|
||||
assert False
|
||||
except BadSignatureError:
|
||||
assert True
|
||||
|
||||
|
||||
####
|
||||
#
|
||||
# For string encoded signatures, only the length of string is important
|
||||
#
|
||||
####
|
||||
|
||||
str_sigs = []
|
||||
|
||||
#for curve in curves:
|
||||
for curve in [NIST256p]:
|
||||
#for hash_alg in ["md5", "sha1", "sha224", "sha256", "sha384", "sha512"]:
|
||||
for hash_alg in ["sha256"]:
|
||||
key = SigningKey.generate(curve)
|
||||
signature = key.sign(example_data, hashfunc=getattr(hashlib, hash_alg),
|
||||
sigencode=sigencode_string)
|
||||
for trunc in range(len(signature)):
|
||||
str_sigs.append(pytest.param(
|
||||
key.verifying_key, hash_alg,
|
||||
signature, trunc,
|
||||
id="{0}-{1}-trunc-{2}".format(
|
||||
curve.name, hash_alg, trunc)))
|
||||
|
||||
|
||||
@pytest.mark.parametrize("verifying_key,hash_alg,signature,trunc", str_sigs)
|
||||
def test_truncated_string_signatures(verifying_key, hash_alg, signature, trunc):
|
||||
# check if a malformed string encoded signature causes the same exception
|
||||
# to be raised irrespective of the type of error
|
||||
sig = bytearray(signature)
|
||||
sig = sig[:trunc]
|
||||
sig = binary_type(sig)
|
||||
|
||||
try:
|
||||
verifying_key.verify(sig, example_data, getattr(hashlib, hash_alg),
|
||||
sigdecode_string)
|
||||
assert False
|
||||
except BadSignatureError:
|
||||
assert True
|
||||
832
Lambdas/Websocket Authorizer/ecdsa/test_pyecdsa.py
Normal file
832
Lambdas/Websocket Authorizer/ecdsa/test_pyecdsa.py
Normal file
@@ -0,0 +1,832 @@
|
||||
from __future__ import with_statement, division
|
||||
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
import unittest
|
||||
import os
|
||||
import time
|
||||
import shutil
|
||||
import subprocess
|
||||
from binascii import hexlify, unhexlify
|
||||
from hashlib import sha1, sha256, sha512
|
||||
|
||||
from .six import b, print_, binary_type
|
||||
from .keys import SigningKey, VerifyingKey
|
||||
from .keys import BadSignatureError, MalformedPointError, BadDigestError
|
||||
from . import util
|
||||
from .util import sigencode_der, sigencode_strings
|
||||
from .util import sigdecode_der, sigdecode_strings
|
||||
from .util import encoded_oid_ecPublicKey, MalformedSignature
|
||||
from .curves import Curve, UnknownCurveError
|
||||
from .curves import NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1
|
||||
from .ellipticcurve import Point
|
||||
from . import der
|
||||
from . import rfc6979
|
||||
from . import ecdsa
|
||||
|
||||
class SubprocessError(Exception):
|
||||
pass
|
||||
|
||||
def run_openssl(cmd):
|
||||
OPENSSL = "openssl"
|
||||
p = subprocess.Popen([OPENSSL] + cmd.split(),
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
stdout, ignored = p.communicate()
|
||||
if p.returncode != 0:
|
||||
raise SubprocessError("cmd '%s %s' failed: rc=%s, stdout/err was %s" %
|
||||
(OPENSSL, cmd, p.returncode, stdout))
|
||||
return stdout.decode()
|
||||
|
||||
BENCH = False
|
||||
|
||||
class ECDSA(unittest.TestCase):
|
||||
def test_basic(self):
|
||||
priv = SigningKey.generate()
|
||||
pub = priv.get_verifying_key()
|
||||
|
||||
data = b("blahblah")
|
||||
sig = priv.sign(data)
|
||||
|
||||
self.assertTrue(pub.verify(sig, data))
|
||||
self.assertRaises(BadSignatureError, pub.verify, sig, data+b("bad"))
|
||||
|
||||
pub2 = VerifyingKey.from_string(pub.to_string())
|
||||
self.assertTrue(pub2.verify(sig, data))
|
||||
|
||||
def test_deterministic(self):
|
||||
data = b("blahblah")
|
||||
secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16)
|
||||
|
||||
priv = SigningKey.from_secret_exponent(secexp, SECP256k1, sha256)
|
||||
pub = priv.get_verifying_key()
|
||||
|
||||
k = rfc6979.generate_k(
|
||||
SECP256k1.generator.order(), secexp, sha256, sha256(data).digest())
|
||||
|
||||
sig1 = priv.sign(data, k=k)
|
||||
self.assertTrue(pub.verify(sig1, data))
|
||||
|
||||
sig2 = priv.sign(data, k=k)
|
||||
self.assertTrue(pub.verify(sig2, data))
|
||||
|
||||
sig3 = priv.sign_deterministic(data, sha256)
|
||||
self.assertTrue(pub.verify(sig3, data))
|
||||
|
||||
self.assertEqual(sig1, sig2)
|
||||
self.assertEqual(sig1, sig3)
|
||||
|
||||
def test_bad_usage(self):
|
||||
# sk=SigningKey() is wrong
|
||||
self.assertRaises(TypeError, SigningKey)
|
||||
self.assertRaises(TypeError, VerifyingKey)
|
||||
|
||||
def test_lengths(self):
|
||||
default = NIST192p
|
||||
priv = SigningKey.generate()
|
||||
pub = priv.get_verifying_key()
|
||||
self.assertEqual(len(pub.to_string()), default.verifying_key_length)
|
||||
sig = priv.sign(b("data"))
|
||||
self.assertEqual(len(sig), default.signature_length)
|
||||
if BENCH:
|
||||
print_()
|
||||
for curve in (NIST192p, NIST224p, NIST256p, NIST384p, NIST521p):
|
||||
start = time.time()
|
||||
priv = SigningKey.generate(curve=curve)
|
||||
pub1 = priv.get_verifying_key()
|
||||
keygen_time = time.time() - start
|
||||
pub2 = VerifyingKey.from_string(pub1.to_string(), curve)
|
||||
self.assertEqual(pub1.to_string(), pub2.to_string())
|
||||
self.assertEqual(len(pub1.to_string()),
|
||||
curve.verifying_key_length)
|
||||
start = time.time()
|
||||
sig = priv.sign(b("data"))
|
||||
sign_time = time.time() - start
|
||||
self.assertEqual(len(sig), curve.signature_length)
|
||||
if BENCH:
|
||||
start = time.time()
|
||||
pub1.verify(sig, b("data"))
|
||||
verify_time = time.time() - start
|
||||
print_("%s: siglen=%d, keygen=%0.3fs, sign=%0.3f, verify=%0.3f" \
|
||||
% (curve.name, curve.signature_length,
|
||||
keygen_time, sign_time, verify_time))
|
||||
|
||||
def test_serialize(self):
|
||||
seed = b("secret")
|
||||
curve = NIST192p
|
||||
secexp1 = util.randrange_from_seed__trytryagain(seed, curve.order)
|
||||
secexp2 = util.randrange_from_seed__trytryagain(seed, curve.order)
|
||||
self.assertEqual(secexp1, secexp2)
|
||||
priv1 = SigningKey.from_secret_exponent(secexp1, curve)
|
||||
priv2 = SigningKey.from_secret_exponent(secexp2, curve)
|
||||
self.assertEqual(hexlify(priv1.to_string()),
|
||||
hexlify(priv2.to_string()))
|
||||
self.assertEqual(priv1.to_pem(), priv2.to_pem())
|
||||
pub1 = priv1.get_verifying_key()
|
||||
pub2 = priv2.get_verifying_key()
|
||||
data = b("data")
|
||||
sig1 = priv1.sign(data)
|
||||
sig2 = priv2.sign(data)
|
||||
self.assertTrue(pub1.verify(sig1, data))
|
||||
self.assertTrue(pub2.verify(sig1, data))
|
||||
self.assertTrue(pub1.verify(sig2, data))
|
||||
self.assertTrue(pub2.verify(sig2, data))
|
||||
self.assertEqual(hexlify(pub1.to_string()),
|
||||
hexlify(pub2.to_string()))
|
||||
|
||||
def test_nonrandom(self):
|
||||
s = b("all the entropy in the entire world, compressed into one line")
|
||||
def not_much_entropy(numbytes):
|
||||
return s[:numbytes]
|
||||
# we control the entropy source, these two keys should be identical:
|
||||
priv1 = SigningKey.generate(entropy=not_much_entropy)
|
||||
priv2 = SigningKey.generate(entropy=not_much_entropy)
|
||||
self.assertEqual(hexlify(priv1.get_verifying_key().to_string()),
|
||||
hexlify(priv2.get_verifying_key().to_string()))
|
||||
# likewise, signatures should be identical. Obviously you'd never
|
||||
# want to do this with keys you care about, because the secrecy of
|
||||
# the private key depends upon using different random numbers for
|
||||
# each signature
|
||||
sig1 = priv1.sign(b("data"), entropy=not_much_entropy)
|
||||
sig2 = priv2.sign(b("data"), entropy=not_much_entropy)
|
||||
self.assertEqual(hexlify(sig1), hexlify(sig2))
|
||||
|
||||
def assertTruePrivkeysEqual(self, priv1, priv2):
|
||||
self.assertEqual(priv1.privkey.secret_multiplier,
|
||||
priv2.privkey.secret_multiplier)
|
||||
self.assertEqual(priv1.privkey.public_key.generator,
|
||||
priv2.privkey.public_key.generator)
|
||||
|
||||
def failIfPrivkeysEqual(self, priv1, priv2):
|
||||
self.failIfEqual(priv1.privkey.secret_multiplier,
|
||||
priv2.privkey.secret_multiplier)
|
||||
|
||||
def test_privkey_creation(self):
|
||||
s = b("all the entropy in the entire world, compressed into one line")
|
||||
def not_much_entropy(numbytes):
|
||||
return s[:numbytes]
|
||||
priv1 = SigningKey.generate()
|
||||
self.assertEqual(priv1.baselen, NIST192p.baselen)
|
||||
|
||||
priv1 = SigningKey.generate(curve=NIST224p)
|
||||
self.assertEqual(priv1.baselen, NIST224p.baselen)
|
||||
|
||||
priv1 = SigningKey.generate(entropy=not_much_entropy)
|
||||
self.assertEqual(priv1.baselen, NIST192p.baselen)
|
||||
priv2 = SigningKey.generate(entropy=not_much_entropy)
|
||||
self.assertEqual(priv2.baselen, NIST192p.baselen)
|
||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||
|
||||
priv1 = SigningKey.from_secret_exponent(secexp=3)
|
||||
self.assertEqual(priv1.baselen, NIST192p.baselen)
|
||||
priv2 = SigningKey.from_secret_exponent(secexp=3)
|
||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||
|
||||
priv1 = SigningKey.from_secret_exponent(secexp=4, curve=NIST224p)
|
||||
self.assertEqual(priv1.baselen, NIST224p.baselen)
|
||||
|
||||
def test_privkey_strings(self):
|
||||
priv1 = SigningKey.generate()
|
||||
s1 = priv1.to_string()
|
||||
self.assertEqual(type(s1), binary_type)
|
||||
self.assertEqual(len(s1), NIST192p.baselen)
|
||||
priv2 = SigningKey.from_string(s1)
|
||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||
|
||||
s1 = priv1.to_pem()
|
||||
self.assertEqual(type(s1), binary_type)
|
||||
self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----")))
|
||||
self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----")))
|
||||
priv2 = SigningKey.from_pem(s1)
|
||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||
|
||||
s1 = priv1.to_der()
|
||||
self.assertEqual(type(s1), binary_type)
|
||||
priv2 = SigningKey.from_der(s1)
|
||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||
|
||||
priv1 = SigningKey.generate(curve=NIST256p)
|
||||
s1 = priv1.to_pem()
|
||||
self.assertEqual(type(s1), binary_type)
|
||||
self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----")))
|
||||
self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----")))
|
||||
priv2 = SigningKey.from_pem(s1)
|
||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||
|
||||
s1 = priv1.to_der()
|
||||
self.assertEqual(type(s1), binary_type)
|
||||
priv2 = SigningKey.from_der(s1)
|
||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||
|
||||
def assertTruePubkeysEqual(self, pub1, pub2):
|
||||
self.assertEqual(pub1.pubkey.point, pub2.pubkey.point)
|
||||
self.assertEqual(pub1.pubkey.generator, pub2.pubkey.generator)
|
||||
self.assertEqual(pub1.curve, pub2.curve)
|
||||
|
||||
def test_pubkey_strings(self):
|
||||
priv1 = SigningKey.generate()
|
||||
pub1 = priv1.get_verifying_key()
|
||||
s1 = pub1.to_string()
|
||||
self.assertEqual(type(s1), binary_type)
|
||||
self.assertEqual(len(s1), NIST192p.verifying_key_length)
|
||||
pub2 = VerifyingKey.from_string(s1)
|
||||
self.assertTruePubkeysEqual(pub1, pub2)
|
||||
|
||||
priv1 = SigningKey.generate(curve=NIST256p)
|
||||
pub1 = priv1.get_verifying_key()
|
||||
s1 = pub1.to_string()
|
||||
self.assertEqual(type(s1), binary_type)
|
||||
self.assertEqual(len(s1), NIST256p.verifying_key_length)
|
||||
pub2 = VerifyingKey.from_string(s1, curve=NIST256p)
|
||||
self.assertTruePubkeysEqual(pub1, pub2)
|
||||
|
||||
pub1_der = pub1.to_der()
|
||||
self.assertEqual(type(pub1_der), binary_type)
|
||||
pub2 = VerifyingKey.from_der(pub1_der)
|
||||
self.assertTruePubkeysEqual(pub1, pub2)
|
||||
|
||||
self.assertRaises(der.UnexpectedDER,
|
||||
VerifyingKey.from_der, pub1_der+b("junk"))
|
||||
badpub = VerifyingKey.from_der(pub1_der)
|
||||
class FakeGenerator:
|
||||
def order(self): return 123456789
|
||||
badcurve = Curve("unknown", None, FakeGenerator(), (1,2,3,4,5,6), None)
|
||||
badpub.curve = badcurve
|
||||
badder = badpub.to_der()
|
||||
self.assertRaises(UnknownCurveError, VerifyingKey.from_der, badder)
|
||||
|
||||
pem = pub1.to_pem()
|
||||
self.assertEqual(type(pem), binary_type)
|
||||
self.assertTrue(pem.startswith(b("-----BEGIN PUBLIC KEY-----")), pem)
|
||||
self.assertTrue(pem.strip().endswith(b("-----END PUBLIC KEY-----")), pem)
|
||||
pub2 = VerifyingKey.from_pem(pem)
|
||||
self.assertTruePubkeysEqual(pub1, pub2)
|
||||
|
||||
def test_vk_from_der_garbage_after_curve_oid(self):
|
||||
type_oid_der = encoded_oid_ecPublicKey
|
||||
curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1)) + \
|
||||
b('garbage')
|
||||
enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
|
||||
point_der = der.encode_bitstring(b'\x00\xff')
|
||||
to_decode = der.encode_sequence(enc_type_der, point_der)
|
||||
|
||||
with self.assertRaises(der.UnexpectedDER):
|
||||
VerifyingKey.from_der(to_decode)
|
||||
|
||||
def test_vk_from_der_invalid_key_type(self):
|
||||
type_oid_der = der.encode_oid(*(1, 2, 3))
|
||||
curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
|
||||
enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
|
||||
point_der = der.encode_bitstring(b'\x00\xff')
|
||||
to_decode = der.encode_sequence(enc_type_der, point_der)
|
||||
|
||||
with self.assertRaises(der.UnexpectedDER):
|
||||
VerifyingKey.from_der(to_decode)
|
||||
|
||||
def test_vk_from_der_garbage_after_point_string(self):
|
||||
type_oid_der = encoded_oid_ecPublicKey
|
||||
curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
|
||||
enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
|
||||
point_der = der.encode_bitstring(b'\x00\xff') + b('garbage')
|
||||
to_decode = der.encode_sequence(enc_type_der, point_der)
|
||||
|
||||
with self.assertRaises(der.UnexpectedDER):
|
||||
VerifyingKey.from_der(to_decode)
|
||||
|
||||
def test_vk_from_der_invalid_bitstring(self):
|
||||
type_oid_der = encoded_oid_ecPublicKey
|
||||
curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
|
||||
enc_type_der = der.encode_sequence(type_oid_der, curve_oid_der)
|
||||
point_der = der.encode_bitstring(b'\x08\xff')
|
||||
to_decode = der.encode_sequence(enc_type_der, point_der)
|
||||
|
||||
with self.assertRaises(der.UnexpectedDER):
|
||||
VerifyingKey.from_der(to_decode)
|
||||
|
||||
def test_signature_strings(self):
|
||||
priv1 = SigningKey.generate()
|
||||
pub1 = priv1.get_verifying_key()
|
||||
data = b("data")
|
||||
|
||||
sig = priv1.sign(data)
|
||||
self.assertEqual(type(sig), binary_type)
|
||||
self.assertEqual(len(sig), NIST192p.signature_length)
|
||||
self.assertTrue(pub1.verify(sig, data))
|
||||
|
||||
sig = priv1.sign(data, sigencode=sigencode_strings)
|
||||
self.assertEqual(type(sig), tuple)
|
||||
self.assertEqual(len(sig), 2)
|
||||
self.assertEqual(type(sig[0]), binary_type)
|
||||
self.assertEqual(type(sig[1]), binary_type)
|
||||
self.assertEqual(len(sig[0]), NIST192p.baselen)
|
||||
self.assertEqual(len(sig[1]), NIST192p.baselen)
|
||||
self.assertTrue(pub1.verify(sig, data, sigdecode=sigdecode_strings))
|
||||
|
||||
sig_der = priv1.sign(data, sigencode=sigencode_der)
|
||||
self.assertEqual(type(sig_der), binary_type)
|
||||
self.assertTrue(pub1.verify(sig_der, data, sigdecode=sigdecode_der))
|
||||
|
||||
def test_sig_decode_strings_with_invalid_count(self):
|
||||
with self.assertRaises(MalformedSignature):
|
||||
sigdecode_strings([b('one'), b('two'), b('three')], 0xff)
|
||||
|
||||
def test_sig_decode_strings_with_wrong_r_len(self):
|
||||
with self.assertRaises(MalformedSignature):
|
||||
sigdecode_strings([b('one'), b('two')], 0xff)
|
||||
|
||||
def test_sig_decode_strings_with_wrong_s_len(self):
|
||||
with self.assertRaises(MalformedSignature):
|
||||
sigdecode_strings([b('\xa0'), b('\xb0\xff')], 0xff)
|
||||
|
||||
def test_verify_with_too_long_input(self):
|
||||
sk = SigningKey.generate()
|
||||
vk = sk.verifying_key
|
||||
|
||||
with self.assertRaises(BadDigestError):
|
||||
vk.verify_digest(None, b('\x00') * 128)
|
||||
|
||||
def test_sk_from_secret_exponent_with_wrong_sec_exponent(self):
|
||||
with self.assertRaises(MalformedPointError):
|
||||
SigningKey.from_secret_exponent(0)
|
||||
|
||||
def test_sk_from_string_with_wrong_len_string(self):
|
||||
with self.assertRaises(MalformedPointError):
|
||||
SigningKey.from_string(b('\x01'))
|
||||
|
||||
def test_sk_from_der_with_junk_after_sequence(self):
|
||||
ver_der = der.encode_integer(1)
|
||||
to_decode = der.encode_sequence(ver_der) + b('garbage')
|
||||
|
||||
with self.assertRaises(der.UnexpectedDER):
|
||||
SigningKey.from_der(to_decode)
|
||||
|
||||
def test_sk_from_der_with_wrong_version(self):
|
||||
ver_der = der.encode_integer(0)
|
||||
to_decode = der.encode_sequence(ver_der)
|
||||
|
||||
with self.assertRaises(der.UnexpectedDER):
|
||||
SigningKey.from_der(to_decode)
|
||||
|
||||
def test_sk_from_der_invalid_const_tag(self):
|
||||
ver_der = der.encode_integer(1)
|
||||
privkey_der = der.encode_octet_string(b('\x00\xff'))
|
||||
curve_oid_der = der.encode_oid(*(1, 2, 3))
|
||||
const_der = der.encode_constructed(1, curve_oid_der)
|
||||
to_decode = der.encode_sequence(ver_der, privkey_der, const_der,
|
||||
curve_oid_der)
|
||||
|
||||
with self.assertRaises(der.UnexpectedDER):
|
||||
SigningKey.from_der(to_decode)
|
||||
|
||||
def test_sk_from_der_garbage_after_privkey_oid(self):
|
||||
ver_der = der.encode_integer(1)
|
||||
privkey_der = der.encode_octet_string(b('\x00\xff'))
|
||||
curve_oid_der = der.encode_oid(*(1, 2, 3)) + b('garbage')
|
||||
const_der = der.encode_constructed(0, curve_oid_der)
|
||||
to_decode = der.encode_sequence(ver_der, privkey_der, const_der,
|
||||
curve_oid_der)
|
||||
|
||||
with self.assertRaises(der.UnexpectedDER):
|
||||
SigningKey.from_der(to_decode)
|
||||
|
||||
def test_sk_from_der_with_short_privkey(self):
|
||||
ver_der = der.encode_integer(1)
|
||||
privkey_der = der.encode_octet_string(b('\x00\xff'))
|
||||
curve_oid_der = der.encode_oid(*(1, 2, 840, 10045, 3, 1, 1))
|
||||
const_der = der.encode_constructed(0, curve_oid_der)
|
||||
to_decode = der.encode_sequence(ver_der, privkey_der, const_der,
|
||||
curve_oid_der)
|
||||
|
||||
sk = SigningKey.from_der(to_decode)
|
||||
self.assertEqual(sk.privkey.secret_multiplier, 255)
|
||||
|
||||
def test_sign_with_too_long_hash(self):
|
||||
sk = SigningKey.from_secret_exponent(12)
|
||||
|
||||
with self.assertRaises(BadDigestError):
|
||||
sk.sign_digest(b('\xff') * 64)
|
||||
|
||||
def test_hashfunc(self):
|
||||
sk = SigningKey.generate(curve=NIST256p, hashfunc=sha256)
|
||||
data = b("security level is 128 bits")
|
||||
sig = sk.sign(data)
|
||||
vk = VerifyingKey.from_string(sk.get_verifying_key().to_string(),
|
||||
curve=NIST256p, hashfunc=sha256)
|
||||
self.assertTrue(vk.verify(sig, data))
|
||||
|
||||
sk2 = SigningKey.generate(curve=NIST256p)
|
||||
sig2 = sk2.sign(data, hashfunc=sha256)
|
||||
vk2 = VerifyingKey.from_string(sk2.get_verifying_key().to_string(),
|
||||
curve=NIST256p, hashfunc=sha256)
|
||||
self.assertTrue(vk2.verify(sig2, data))
|
||||
|
||||
vk3 = VerifyingKey.from_string(sk.get_verifying_key().to_string(),
|
||||
curve=NIST256p)
|
||||
self.assertTrue(vk3.verify(sig, data, hashfunc=sha256))
|
||||
|
||||
def test_decoding_with_malformed_uncompressed(self):
|
||||
enc = b('\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3'
|
||||
'\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4'
|
||||
'z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*')
|
||||
|
||||
with self.assertRaises(MalformedPointError):
|
||||
VerifyingKey.from_string(b('\x02') + enc)
|
||||
|
||||
def test_decoding_with_point_not_on_curve(self):
|
||||
enc = b('\x0c\xe0\x1d\xe0d\x1c\x8eS\x8a\xc0\x9eK\xa8x !\xd5\xc2\xc3'
|
||||
'\xfd\xc8\xa0c\xff\xfb\x02\xb9\xc4\x84)\x1a\x0f\x8b\x87\xa4'
|
||||
'z\x8a#\xb5\x97\xecO\xb6\xa0HQ\x89*')
|
||||
|
||||
with self.assertRaises(MalformedPointError):
|
||||
VerifyingKey.from_string(enc[:47] + b('\x00'))
|
||||
|
||||
def test_decoding_with_point_at_infinity(self):
|
||||
# decoding it is unsupported, as it's not necessary to encode it
|
||||
with self.assertRaises(MalformedPointError):
|
||||
VerifyingKey.from_string(b('\x00'))
|
||||
|
||||
def test_from_string_with_invalid_curve_too_short_ver_key_len(self):
|
||||
# both verifying_key_length and baselen are calculated internally
|
||||
# by the Curve constructor, but since we depend on them verify
|
||||
# that inconsistent values are detected
|
||||
curve = Curve("test", ecdsa.curve_192, ecdsa.generator_192, (1, 2))
|
||||
curve.verifying_key_length = 16
|
||||
curve.baselen = 32
|
||||
|
||||
with self.assertRaises(MalformedPointError):
|
||||
VerifyingKey.from_string(b('\x00')*16, curve)
|
||||
|
||||
def test_from_string_with_invalid_curve_too_long_ver_key_len(self):
|
||||
# both verifying_key_length and baselen are calculated internally
|
||||
# by the Curve constructor, but since we depend on them verify
|
||||
# that inconsistent values are detected
|
||||
curve = Curve("test", ecdsa.curve_192, ecdsa.generator_192, (1, 2))
|
||||
curve.verifying_key_length = 16
|
||||
curve.baselen = 16
|
||||
|
||||
with self.assertRaises(MalformedPointError):
|
||||
VerifyingKey.from_string(b('\x00')*16, curve)
|
||||
|
||||
|
||||
class OpenSSL(unittest.TestCase):
|
||||
# test interoperability with OpenSSL tools. Note that openssl's ECDSA
|
||||
# sign/verify arguments changed between 0.9.8 and 1.0.0: the early
|
||||
# versions require "-ecdsa-with-SHA1", the later versions want just
|
||||
# "-SHA1" (or to leave out that argument entirely, which means the
|
||||
# signature will use some default digest algorithm, probably determined
|
||||
# by the key, probably always SHA1).
|
||||
#
|
||||
# openssl ecparam -name secp224r1 -genkey -out privkey.pem
|
||||
# openssl ec -in privkey.pem -text -noout # get the priv/pub keys
|
||||
# openssl dgst -ecdsa-with-SHA1 -sign privkey.pem -out data.sig data.txt
|
||||
# openssl asn1parse -in data.sig -inform DER
|
||||
# data.sig is 64 bytes, probably 56b plus ASN1 overhead
|
||||
# openssl dgst -ecdsa-with-SHA1 -prverify privkey.pem -signature data.sig data.txt ; echo $?
|
||||
# openssl ec -in privkey.pem -pubout -out pubkey.pem
|
||||
# openssl ec -in privkey.pem -pubout -outform DER -out pubkey.der
|
||||
|
||||
def get_openssl_messagedigest_arg(self):
|
||||
v = run_openssl("version")
|
||||
# e.g. "OpenSSL 1.0.0 29 Mar 2010", or "OpenSSL 1.0.0a 1 Jun 2010",
|
||||
# or "OpenSSL 0.9.8o 01 Jun 2010"
|
||||
vs = v.split()[1].split(".")
|
||||
if vs >= ["1","0","0"]:
|
||||
return "-SHA1"
|
||||
else:
|
||||
return "-ecdsa-with-SHA1"
|
||||
|
||||
# sk: 1:OpenSSL->python 2:python->OpenSSL
|
||||
# vk: 3:OpenSSL->python 4:python->OpenSSL
|
||||
# sig: 5:OpenSSL->python 6:python->OpenSSL
|
||||
|
||||
def test_from_openssl_nist192p(self):
|
||||
return self.do_test_from_openssl(NIST192p)
|
||||
def test_from_openssl_nist224p(self):
|
||||
return self.do_test_from_openssl(NIST224p)
|
||||
def test_from_openssl_nist256p(self):
|
||||
return self.do_test_from_openssl(NIST256p)
|
||||
def test_from_openssl_nist384p(self):
|
||||
return self.do_test_from_openssl(NIST384p)
|
||||
def test_from_openssl_nist521p(self):
|
||||
return self.do_test_from_openssl(NIST521p)
|
||||
def test_from_openssl_secp256k1(self):
|
||||
return self.do_test_from_openssl(SECP256k1)
|
||||
|
||||
def do_test_from_openssl(self, curve):
|
||||
curvename = curve.openssl_name
|
||||
assert curvename
|
||||
# OpenSSL: create sk, vk, sign.
|
||||
# Python: read vk(3), checksig(5), read sk(1), sign, check
|
||||
mdarg = self.get_openssl_messagedigest_arg()
|
||||
if os.path.isdir("t"):
|
||||
shutil.rmtree("t")
|
||||
os.mkdir("t")
|
||||
run_openssl("ecparam -name %s -genkey -out t/privkey.pem" % curvename)
|
||||
run_openssl("ec -in t/privkey.pem -pubout -out t/pubkey.pem")
|
||||
data = b("data")
|
||||
with open("t/data.txt","wb") as e: e.write(data)
|
||||
run_openssl("dgst %s -sign t/privkey.pem -out t/data.sig t/data.txt" % mdarg)
|
||||
run_openssl("dgst %s -verify t/pubkey.pem -signature t/data.sig t/data.txt" % mdarg)
|
||||
with open("t/pubkey.pem","rb") as e: pubkey_pem = e.read()
|
||||
vk = VerifyingKey.from_pem(pubkey_pem) # 3
|
||||
with open("t/data.sig","rb") as e: sig_der = e.read()
|
||||
self.assertTrue(vk.verify(sig_der, data, # 5
|
||||
hashfunc=sha1, sigdecode=sigdecode_der))
|
||||
|
||||
with open("t/privkey.pem") as e: fp = e.read()
|
||||
sk = SigningKey.from_pem(fp) # 1
|
||||
sig = sk.sign(data)
|
||||
self.assertTrue(vk.verify(sig, data))
|
||||
|
||||
def test_to_openssl_nist192p(self):
|
||||
self.do_test_to_openssl(NIST192p)
|
||||
def test_to_openssl_nist224p(self):
|
||||
self.do_test_to_openssl(NIST224p)
|
||||
def test_to_openssl_nist256p(self):
|
||||
self.do_test_to_openssl(NIST256p)
|
||||
def test_to_openssl_nist384p(self):
|
||||
self.do_test_to_openssl(NIST384p)
|
||||
def test_to_openssl_nist521p(self):
|
||||
self.do_test_to_openssl(NIST521p)
|
||||
def test_to_openssl_secp256k1(self):
|
||||
self.do_test_to_openssl(SECP256k1)
|
||||
|
||||
def do_test_to_openssl(self, curve):
|
||||
curvename = curve.openssl_name
|
||||
assert curvename
|
||||
# Python: create sk, vk, sign.
|
||||
# OpenSSL: read vk(4), checksig(6), read sk(2), sign, check
|
||||
mdarg = self.get_openssl_messagedigest_arg()
|
||||
if os.path.isdir("t"):
|
||||
shutil.rmtree("t")
|
||||
os.mkdir("t")
|
||||
sk = SigningKey.generate(curve=curve)
|
||||
vk = sk.get_verifying_key()
|
||||
data = b("data")
|
||||
with open("t/pubkey.der","wb") as e: e.write(vk.to_der()) # 4
|
||||
with open("t/pubkey.pem","wb") as e: e.write(vk.to_pem()) # 4
|
||||
sig_der = sk.sign(data, hashfunc=sha1, sigencode=sigencode_der)
|
||||
|
||||
with open("t/data.sig","wb") as e: e.write(sig_der) # 6
|
||||
with open("t/data.txt","wb") as e: e.write(data)
|
||||
with open("t/baddata.txt","wb") as e: e.write(data+b("corrupt"))
|
||||
|
||||
self.assertRaises(SubprocessError, run_openssl,
|
||||
"dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/baddata.txt" % mdarg)
|
||||
run_openssl("dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/data.txt" % mdarg)
|
||||
|
||||
with open("t/privkey.pem","wb") as e: e.write(sk.to_pem()) # 2
|
||||
run_openssl("dgst %s -sign t/privkey.pem -out t/data.sig2 t/data.txt" % mdarg)
|
||||
run_openssl("dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt" % mdarg)
|
||||
|
||||
class DER(unittest.TestCase):
|
||||
def test_oids(self):
|
||||
oid_ecPublicKey = der.encode_oid(1, 2, 840, 10045, 2, 1)
|
||||
self.assertEqual(hexlify(oid_ecPublicKey), b("06072a8648ce3d0201"))
|
||||
self.assertEqual(hexlify(NIST224p.encoded_oid), b("06052b81040021"))
|
||||
self.assertEqual(hexlify(NIST256p.encoded_oid),
|
||||
b("06082a8648ce3d030107"))
|
||||
x = oid_ecPublicKey + b("more")
|
||||
x1, rest = der.remove_object(x)
|
||||
self.assertEqual(x1, (1, 2, 840, 10045, 2, 1))
|
||||
self.assertEqual(rest, b("more"))
|
||||
|
||||
def test_integer(self):
|
||||
self.assertEqual(der.encode_integer(0), b("\x02\x01\x00"))
|
||||
self.assertEqual(der.encode_integer(1), b("\x02\x01\x01"))
|
||||
self.assertEqual(der.encode_integer(127), b("\x02\x01\x7f"))
|
||||
self.assertEqual(der.encode_integer(128), b("\x02\x02\x00\x80"))
|
||||
self.assertEqual(der.encode_integer(256), b("\x02\x02\x01\x00"))
|
||||
#self.assertEqual(der.encode_integer(-1), b("\x02\x01\xff"))
|
||||
|
||||
def s(n): return der.remove_integer(der.encode_integer(n) + b("junk"))
|
||||
self.assertEqual(s(0), (0, b("junk")))
|
||||
self.assertEqual(s(1), (1, b("junk")))
|
||||
self.assertEqual(s(127), (127, b("junk")))
|
||||
self.assertEqual(s(128), (128, b("junk")))
|
||||
self.assertEqual(s(256), (256, b("junk")))
|
||||
self.assertEqual(s(1234567890123456789012345678901234567890),
|
||||
(1234567890123456789012345678901234567890,b("junk")))
|
||||
|
||||
def test_number(self):
|
||||
self.assertEqual(der.encode_number(0), b("\x00"))
|
||||
self.assertEqual(der.encode_number(127), b("\x7f"))
|
||||
self.assertEqual(der.encode_number(128), b("\x81\x00"))
|
||||
self.assertEqual(der.encode_number(3*128+7), b("\x83\x07"))
|
||||
#self.assertEqual(der.read_number("\x81\x9b"+"more"), (155, 2))
|
||||
#self.assertEqual(der.encode_number(155), b("\x81\x9b"))
|
||||
for n in (0, 1, 2, 127, 128, 3*128+7, 840, 10045): #, 155):
|
||||
x = der.encode_number(n) + b("more")
|
||||
n1, llen = der.read_number(x)
|
||||
self.assertEqual(n1, n)
|
||||
self.assertEqual(x[llen:], b("more"))
|
||||
|
||||
def test_length(self):
|
||||
self.assertEqual(der.encode_length(0), b("\x00"))
|
||||
self.assertEqual(der.encode_length(127), b("\x7f"))
|
||||
self.assertEqual(der.encode_length(128), b("\x81\x80"))
|
||||
self.assertEqual(der.encode_length(255), b("\x81\xff"))
|
||||
self.assertEqual(der.encode_length(256), b("\x82\x01\x00"))
|
||||
self.assertEqual(der.encode_length(3*256+7), b("\x82\x03\x07"))
|
||||
self.assertEqual(der.read_length(b("\x81\x9b")+b("more")), (155, 2))
|
||||
self.assertEqual(der.encode_length(155), b("\x81\x9b"))
|
||||
for n in (0, 1, 2, 127, 128, 255, 256, 3*256+7, 155):
|
||||
x = der.encode_length(n) + b("more")
|
||||
n1, llen = der.read_length(x)
|
||||
self.assertEqual(n1, n)
|
||||
self.assertEqual(x[llen:], b("more"))
|
||||
|
||||
def test_sequence(self):
|
||||
x = der.encode_sequence(b("ABC"), b("DEF")) + b("GHI")
|
||||
self.assertEqual(x, b("\x30\x06ABCDEFGHI"))
|
||||
x1, rest = der.remove_sequence(x)
|
||||
self.assertEqual(x1, b("ABCDEF"))
|
||||
self.assertEqual(rest, b("GHI"))
|
||||
|
||||
def test_constructed(self):
|
||||
x = der.encode_constructed(0, NIST224p.encoded_oid)
|
||||
self.assertEqual(hexlify(x), b("a007") + b("06052b81040021"))
|
||||
x = der.encode_constructed(1, unhexlify(b("0102030a0b0c")))
|
||||
self.assertEqual(hexlify(x), b("a106") + b("0102030a0b0c"))
|
||||
|
||||
class Util(unittest.TestCase):
|
||||
def test_trytryagain(self):
|
||||
tta = util.randrange_from_seed__trytryagain
|
||||
for i in range(1000):
|
||||
seed = "seed-%d" % i
|
||||
for order in (2**8-2, 2**8-1, 2**8, 2**8+1, 2**8+2,
|
||||
2**16-1, 2**16+1):
|
||||
n = tta(seed, order)
|
||||
self.assertTrue(1 <= n < order, (1, n, order))
|
||||
# this trytryagain *does* provide long-term stability
|
||||
self.assertEqual(("%x"%(tta("seed", NIST224p.order))).encode(),
|
||||
b("6fa59d73bf0446ae8743cf748fc5ac11d5585a90356417e97155c3bc"))
|
||||
|
||||
def test_randrange(self):
|
||||
# util.randrange does not provide long-term stability: we might
|
||||
# change the algorithm in the future.
|
||||
for i in range(1000):
|
||||
entropy = util.PRNG("seed-%d" % i)
|
||||
for order in (2**8-2, 2**8-1, 2**8,
|
||||
2**16-1, 2**16+1,
|
||||
):
|
||||
# that oddball 2**16+1 takes half our runtime
|
||||
n = util.randrange(order, entropy=entropy)
|
||||
self.assertTrue(1 <= n < order, (1, n, order))
|
||||
|
||||
def OFF_test_prove_uniformity(self):
|
||||
order = 2**8-2
|
||||
counts = dict([(i, 0) for i in range(1, order)])
|
||||
assert 0 not in counts
|
||||
assert order not in counts
|
||||
for i in range(1000000):
|
||||
seed = "seed-%d" % i
|
||||
n = util.randrange_from_seed__trytryagain(seed, order)
|
||||
counts[n] += 1
|
||||
# this technique should use the full range
|
||||
self.assertTrue(counts[order-1])
|
||||
for i in range(1, order):
|
||||
print_("%3d: %s" % (i, "*"*(counts[i]//100)))
|
||||
|
||||
class RFC6979(unittest.TestCase):
|
||||
# https://tools.ietf.org/html/rfc6979#appendix-A.1
|
||||
def _do(self, generator, secexp, hsh, hash_func, expected):
|
||||
actual = rfc6979.generate_k(generator.order(), secexp, hash_func, hsh)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_SECP256k1(self):
|
||||
'''RFC doesn't contain test vectors for SECP256k1 used in bitcoin.
|
||||
This vector has been computed by Golang reference implementation instead.'''
|
||||
self._do(
|
||||
generator = SECP256k1.generator,
|
||||
secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16),
|
||||
hsh = sha256(b("sample")).digest(),
|
||||
hash_func = sha256,
|
||||
expected = int("8fa1f95d514760e498f28957b824ee6ec39ed64826ff4fecc2b5739ec45b91cd", 16))
|
||||
|
||||
def test_SECP256k1_2(self):
|
||||
self._do(
|
||||
generator=SECP256k1.generator,
|
||||
secexp=int("cca9fbcc1b41e5a95d369eaa6ddcff73b61a4efaa279cfc6567e8daa39cbaf50", 16),
|
||||
hsh=sha256(b("sample")).digest(),
|
||||
hash_func=sha256,
|
||||
expected=int("2df40ca70e639d89528a6b670d9d48d9165fdc0febc0974056bdce192b8e16a3", 16))
|
||||
|
||||
def test_SECP256k1_3(self):
|
||||
self._do(
|
||||
generator=SECP256k1.generator,
|
||||
secexp=0x1,
|
||||
hsh=sha256(b("Satoshi Nakamoto")).digest(),
|
||||
hash_func=sha256,
|
||||
expected=0x8F8A276C19F4149656B280621E358CCE24F5F52542772691EE69063B74F15D15)
|
||||
|
||||
def test_SECP256k1_4(self):
|
||||
self._do(
|
||||
generator=SECP256k1.generator,
|
||||
secexp=0x1,
|
||||
hsh=sha256(b("All those moments will be lost in time, like tears in rain. Time to die...")).digest(),
|
||||
hash_func=sha256,
|
||||
expected=0x38AA22D72376B4DBC472E06C3BA403EE0A394DA63FC58D88686C611ABA98D6B3)
|
||||
|
||||
def test_SECP256k1_5(self):
|
||||
self._do(
|
||||
generator=SECP256k1.generator,
|
||||
secexp=0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364140,
|
||||
hsh=sha256(b("Satoshi Nakamoto")).digest(),
|
||||
hash_func=sha256,
|
||||
expected=0x33A19B60E25FB6F4435AF53A3D42D493644827367E6453928554F43E49AA6F90)
|
||||
|
||||
def test_SECP256k1_6(self):
|
||||
self._do(
|
||||
generator=SECP256k1.generator,
|
||||
secexp=0xf8b8af8ce3c7cca5e300d33939540c10d45ce001b8f252bfbc57ba0342904181,
|
||||
hsh=sha256(b("Alan Turing")).digest(),
|
||||
hash_func=sha256,
|
||||
expected=0x525A82B70E67874398067543FD84C83D30C175FDC45FDEEE082FE13B1D7CFDF1)
|
||||
|
||||
def test_1(self):
|
||||
# Basic example of the RFC, it also tests 'try-try-again' from Step H of rfc6979
|
||||
self._do(
|
||||
generator = Point(None, 0, 0, int("4000000000000000000020108A2E0CC0D99F8A5EF", 16)),
|
||||
secexp = int("09A4D6792295A7F730FC3F2B49CBC0F62E862272F", 16),
|
||||
hsh = unhexlify(b("AF2BDBE1AA9B6EC1E2ADE1D694F41FC71A831D0268E9891562113D8A62ADD1BF")),
|
||||
hash_func = sha256,
|
||||
expected = int("23AF4074C90A02B3FE61D286D5C87F425E6BDD81B", 16))
|
||||
|
||||
def test_2(self):
|
||||
self._do(
|
||||
generator=NIST192p.generator,
|
||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||
hsh = sha1(b("sample")).digest(),
|
||||
hash_func = sha1,
|
||||
expected = int("37D7CA00D2C7B0E5E412AC03BD44BA837FDD5B28CD3B0021", 16))
|
||||
|
||||
def test_3(self):
|
||||
self._do(
|
||||
generator=NIST192p.generator,
|
||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||
hsh = sha256(b("sample")).digest(),
|
||||
hash_func = sha256,
|
||||
expected = int("32B1B6D7D42A05CB449065727A84804FB1A3E34D8F261496", 16))
|
||||
|
||||
def test_4(self):
|
||||
self._do(
|
||||
generator=NIST192p.generator,
|
||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||
hsh = sha512(b("sample")).digest(),
|
||||
hash_func = sha512,
|
||||
expected = int("A2AC7AB055E4F20692D49209544C203A7D1F2C0BFBC75DB1", 16))
|
||||
|
||||
def test_5(self):
|
||||
self._do(
|
||||
generator=NIST192p.generator,
|
||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||
hsh = sha1(b("test")).digest(),
|
||||
hash_func = sha1,
|
||||
expected = int("D9CF9C3D3297D3260773A1DA7418DB5537AB8DD93DE7FA25", 16))
|
||||
|
||||
def test_6(self):
|
||||
self._do(
|
||||
generator=NIST192p.generator,
|
||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||
hsh = sha256(b("test")).digest(),
|
||||
hash_func = sha256,
|
||||
expected = int("5C4CE89CF56D9E7C77C8585339B006B97B5F0680B4306C6C", 16))
|
||||
|
||||
def test_7(self):
|
||||
self._do(
|
||||
generator=NIST192p.generator,
|
||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||
hsh = sha512(b("test")).digest(),
|
||||
hash_func = sha512,
|
||||
expected = int("0758753A5254759C7CFBAD2E2D9B0792EEE44136C9480527", 16))
|
||||
|
||||
def test_8(self):
|
||||
self._do(
|
||||
generator=NIST521p.generator,
|
||||
secexp = int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
|
||||
hsh = sha1(b("sample")).digest(),
|
||||
hash_func = sha1,
|
||||
expected = int("089C071B419E1C2820962321787258469511958E80582E95D8378E0C2CCDB3CB42BEDE42F50E3FA3C71F5A76724281D31D9C89F0F91FC1BE4918DB1C03A5838D0F9", 16))
|
||||
|
||||
def test_9(self):
|
||||
self._do(
|
||||
generator=NIST521p.generator,
|
||||
secexp = int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
|
||||
hsh = sha256(b("sample")).digest(),
|
||||
hash_func = sha256,
|
||||
expected = int("0EDF38AFCAAECAB4383358B34D67C9F2216C8382AAEA44A3DAD5FDC9C32575761793FEF24EB0FC276DFC4F6E3EC476752F043CF01415387470BCBD8678ED2C7E1A0", 16))
|
||||
|
||||
def test_10(self):
|
||||
self._do(
|
||||
generator=NIST521p.generator,
|
||||
secexp = int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
|
||||
hsh = sha512(b("test")).digest(),
|
||||
hash_func = sha512,
|
||||
expected = int("16200813020EC986863BEDFC1B121F605C1215645018AEA1A7B215A564DE9EB1B38A67AA1128B80CE391C4FB71187654AAA3431027BFC7F395766CA988C964DC56D", 16))
|
||||
|
||||
def __main__():
|
||||
unittest.main()
|
||||
if __name__ == "__main__":
|
||||
__main__()
|
||||
267
Lambdas/Websocket Authorizer/ecdsa/util.py
Normal file
267
Lambdas/Websocket Authorizer/ecdsa/util.py
Normal file
@@ -0,0 +1,267 @@
|
||||
from __future__ import division
|
||||
|
||||
import os
|
||||
import math
|
||||
import binascii
|
||||
from hashlib import sha256
|
||||
from . import der
|
||||
from .curves import orderlen
|
||||
from .six import PY3, int2byte, b, next
|
||||
|
||||
# RFC5480:
|
||||
# The "unrestricted" algorithm identifier is:
|
||||
# id-ecPublicKey OBJECT IDENTIFIER ::= {
|
||||
# iso(1) member-body(2) us(840) ansi-X9-62(10045) keyType(2) 1 }
|
||||
|
||||
oid_ecPublicKey = (1, 2, 840, 10045, 2, 1)
|
||||
encoded_oid_ecPublicKey = der.encode_oid(*oid_ecPublicKey)
|
||||
|
||||
def randrange(order, entropy=None):
|
||||
"""Return a random integer k such that 1 <= k < order, uniformly
|
||||
distributed across that range. For simplicity, this only behaves well if
|
||||
'order' is fairly close (but below) a power of 256. The try-try-again
|
||||
algorithm we use takes longer and longer time (on average) to complete as
|
||||
'order' falls, rising to a maximum of avg=512 loops for the worst-case
|
||||
(256**k)+1 . All of the standard curves behave well. There is a cutoff at
|
||||
10k loops (which raises RuntimeError) to prevent an infinite loop when
|
||||
something is really broken like the entropy function not working.
|
||||
|
||||
Note that this function is not declared to be forwards-compatible: we may
|
||||
change the behavior in future releases. The entropy= argument (which
|
||||
should get a callable that behaves like os.urandom) can be used to
|
||||
achieve stability within a given release (for repeatable unit tests), but
|
||||
should not be used as a long-term-compatible key generation algorithm.
|
||||
"""
|
||||
# we could handle arbitrary orders (even 256**k+1) better if we created
|
||||
# candidates bit-wise instead of byte-wise, which would reduce the
|
||||
# worst-case behavior to avg=2 loops, but that would be more complex. The
|
||||
# change would be to round the order up to a power of 256, subtract one
|
||||
# (to get 0xffff..), use that to get a byte-long mask for the top byte,
|
||||
# generate the len-1 entropy bytes, generate one extra byte and mask off
|
||||
# the top bits, then combine it with the rest. Requires jumping back and
|
||||
# forth between strings and integers a lot.
|
||||
|
||||
if entropy is None:
|
||||
entropy = os.urandom
|
||||
assert order > 1
|
||||
bytes = orderlen(order)
|
||||
dont_try_forever = 10000 # gives about 2**-60 failures for worst case
|
||||
while dont_try_forever > 0:
|
||||
dont_try_forever -= 1
|
||||
candidate = string_to_number(entropy(bytes)) + 1
|
||||
if 1 <= candidate < order:
|
||||
return candidate
|
||||
continue
|
||||
raise RuntimeError("randrange() tried hard but gave up, either something"
|
||||
" is very wrong or you got realllly unlucky. Order was"
|
||||
" %x" % order)
|
||||
|
||||
class PRNG:
|
||||
# this returns a callable which, when invoked with an integer N, will
|
||||
# return N pseudorandom bytes. Note: this is a short-term PRNG, meant
|
||||
# primarily for the needs of randrange_from_seed__trytryagain(), which
|
||||
# only needs to run it a few times per seed. It does not provide
|
||||
# protection against state compromise (forward security).
|
||||
def __init__(self, seed):
|
||||
self.generator = self.block_generator(seed)
|
||||
|
||||
def __call__(self, numbytes):
|
||||
a = [next(self.generator) for i in range(numbytes)]
|
||||
|
||||
if PY3:
|
||||
return bytes(a)
|
||||
else:
|
||||
return "".join(a)
|
||||
|
||||
|
||||
def block_generator(self, seed):
|
||||
counter = 0
|
||||
while True:
|
||||
for byte in sha256(("prng-%d-%s" % (counter, seed)).encode()).digest():
|
||||
yield byte
|
||||
counter += 1
|
||||
|
||||
def randrange_from_seed__overshoot_modulo(seed, order):
|
||||
# hash the data, then turn the digest into a number in [1,order).
|
||||
#
|
||||
# We use David-Sarah Hopwood's suggestion: turn it into a number that's
|
||||
# sufficiently larger than the group order, then modulo it down to fit.
|
||||
# This should give adequate (but not perfect) uniformity, and simple
|
||||
# code. There are other choices: try-try-again is the main one.
|
||||
base = PRNG(seed)(2*orderlen(order))
|
||||
number = (int(binascii.hexlify(base), 16) % (order-1)) + 1
|
||||
assert 1 <= number < order, (1, number, order)
|
||||
return number
|
||||
|
||||
def lsb_of_ones(numbits):
|
||||
return (1 << numbits) - 1
|
||||
def bits_and_bytes(order):
|
||||
bits = int(math.log(order-1, 2)+1)
|
||||
bytes = bits // 8
|
||||
extrabits = bits % 8
|
||||
return bits, bytes, extrabits
|
||||
|
||||
# the following randrange_from_seed__METHOD() functions take an
|
||||
# arbitrarily-sized secret seed and turn it into a number that obeys the same
|
||||
# range limits as randrange() above. They are meant for deriving consistent
|
||||
# signing keys from a secret rather than generating them randomly, for
|
||||
# example a protocol in which three signing keys are derived from a master
|
||||
# secret. You should use a uniformly-distributed unguessable seed with about
|
||||
# curve.baselen bytes of entropy. To use one, do this:
|
||||
# seed = os.urandom(curve.baselen) # or other starting point
|
||||
# secexp = ecdsa.util.randrange_from_seed__trytryagain(sed, curve.order)
|
||||
# sk = SigningKey.from_secret_exponent(secexp, curve)
|
||||
|
||||
def randrange_from_seed__truncate_bytes(seed, order, hashmod=sha256):
|
||||
# hash the seed, then turn the digest into a number in [1,order), but
|
||||
# don't worry about trying to uniformly fill the range. This will lose,
|
||||
# on average, four bits of entropy.
|
||||
bits, bytes, extrabits = bits_and_bytes(order)
|
||||
if extrabits:
|
||||
bytes += 1
|
||||
base = hashmod(seed).digest()[:bytes]
|
||||
base = "\x00"*(bytes-len(base)) + base
|
||||
number = 1+int(binascii.hexlify(base), 16)
|
||||
assert 1 <= number < order
|
||||
return number
|
||||
|
||||
def randrange_from_seed__truncate_bits(seed, order, hashmod=sha256):
|
||||
# like string_to_randrange_truncate_bytes, but only lose an average of
|
||||
# half a bit
|
||||
bits = int(math.log(order-1, 2)+1)
|
||||
maxbytes = (bits+7) // 8
|
||||
base = hashmod(seed).digest()[:maxbytes]
|
||||
base = "\x00"*(maxbytes-len(base)) + base
|
||||
topbits = 8*maxbytes - bits
|
||||
if topbits:
|
||||
base = int2byte(ord(base[0]) & lsb_of_ones(topbits)) + base[1:]
|
||||
number = 1+int(binascii.hexlify(base), 16)
|
||||
assert 1 <= number < order
|
||||
return number
|
||||
|
||||
def randrange_from_seed__trytryagain(seed, order):
|
||||
# figure out exactly how many bits we need (rounded up to the nearest
|
||||
# bit), so we can reduce the chance of looping to less than 0.5 . This is
|
||||
# specified to feed from a byte-oriented PRNG, and discards the
|
||||
# high-order bits of the first byte as necessary to get the right number
|
||||
# of bits. The average number of loops will range from 1.0 (when
|
||||
# order=2**k-1) to 2.0 (when order=2**k+1).
|
||||
assert order > 1
|
||||
bits, bytes, extrabits = bits_and_bytes(order)
|
||||
generate = PRNG(seed)
|
||||
while True:
|
||||
extrabyte = b("")
|
||||
if extrabits:
|
||||
extrabyte = int2byte(ord(generate(1)) & lsb_of_ones(extrabits))
|
||||
guess = string_to_number(extrabyte + generate(bytes)) + 1
|
||||
if 1 <= guess < order:
|
||||
return guess
|
||||
|
||||
|
||||
def number_to_string(num, order):
|
||||
l = orderlen(order)
|
||||
fmt_str = "%0" + str(2*l) + "x"
|
||||
string = binascii.unhexlify((fmt_str % num).encode())
|
||||
assert len(string) == l, (len(string), l)
|
||||
return string
|
||||
|
||||
def number_to_string_crop(num, order):
|
||||
l = orderlen(order)
|
||||
fmt_str = "%0" + str(2*l) + "x"
|
||||
string = binascii.unhexlify((fmt_str % num).encode())
|
||||
return string[:l]
|
||||
|
||||
def string_to_number(string):
|
||||
return int(binascii.hexlify(string), 16)
|
||||
|
||||
def string_to_number_fixedlen(string, order):
|
||||
l = orderlen(order)
|
||||
assert len(string) == l, (len(string), l)
|
||||
return int(binascii.hexlify(string), 16)
|
||||
|
||||
# these methods are useful for the sigencode= argument to SK.sign() and the
|
||||
# sigdecode= argument to VK.verify(), and control how the signature is packed
|
||||
# or unpacked.
|
||||
|
||||
def sigencode_strings(r, s, order):
|
||||
r_str = number_to_string(r, order)
|
||||
s_str = number_to_string(s, order)
|
||||
return (r_str, s_str)
|
||||
|
||||
def sigencode_string(r, s, order):
|
||||
# for any given curve, the size of the signature numbers is
|
||||
# fixed, so just use simple concatenation
|
||||
r_str, s_str = sigencode_strings(r, s, order)
|
||||
return r_str + s_str
|
||||
|
||||
def sigencode_der(r, s, order):
|
||||
return der.encode_sequence(der.encode_integer(r), der.encode_integer(s))
|
||||
|
||||
# canonical versions of sigencode methods
|
||||
# these enforce low S values, by negating the value (modulo the order) if above order/2
|
||||
# see CECKey::Sign() https://github.com/bitcoin/bitcoin/blob/master/src/key.cpp#L214
|
||||
def sigencode_strings_canonize(r, s, order):
|
||||
if s > order / 2:
|
||||
s = order - s
|
||||
return sigencode_strings(r, s, order)
|
||||
|
||||
def sigencode_string_canonize(r, s, order):
|
||||
if s > order / 2:
|
||||
s = order - s
|
||||
return sigencode_string(r, s, order)
|
||||
|
||||
def sigencode_der_canonize(r, s, order):
|
||||
if s > order / 2:
|
||||
s = order - s
|
||||
return sigencode_der(r, s, order)
|
||||
|
||||
|
||||
class MalformedSignature(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def sigdecode_string(signature, order):
|
||||
l = orderlen(order)
|
||||
if not len(signature) == 2 * l:
|
||||
raise MalformedSignature(
|
||||
"Invalid length of signature, expected {0} bytes long, "
|
||||
"provided string is {1} bytes long"
|
||||
.format(2 * l, len(signature)))
|
||||
r = string_to_number_fixedlen(signature[:l], order)
|
||||
s = string_to_number_fixedlen(signature[l:], order)
|
||||
return r, s
|
||||
|
||||
def sigdecode_strings(rs_strings, order):
|
||||
if not len(rs_strings) == 2:
|
||||
raise MalformedSignature(
|
||||
"Invalid number of strings provided: {0}, expected 2"
|
||||
.format(len(rs_strings)))
|
||||
(r_str, s_str) = rs_strings
|
||||
l = orderlen(order)
|
||||
if not len(r_str) == l:
|
||||
raise MalformedSignature(
|
||||
"Invalid length of first string ('r' parameter), "
|
||||
"expected {0} bytes long, provided string is {1} bytes long"
|
||||
.format(l, len(r_str)))
|
||||
if not len(s_str) == l:
|
||||
raise MalformedSignature(
|
||||
"Invalid length of second string ('s' parameter), "
|
||||
"expected {0} bytes long, provided string is {1} bytes long"
|
||||
.format(l, len(s_str)))
|
||||
r = string_to_number_fixedlen(r_str, order)
|
||||
s = string_to_number_fixedlen(s_str, order)
|
||||
return r, s
|
||||
|
||||
def sigdecode_der(sig_der, order):
|
||||
#return der.encode_sequence(der.encode_integer(r), der.encode_integer(s))
|
||||
rs_strings, empty = der.remove_sequence(sig_der)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER sig: %s" %
|
||||
binascii.hexlify(empty))
|
||||
r, rest = der.remove_integer(rs_strings)
|
||||
s, empty = der.remove_integer(rest)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER numbers: %s" %
|
||||
binascii.hexlify(empty))
|
||||
return r, s
|
||||
|
||||
11
Lambdas/Websocket Authorizer/jose/__init__.py
Normal file
11
Lambdas/Websocket Authorizer/jose/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
|
||||
__version__ = "3.0.1"
|
||||
__author__ = 'Michael Davis'
|
||||
__license__ = 'MIT'
|
||||
__copyright__ = 'Copyright 2016 Michael Davis'
|
||||
|
||||
|
||||
from .exceptions import JOSEError # noqa: F401
|
||||
from .exceptions import JWSError # noqa: F401
|
||||
from .exceptions import ExpiredSignatureError # noqa: F401
|
||||
from .exceptions import JWTError # noqa: F401
|
||||
13
Lambdas/Websocket Authorizer/jose/backends/__init__.py
Normal file
13
Lambdas/Websocket Authorizer/jose/backends/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
||||
|
||||
try:
|
||||
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey # noqa: F401
|
||||
except ImportError:
|
||||
try:
|
||||
from jose.backends.pycrypto_backend import RSAKey # noqa: F401
|
||||
except ImportError:
|
||||
from jose.backends.rsa_backend import RSAKey # noqa: F401
|
||||
|
||||
try:
|
||||
from jose.backends.cryptography_backend import CryptographyECKey as ECKey # noqa: F401
|
||||
except ImportError:
|
||||
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey # noqa: F401
|
||||
82
Lambdas/Websocket Authorizer/jose/backends/_asn1.py
Normal file
82
Lambdas/Websocket Authorizer/jose/backends/_asn1.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""ASN1 encoding helpers for converting between PKCS1 and PKCS8.
|
||||
|
||||
Required by rsa_backend and pycrypto_backend but not cryptography_backend.
|
||||
"""
|
||||
from pyasn1.codec.der import decoder, encoder
|
||||
from pyasn1.type import namedtype, univ
|
||||
|
||||
RSA_ENCRYPTION_ASN1_OID = "1.2.840.113549.1.1.1"
|
||||
|
||||
|
||||
class RsaAlgorithmIdentifier(univ.Sequence):
|
||||
"""ASN1 structure for recording RSA PrivateKeyAlgorithm identifiers."""
|
||||
componentType = namedtype.NamedTypes(
|
||||
namedtype.NamedType("rsaEncryption", univ.ObjectIdentifier()),
|
||||
namedtype.NamedType("parameters", univ.Null())
|
||||
)
|
||||
|
||||
|
||||
class PKCS8PrivateKey(univ.Sequence):
|
||||
"""ASN1 structure for recording PKCS8 private keys."""
|
||||
componentType = namedtype.NamedTypes(
|
||||
namedtype.NamedType("version", univ.Integer()),
|
||||
namedtype.NamedType("privateKeyAlgorithm", RsaAlgorithmIdentifier()),
|
||||
namedtype.NamedType("privateKey", univ.OctetString())
|
||||
)
|
||||
|
||||
|
||||
class PublicKeyInfo(univ.Sequence):
|
||||
"""ASN1 structure for recording PKCS8 public keys."""
|
||||
componentType = namedtype.NamedTypes(
|
||||
namedtype.NamedType("algorithm", RsaAlgorithmIdentifier()),
|
||||
namedtype.NamedType("publicKey", univ.BitString())
|
||||
)
|
||||
|
||||
|
||||
def rsa_private_key_pkcs8_to_pkcs1(pkcs8_key):
|
||||
"""Convert a PKCS8-encoded RSA private key to PKCS1."""
|
||||
decoded_values = decoder.decode(pkcs8_key, asn1Spec=PKCS8PrivateKey())
|
||||
|
||||
try:
|
||||
decoded_key = decoded_values[0]
|
||||
except IndexError:
|
||||
raise ValueError("Invalid private key encoding")
|
||||
|
||||
return decoded_key["privateKey"]
|
||||
|
||||
|
||||
def rsa_private_key_pkcs1_to_pkcs8(pkcs1_key):
|
||||
"""Convert a PKCS1-encoded RSA private key to PKCS8."""
|
||||
algorithm = RsaAlgorithmIdentifier()
|
||||
algorithm["rsaEncryption"] = RSA_ENCRYPTION_ASN1_OID
|
||||
|
||||
pkcs8_key = PKCS8PrivateKey()
|
||||
pkcs8_key["version"] = 0
|
||||
pkcs8_key["privateKeyAlgorithm"] = algorithm
|
||||
pkcs8_key["privateKey"] = pkcs1_key
|
||||
|
||||
return encoder.encode(pkcs8_key)
|
||||
|
||||
|
||||
def rsa_public_key_pkcs1_to_pkcs8(pkcs1_key):
|
||||
"""Convert a PKCS1-encoded RSA private key to PKCS8."""
|
||||
algorithm = RsaAlgorithmIdentifier()
|
||||
algorithm["rsaEncryption"] = RSA_ENCRYPTION_ASN1_OID
|
||||
|
||||
pkcs8_key = PublicKeyInfo()
|
||||
pkcs8_key["algorithm"] = algorithm
|
||||
pkcs8_key["publicKey"] = univ.BitString.fromOctetString(pkcs1_key)
|
||||
|
||||
return encoder.encode(pkcs8_key)
|
||||
|
||||
|
||||
def rsa_public_key_pkcs8_to_pkcs1(pkcs8_key):
|
||||
"""Convert a PKCS8-encoded RSA private key to PKCS1."""
|
||||
decoded_values = decoder.decode(pkcs8_key, asn1Spec=PublicKeyInfo())
|
||||
|
||||
try:
|
||||
decoded_key = decoded_values[0]
|
||||
except IndexError:
|
||||
raise ValueError("Invalid public key encoding.")
|
||||
|
||||
return decoded_key["publicKey"].asOctets()
|
||||
21
Lambdas/Websocket Authorizer/jose/backends/base.py
Normal file
21
Lambdas/Websocket Authorizer/jose/backends/base.py
Normal file
@@ -0,0 +1,21 @@
|
||||
class Key(object):
|
||||
"""
|
||||
A simple interface for implementing JWK keys.
|
||||
"""
|
||||
def __init__(self, key, algorithm):
|
||||
pass
|
||||
|
||||
def sign(self, msg):
|
||||
raise NotImplementedError()
|
||||
|
||||
def verify(self, msg, sig):
|
||||
raise NotImplementedError()
|
||||
|
||||
def public_key(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def to_pem(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def to_dict(self):
|
||||
raise NotImplementedError()
|
||||
@@ -0,0 +1,371 @@
|
||||
from __future__ import division
|
||||
|
||||
import math
|
||||
|
||||
import six
|
||||
|
||||
try:
|
||||
from ecdsa import SigningKey as EcdsaSigningKey, VerifyingKey as EcdsaVerifyingKey
|
||||
except ImportError:
|
||||
EcdsaSigningKey = EcdsaVerifyingKey = None
|
||||
|
||||
from jose.backends.base import Key
|
||||
from jose.utils import base64_to_long, long_to_base64
|
||||
from jose.constants import ALGORITHMS
|
||||
from jose.exceptions import JWKError
|
||||
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import ec, rsa, padding
|
||||
from cryptography.hazmat.primitives.asymmetric.utils import decode_dss_signature, encode_dss_signature
|
||||
from cryptography.hazmat.primitives.serialization import load_pem_private_key, load_pem_public_key
|
||||
from cryptography.utils import int_from_bytes, int_to_bytes
|
||||
from cryptography.x509 import load_pem_x509_certificate
|
||||
|
||||
|
||||
class CryptographyECKey(Key):
|
||||
SHA256 = hashes.SHA256
|
||||
SHA384 = hashes.SHA384
|
||||
SHA512 = hashes.SHA512
|
||||
|
||||
def __init__(self, key, algorithm, cryptography_backend=default_backend):
|
||||
if algorithm not in ALGORITHMS.EC:
|
||||
raise JWKError('hash_alg: %s is not a valid hash algorithm' % algorithm)
|
||||
|
||||
self.hash_alg = {
|
||||
ALGORITHMS.ES256: self.SHA256,
|
||||
ALGORITHMS.ES384: self.SHA384,
|
||||
ALGORITHMS.ES512: self.SHA512
|
||||
}.get(algorithm)
|
||||
self._algorithm = algorithm
|
||||
|
||||
self.cryptography_backend = cryptography_backend
|
||||
|
||||
if hasattr(key, 'public_bytes') or hasattr(key, 'private_bytes'):
|
||||
self.prepared_key = key
|
||||
return
|
||||
|
||||
if None not in (EcdsaSigningKey, EcdsaVerifyingKey) and isinstance(key, (EcdsaSigningKey, EcdsaVerifyingKey)):
|
||||
# convert to PEM and let cryptography below load it as PEM
|
||||
key = key.to_pem().decode('utf-8')
|
||||
|
||||
if isinstance(key, dict):
|
||||
self.prepared_key = self._process_jwk(key)
|
||||
return
|
||||
|
||||
if isinstance(key, six.string_types):
|
||||
key = key.encode('utf-8')
|
||||
|
||||
if isinstance(key, six.binary_type):
|
||||
# Attempt to load key. We don't know if it's
|
||||
# a Public Key or a Private Key, so we try
|
||||
# the Public Key first.
|
||||
try:
|
||||
try:
|
||||
key = load_pem_public_key(key, self.cryptography_backend())
|
||||
except ValueError:
|
||||
key = load_pem_private_key(key, password=None, backend=self.cryptography_backend())
|
||||
except Exception as e:
|
||||
raise JWKError(e)
|
||||
|
||||
self.prepared_key = key
|
||||
return
|
||||
|
||||
raise JWKError('Unable to parse an ECKey from key: %s' % key)
|
||||
|
||||
def _process_jwk(self, jwk_dict):
|
||||
if not jwk_dict.get('kty') == 'EC':
|
||||
raise JWKError("Incorrect key type. Expected: 'EC', Received: %s" % jwk_dict.get('kty'))
|
||||
|
||||
if not all(k in jwk_dict for k in ['x', 'y', 'crv']):
|
||||
raise JWKError('Mandatory parameters are missing')
|
||||
|
||||
x = base64_to_long(jwk_dict.get('x'))
|
||||
y = base64_to_long(jwk_dict.get('y'))
|
||||
curve = {
|
||||
'P-256': ec.SECP256R1,
|
||||
'P-384': ec.SECP384R1,
|
||||
'P-521': ec.SECP521R1,
|
||||
}[jwk_dict['crv']]
|
||||
|
||||
public = ec.EllipticCurvePublicNumbers(x, y, curve())
|
||||
|
||||
if 'd' in jwk_dict:
|
||||
d = base64_to_long(jwk_dict.get('d'))
|
||||
private = ec.EllipticCurvePrivateNumbers(d, public)
|
||||
|
||||
return private.private_key(self.cryptography_backend())
|
||||
else:
|
||||
return public.public_key(self.cryptography_backend())
|
||||
|
||||
def _sig_component_length(self):
|
||||
"""Determine the correct serialization length for an encoded signature component.
|
||||
|
||||
This is the number of bytes required to encode the maximum key value.
|
||||
"""
|
||||
return int(math.ceil(self.prepared_key.key_size / 8.0))
|
||||
|
||||
def _der_to_raw(self, der_signature):
|
||||
"""Convert signature from DER encoding to RAW encoding."""
|
||||
r, s = decode_dss_signature(der_signature)
|
||||
component_length = self._sig_component_length()
|
||||
return int_to_bytes(r, component_length) + int_to_bytes(s, component_length)
|
||||
|
||||
def _raw_to_der(self, raw_signature):
|
||||
"""Convert signature from RAW encoding to DER encoding."""
|
||||
component_length = self._sig_component_length()
|
||||
if len(raw_signature) != int(2 * component_length):
|
||||
raise ValueError("Invalid signature")
|
||||
|
||||
r_bytes = raw_signature[:component_length]
|
||||
s_bytes = raw_signature[component_length:]
|
||||
r = int_from_bytes(r_bytes, "big")
|
||||
s = int_from_bytes(s_bytes, "big")
|
||||
return encode_dss_signature(r, s)
|
||||
|
||||
def sign(self, msg):
|
||||
if self.hash_alg.digest_size * 8 > self.prepared_key.curve.key_size:
|
||||
raise TypeError("this curve (%s) is too short "
|
||||
"for your digest (%d)" % (self.prepared_key.curve.name,
|
||||
8 * self.hash_alg.digest_size))
|
||||
signature = self.prepared_key.sign(msg, ec.ECDSA(self.hash_alg()))
|
||||
return self._der_to_raw(signature)
|
||||
|
||||
def verify(self, msg, sig):
|
||||
try:
|
||||
signature = self._raw_to_der(sig)
|
||||
self.prepared_key.verify(signature, msg, ec.ECDSA(self.hash_alg()))
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def is_public(self):
|
||||
return hasattr(self.prepared_key, 'public_bytes')
|
||||
|
||||
def public_key(self):
|
||||
if self.is_public():
|
||||
return self
|
||||
return self.__class__(self.prepared_key.public_key(), self._algorithm)
|
||||
|
||||
def to_pem(self):
|
||||
if self.is_public():
|
||||
pem = self.prepared_key.public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo
|
||||
)
|
||||
return pem
|
||||
pem = self.prepared_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.TraditionalOpenSSL,
|
||||
encryption_algorithm=serialization.NoEncryption()
|
||||
)
|
||||
return pem
|
||||
|
||||
def to_dict(self):
|
||||
if not self.is_public():
|
||||
public_key = self.prepared_key.public_key()
|
||||
else:
|
||||
public_key = self.prepared_key
|
||||
|
||||
crv = {
|
||||
'secp256r1': 'P-256',
|
||||
'secp384r1': 'P-384',
|
||||
'secp521r1': 'P-521',
|
||||
}[self.prepared_key.curve.name]
|
||||
|
||||
# Calculate the key size in bytes. Section 6.2.1.2 and 6.2.1.3 of
|
||||
# RFC7518 prescribes that the 'x', 'y' and 'd' parameters of the curve
|
||||
# points must be encoded as octed-strings of this length.
|
||||
key_size = (self.prepared_key.curve.key_size + 7) // 8
|
||||
|
||||
data = {
|
||||
'alg': self._algorithm,
|
||||
'kty': 'EC',
|
||||
'crv': crv,
|
||||
'x': long_to_base64(public_key.public_numbers().x, size=key_size),
|
||||
'y': long_to_base64(public_key.public_numbers().y, size=key_size),
|
||||
}
|
||||
|
||||
if not self.is_public():
|
||||
data['d'] = long_to_base64(
|
||||
self.prepared_key.private_numbers().private_value,
|
||||
size=key_size
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class CryptographyRSAKey(Key):
|
||||
SHA256 = hashes.SHA256
|
||||
SHA384 = hashes.SHA384
|
||||
SHA512 = hashes.SHA512
|
||||
|
||||
def __init__(self, key, algorithm, cryptography_backend=default_backend):
|
||||
if algorithm not in ALGORITHMS.RSA:
|
||||
raise JWKError('hash_alg: %s is not a valid hash algorithm' % algorithm)
|
||||
|
||||
self.hash_alg = {
|
||||
ALGORITHMS.RS256: self.SHA256,
|
||||
ALGORITHMS.RS384: self.SHA384,
|
||||
ALGORITHMS.RS512: self.SHA512
|
||||
}.get(algorithm)
|
||||
self._algorithm = algorithm
|
||||
|
||||
self.cryptography_backend = cryptography_backend
|
||||
|
||||
# if it conforms to RSAPublicKey interface
|
||||
if hasattr(key, 'public_bytes') and hasattr(key, 'public_numbers'):
|
||||
self.prepared_key = key
|
||||
return
|
||||
|
||||
if isinstance(key, dict):
|
||||
self.prepared_key = self._process_jwk(key)
|
||||
return
|
||||
|
||||
if isinstance(key, six.string_types):
|
||||
key = key.encode('utf-8')
|
||||
|
||||
if isinstance(key, six.binary_type):
|
||||
try:
|
||||
if key.startswith(b'-----BEGIN CERTIFICATE-----'):
|
||||
self._process_cert(key)
|
||||
return
|
||||
|
||||
try:
|
||||
self.prepared_key = load_pem_public_key(key, self.cryptography_backend())
|
||||
except ValueError:
|
||||
self.prepared_key = load_pem_private_key(key, password=None, backend=self.cryptography_backend())
|
||||
except Exception as e:
|
||||
raise JWKError(e)
|
||||
return
|
||||
|
||||
raise JWKError('Unable to parse an RSA_JWK from key: %s' % key)
|
||||
|
||||
def _process_jwk(self, jwk_dict):
|
||||
if not jwk_dict.get('kty') == 'RSA':
|
||||
raise JWKError("Incorrect key type. Expected: 'RSA', Received: %s" % jwk_dict.get('kty'))
|
||||
|
||||
e = base64_to_long(jwk_dict.get('e', 256))
|
||||
n = base64_to_long(jwk_dict.get('n'))
|
||||
public = rsa.RSAPublicNumbers(e, n)
|
||||
|
||||
if 'd' not in jwk_dict:
|
||||
return public.public_key(self.cryptography_backend())
|
||||
else:
|
||||
# This is a private key.
|
||||
d = base64_to_long(jwk_dict.get('d'))
|
||||
|
||||
extra_params = ['p', 'q', 'dp', 'dq', 'qi']
|
||||
|
||||
if any(k in jwk_dict for k in extra_params):
|
||||
# Precomputed private key parameters are available.
|
||||
if not all(k in jwk_dict for k in extra_params):
|
||||
# These values must be present when 'p' is according to
|
||||
# Section 6.3.2 of RFC7518, so if they are not we raise
|
||||
# an error.
|
||||
raise JWKError('Precomputed private key parameters are incomplete.')
|
||||
|
||||
p = base64_to_long(jwk_dict['p'])
|
||||
q = base64_to_long(jwk_dict['q'])
|
||||
dp = base64_to_long(jwk_dict['dp'])
|
||||
dq = base64_to_long(jwk_dict['dq'])
|
||||
qi = base64_to_long(jwk_dict['qi'])
|
||||
else:
|
||||
# The precomputed private key parameters are not available,
|
||||
# so we use cryptography's API to fill them in.
|
||||
p, q = rsa.rsa_recover_prime_factors(n, e, d)
|
||||
dp = rsa.rsa_crt_dmp1(d, p)
|
||||
dq = rsa.rsa_crt_dmq1(d, q)
|
||||
qi = rsa.rsa_crt_iqmp(p, q)
|
||||
|
||||
private = rsa.RSAPrivateNumbers(p, q, d, dp, dq, qi, public)
|
||||
|
||||
return private.private_key(self.cryptography_backend())
|
||||
|
||||
def _process_cert(self, key):
|
||||
key = load_pem_x509_certificate(key, self.cryptography_backend())
|
||||
self.prepared_key = key.public_key()
|
||||
|
||||
def sign(self, msg):
|
||||
try:
|
||||
signature = self.prepared_key.sign(
|
||||
msg,
|
||||
padding.PKCS1v15(),
|
||||
self.hash_alg()
|
||||
)
|
||||
except Exception as e:
|
||||
raise JWKError(e)
|
||||
return signature
|
||||
|
||||
def verify(self, msg, sig):
|
||||
try:
|
||||
self.prepared_key.verify(
|
||||
sig,
|
||||
msg,
|
||||
padding.PKCS1v15(),
|
||||
self.hash_alg()
|
||||
)
|
||||
return True
|
||||
except InvalidSignature:
|
||||
return False
|
||||
|
||||
def is_public(self):
|
||||
return hasattr(self.prepared_key, 'public_bytes')
|
||||
|
||||
def public_key(self):
|
||||
if self.is_public():
|
||||
return self
|
||||
return self.__class__(self.prepared_key.public_key(), self._algorithm)
|
||||
|
||||
def to_pem(self, pem_format='PKCS8'):
|
||||
if self.is_public():
|
||||
if pem_format == 'PKCS8':
|
||||
fmt = serialization.PublicFormat.SubjectPublicKeyInfo
|
||||
elif pem_format == 'PKCS1':
|
||||
fmt = serialization.PublicFormat.PKCS1
|
||||
else:
|
||||
raise ValueError("Invalid format specified: %r" % pem_format)
|
||||
pem = self.prepared_key.public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=fmt
|
||||
)
|
||||
return pem
|
||||
|
||||
if pem_format == 'PKCS8':
|
||||
fmt = serialization.PrivateFormat.PKCS8
|
||||
elif pem_format == 'PKCS1':
|
||||
fmt = serialization.PrivateFormat.TraditionalOpenSSL
|
||||
else:
|
||||
raise ValueError("Invalid format specified: %r" % pem_format)
|
||||
|
||||
return self.prepared_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=fmt,
|
||||
encryption_algorithm=serialization.NoEncryption()
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
if not self.is_public():
|
||||
public_key = self.prepared_key.public_key()
|
||||
else:
|
||||
public_key = self.prepared_key
|
||||
|
||||
data = {
|
||||
'alg': self._algorithm,
|
||||
'kty': 'RSA',
|
||||
'n': long_to_base64(public_key.public_numbers().n),
|
||||
'e': long_to_base64(public_key.public_numbers().e),
|
||||
}
|
||||
|
||||
if not self.is_public():
|
||||
data.update({
|
||||
'd': long_to_base64(self.prepared_key.private_numbers().d),
|
||||
'p': long_to_base64(self.prepared_key.private_numbers().p),
|
||||
'q': long_to_base64(self.prepared_key.private_numbers().q),
|
||||
'dp': long_to_base64(self.prepared_key.private_numbers().dmp1),
|
||||
'dq': long_to_base64(self.prepared_key.private_numbers().dmq1),
|
||||
'qi': long_to_base64(self.prepared_key.private_numbers().iqmp),
|
||||
})
|
||||
|
||||
return data
|
||||
144
Lambdas/Websocket Authorizer/jose/backends/ecdsa_backend.py
Normal file
144
Lambdas/Websocket Authorizer/jose/backends/ecdsa_backend.py
Normal file
@@ -0,0 +1,144 @@
|
||||
import hashlib
|
||||
import six
|
||||
|
||||
from jose.backends.base import Key
|
||||
import ecdsa
|
||||
|
||||
from jose.constants import ALGORITHMS
|
||||
from jose.exceptions import JWKError
|
||||
from jose.utils import base64_to_long, long_to_base64
|
||||
|
||||
|
||||
class ECDSAECKey(Key):
|
||||
"""
|
||||
Performs signing and verification operations using
|
||||
ECDSA and the specified hash function
|
||||
|
||||
This class requires the ecdsa package to be installed.
|
||||
|
||||
This is based off of the implementation in PyJWT 0.3.2
|
||||
"""
|
||||
SHA256 = hashlib.sha256
|
||||
SHA384 = hashlib.sha384
|
||||
SHA512 = hashlib.sha512
|
||||
|
||||
CURVE_MAP = {
|
||||
SHA256: ecdsa.curves.NIST256p,
|
||||
SHA384: ecdsa.curves.NIST384p,
|
||||
SHA512: ecdsa.curves.NIST521p,
|
||||
}
|
||||
|
||||
def __init__(self, key, algorithm):
|
||||
if algorithm not in ALGORITHMS.EC:
|
||||
raise JWKError('hash_alg: %s is not a valid hash algorithm' % algorithm)
|
||||
|
||||
self.hash_alg = {
|
||||
ALGORITHMS.ES256: self.SHA256,
|
||||
ALGORITHMS.ES384: self.SHA384,
|
||||
ALGORITHMS.ES512: self.SHA512
|
||||
}.get(algorithm)
|
||||
self._algorithm = algorithm
|
||||
|
||||
self.curve = self.CURVE_MAP.get(self.hash_alg)
|
||||
|
||||
if isinstance(key, (ecdsa.SigningKey, ecdsa.VerifyingKey)):
|
||||
self.prepared_key = key
|
||||
return
|
||||
|
||||
if isinstance(key, dict):
|
||||
self.prepared_key = self._process_jwk(key)
|
||||
return
|
||||
|
||||
if isinstance(key, six.string_types):
|
||||
key = key.encode('utf-8')
|
||||
|
||||
if isinstance(key, six.binary_type):
|
||||
# Attempt to load key. We don't know if it's
|
||||
# a Signing Key or a Verifying Key, so we try
|
||||
# the Verifying Key first.
|
||||
try:
|
||||
key = ecdsa.VerifyingKey.from_pem(key)
|
||||
except ecdsa.der.UnexpectedDER:
|
||||
key = ecdsa.SigningKey.from_pem(key)
|
||||
except Exception as e:
|
||||
raise JWKError(e)
|
||||
|
||||
self.prepared_key = key
|
||||
return
|
||||
|
||||
raise JWKError('Unable to parse an ECKey from key: %s' % key)
|
||||
|
||||
def _process_jwk(self, jwk_dict):
|
||||
if not jwk_dict.get('kty') == 'EC':
|
||||
raise JWKError("Incorrect key type. Expected: 'EC', Recieved: %s" % jwk_dict.get('kty'))
|
||||
|
||||
if not all(k in jwk_dict for k in ['x', 'y', 'crv']):
|
||||
raise JWKError('Mandatory parameters are missing')
|
||||
|
||||
if 'd' in jwk_dict:
|
||||
# We are dealing with a private key; the secret exponent is enough
|
||||
# to create an ecdsa key.
|
||||
d = base64_to_long(jwk_dict.get('d'))
|
||||
return ecdsa.keys.SigningKey.from_secret_exponent(d, self.curve)
|
||||
else:
|
||||
x = base64_to_long(jwk_dict.get('x'))
|
||||
y = base64_to_long(jwk_dict.get('y'))
|
||||
|
||||
if not ecdsa.ecdsa.point_is_valid(self.curve.generator, x, y):
|
||||
raise JWKError("Point: %s, %s is not a valid point" % (x, y))
|
||||
|
||||
point = ecdsa.ellipticcurve.Point(self.curve.curve, x, y, self.curve.order)
|
||||
return ecdsa.keys.VerifyingKey.from_public_point(point, self.curve)
|
||||
|
||||
def sign(self, msg):
|
||||
return self.prepared_key.sign(msg, hashfunc=self.hash_alg, sigencode=ecdsa.util.sigencode_string)
|
||||
|
||||
def verify(self, msg, sig):
|
||||
try:
|
||||
return self.prepared_key.verify(sig, msg, hashfunc=self.hash_alg, sigdecode=ecdsa.util.sigdecode_string)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def is_public(self):
|
||||
return isinstance(self.prepared_key, ecdsa.VerifyingKey)
|
||||
|
||||
def public_key(self):
|
||||
if self.is_public():
|
||||
return self
|
||||
return self.__class__(self.prepared_key.get_verifying_key(), self._algorithm)
|
||||
|
||||
def to_pem(self):
|
||||
return self.prepared_key.to_pem()
|
||||
|
||||
def to_dict(self):
|
||||
if not self.is_public():
|
||||
public_key = self.prepared_key.get_verifying_key()
|
||||
else:
|
||||
public_key = self.prepared_key
|
||||
|
||||
crv = {
|
||||
ecdsa.curves.NIST256p: 'P-256',
|
||||
ecdsa.curves.NIST384p: 'P-384',
|
||||
ecdsa.curves.NIST521p: 'P-521',
|
||||
}[self.prepared_key.curve]
|
||||
|
||||
# Calculate the key size in bytes. Section 6.2.1.2 and 6.2.1.3 of
|
||||
# RFC7518 prescribes that the 'x', 'y' and 'd' parameters of the curve
|
||||
# points must be encoded as octed-strings of this length.
|
||||
key_size = self.prepared_key.curve.baselen
|
||||
|
||||
data = {
|
||||
'alg': self._algorithm,
|
||||
'kty': 'EC',
|
||||
'crv': crv,
|
||||
'x': long_to_base64(public_key.pubkey.point.x(), size=key_size),
|
||||
'y': long_to_base64(public_key.pubkey.point.y(), size=key_size),
|
||||
}
|
||||
|
||||
if not self.is_public():
|
||||
data['d'] = long_to_base64(
|
||||
self.prepared_key.privkey.secret_multiplier,
|
||||
size=key_size
|
||||
)
|
||||
|
||||
return data
|
||||
212
Lambdas/Websocket Authorizer/jose/backends/pycrypto_backend.py
Normal file
212
Lambdas/Websocket Authorizer/jose/backends/pycrypto_backend.py
Normal file
@@ -0,0 +1,212 @@
|
||||
from base64 import b64encode
|
||||
|
||||
import six
|
||||
|
||||
import Crypto.Hash.SHA256
|
||||
import Crypto.Hash.SHA384
|
||||
import Crypto.Hash.SHA512
|
||||
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.Signature import PKCS1_v1_5
|
||||
from Crypto.Util.asn1 import DerSequence
|
||||
|
||||
from jose.backends.base import Key
|
||||
from jose.backends._asn1 import rsa_public_key_pkcs8_to_pkcs1
|
||||
from jose.utils import base64_to_long, long_to_base64
|
||||
from jose.constants import ALGORITHMS
|
||||
from jose.exceptions import JWKError
|
||||
from jose.utils import base64url_decode
|
||||
|
||||
|
||||
# We default to using PyCryptodome, however, if PyCrypto is installed, it is
|
||||
# used instead. This is so that environments that require the use of PyCrypto
|
||||
# are still supported.
|
||||
if hasattr(RSA, 'RsaKey'):
|
||||
_RSAKey = RSA.RsaKey
|
||||
else:
|
||||
_RSAKey = RSA._RSAobj
|
||||
|
||||
|
||||
def _der_to_pem(der_key, marker):
|
||||
"""
|
||||
Perform a simple DER to PEM conversion.
|
||||
"""
|
||||
pem_key_chunks = [('-----BEGIN %s-----' % marker).encode('utf-8')]
|
||||
|
||||
# Limit base64 output lines to 64 characters by limiting input lines to 48 characters.
|
||||
for chunk_start in range(0, len(der_key), 48):
|
||||
pem_key_chunks.append(b64encode(der_key[chunk_start:chunk_start + 48]))
|
||||
|
||||
pem_key_chunks.append(('-----END %s-----' % marker).encode('utf-8'))
|
||||
|
||||
return b'\n'.join(pem_key_chunks)
|
||||
|
||||
|
||||
class RSAKey(Key):
|
||||
"""
|
||||
Performs signing and verification operations using
|
||||
RSASSA-PKCS-v1_5 and the specified hash function.
|
||||
This class requires PyCrypto package to be installed.
|
||||
This is based off of the implementation in PyJWT 0.3.2
|
||||
"""
|
||||
|
||||
SHA256 = Crypto.Hash.SHA256
|
||||
SHA384 = Crypto.Hash.SHA384
|
||||
SHA512 = Crypto.Hash.SHA512
|
||||
|
||||
def __init__(self, key, algorithm):
|
||||
|
||||
if algorithm not in ALGORITHMS.RSA:
|
||||
raise JWKError('hash_alg: %s is not a valid hash algorithm' % algorithm)
|
||||
|
||||
self.hash_alg = {
|
||||
ALGORITHMS.RS256: self.SHA256,
|
||||
ALGORITHMS.RS384: self.SHA384,
|
||||
ALGORITHMS.RS512: self.SHA512
|
||||
}.get(algorithm)
|
||||
self._algorithm = algorithm
|
||||
|
||||
if isinstance(key, _RSAKey):
|
||||
self.prepared_key = key
|
||||
return
|
||||
|
||||
if isinstance(key, dict):
|
||||
self._process_jwk(key)
|
||||
return
|
||||
|
||||
if isinstance(key, six.string_types):
|
||||
key = key.encode('utf-8')
|
||||
|
||||
if isinstance(key, six.binary_type):
|
||||
if key.startswith(b'-----BEGIN CERTIFICATE-----'):
|
||||
try:
|
||||
self._process_cert(key)
|
||||
except Exception as e:
|
||||
raise JWKError(e)
|
||||
return
|
||||
|
||||
try:
|
||||
self.prepared_key = RSA.importKey(key)
|
||||
except Exception as e:
|
||||
raise JWKError(e)
|
||||
return
|
||||
|
||||
raise JWKError('Unable to parse an RSA_JWK from key: %s' % key)
|
||||
|
||||
def _process_jwk(self, jwk_dict):
|
||||
if not jwk_dict.get('kty') == 'RSA':
|
||||
raise JWKError("Incorrect key type. Expected: 'RSA', Recieved: %s" % jwk_dict.get('kty'))
|
||||
|
||||
e = base64_to_long(jwk_dict.get('e', 256))
|
||||
n = base64_to_long(jwk_dict.get('n'))
|
||||
params = (n, e)
|
||||
|
||||
if 'd' in jwk_dict:
|
||||
params += (base64_to_long(jwk_dict.get('d')),)
|
||||
|
||||
extra_params = ['p', 'q', 'dp', 'dq', 'qi']
|
||||
|
||||
if any(k in jwk_dict for k in extra_params):
|
||||
# Precomputed private key parameters are available.
|
||||
if not all(k in jwk_dict for k in extra_params):
|
||||
# These values must be present when 'p' is according to
|
||||
# Section 6.3.2 of RFC7518, so if they are not we raise
|
||||
# an error.
|
||||
raise JWKError('Precomputed private key parameters are incomplete.')
|
||||
|
||||
p = base64_to_long(jwk_dict.get('p'))
|
||||
q = base64_to_long(jwk_dict.get('q'))
|
||||
qi = base64_to_long(jwk_dict.get('qi'))
|
||||
|
||||
# PyCrypto does not take the dp and dq as arguments, so we do
|
||||
# not pass them. Furthermore, the parameter qi specified in
|
||||
# the JWK is the inverse of q modulo p, whereas PyCrypto
|
||||
# takes the inverse of p modulo q. We therefore switch the
|
||||
# parameters to make the third parameter the inverse of the
|
||||
# second parameter modulo the first parameter.
|
||||
params += (q, p, qi)
|
||||
|
||||
self.prepared_key = RSA.construct(params)
|
||||
|
||||
return self.prepared_key
|
||||
|
||||
def _process_cert(self, key):
|
||||
pemLines = key.replace(b' ', b'').split()
|
||||
certDer = base64url_decode(b''.join(pemLines[1:-1]))
|
||||
certSeq = DerSequence()
|
||||
certSeq.decode(certDer)
|
||||
tbsSeq = DerSequence()
|
||||
tbsSeq.decode(certSeq[0])
|
||||
self.prepared_key = RSA.importKey(tbsSeq[6])
|
||||
return
|
||||
|
||||
def sign(self, msg):
|
||||
try:
|
||||
return PKCS1_v1_5.new(self.prepared_key).sign(self.hash_alg.new(msg))
|
||||
except Exception as e:
|
||||
raise JWKError(e)
|
||||
|
||||
def verify(self, msg, sig):
|
||||
try:
|
||||
return PKCS1_v1_5.new(self.prepared_key).verify(self.hash_alg.new(msg), sig)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def is_public(self):
|
||||
return not self.prepared_key.has_private()
|
||||
|
||||
def public_key(self):
|
||||
if self.is_public():
|
||||
return self
|
||||
return self.__class__(self.prepared_key.publickey(), self._algorithm)
|
||||
|
||||
def to_pem(self, pem_format='PKCS8'):
|
||||
if pem_format == 'PKCS8':
|
||||
pkcs = 8
|
||||
elif pem_format == 'PKCS1':
|
||||
pkcs = 1
|
||||
else:
|
||||
raise ValueError("Invalid pem format specified: %r" % (pem_format,))
|
||||
|
||||
if self.is_public():
|
||||
# PyCrypto/dome always export public keys as PKCS8
|
||||
if pkcs == 8:
|
||||
pem = self.prepared_key.exportKey('PEM')
|
||||
else:
|
||||
pkcs8_der = self.prepared_key.exportKey('DER')
|
||||
pkcs1_der = rsa_public_key_pkcs8_to_pkcs1(pkcs8_der)
|
||||
pem = _der_to_pem(pkcs1_der, 'RSA PUBLIC KEY')
|
||||
return pem
|
||||
else:
|
||||
pem = self.prepared_key.exportKey('PEM', pkcs=pkcs)
|
||||
return pem
|
||||
|
||||
def to_dict(self):
|
||||
data = {
|
||||
'alg': self._algorithm,
|
||||
'kty': 'RSA',
|
||||
'n': long_to_base64(self.prepared_key.n),
|
||||
'e': long_to_base64(self.prepared_key.e),
|
||||
}
|
||||
|
||||
if not self.is_public():
|
||||
# Section 6.3.2 of RFC7518 prescribes that when we include the
|
||||
# optional parameters p and q, we must also include the values of
|
||||
# dp and dq, which are not readily available from PyCrypto - so we
|
||||
# calculate them. Moreover, PyCrypto stores the inverse of p
|
||||
# modulo q rather than the inverse of q modulo p, so we switch
|
||||
# p and q. As far as I can tell, this is OK - RFC7518 only
|
||||
# asserts that p is the 'first factor', but does not specify
|
||||
# what 'first' means in this case.
|
||||
dp = self.prepared_key.d % (self.prepared_key.p - 1)
|
||||
dq = self.prepared_key.d % (self.prepared_key.q - 1)
|
||||
data.update({
|
||||
'd': long_to_base64(self.prepared_key.d),
|
||||
'p': long_to_base64(self.prepared_key.q),
|
||||
'q': long_to_base64(self.prepared_key.p),
|
||||
'dp': long_to_base64(dq),
|
||||
'dq': long_to_base64(dp),
|
||||
'qi': long_to_base64(self.prepared_key.u),
|
||||
})
|
||||
|
||||
return data
|
||||
263
Lambdas/Websocket Authorizer/jose/backends/rsa_backend.py
Normal file
263
Lambdas/Websocket Authorizer/jose/backends/rsa_backend.py
Normal file
@@ -0,0 +1,263 @@
|
||||
import binascii
|
||||
|
||||
import six
|
||||
from pyasn1.error import PyAsn1Error
|
||||
|
||||
import rsa as pyrsa
|
||||
import rsa.pem as pyrsa_pem
|
||||
|
||||
from jose.backends.base import Key
|
||||
from jose.backends._asn1 import (
|
||||
rsa_private_key_pkcs1_to_pkcs8,
|
||||
rsa_private_key_pkcs8_to_pkcs1,
|
||||
rsa_public_key_pkcs1_to_pkcs8,
|
||||
)
|
||||
from jose.constants import ALGORITHMS
|
||||
from jose.exceptions import JWKError
|
||||
from jose.utils import base64_to_long, long_to_base64
|
||||
|
||||
|
||||
LEGACY_INVALID_PKCS8_RSA_HEADER = binascii.unhexlify(
|
||||
"30" # sequence
|
||||
"8204BD" # DER-encoded sequence contents length of 1213 bytes -- INCORRECT STATIC LENGTH
|
||||
"020100" # integer: 0 -- Version
|
||||
"30" # sequence
|
||||
"0D" # DER-encoded sequence contents length of 13 bytes -- PrivateKeyAlgorithmIdentifier
|
||||
"06092A864886F70D010101" # OID -- rsaEncryption
|
||||
"0500" # NULL -- parameters
|
||||
)
|
||||
ASN1_SEQUENCE_ID = binascii.unhexlify("30")
|
||||
RSA_ENCRYPTION_ASN1_OID = "1.2.840.113549.1.1.1"
|
||||
|
||||
# Functions gcd and rsa_recover_prime_factors were copied from cryptography 1.9
|
||||
# to enable pure python rsa module to be in compliance with section 6.3.1 of RFC7518
|
||||
# which requires only private exponent (d) for private key.
|
||||
|
||||
|
||||
def _gcd(a, b):
|
||||
"""Calculate the Greatest Common Divisor of a and b.
|
||||
|
||||
Unless b==0, the result will have the same sign as b (so that when
|
||||
b is divided by it, the result comes out positive).
|
||||
"""
|
||||
while b:
|
||||
a, b = b, (a % b)
|
||||
return a
|
||||
|
||||
|
||||
# Controls the number of iterations rsa_recover_prime_factors will perform
|
||||
# to obtain the prime factors. Each iteration increments by 2 so the actual
|
||||
# maximum attempts is half this number.
|
||||
_MAX_RECOVERY_ATTEMPTS = 1000
|
||||
|
||||
|
||||
def _rsa_recover_prime_factors(n, e, d):
|
||||
"""
|
||||
Compute factors p and q from the private exponent d. We assume that n has
|
||||
no more than two factors. This function is adapted from code in PyCrypto.
|
||||
"""
|
||||
# See 8.2.2(i) in Handbook of Applied Cryptography.
|
||||
ktot = d * e - 1
|
||||
# The quantity d*e-1 is a multiple of phi(n), even,
|
||||
# and can be represented as t*2^s.
|
||||
t = ktot
|
||||
while t % 2 == 0:
|
||||
t = t // 2
|
||||
# Cycle through all multiplicative inverses in Zn.
|
||||
# The algorithm is non-deterministic, but there is a 50% chance
|
||||
# any candidate a leads to successful factoring.
|
||||
# See "Digitalized Signatures and Public Key Functions as Intractable
|
||||
# as Factorization", M. Rabin, 1979
|
||||
spotted = False
|
||||
a = 2
|
||||
while not spotted and a < _MAX_RECOVERY_ATTEMPTS:
|
||||
k = t
|
||||
# Cycle through all values a^{t*2^i}=a^k
|
||||
while k < ktot:
|
||||
cand = pow(a, k, n)
|
||||
# Check if a^k is a non-trivial root of unity (mod n)
|
||||
if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1:
|
||||
# We have found a number such that (cand-1)(cand+1)=0 (mod n).
|
||||
# Either of the terms divides n.
|
||||
p = _gcd(cand + 1, n)
|
||||
spotted = True
|
||||
break
|
||||
k *= 2
|
||||
# This value was not any good... let's try another!
|
||||
a += 2
|
||||
if not spotted:
|
||||
raise ValueError("Unable to compute factors p and q from exponent d.")
|
||||
# Found !
|
||||
q, r = divmod(n, p)
|
||||
assert r == 0
|
||||
p, q = sorted((p, q), reverse=True)
|
||||
return (p, q)
|
||||
|
||||
|
||||
def pem_to_spki(pem, fmt='PKCS8'):
|
||||
key = RSAKey(pem, ALGORITHMS.RS256)
|
||||
return key.to_pem(fmt)
|
||||
|
||||
|
||||
def _legacy_private_key_pkcs8_to_pkcs1(pkcs8_key):
|
||||
"""Legacy RSA private key PKCS8-to-PKCS1 conversion.
|
||||
|
||||
.. warning::
|
||||
|
||||
This is incorrect parsing and only works because the legacy PKCS1-to-PKCS8
|
||||
encoding was also incorrect.
|
||||
"""
|
||||
# Only allow this processing if the prefix matches
|
||||
# AND the following byte indicates an ASN1 sequence,
|
||||
# as we would expect with the legacy encoding.
|
||||
if not pkcs8_key.startswith(LEGACY_INVALID_PKCS8_RSA_HEADER + ASN1_SEQUENCE_ID):
|
||||
raise ValueError("Invalid private key encoding")
|
||||
|
||||
return pkcs8_key[len(LEGACY_INVALID_PKCS8_RSA_HEADER):]
|
||||
|
||||
|
||||
class RSAKey(Key):
|
||||
SHA256 = 'SHA-256'
|
||||
SHA384 = 'SHA-384'
|
||||
SHA512 = 'SHA-512'
|
||||
|
||||
def __init__(self, key, algorithm):
|
||||
if algorithm not in ALGORITHMS.RSA:
|
||||
raise JWKError('hash_alg: %s is not a valid hash algorithm' % algorithm)
|
||||
|
||||
self.hash_alg = {
|
||||
ALGORITHMS.RS256: self.SHA256,
|
||||
ALGORITHMS.RS384: self.SHA384,
|
||||
ALGORITHMS.RS512: self.SHA512
|
||||
}.get(algorithm)
|
||||
self._algorithm = algorithm
|
||||
|
||||
if isinstance(key, dict):
|
||||
self._prepared_key = self._process_jwk(key)
|
||||
return
|
||||
|
||||
if isinstance(key, (pyrsa.PublicKey, pyrsa.PrivateKey)):
|
||||
self._prepared_key = key
|
||||
return
|
||||
|
||||
if isinstance(key, six.string_types):
|
||||
key = key.encode('utf-8')
|
||||
|
||||
if isinstance(key, six.binary_type):
|
||||
try:
|
||||
self._prepared_key = pyrsa.PublicKey.load_pkcs1(key)
|
||||
except ValueError:
|
||||
try:
|
||||
self._prepared_key = pyrsa.PublicKey.load_pkcs1_openssl_pem(key)
|
||||
except ValueError:
|
||||
try:
|
||||
self._prepared_key = pyrsa.PrivateKey.load_pkcs1(key)
|
||||
except ValueError:
|
||||
try:
|
||||
der = pyrsa_pem.load_pem(key, b'PRIVATE KEY')
|
||||
try:
|
||||
pkcs1_key = rsa_private_key_pkcs8_to_pkcs1(der)
|
||||
except PyAsn1Error:
|
||||
# If the key was encoded using the old, invalid,
|
||||
# encoding then pyasn1 will throw an error attempting
|
||||
# to parse the key.
|
||||
pkcs1_key = _legacy_private_key_pkcs8_to_pkcs1(der)
|
||||
self._prepared_key = pyrsa.PrivateKey.load_pkcs1(pkcs1_key, format="DER")
|
||||
except ValueError as e:
|
||||
raise JWKError(e)
|
||||
return
|
||||
raise JWKError('Unable to parse an RSA_JWK from key: %s' % key)
|
||||
|
||||
def _process_jwk(self, jwk_dict):
|
||||
if not jwk_dict.get('kty') == 'RSA':
|
||||
raise JWKError("Incorrect key type. Expected: 'RSA', Recieved: %s" % jwk_dict.get('kty'))
|
||||
|
||||
e = base64_to_long(jwk_dict.get('e'))
|
||||
n = base64_to_long(jwk_dict.get('n'))
|
||||
|
||||
if 'd' not in jwk_dict:
|
||||
return pyrsa.PublicKey(e=e, n=n)
|
||||
else:
|
||||
d = base64_to_long(jwk_dict.get('d'))
|
||||
extra_params = ['p', 'q', 'dp', 'dq', 'qi']
|
||||
|
||||
if any(k in jwk_dict for k in extra_params):
|
||||
# Precomputed private key parameters are available.
|
||||
if not all(k in jwk_dict for k in extra_params):
|
||||
# These values must be present when 'p' is according to
|
||||
# Section 6.3.2 of RFC7518, so if they are not we raise
|
||||
# an error.
|
||||
raise JWKError('Precomputed private key parameters are incomplete.')
|
||||
|
||||
p = base64_to_long(jwk_dict['p'])
|
||||
q = base64_to_long(jwk_dict['q'])
|
||||
return pyrsa.PrivateKey(e=e, n=n, d=d, p=p, q=q)
|
||||
else:
|
||||
p, q = _rsa_recover_prime_factors(n, e, d)
|
||||
return pyrsa.PrivateKey(n=n, e=e, d=d, p=p, q=q)
|
||||
|
||||
def sign(self, msg):
|
||||
return pyrsa.sign(msg, self._prepared_key, self.hash_alg)
|
||||
|
||||
def verify(self, msg, sig):
|
||||
try:
|
||||
pyrsa.verify(msg, sig, self._prepared_key)
|
||||
return True
|
||||
except pyrsa.pkcs1.VerificationError:
|
||||
return False
|
||||
|
||||
def is_public(self):
|
||||
return isinstance(self._prepared_key, pyrsa.PublicKey)
|
||||
|
||||
def public_key(self):
|
||||
if isinstance(self._prepared_key, pyrsa.PublicKey):
|
||||
return self
|
||||
return self.__class__(pyrsa.PublicKey(n=self._prepared_key.n, e=self._prepared_key.e), self._algorithm)
|
||||
|
||||
def to_pem(self, pem_format='PKCS8'):
|
||||
|
||||
if isinstance(self._prepared_key, pyrsa.PrivateKey):
|
||||
der = self._prepared_key.save_pkcs1(format='DER')
|
||||
if pem_format == 'PKCS8':
|
||||
pkcs8_der = rsa_private_key_pkcs1_to_pkcs8(der)
|
||||
pem = pyrsa_pem.save_pem(pkcs8_der, pem_marker='PRIVATE KEY')
|
||||
elif pem_format == 'PKCS1':
|
||||
pem = pyrsa_pem.save_pem(der, pem_marker='RSA PRIVATE KEY')
|
||||
else:
|
||||
raise ValueError("Invalid pem format specified: %r" % (pem_format,))
|
||||
else:
|
||||
if pem_format == 'PKCS8':
|
||||
pkcs1_der = self._prepared_key.save_pkcs1(format="DER")
|
||||
pkcs8_der = rsa_public_key_pkcs1_to_pkcs8(pkcs1_der)
|
||||
pem = pyrsa_pem.save_pem(pkcs8_der, pem_marker='PUBLIC KEY')
|
||||
elif pem_format == 'PKCS1':
|
||||
der = self._prepared_key.save_pkcs1(format='DER')
|
||||
pem = pyrsa_pem.save_pem(der, pem_marker='RSA PUBLIC KEY')
|
||||
else:
|
||||
raise ValueError("Invalid pem format specified: %r" % (pem_format,))
|
||||
return pem
|
||||
|
||||
def to_dict(self):
|
||||
if not self.is_public():
|
||||
public_key = self.public_key()._prepared_key
|
||||
else:
|
||||
public_key = self._prepared_key
|
||||
|
||||
data = {
|
||||
'alg': self._algorithm,
|
||||
'kty': 'RSA',
|
||||
'n': long_to_base64(public_key.n),
|
||||
'e': long_to_base64(public_key.e),
|
||||
}
|
||||
|
||||
if not self.is_public():
|
||||
data.update({
|
||||
'd': long_to_base64(self._prepared_key.d),
|
||||
'p': long_to_base64(self._prepared_key.p),
|
||||
'q': long_to_base64(self._prepared_key.q),
|
||||
'dp': long_to_base64(self._prepared_key.exp1),
|
||||
'dq': long_to_base64(self._prepared_key.exp2),
|
||||
'qi': long_to_base64(self._prepared_key.coef),
|
||||
})
|
||||
|
||||
return data
|
||||
39
Lambdas/Websocket Authorizer/jose/constants.py
Normal file
39
Lambdas/Websocket Authorizer/jose/constants.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import hashlib
|
||||
|
||||
|
||||
class Algorithms(object):
|
||||
NONE = 'none'
|
||||
HS256 = 'HS256'
|
||||
HS384 = 'HS384'
|
||||
HS512 = 'HS512'
|
||||
RS256 = 'RS256'
|
||||
RS384 = 'RS384'
|
||||
RS512 = 'RS512'
|
||||
ES256 = 'ES256'
|
||||
ES384 = 'ES384'
|
||||
ES512 = 'ES512'
|
||||
|
||||
HMAC = {HS256, HS384, HS512}
|
||||
RSA = {RS256, RS384, RS512}
|
||||
EC = {ES256, ES384, ES512}
|
||||
|
||||
SUPPORTED = HMAC.union(RSA).union(EC)
|
||||
|
||||
ALL = SUPPORTED.union([NONE])
|
||||
|
||||
HASHES = {
|
||||
HS256: hashlib.sha256,
|
||||
HS384: hashlib.sha384,
|
||||
HS512: hashlib.sha512,
|
||||
RS256: hashlib.sha256,
|
||||
RS384: hashlib.sha384,
|
||||
RS512: hashlib.sha512,
|
||||
ES256: hashlib.sha256,
|
||||
ES384: hashlib.sha384,
|
||||
ES512: hashlib.sha512,
|
||||
}
|
||||
|
||||
KEYS = {}
|
||||
|
||||
|
||||
ALGORITHMS = Algorithms()
|
||||
36
Lambdas/Websocket Authorizer/jose/exceptions.py
Normal file
36
Lambdas/Websocket Authorizer/jose/exceptions.py
Normal file
@@ -0,0 +1,36 @@
|
||||
|
||||
|
||||
class JOSEError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class JWSError(JOSEError):
|
||||
pass
|
||||
|
||||
|
||||
class JWSSignatureError(JWSError):
|
||||
pass
|
||||
|
||||
|
||||
class JWSAlgorithmError(JWSError):
|
||||
pass
|
||||
|
||||
|
||||
class JWTError(JOSEError):
|
||||
pass
|
||||
|
||||
|
||||
class JWTClaimsError(JWTError):
|
||||
pass
|
||||
|
||||
|
||||
class JWTSignatureError(JWTError):
|
||||
pass
|
||||
|
||||
|
||||
class ExpiredSignatureError(JWTError):
|
||||
pass
|
||||
|
||||
|
||||
class JWKError(JOSEError):
|
||||
pass
|
||||
142
Lambdas/Websocket Authorizer/jose/jwk.py
Normal file
142
Lambdas/Websocket Authorizer/jose/jwk.py
Normal file
@@ -0,0 +1,142 @@
|
||||
|
||||
import hashlib
|
||||
import hmac
|
||||
import six
|
||||
|
||||
from jose.constants import ALGORITHMS
|
||||
from jose.exceptions import JWKError
|
||||
from jose.utils import base64url_decode, base64url_encode
|
||||
from jose.utils import constant_time_string_compare
|
||||
from jose.backends.base import Key
|
||||
|
||||
try:
|
||||
from jose.backends import RSAKey # noqa: F401
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
from jose.backends import ECKey # noqa: F401
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def get_key(algorithm):
|
||||
if algorithm in ALGORITHMS.KEYS:
|
||||
return ALGORITHMS.KEYS[algorithm]
|
||||
elif algorithm in ALGORITHMS.HMAC:
|
||||
return HMACKey
|
||||
elif algorithm in ALGORITHMS.RSA:
|
||||
from jose.backends import RSAKey # noqa: F811
|
||||
return RSAKey
|
||||
elif algorithm in ALGORITHMS.EC:
|
||||
from jose.backends import ECKey # noqa: F811
|
||||
return ECKey
|
||||
return None
|
||||
|
||||
|
||||
def register_key(algorithm, key_class):
|
||||
if not issubclass(key_class, Key):
|
||||
raise TypeError("Key class not a subclass of jwk.Key")
|
||||
ALGORITHMS.KEYS[algorithm] = key_class
|
||||
ALGORITHMS.SUPPORTED.add(algorithm)
|
||||
return True
|
||||
|
||||
|
||||
def construct(key_data, algorithm=None):
|
||||
"""
|
||||
Construct a Key object for the given algorithm with the given
|
||||
key_data.
|
||||
"""
|
||||
|
||||
# Allow for pulling the algorithm off of the passed in jwk.
|
||||
if not algorithm and isinstance(key_data, dict):
|
||||
algorithm = key_data.get('alg', None)
|
||||
|
||||
if not algorithm:
|
||||
raise JWKError('Unable to find a algorithm for key: %s' % key_data)
|
||||
|
||||
key_class = get_key(algorithm)
|
||||
if not key_class:
|
||||
raise JWKError('Unable to find a algorithm for key: %s' % key_data)
|
||||
return key_class(key_data, algorithm)
|
||||
|
||||
|
||||
def get_algorithm_object(algorithm):
|
||||
algorithms = {
|
||||
ALGORITHMS.HS256: 'SHA256',
|
||||
ALGORITHMS.HS384: 'SHA384',
|
||||
ALGORITHMS.HS512: 'SHA512',
|
||||
ALGORITHMS.RS256: 'SHA256',
|
||||
ALGORITHMS.RS384: 'SHA384',
|
||||
ALGORITHMS.RS512: 'SHA512',
|
||||
ALGORITHMS.ES256: 'SHA256',
|
||||
ALGORITHMS.ES384: 'SHA384',
|
||||
ALGORITHMS.ES512: 'SHA512',
|
||||
}
|
||||
key = get_key(algorithm)
|
||||
attr = algorithms.get(algorithm, None)
|
||||
return getattr(key, attr)
|
||||
|
||||
|
||||
class HMACKey(Key):
|
||||
"""
|
||||
Performs signing and verification operations using HMAC
|
||||
and the specified hash function.
|
||||
"""
|
||||
SHA256 = hashlib.sha256
|
||||
SHA384 = hashlib.sha384
|
||||
SHA512 = hashlib.sha512
|
||||
|
||||
def __init__(self, key, algorithm):
|
||||
if algorithm not in ALGORITHMS.HMAC:
|
||||
raise JWKError('hash_alg: %s is not a valid hash algorithm' % algorithm)
|
||||
self._algorithm = algorithm
|
||||
self.hash_alg = get_algorithm_object(algorithm)
|
||||
|
||||
if isinstance(key, dict):
|
||||
self.prepared_key = self._process_jwk(key)
|
||||
return
|
||||
|
||||
if not isinstance(key, six.string_types) and not isinstance(key, bytes):
|
||||
raise JWKError('Expecting a string- or bytes-formatted key.')
|
||||
|
||||
if isinstance(key, six.text_type):
|
||||
key = key.encode('utf-8')
|
||||
|
||||
invalid_strings = [
|
||||
b'-----BEGIN PUBLIC KEY-----',
|
||||
b'-----BEGIN RSA PUBLIC KEY-----',
|
||||
b'-----BEGIN CERTIFICATE-----',
|
||||
b'ssh-rsa'
|
||||
]
|
||||
|
||||
if any(string_value in key for string_value in invalid_strings):
|
||||
raise JWKError(
|
||||
'The specified key is an asymmetric key or x509 certificate and'
|
||||
' should not be used as an HMAC secret.')
|
||||
|
||||
self.prepared_key = key
|
||||
|
||||
def _process_jwk(self, jwk_dict):
|
||||
if not jwk_dict.get('kty') == 'oct':
|
||||
raise JWKError("Incorrect key type. Expected: 'oct', Recieved: %s" % jwk_dict.get('kty'))
|
||||
|
||||
k = jwk_dict.get('k')
|
||||
k = k.encode('utf-8')
|
||||
k = bytes(k)
|
||||
k = base64url_decode(k)
|
||||
|
||||
return k
|
||||
|
||||
def sign(self, msg):
|
||||
return hmac.new(self.prepared_key, msg, self.hash_alg).digest()
|
||||
|
||||
def verify(self, msg, sig):
|
||||
return constant_time_string_compare(sig, self.sign(msg))
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'alg': self._algorithm,
|
||||
'kty': 'oct',
|
||||
'k': base64url_encode(self.prepared_key),
|
||||
}
|
||||
273
Lambdas/Websocket Authorizer/jose/jws.py
Normal file
273
Lambdas/Websocket Authorizer/jose/jws.py
Normal file
@@ -0,0 +1,273 @@
|
||||
|
||||
import binascii
|
||||
import json
|
||||
import six
|
||||
|
||||
try:
|
||||
from collections.abc import Mapping, Iterable # Python 3
|
||||
except ImportError:
|
||||
from collections import Mapping, Iterable # Python 2, will be deprecated in Python 3.8
|
||||
|
||||
from jose import jwk
|
||||
from jose.constants import ALGORITHMS
|
||||
from jose.exceptions import JWSError
|
||||
from jose.exceptions import JWSSignatureError
|
||||
from jose.utils import base64url_encode
|
||||
from jose.utils import base64url_decode
|
||||
|
||||
|
||||
def sign(payload, key, headers=None, algorithm=ALGORITHMS.HS256):
|
||||
"""Signs a claims set and returns a JWS string.
|
||||
|
||||
Args:
|
||||
payload (str): A string to sign
|
||||
key (str or dict): The key to use for signing the claim set. Can be
|
||||
individual JWK or JWK set.
|
||||
headers (dict, optional): A set of headers that will be added to
|
||||
the default headers. Any headers that are added as additional
|
||||
headers will override the default headers.
|
||||
algorithm (str, optional): The algorithm to use for signing the
|
||||
the claims. Defaults to HS256.
|
||||
|
||||
Returns:
|
||||
str: The string representation of the header, claims, and signature.
|
||||
|
||||
Raises:
|
||||
JWSError: If there is an error signing the token.
|
||||
|
||||
Examples:
|
||||
|
||||
>>> jws.sign({'a': 'b'}, 'secret', algorithm='HS256')
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8'
|
||||
|
||||
"""
|
||||
|
||||
if algorithm not in ALGORITHMS.SUPPORTED:
|
||||
raise JWSError('Algorithm %s not supported.' % algorithm)
|
||||
|
||||
encoded_header = _encode_header(algorithm, additional_headers=headers)
|
||||
encoded_payload = _encode_payload(payload)
|
||||
signed_output = _sign_header_and_claims(encoded_header, encoded_payload, algorithm, key)
|
||||
|
||||
return signed_output
|
||||
|
||||
|
||||
def verify(token, key, algorithms, verify=True):
|
||||
"""Verifies a JWS string's signature.
|
||||
|
||||
Args:
|
||||
token (str): A signed JWS to be verified.
|
||||
key (str or dict): A key to attempt to verify the payload with. Can be
|
||||
individual JWK or JWK set.
|
||||
algorithms (str or list): Valid algorithms that should be used to verify the JWS.
|
||||
|
||||
Returns:
|
||||
str: The str representation of the payload, assuming the signature is valid.
|
||||
|
||||
Raises:
|
||||
JWSError: If there is an exception verifying a token.
|
||||
|
||||
Examples:
|
||||
|
||||
>>> token = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8'
|
||||
>>> jws.verify(token, 'secret', algorithms='HS256')
|
||||
|
||||
"""
|
||||
|
||||
header, payload, signing_input, signature = _load(token)
|
||||
|
||||
if verify:
|
||||
_verify_signature(signing_input, header, signature, key, algorithms)
|
||||
|
||||
return payload
|
||||
|
||||
|
||||
def get_unverified_header(token):
|
||||
"""Returns the decoded headers without verification of any kind.
|
||||
|
||||
Args:
|
||||
token (str): A signed JWS to decode the headers from.
|
||||
|
||||
Returns:
|
||||
dict: The dict representation of the token headers.
|
||||
|
||||
Raises:
|
||||
JWSError: If there is an exception decoding the token.
|
||||
"""
|
||||
header, claims, signing_input, signature = _load(token)
|
||||
return header
|
||||
|
||||
|
||||
def get_unverified_headers(token):
|
||||
"""Returns the decoded headers without verification of any kind.
|
||||
|
||||
This is simply a wrapper of get_unverified_header() for backwards
|
||||
compatibility.
|
||||
|
||||
Args:
|
||||
token (str): A signed JWS to decode the headers from.
|
||||
|
||||
Returns:
|
||||
dict: The dict representation of the token headers.
|
||||
|
||||
Raises:
|
||||
JWSError: If there is an exception decoding the token.
|
||||
"""
|
||||
return get_unverified_header(token)
|
||||
|
||||
|
||||
def get_unverified_claims(token):
|
||||
"""Returns the decoded claims without verification of any kind.
|
||||
|
||||
Args:
|
||||
token (str): A signed JWS to decode the headers from.
|
||||
|
||||
Returns:
|
||||
str: The str representation of the token claims.
|
||||
|
||||
Raises:
|
||||
JWSError: If there is an exception decoding the token.
|
||||
"""
|
||||
header, claims, signing_input, signature = _load(token)
|
||||
return claims
|
||||
|
||||
|
||||
def _encode_header(algorithm, additional_headers=None):
|
||||
header = {
|
||||
"typ": "JWT",
|
||||
"alg": algorithm
|
||||
}
|
||||
|
||||
if additional_headers:
|
||||
header.update(additional_headers)
|
||||
|
||||
json_header = json.dumps(
|
||||
header,
|
||||
separators=(',', ':'),
|
||||
sort_keys=True,
|
||||
).encode('utf-8')
|
||||
|
||||
return base64url_encode(json_header)
|
||||
|
||||
|
||||
def _encode_payload(payload):
|
||||
if isinstance(payload, Mapping):
|
||||
try:
|
||||
payload = json.dumps(
|
||||
payload,
|
||||
separators=(',', ':'),
|
||||
).encode('utf-8')
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return base64url_encode(payload)
|
||||
|
||||
|
||||
def _sign_header_and_claims(encoded_header, encoded_claims, algorithm, key_data):
|
||||
signing_input = b'.'.join([encoded_header, encoded_claims])
|
||||
try:
|
||||
key = jwk.construct(key_data, algorithm)
|
||||
signature = key.sign(signing_input)
|
||||
except Exception as e:
|
||||
raise JWSError(e)
|
||||
|
||||
encoded_signature = base64url_encode(signature)
|
||||
|
||||
encoded_string = b'.'.join([encoded_header, encoded_claims, encoded_signature])
|
||||
|
||||
return encoded_string.decode('utf-8')
|
||||
|
||||
|
||||
def _load(jwt):
|
||||
if isinstance(jwt, six.text_type):
|
||||
jwt = jwt.encode('utf-8')
|
||||
try:
|
||||
signing_input, crypto_segment = jwt.rsplit(b'.', 1)
|
||||
header_segment, claims_segment = signing_input.split(b'.', 1)
|
||||
header_data = base64url_decode(header_segment)
|
||||
except ValueError:
|
||||
raise JWSError('Not enough segments')
|
||||
except (TypeError, binascii.Error):
|
||||
raise JWSError('Invalid header padding')
|
||||
|
||||
try:
|
||||
header = json.loads(header_data.decode('utf-8'))
|
||||
except ValueError as e:
|
||||
raise JWSError('Invalid header string: %s' % e)
|
||||
|
||||
if not isinstance(header, Mapping):
|
||||
raise JWSError('Invalid header string: must be a json object')
|
||||
|
||||
try:
|
||||
payload = base64url_decode(claims_segment)
|
||||
except (TypeError, binascii.Error):
|
||||
raise JWSError('Invalid payload padding')
|
||||
|
||||
try:
|
||||
signature = base64url_decode(crypto_segment)
|
||||
except (TypeError, binascii.Error):
|
||||
raise JWSError('Invalid crypto padding')
|
||||
|
||||
return (header, payload, signing_input, signature)
|
||||
|
||||
|
||||
def _sig_matches_keys(keys, signing_input, signature, alg):
|
||||
for key in keys:
|
||||
key = jwk.construct(key, alg)
|
||||
try:
|
||||
if key.verify(signing_input, signature):
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def _get_keys(key):
|
||||
|
||||
try:
|
||||
key = json.loads(key)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# JWK Set per RFC 7517
|
||||
if 'keys' in key:
|
||||
return key['keys']
|
||||
|
||||
# Individual JWK per RFC 7517
|
||||
elif 'kty' in key:
|
||||
return (key,)
|
||||
|
||||
# Some other mapping. Firebase uses just dict of kid, cert pairs
|
||||
elif isinstance(key, Mapping):
|
||||
values = key.values()
|
||||
if values:
|
||||
return values
|
||||
return (key,)
|
||||
|
||||
# Iterable but not text or mapping => list- or tuple-like
|
||||
elif (isinstance(key, Iterable) and
|
||||
not (isinstance(key, six.string_types) or isinstance(key, Mapping))):
|
||||
return key
|
||||
|
||||
# Scalar value, wrap in tuple.
|
||||
else:
|
||||
return (key,)
|
||||
|
||||
|
||||
def _verify_signature(signing_input, header, signature, key='', algorithms=None):
|
||||
|
||||
alg = header.get('alg')
|
||||
if not alg:
|
||||
raise JWSError('No algorithm was specified in the JWS header.')
|
||||
|
||||
if algorithms is not None and alg not in algorithms:
|
||||
raise JWSError('The specified alg value is not allowed')
|
||||
|
||||
keys = _get_keys(key)
|
||||
try:
|
||||
if not _sig_matches_keys(keys, signing_input, signature, alg):
|
||||
raise JWSSignatureError()
|
||||
except JWSSignatureError:
|
||||
raise JWSError('Signature verification failed.')
|
||||
except JWSError:
|
||||
raise JWSError('Invalid or unsupported algorithm: %s' % alg)
|
||||
507
Lambdas/Websocket Authorizer/jose/jwt.py
Normal file
507
Lambdas/Websocket Authorizer/jose/jwt.py
Normal file
@@ -0,0 +1,507 @@
|
||||
|
||||
import json
|
||||
|
||||
from calendar import timegm
|
||||
try:
|
||||
from collections.abc import Mapping # Python3
|
||||
except ImportError:
|
||||
from collections import Mapping # Python2, will be deprecated in Python 3.8
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from six import string_types
|
||||
|
||||
from jose import jws
|
||||
|
||||
from .exceptions import JWSError
|
||||
from .exceptions import JWTClaimsError
|
||||
from .exceptions import JWTError
|
||||
from .exceptions import ExpiredSignatureError
|
||||
from .constants import ALGORITHMS
|
||||
from .utils import timedelta_total_seconds, calculate_at_hash
|
||||
|
||||
|
||||
def encode(claims, key, algorithm=ALGORITHMS.HS256, headers=None, access_token=None):
|
||||
"""Encodes a claims set and returns a JWT string.
|
||||
|
||||
JWTs are JWS signed objects with a few reserved claims.
|
||||
|
||||
Args:
|
||||
claims (dict): A claims set to sign
|
||||
key (str or dict): The key to use for signing the claim set. Can be
|
||||
individual JWK or JWK set.
|
||||
algorithm (str, optional): The algorithm to use for signing the
|
||||
the claims. Defaults to HS256.
|
||||
headers (dict, optional): A set of headers that will be added to
|
||||
the default headers. Any headers that are added as additional
|
||||
headers will override the default headers.
|
||||
access_token (str, optional): If present, the 'at_hash' claim will
|
||||
be calculated and added to the claims present in the 'claims'
|
||||
parameter.
|
||||
|
||||
Returns:
|
||||
str: The string representation of the header, claims, and signature.
|
||||
|
||||
Raises:
|
||||
JWTError: If there is an error encoding the claims.
|
||||
|
||||
Examples:
|
||||
|
||||
>>> jwt.encode({'a': 'b'}, 'secret', algorithm='HS256')
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8'
|
||||
|
||||
"""
|
||||
|
||||
for time_claim in ['exp', 'iat', 'nbf']:
|
||||
|
||||
# Convert datetime to a intDate value in known time-format claims
|
||||
if isinstance(claims.get(time_claim), datetime):
|
||||
claims[time_claim] = timegm(claims[time_claim].utctimetuple())
|
||||
|
||||
if access_token:
|
||||
claims['at_hash'] = calculate_at_hash(access_token,
|
||||
ALGORITHMS.HASHES[algorithm])
|
||||
|
||||
return jws.sign(claims, key, headers=headers, algorithm=algorithm)
|
||||
|
||||
|
||||
def decode(token, key, algorithms=None, options=None, audience=None,
|
||||
issuer=None, subject=None, access_token=None):
|
||||
"""Verifies a JWT string's signature and validates reserved claims.
|
||||
|
||||
Args:
|
||||
token (str): A signed JWS to be verified.
|
||||
key (str or dict): A key to attempt to verify the payload with. Can be
|
||||
individual JWK or JWK set.
|
||||
algorithms (str or list): Valid algorithms that should be used to verify the JWS.
|
||||
audience (str): The intended audience of the token. If the "aud" claim is
|
||||
included in the claim set, then the audience must be included and must equal
|
||||
the provided claim.
|
||||
issuer (str or iterable): Acceptable value(s) for the issuer of the token.
|
||||
If the "iss" claim is included in the claim set, then the issuer must be
|
||||
given and the claim in the token must be among the acceptable values.
|
||||
subject (str): The subject of the token. If the "sub" claim is
|
||||
included in the claim set, then the subject must be included and must equal
|
||||
the provided claim.
|
||||
access_token (str): An access token string. If the "at_hash" claim is included in the
|
||||
claim set, then the access_token must be included, and it must match
|
||||
the "at_hash" claim.
|
||||
options (dict): A dictionary of options for skipping validation steps.
|
||||
|
||||
defaults = {
|
||||
'verify_signature': True,
|
||||
'verify_aud': True,
|
||||
'verify_iat': True,
|
||||
'verify_exp': True,
|
||||
'verify_nbf': True,
|
||||
'verify_iss': True,
|
||||
'verify_sub': True,
|
||||
'verify_jti': True,
|
||||
'verify_at_hash': True,
|
||||
'require_aud': False,
|
||||
'require_iat': False,
|
||||
'require_exp': False,
|
||||
'require_nbf': False,
|
||||
'require_iss': False,
|
||||
'require_sub': False,
|
||||
'require_jti': False,
|
||||
'require_at_hash': False,
|
||||
'leeway': 0,
|
||||
}
|
||||
|
||||
Returns:
|
||||
dict: The dict representation of the claims set, assuming the signature is valid
|
||||
and all requested data validation passes.
|
||||
|
||||
Raises:
|
||||
JWTError: If the signature is invalid in any way.
|
||||
ExpiredSignatureError: If the signature has expired.
|
||||
JWTClaimsError: If any claim is invalid in any way.
|
||||
|
||||
Examples:
|
||||
|
||||
>>> payload = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhIjoiYiJ9.jiMyrsmD8AoHWeQgmxZ5yq8z0lXS67_QGs52AzC8Ru8'
|
||||
>>> jwt.decode(payload, 'secret', algorithms='HS256')
|
||||
|
||||
"""
|
||||
|
||||
defaults = {
|
||||
'verify_signature': True,
|
||||
'verify_aud': True,
|
||||
'verify_iat': True,
|
||||
'verify_exp': True,
|
||||
'verify_nbf': True,
|
||||
'verify_iss': True,
|
||||
'verify_sub': True,
|
||||
'verify_jti': True,
|
||||
'verify_at_hash': True,
|
||||
'require_aud': False,
|
||||
'require_iat': False,
|
||||
'require_exp': False,
|
||||
'require_nbf': False,
|
||||
'require_iss': False,
|
||||
'require_sub': False,
|
||||
'require_jti': False,
|
||||
'require_at_hash': False,
|
||||
'leeway': 0,
|
||||
}
|
||||
|
||||
if options:
|
||||
defaults.update(options)
|
||||
|
||||
verify_signature = defaults.get('verify_signature', True)
|
||||
|
||||
try:
|
||||
payload = jws.verify(token, key, algorithms, verify=verify_signature)
|
||||
except JWSError as e:
|
||||
raise JWTError(e)
|
||||
|
||||
# Needed for at_hash verification
|
||||
algorithm = jws.get_unverified_header(token)['alg']
|
||||
|
||||
try:
|
||||
claims = json.loads(payload.decode('utf-8'))
|
||||
except ValueError as e:
|
||||
raise JWTError('Invalid payload string: %s' % e)
|
||||
|
||||
if not isinstance(claims, Mapping):
|
||||
raise JWTError('Invalid payload string: must be a json object')
|
||||
|
||||
_validate_claims(claims, audience=audience, issuer=issuer,
|
||||
subject=subject, algorithm=algorithm,
|
||||
access_token=access_token,
|
||||
options=defaults)
|
||||
|
||||
return claims
|
||||
|
||||
|
||||
def get_unverified_header(token):
|
||||
"""Returns the decoded headers without verification of any kind.
|
||||
|
||||
Args:
|
||||
token (str): A signed JWT to decode the headers from.
|
||||
|
||||
Returns:
|
||||
dict: The dict representation of the token headers.
|
||||
|
||||
Raises:
|
||||
JWTError: If there is an exception decoding the token.
|
||||
"""
|
||||
try:
|
||||
headers = jws.get_unverified_headers(token)
|
||||
except Exception:
|
||||
raise JWTError('Error decoding token headers.')
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def get_unverified_headers(token):
|
||||
"""Returns the decoded headers without verification of any kind.
|
||||
|
||||
This is simply a wrapper of get_unverified_header() for backwards
|
||||
compatibility.
|
||||
|
||||
Args:
|
||||
token (str): A signed JWT to decode the headers from.
|
||||
|
||||
Returns:
|
||||
dict: The dict representation of the token headers.
|
||||
|
||||
Raises:
|
||||
JWTError: If there is an exception decoding the token.
|
||||
"""
|
||||
return get_unverified_header(token)
|
||||
|
||||
|
||||
def get_unverified_claims(token):
|
||||
"""Returns the decoded claims without verification of any kind.
|
||||
|
||||
Args:
|
||||
token (str): A signed JWT to decode the headers from.
|
||||
|
||||
Returns:
|
||||
dict: The dict representation of the token claims.
|
||||
|
||||
Raises:
|
||||
JWTError: If there is an exception decoding the token.
|
||||
"""
|
||||
try:
|
||||
claims = jws.get_unverified_claims(token)
|
||||
except Exception:
|
||||
raise JWTError('Error decoding token claims.')
|
||||
|
||||
try:
|
||||
claims = json.loads(claims.decode('utf-8'))
|
||||
except ValueError as e:
|
||||
raise JWTError('Invalid claims string: %s' % e)
|
||||
|
||||
if not isinstance(claims, Mapping):
|
||||
raise JWTError('Invalid claims string: must be a json object')
|
||||
|
||||
return claims
|
||||
|
||||
|
||||
def _validate_iat(claims):
|
||||
"""Validates that the 'iat' claim is valid.
|
||||
|
||||
The "iat" (issued at) claim identifies the time at which the JWT was
|
||||
issued. This claim can be used to determine the age of the JWT. Its
|
||||
value MUST be a number containing a NumericDate value. Use of this
|
||||
claim is OPTIONAL.
|
||||
|
||||
Args:
|
||||
claims (dict): The claims dictionary to validate.
|
||||
"""
|
||||
|
||||
if 'iat' not in claims:
|
||||
return
|
||||
|
||||
try:
|
||||
int(claims['iat'])
|
||||
except ValueError:
|
||||
raise JWTClaimsError('Issued At claim (iat) must be an integer.')
|
||||
|
||||
|
||||
def _validate_nbf(claims, leeway=0):
|
||||
"""Validates that the 'nbf' claim is valid.
|
||||
|
||||
The "nbf" (not before) claim identifies the time before which the JWT
|
||||
MUST NOT be accepted for processing. The processing of the "nbf"
|
||||
claim requires that the current date/time MUST be after or equal to
|
||||
the not-before date/time listed in the "nbf" claim. Implementers MAY
|
||||
provide for some small leeway, usually no more than a few minutes, to
|
||||
account for clock skew. Its value MUST be a number containing a
|
||||
NumericDate value. Use of this claim is OPTIONAL.
|
||||
|
||||
Args:
|
||||
claims (dict): The claims dictionary to validate.
|
||||
leeway (int): The number of seconds of skew that is allowed.
|
||||
"""
|
||||
|
||||
if 'nbf' not in claims:
|
||||
return
|
||||
|
||||
try:
|
||||
nbf = int(claims['nbf'])
|
||||
except ValueError:
|
||||
raise JWTClaimsError('Not Before claim (nbf) must be an integer.')
|
||||
|
||||
now = timegm(datetime.utcnow().utctimetuple())
|
||||
|
||||
if nbf > (now + leeway):
|
||||
raise JWTClaimsError('The token is not yet valid (nbf)')
|
||||
|
||||
|
||||
def _validate_exp(claims, leeway=0):
|
||||
"""Validates that the 'exp' claim is valid.
|
||||
|
||||
The "exp" (expiration time) claim identifies the expiration time on
|
||||
or after which the JWT MUST NOT be accepted for processing. The
|
||||
processing of the "exp" claim requires that the current date/time
|
||||
MUST be before the expiration date/time listed in the "exp" claim.
|
||||
Implementers MAY provide for some small leeway, usually no more than
|
||||
a few minutes, to account for clock skew. Its value MUST be a number
|
||||
containing a NumericDate value. Use of this claim is OPTIONAL.
|
||||
|
||||
Args:
|
||||
claims (dict): The claims dictionary to validate.
|
||||
leeway (int): The number of seconds of skew that is allowed.
|
||||
"""
|
||||
|
||||
if 'exp' not in claims:
|
||||
return
|
||||
|
||||
try:
|
||||
exp = int(claims['exp'])
|
||||
except ValueError:
|
||||
raise JWTClaimsError('Expiration Time claim (exp) must be an integer.')
|
||||
|
||||
now = timegm(datetime.utcnow().utctimetuple())
|
||||
|
||||
if exp < (now - leeway):
|
||||
raise ExpiredSignatureError('Signature has expired.')
|
||||
|
||||
|
||||
def _validate_aud(claims, audience=None):
|
||||
"""Validates that the 'aud' claim is valid.
|
||||
|
||||
The "aud" (audience) claim identifies the recipients that the JWT is
|
||||
intended for. Each principal intended to process the JWT MUST
|
||||
identify itself with a value in the audience claim. If the principal
|
||||
processing the claim does not identify itself with a value in the
|
||||
"aud" claim when this claim is present, then the JWT MUST be
|
||||
rejected. In the general case, the "aud" value is an array of case-
|
||||
sensitive strings, each containing a StringOrURI value. In the
|
||||
special case when the JWT has one audience, the "aud" value MAY be a
|
||||
single case-sensitive string containing a StringOrURI value. The
|
||||
interpretation of audience values is generally application specific.
|
||||
Use of this claim is OPTIONAL.
|
||||
|
||||
Args:
|
||||
claims (dict): The claims dictionary to validate.
|
||||
audience (str): The audience that is verifying the token.
|
||||
"""
|
||||
|
||||
if 'aud' not in claims:
|
||||
# if audience:
|
||||
# raise JWTError('Audience claim expected, but not in claims')
|
||||
return
|
||||
|
||||
audience_claims = claims['aud']
|
||||
if isinstance(audience_claims, string_types):
|
||||
audience_claims = [audience_claims]
|
||||
if not isinstance(audience_claims, list):
|
||||
raise JWTClaimsError('Invalid claim format in token')
|
||||
if any(not isinstance(c, string_types) for c in audience_claims):
|
||||
raise JWTClaimsError('Invalid claim format in token')
|
||||
if audience not in audience_claims:
|
||||
raise JWTClaimsError('Invalid audience')
|
||||
|
||||
|
||||
def _validate_iss(claims, issuer=None):
|
||||
"""Validates that the 'iss' claim is valid.
|
||||
|
||||
The "iss" (issuer) claim identifies the principal that issued the
|
||||
JWT. The processing of this claim is generally application specific.
|
||||
The "iss" value is a case-sensitive string containing a StringOrURI
|
||||
value. Use of this claim is OPTIONAL.
|
||||
|
||||
Args:
|
||||
claims (dict): The claims dictionary to validate.
|
||||
issuer (str or iterable): Acceptable value(s) for the issuer that
|
||||
signed the token.
|
||||
"""
|
||||
|
||||
if issuer is not None:
|
||||
if isinstance(issuer, string_types):
|
||||
issuer = (issuer,)
|
||||
if claims.get('iss') not in issuer:
|
||||
raise JWTClaimsError('Invalid issuer')
|
||||
|
||||
|
||||
def _validate_sub(claims, subject=None):
|
||||
"""Validates that the 'sub' claim is valid.
|
||||
|
||||
The "sub" (subject) claim identifies the principal that is the
|
||||
subject of the JWT. The claims in a JWT are normally statements
|
||||
about the subject. The subject value MUST either be scoped to be
|
||||
locally unique in the context of the issuer or be globally unique.
|
||||
The processing of this claim is generally application specific. The
|
||||
"sub" value is a case-sensitive string containing a StringOrURI
|
||||
value. Use of this claim is OPTIONAL.
|
||||
|
||||
Args:
|
||||
claims (dict): The claims dictionary to validate.
|
||||
subject (str): The subject of the token.
|
||||
"""
|
||||
|
||||
if 'sub' not in claims:
|
||||
return
|
||||
|
||||
if not isinstance(claims['sub'], string_types):
|
||||
raise JWTClaimsError('Subject must be a string.')
|
||||
|
||||
if subject is not None:
|
||||
if claims.get('sub') != subject:
|
||||
raise JWTClaimsError('Invalid subject')
|
||||
|
||||
|
||||
def _validate_jti(claims):
|
||||
"""Validates that the 'jti' claim is valid.
|
||||
|
||||
The "jti" (JWT ID) claim provides a unique identifier for the JWT.
|
||||
The identifier value MUST be assigned in a manner that ensures that
|
||||
there is a negligible probability that the same value will be
|
||||
accidentally assigned to a different data object; if the application
|
||||
uses multiple issuers, collisions MUST be prevented among values
|
||||
produced by different issuers as well. The "jti" claim can be used
|
||||
to prevent the JWT from being replayed. The "jti" value is a case-
|
||||
sensitive string. Use of this claim is OPTIONAL.
|
||||
|
||||
Args:
|
||||
claims (dict): The claims dictionary to validate.
|
||||
"""
|
||||
if 'jti' not in claims:
|
||||
return
|
||||
|
||||
if not isinstance(claims['jti'], string_types):
|
||||
raise JWTClaimsError('JWT ID must be a string.')
|
||||
|
||||
|
||||
def _validate_at_hash(claims, access_token, algorithm):
|
||||
"""
|
||||
Validates that the 'at_hash' is valid.
|
||||
|
||||
Its value is the base64url encoding of the left-most half of the hash
|
||||
of the octets of the ASCII representation of the access_token value,
|
||||
where the hash algorithm used is the hash algorithm used in the alg
|
||||
Header Parameter of the ID Token's JOSE Header. For instance, if the
|
||||
alg is RS256, hash the access_token value with SHA-256, then take the
|
||||
left-most 128 bits and base64url encode them. The at_hash value is a
|
||||
case sensitive string. Use of this claim is OPTIONAL.
|
||||
|
||||
Args:
|
||||
claims (dict): The claims dictionary to validate.
|
||||
access_token (str): The access token returned by the OpenID Provider.
|
||||
algorithm (str): The algorithm used to sign the JWT, as specified by
|
||||
the token headers.
|
||||
"""
|
||||
if 'at_hash' not in claims:
|
||||
return
|
||||
|
||||
if not access_token:
|
||||
msg = 'No access_token provided to compare against at_hash claim.'
|
||||
raise JWTClaimsError(msg)
|
||||
|
||||
try:
|
||||
expected_hash = calculate_at_hash(access_token,
|
||||
ALGORITHMS.HASHES[algorithm])
|
||||
except (TypeError, ValueError):
|
||||
msg = 'Unable to calculate at_hash to verify against token claims.'
|
||||
raise JWTClaimsError(msg)
|
||||
|
||||
if claims['at_hash'] != expected_hash:
|
||||
raise JWTClaimsError('at_hash claim does not match access_token.')
|
||||
|
||||
|
||||
def _validate_claims(claims, audience=None, issuer=None, subject=None,
|
||||
algorithm=None, access_token=None, options=None):
|
||||
|
||||
leeway = options.get('leeway', 0)
|
||||
|
||||
if isinstance(leeway, timedelta):
|
||||
leeway = timedelta_total_seconds(leeway)
|
||||
|
||||
for require_claim in [
|
||||
e[len("require_"):] for e in options.keys() if e.startswith("require_") and options[e]
|
||||
]:
|
||||
if require_claim not in claims:
|
||||
raise JWTError('missing required key "%s" among claims' % require_claim)
|
||||
else:
|
||||
options['verify_' + require_claim] = True # override verify when required
|
||||
|
||||
if not isinstance(audience, (string_types, type(None))):
|
||||
raise JWTError('audience must be a string or None')
|
||||
|
||||
if options.get('verify_iat'):
|
||||
_validate_iat(claims)
|
||||
|
||||
if options.get('verify_nbf'):
|
||||
_validate_nbf(claims, leeway=leeway)
|
||||
|
||||
if options.get('verify_exp'):
|
||||
_validate_exp(claims, leeway=leeway)
|
||||
|
||||
if options.get('verify_aud'):
|
||||
_validate_aud(claims, audience=audience)
|
||||
|
||||
if options.get('verify_iss'):
|
||||
_validate_iss(claims, issuer=issuer)
|
||||
|
||||
if options.get('verify_sub'):
|
||||
_validate_sub(claims, subject=subject)
|
||||
|
||||
if options.get('verify_jti'):
|
||||
_validate_jti(claims)
|
||||
|
||||
if options.get('verify_at_hash'):
|
||||
_validate_at_hash(claims, access_token, algorithm)
|
||||
134
Lambdas/Websocket Authorizer/jose/utils.py
Normal file
134
Lambdas/Websocket Authorizer/jose/utils.py
Normal file
@@ -0,0 +1,134 @@
|
||||
|
||||
import base64
|
||||
import hmac
|
||||
import six
|
||||
import struct
|
||||
import sys
|
||||
|
||||
if sys.version_info > (3,):
|
||||
# Deal with integer compatibilities between Python 2 and 3.
|
||||
# Using `from builtins import int` is not supported on AppEngine.
|
||||
long = int
|
||||
|
||||
|
||||
# Piggyback of the backends implementation of the function that converts a long
|
||||
# to a bytes stream. Some plumbing is necessary to have the signatures match.
|
||||
try:
|
||||
from Crypto.Util.number import long_to_bytes
|
||||
except ImportError:
|
||||
try:
|
||||
from cryptography.utils import int_to_bytes as _long_to_bytes
|
||||
|
||||
def long_to_bytes(n, blocksize=0):
|
||||
return _long_to_bytes(n, blocksize or None)
|
||||
|
||||
except ImportError:
|
||||
from ecdsa.ecdsa import int_to_string as _long_to_bytes
|
||||
|
||||
def long_to_bytes(n, blocksize=0):
|
||||
ret = _long_to_bytes(n)
|
||||
if blocksize == 0:
|
||||
return ret
|
||||
else:
|
||||
assert len(ret) <= blocksize
|
||||
padding = blocksize - len(ret)
|
||||
return b'\x00' * padding + ret
|
||||
|
||||
|
||||
def long_to_base64(data, size=0):
|
||||
return base64.urlsafe_b64encode(long_to_bytes(data, size)).strip(b'=')
|
||||
|
||||
|
||||
def int_arr_to_long(arr):
|
||||
return long(''.join(["%02x" % byte for byte in arr]), 16)
|
||||
|
||||
|
||||
def base64_to_long(data):
|
||||
if isinstance(data, six.text_type):
|
||||
data = data.encode("ascii")
|
||||
|
||||
# urlsafe_b64decode will happily convert b64encoded data
|
||||
_d = base64.urlsafe_b64decode(bytes(data) + b'==')
|
||||
return int_arr_to_long(struct.unpack('%sB' % len(_d), _d))
|
||||
|
||||
|
||||
def calculate_at_hash(access_token, hash_alg):
|
||||
"""Helper method for calculating an access token
|
||||
hash, as described in http://openid.net/specs/openid-connect-core-1_0.html#CodeIDToken
|
||||
|
||||
Its value is the base64url encoding of the left-most half of the hash of the octets
|
||||
of the ASCII representation of the access_token value, where the hash algorithm
|
||||
used is the hash algorithm used in the alg Header Parameter of the ID Token's JOSE
|
||||
Header. For instance, if the alg is RS256, hash the access_token value with SHA-256,
|
||||
then take the left-most 128 bits and base64url encode them. The at_hash value is a
|
||||
case sensitive string.
|
||||
|
||||
Args:
|
||||
access_token (str): An access token string.
|
||||
hash_alg (callable): A callable returning a hash object, e.g. hashlib.sha256
|
||||
|
||||
"""
|
||||
hash_digest = hash_alg(access_token.encode('utf-8')).digest()
|
||||
cut_at = int(len(hash_digest) / 2)
|
||||
truncated = hash_digest[:cut_at]
|
||||
at_hash = base64url_encode(truncated)
|
||||
return at_hash.decode('utf-8')
|
||||
|
||||
|
||||
def base64url_decode(input):
|
||||
"""Helper method to base64url_decode a string.
|
||||
|
||||
Args:
|
||||
input (str): A base64url_encoded string to decode.
|
||||
|
||||
"""
|
||||
rem = len(input) % 4
|
||||
|
||||
if rem > 0:
|
||||
input += b'=' * (4 - rem)
|
||||
|
||||
return base64.urlsafe_b64decode(input)
|
||||
|
||||
|
||||
def base64url_encode(input):
|
||||
"""Helper method to base64url_encode a string.
|
||||
|
||||
Args:
|
||||
input (str): A base64url_encoded string to encode.
|
||||
|
||||
"""
|
||||
return base64.urlsafe_b64encode(input).replace(b'=', b'')
|
||||
|
||||
|
||||
def timedelta_total_seconds(delta):
|
||||
"""Helper method to determine the total number of seconds
|
||||
from a timedelta.
|
||||
|
||||
Args:
|
||||
delta (timedelta): A timedelta to convert to seconds.
|
||||
"""
|
||||
return delta.days * 24 * 60 * 60 + delta.seconds
|
||||
|
||||
|
||||
def constant_time_string_compare(a, b):
|
||||
"""Helper for comparing string in constant time, independent
|
||||
of the python version being used.
|
||||
|
||||
Args:
|
||||
a (str): A string to compare
|
||||
b (str): A string to compare
|
||||
"""
|
||||
|
||||
try:
|
||||
return hmac.compare_digest(a, b)
|
||||
except AttributeError:
|
||||
|
||||
if len(a) != len(b):
|
||||
return False
|
||||
|
||||
result = 0
|
||||
|
||||
for x, y in zip(a, b):
|
||||
result |= ord(x) ^ ord(y)
|
||||
|
||||
return result == 0
|
||||
82
Lambdas/Websocket Authorizer/lambda_function.py
Normal file
82
Lambdas/Websocket Authorizer/lambda_function.py
Normal file
@@ -0,0 +1,82 @@
|
||||
import json
|
||||
import time
|
||||
import urllib.request
|
||||
from jose import jwk, jwt
|
||||
from jose.utils import base64url_decode
|
||||
import logging
|
||||
|
||||
logging.basicConfig(format='%(levelname)s: %(asctime)s: %(message)s')
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
region = 'us-east-1'
|
||||
userpool_id = 'us-east-1_XcUWWJXMT'
|
||||
app_client_id = 'ms2jhuludm93g9qfpuio8m9k6'
|
||||
keys_url = 'https://cognito-idp.{}.amazonaws.com/{}/.well-known/jwks.json'.format(region, userpool_id)
|
||||
# instead of re-downloading the public keys every time
|
||||
# we download them only on cold start
|
||||
# https://aws.amazon.com/blogs/compute/container-reuse-in-lambda/
|
||||
with urllib.request.urlopen(keys_url) as f:
|
||||
response = f.read()
|
||||
keys = json.loads(response.decode('utf-8'))['keys']
|
||||
|
||||
def lambda_handler(event, context):
|
||||
#logger.info(event)
|
||||
token = event['queryStringParameters']['token']
|
||||
# get the kid from the headers prior to verification
|
||||
headers = jwt.get_unverified_headers(token)
|
||||
kid = headers['kid']
|
||||
# search for the kid in the downloaded public keys
|
||||
key_index = -1
|
||||
for i in range(len(keys)):
|
||||
if kid == keys[i]['kid']:
|
||||
key_index = i
|
||||
break
|
||||
if key_index == -1:
|
||||
print('Public key not found in jwks.json')
|
||||
return False
|
||||
# construct the public key
|
||||
public_key = jwk.construct(keys[key_index])
|
||||
# get the last two sections of the token,
|
||||
# message and signature (encoded in base64)
|
||||
message, encoded_signature = str(token).rsplit('.', 1)
|
||||
# decode the signature
|
||||
decoded_signature = base64url_decode(encoded_signature.encode('utf-8'))
|
||||
# verify the signature
|
||||
if not public_key.verify(message.encode("utf8"), decoded_signature):
|
||||
print('Signature verification failed')
|
||||
return False
|
||||
print('Signature successfully verified')
|
||||
# since we passed the verification, we can now safely
|
||||
# use the unverified claims
|
||||
claims = jwt.get_unverified_claims(token)
|
||||
# additionally we can verify the token expiration
|
||||
if time.time() > claims['exp']:
|
||||
print('Token is expired')
|
||||
return False
|
||||
# and the Audience (use claims['client_id'] if verifying an access token)
|
||||
if claims['client_id'] != app_client_id:
|
||||
print('Token was not issued for this audience')
|
||||
return False
|
||||
# now we can use the claims
|
||||
return generateAllow("user", event["methodArn"]) #claims
|
||||
|
||||
def generateAllow(principleID, resource):
|
||||
return generatePolicy(principleID,'Allow', resource)
|
||||
def generateDeny(principleID, resource):
|
||||
return generatePolicy(principleID, 'Deny', resource)
|
||||
def generatePolicy(principleID, effect, resource):
|
||||
authResponse = {}
|
||||
authResponse["principleId"] = principleID
|
||||
if (effect and resource):
|
||||
policyDocument = {}
|
||||
policyDocument["Version"] = '2012-10-17'
|
||||
policyDocument["Statement"] = []
|
||||
statementOne = {}
|
||||
statementOne["Action"] = 'execute-api:Invoke'
|
||||
statementOne["Effect"] = effect
|
||||
statementOne["Resource"] = resource
|
||||
policyDocument["Statement"].append(statementOne)
|
||||
authResponse["policyDocument"] = policyDocument
|
||||
|
||||
return authResponse
|
||||
BIN
Lambdas/Websocket Authorizer/lambda_function.zip
Normal file
BIN
Lambdas/Websocket Authorizer/lambda_function.zip
Normal file
Binary file not shown.
7
Lambdas/Websocket Authorizer/pyasn1/__init__.py
Normal file
7
Lambdas/Websocket Authorizer/pyasn1/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
import sys
|
||||
|
||||
# https://www.python.org/dev/peps/pep-0396/
|
||||
__version__ = '0.4.7'
|
||||
|
||||
if sys.version_info[:2] < (2, 4):
|
||||
raise RuntimeError('PyASN1 requires Python 2.4 or later')
|
||||
1
Lambdas/Websocket Authorizer/pyasn1/codec/__init__.py
Normal file
1
Lambdas/Websocket Authorizer/pyasn1/codec/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# This file is necessary to make this directory a package.
|
||||
@@ -0,0 +1 @@
|
||||
# This file is necessary to make this directory a package.
|
||||
1654
Lambdas/Websocket Authorizer/pyasn1/codec/ber/decoder.py
Normal file
1654
Lambdas/Websocket Authorizer/pyasn1/codec/ber/decoder.py
Normal file
File diff suppressed because it is too large
Load Diff
890
Lambdas/Websocket Authorizer/pyasn1/codec/ber/encoder.py
Normal file
890
Lambdas/Websocket Authorizer/pyasn1/codec/ber/encoder.py
Normal file
@@ -0,0 +1,890 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import sys
|
||||
|
||||
from pyasn1 import debug
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.ber import eoo
|
||||
from pyasn1.compat.integer import to_bytes
|
||||
from pyasn1.compat.octets import (int2oct, oct2int, ints2octs, null,
|
||||
str2octs, isOctetsType)
|
||||
from pyasn1.type import char
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import univ
|
||||
from pyasn1.type import useful
|
||||
|
||||
__all__ = ['encode']
|
||||
|
||||
LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_ENCODER)
|
||||
|
||||
|
||||
class AbstractItemEncoder(object):
|
||||
supportIndefLenMode = True
|
||||
|
||||
# An outcome of otherwise legit call `encodeFun(eoo.endOfOctets)`
|
||||
eooIntegerSubstrate = (0, 0)
|
||||
eooOctetsSubstrate = ints2octs(eooIntegerSubstrate)
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
def encodeTag(self, singleTag, isConstructed):
|
||||
tagClass, tagFormat, tagId = singleTag
|
||||
encodedTag = tagClass | tagFormat
|
||||
if isConstructed:
|
||||
encodedTag |= tag.tagFormatConstructed
|
||||
|
||||
if tagId < 31:
|
||||
return encodedTag | tagId,
|
||||
|
||||
else:
|
||||
substrate = tagId & 0x7f,
|
||||
|
||||
tagId >>= 7
|
||||
|
||||
while tagId:
|
||||
substrate = (0x80 | (tagId & 0x7f),) + substrate
|
||||
tagId >>= 7
|
||||
|
||||
return (encodedTag | 0x1F,) + substrate
|
||||
|
||||
def encodeLength(self, length, defMode):
|
||||
if not defMode and self.supportIndefLenMode:
|
||||
return (0x80,)
|
||||
|
||||
if length < 0x80:
|
||||
return length,
|
||||
|
||||
else:
|
||||
substrate = ()
|
||||
while length:
|
||||
substrate = (length & 0xff,) + substrate
|
||||
length >>= 8
|
||||
|
||||
substrateLen = len(substrate)
|
||||
|
||||
if substrateLen > 126:
|
||||
raise error.PyAsn1Error('Length octets overflow (%d)' % substrateLen)
|
||||
|
||||
return (0x80 | substrateLen,) + substrate
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
raise error.PyAsn1Error('Not implemented')
|
||||
|
||||
def encode(self, value, asn1Spec=None, encodeFun=None, **options):
|
||||
|
||||
if asn1Spec is None:
|
||||
tagSet = value.tagSet
|
||||
else:
|
||||
tagSet = asn1Spec.tagSet
|
||||
|
||||
# untagged item?
|
||||
if not tagSet:
|
||||
substrate, isConstructed, isOctets = self.encodeValue(
|
||||
value, asn1Spec, encodeFun, **options
|
||||
)
|
||||
return substrate
|
||||
|
||||
defMode = options.get('defMode', True)
|
||||
|
||||
substrate = null
|
||||
|
||||
for idx, singleTag in enumerate(tagSet.superTags):
|
||||
|
||||
defModeOverride = defMode
|
||||
|
||||
# base tag?
|
||||
if not idx:
|
||||
try:
|
||||
substrate, isConstructed, isOctets = self.encodeValue(
|
||||
value, asn1Spec, encodeFun, **options
|
||||
)
|
||||
|
||||
except error.PyAsn1Error:
|
||||
exc = sys.exc_info()
|
||||
raise error.PyAsn1Error(
|
||||
'Error encoding %r: %s' % (value, exc[1]))
|
||||
|
||||
if LOG:
|
||||
LOG('encoded %svalue %s into %s' % (
|
||||
isConstructed and 'constructed ' or '', value, substrate
|
||||
))
|
||||
|
||||
if not substrate and isConstructed and options.get('ifNotEmpty', False):
|
||||
return substrate
|
||||
|
||||
if not isConstructed:
|
||||
defModeOverride = True
|
||||
|
||||
if LOG:
|
||||
LOG('overridden encoding mode into definitive for primitive type')
|
||||
|
||||
header = self.encodeTag(singleTag, isConstructed)
|
||||
|
||||
if LOG:
|
||||
LOG('encoded %stag %s into %s' % (
|
||||
isConstructed and 'constructed ' or '',
|
||||
singleTag, debug.hexdump(ints2octs(header))))
|
||||
|
||||
header += self.encodeLength(len(substrate), defModeOverride)
|
||||
|
||||
if LOG:
|
||||
LOG('encoded %s octets (tag + payload) into %s' % (
|
||||
len(substrate), debug.hexdump(ints2octs(header))))
|
||||
|
||||
if isOctets:
|
||||
substrate = ints2octs(header) + substrate
|
||||
|
||||
if not defModeOverride:
|
||||
substrate += self.eooOctetsSubstrate
|
||||
|
||||
else:
|
||||
substrate = header + substrate
|
||||
|
||||
if not defModeOverride:
|
||||
substrate += self.eooIntegerSubstrate
|
||||
|
||||
if not isOctets:
|
||||
substrate = ints2octs(substrate)
|
||||
|
||||
return substrate
|
||||
|
||||
|
||||
class EndOfOctetsEncoder(AbstractItemEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
return null, False, True
|
||||
|
||||
|
||||
class BooleanEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = False
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
return value and (1,) or (0,), False, False
|
||||
|
||||
|
||||
class IntegerEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = False
|
||||
supportCompactZero = False
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if value == 0:
|
||||
if LOG:
|
||||
LOG('encoding %spayload for zero INTEGER' % (
|
||||
self.supportCompactZero and 'no ' or ''
|
||||
))
|
||||
|
||||
# de-facto way to encode zero
|
||||
if self.supportCompactZero:
|
||||
return (), False, False
|
||||
else:
|
||||
return (0,), False, False
|
||||
|
||||
return to_bytes(int(value), signed=True), False, True
|
||||
|
||||
|
||||
class BitStringEncoder(AbstractItemEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if asn1Spec is not None:
|
||||
# TODO: try to avoid ASN.1 schema instantiation
|
||||
value = asn1Spec.clone(value)
|
||||
|
||||
valueLength = len(value)
|
||||
if valueLength % 8:
|
||||
alignedValue = value << (8 - valueLength % 8)
|
||||
else:
|
||||
alignedValue = value
|
||||
|
||||
maxChunkSize = options.get('maxChunkSize', 0)
|
||||
if not maxChunkSize or len(alignedValue) <= maxChunkSize * 8:
|
||||
substrate = alignedValue.asOctets()
|
||||
return int2oct(len(substrate) * 8 - valueLength) + substrate, False, True
|
||||
|
||||
if LOG:
|
||||
LOG('encoding into up to %s-octet chunks' % maxChunkSize)
|
||||
|
||||
baseTag = value.tagSet.baseTag
|
||||
|
||||
# strip off explicit tags
|
||||
if baseTag:
|
||||
tagSet = tag.TagSet(baseTag, baseTag)
|
||||
|
||||
else:
|
||||
tagSet = tag.TagSet()
|
||||
|
||||
alignedValue = alignedValue.clone(tagSet=tagSet)
|
||||
|
||||
stop = 0
|
||||
substrate = null
|
||||
while stop < valueLength:
|
||||
start = stop
|
||||
stop = min(start + maxChunkSize * 8, valueLength)
|
||||
substrate += encodeFun(alignedValue[start:stop], asn1Spec, **options)
|
||||
|
||||
return substrate, True, True
|
||||
|
||||
|
||||
class OctetStringEncoder(AbstractItemEncoder):
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
|
||||
if asn1Spec is None:
|
||||
substrate = value.asOctets()
|
||||
|
||||
elif not isOctetsType(value):
|
||||
substrate = asn1Spec.clone(value).asOctets()
|
||||
|
||||
else:
|
||||
substrate = value
|
||||
|
||||
maxChunkSize = options.get('maxChunkSize', 0)
|
||||
|
||||
if not maxChunkSize or len(substrate) <= maxChunkSize:
|
||||
return substrate, False, True
|
||||
|
||||
if LOG:
|
||||
LOG('encoding into up to %s-octet chunks' % maxChunkSize)
|
||||
|
||||
# strip off explicit tags for inner chunks
|
||||
|
||||
if asn1Spec is None:
|
||||
baseTag = value.tagSet.baseTag
|
||||
|
||||
# strip off explicit tags
|
||||
if baseTag:
|
||||
tagSet = tag.TagSet(baseTag, baseTag)
|
||||
|
||||
else:
|
||||
tagSet = tag.TagSet()
|
||||
|
||||
asn1Spec = value.clone(tagSet=tagSet)
|
||||
|
||||
elif not isOctetsType(value):
|
||||
baseTag = asn1Spec.tagSet.baseTag
|
||||
|
||||
# strip off explicit tags
|
||||
if baseTag:
|
||||
tagSet = tag.TagSet(baseTag, baseTag)
|
||||
|
||||
else:
|
||||
tagSet = tag.TagSet()
|
||||
|
||||
asn1Spec = asn1Spec.clone(tagSet=tagSet)
|
||||
|
||||
pos = 0
|
||||
substrate = null
|
||||
|
||||
while True:
|
||||
chunk = value[pos:pos + maxChunkSize]
|
||||
if not chunk:
|
||||
break
|
||||
|
||||
substrate += encodeFun(chunk, asn1Spec, **options)
|
||||
pos += maxChunkSize
|
||||
|
||||
return substrate, True, True
|
||||
|
||||
|
||||
class NullEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = False
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
return null, False, True
|
||||
|
||||
|
||||
class ObjectIdentifierEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = False
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if asn1Spec is not None:
|
||||
value = asn1Spec.clone(value)
|
||||
|
||||
oid = value.asTuple()
|
||||
|
||||
# Build the first pair
|
||||
try:
|
||||
first = oid[0]
|
||||
second = oid[1]
|
||||
|
||||
except IndexError:
|
||||
raise error.PyAsn1Error('Short OID %s' % (value,))
|
||||
|
||||
if 0 <= second <= 39:
|
||||
if first == 1:
|
||||
oid = (second + 40,) + oid[2:]
|
||||
elif first == 0:
|
||||
oid = (second,) + oid[2:]
|
||||
elif first == 2:
|
||||
oid = (second + 80,) + oid[2:]
|
||||
else:
|
||||
raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,))
|
||||
|
||||
elif first == 2:
|
||||
oid = (second + 80,) + oid[2:]
|
||||
|
||||
else:
|
||||
raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,))
|
||||
|
||||
octets = ()
|
||||
|
||||
# Cycle through subIds
|
||||
for subOid in oid:
|
||||
if 0 <= subOid <= 127:
|
||||
# Optimize for the common case
|
||||
octets += (subOid,)
|
||||
|
||||
elif subOid > 127:
|
||||
# Pack large Sub-Object IDs
|
||||
res = (subOid & 0x7f,)
|
||||
subOid >>= 7
|
||||
|
||||
while subOid:
|
||||
res = (0x80 | (subOid & 0x7f),) + res
|
||||
subOid >>= 7
|
||||
|
||||
# Add packed Sub-Object ID to resulted Object ID
|
||||
octets += res
|
||||
|
||||
else:
|
||||
raise error.PyAsn1Error('Negative OID arc %s at %s' % (subOid, value))
|
||||
|
||||
return octets, False, False
|
||||
|
||||
|
||||
class RealEncoder(AbstractItemEncoder):
|
||||
supportIndefLenMode = 0
|
||||
binEncBase = 2 # set to None to choose encoding base automatically
|
||||
|
||||
@staticmethod
|
||||
def _dropFloatingPoint(m, encbase, e):
|
||||
ms, es = 1, 1
|
||||
if m < 0:
|
||||
ms = -1 # mantissa sign
|
||||
|
||||
if e < 0:
|
||||
es = -1 # exponent sign
|
||||
|
||||
m *= ms
|
||||
|
||||
if encbase == 8:
|
||||
m *= 2 ** (abs(e) % 3 * es)
|
||||
e = abs(e) // 3 * es
|
||||
|
||||
elif encbase == 16:
|
||||
m *= 2 ** (abs(e) % 4 * es)
|
||||
e = abs(e) // 4 * es
|
||||
|
||||
while True:
|
||||
if int(m) != m:
|
||||
m *= encbase
|
||||
e -= 1
|
||||
continue
|
||||
break
|
||||
|
||||
return ms, int(m), encbase, e
|
||||
|
||||
def _chooseEncBase(self, value):
|
||||
m, b, e = value
|
||||
encBase = [2, 8, 16]
|
||||
if value.binEncBase in encBase:
|
||||
return self._dropFloatingPoint(m, value.binEncBase, e)
|
||||
|
||||
elif self.binEncBase in encBase:
|
||||
return self._dropFloatingPoint(m, self.binEncBase, e)
|
||||
|
||||
# auto choosing base 2/8/16
|
||||
mantissa = [m, m, m]
|
||||
exponent = [e, e, e]
|
||||
sign = 1
|
||||
encbase = 2
|
||||
e = float('inf')
|
||||
|
||||
for i in range(3):
|
||||
(sign,
|
||||
mantissa[i],
|
||||
encBase[i],
|
||||
exponent[i]) = self._dropFloatingPoint(mantissa[i], encBase[i], exponent[i])
|
||||
|
||||
if abs(exponent[i]) < abs(e) or (abs(exponent[i]) == abs(e) and mantissa[i] < m):
|
||||
e = exponent[i]
|
||||
m = int(mantissa[i])
|
||||
encbase = encBase[i]
|
||||
|
||||
if LOG:
|
||||
LOG('automatically chosen REAL encoding base %s, sign %s, mantissa %s, '
|
||||
'exponent %s' % (encbase, sign, m, e))
|
||||
|
||||
return sign, m, encbase, e
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if asn1Spec is not None:
|
||||
value = asn1Spec.clone(value)
|
||||
|
||||
if value.isPlusInf:
|
||||
return (0x40,), False, False
|
||||
|
||||
if value.isMinusInf:
|
||||
return (0x41,), False, False
|
||||
|
||||
m, b, e = value
|
||||
|
||||
if not m:
|
||||
return null, False, True
|
||||
|
||||
if b == 10:
|
||||
if LOG:
|
||||
LOG('encoding REAL into character form')
|
||||
|
||||
return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), False, True
|
||||
|
||||
elif b == 2:
|
||||
fo = 0x80 # binary encoding
|
||||
ms, m, encbase, e = self._chooseEncBase(value)
|
||||
|
||||
if ms < 0: # mantissa sign
|
||||
fo |= 0x40 # sign bit
|
||||
|
||||
# exponent & mantissa normalization
|
||||
if encbase == 2:
|
||||
while m & 0x1 == 0:
|
||||
m >>= 1
|
||||
e += 1
|
||||
|
||||
elif encbase == 8:
|
||||
while m & 0x7 == 0:
|
||||
m >>= 3
|
||||
e += 1
|
||||
fo |= 0x10
|
||||
|
||||
else: # encbase = 16
|
||||
while m & 0xf == 0:
|
||||
m >>= 4
|
||||
e += 1
|
||||
fo |= 0x20
|
||||
|
||||
sf = 0 # scale factor
|
||||
|
||||
while m & 0x1 == 0:
|
||||
m >>= 1
|
||||
sf += 1
|
||||
|
||||
if sf > 3:
|
||||
raise error.PyAsn1Error('Scale factor overflow') # bug if raised
|
||||
|
||||
fo |= sf << 2
|
||||
eo = null
|
||||
if e == 0 or e == -1:
|
||||
eo = int2oct(e & 0xff)
|
||||
|
||||
else:
|
||||
while e not in (0, -1):
|
||||
eo = int2oct(e & 0xff) + eo
|
||||
e >>= 8
|
||||
|
||||
if e == 0 and eo and oct2int(eo[0]) & 0x80:
|
||||
eo = int2oct(0) + eo
|
||||
|
||||
if e == -1 and eo and not (oct2int(eo[0]) & 0x80):
|
||||
eo = int2oct(0xff) + eo
|
||||
|
||||
n = len(eo)
|
||||
if n > 0xff:
|
||||
raise error.PyAsn1Error('Real exponent overflow')
|
||||
|
||||
if n == 1:
|
||||
pass
|
||||
|
||||
elif n == 2:
|
||||
fo |= 1
|
||||
|
||||
elif n == 3:
|
||||
fo |= 2
|
||||
|
||||
else:
|
||||
fo |= 3
|
||||
eo = int2oct(n & 0xff) + eo
|
||||
|
||||
po = null
|
||||
|
||||
while m:
|
||||
po = int2oct(m & 0xff) + po
|
||||
m >>= 8
|
||||
|
||||
substrate = int2oct(fo) + eo + po
|
||||
|
||||
return substrate, False, True
|
||||
|
||||
else:
|
||||
raise error.PyAsn1Error('Prohibited Real base %s' % b)
|
||||
|
||||
|
||||
class SequenceEncoder(AbstractItemEncoder):
|
||||
omitEmptyOptionals = False
|
||||
|
||||
# TODO: handling three flavors of input is too much -- split over codecs
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
|
||||
substrate = null
|
||||
|
||||
omitEmptyOptionals = options.get(
|
||||
'omitEmptyOptionals', self.omitEmptyOptionals)
|
||||
|
||||
if LOG:
|
||||
LOG('%sencoding empty OPTIONAL components' % (
|
||||
omitEmptyOptionals and 'not ' or ''))
|
||||
|
||||
if asn1Spec is None:
|
||||
# instance of ASN.1 schema
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise inconsistency
|
||||
|
||||
namedTypes = value.componentType
|
||||
|
||||
for idx, component in enumerate(value.values()):
|
||||
if namedTypes:
|
||||
namedType = namedTypes[idx]
|
||||
|
||||
if namedType.isOptional and not component.isValue:
|
||||
if LOG:
|
||||
LOG('not encoding OPTIONAL component %r' % (namedType,))
|
||||
continue
|
||||
|
||||
if namedType.isDefaulted and component == namedType.asn1Object:
|
||||
if LOG:
|
||||
LOG('not encoding DEFAULT component %r' % (namedType,))
|
||||
continue
|
||||
|
||||
if omitEmptyOptionals:
|
||||
options.update(ifNotEmpty=namedType.isOptional)
|
||||
|
||||
# wrap open type blob if needed
|
||||
if namedTypes and namedType.openType:
|
||||
|
||||
wrapType = namedType.asn1Object
|
||||
|
||||
if wrapType.typeId in (
|
||||
univ.SetOf.typeId, univ.SequenceOf.typeId):
|
||||
|
||||
substrate += encodeFun(
|
||||
component, asn1Spec,
|
||||
**dict(options, wrapType=wrapType.componentType))
|
||||
|
||||
else:
|
||||
chunk = encodeFun(component, asn1Spec, **options)
|
||||
|
||||
if wrapType.isSameTypeWith(component):
|
||||
substrate += chunk
|
||||
|
||||
else:
|
||||
substrate += encodeFun(chunk, wrapType, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('wrapped with wrap type %r' % (wrapType,))
|
||||
|
||||
else:
|
||||
substrate += encodeFun(component, asn1Spec, **options)
|
||||
|
||||
else:
|
||||
# bare Python value + ASN.1 schema
|
||||
for idx, namedType in enumerate(asn1Spec.componentType.namedTypes):
|
||||
|
||||
try:
|
||||
component = value[namedType.name]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Component name "%s" not found in %r' % (
|
||||
namedType.name, value))
|
||||
|
||||
if namedType.isOptional and namedType.name not in value:
|
||||
if LOG:
|
||||
LOG('not encoding OPTIONAL component %r' % (namedType,))
|
||||
continue
|
||||
|
||||
if namedType.isDefaulted and component == namedType.asn1Object:
|
||||
if LOG:
|
||||
LOG('not encoding DEFAULT component %r' % (namedType,))
|
||||
continue
|
||||
|
||||
if omitEmptyOptionals:
|
||||
options.update(ifNotEmpty=namedType.isOptional)
|
||||
|
||||
componentSpec = namedType.asn1Object
|
||||
|
||||
# wrap open type blob if needed
|
||||
if namedType.openType:
|
||||
|
||||
if componentSpec.typeId in (
|
||||
univ.SetOf.typeId, univ.SequenceOf.typeId):
|
||||
|
||||
substrate += encodeFun(
|
||||
component, componentSpec,
|
||||
**dict(options, wrapType=componentSpec.componentType))
|
||||
|
||||
else:
|
||||
chunk = encodeFun(component, componentSpec, **options)
|
||||
|
||||
if componentSpec.isSameTypeWith(component):
|
||||
substrate += chunk
|
||||
|
||||
else:
|
||||
substrate += encodeFun(chunk, componentSpec, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('wrapped with wrap type %r' % (componentSpec,))
|
||||
|
||||
else:
|
||||
substrate += encodeFun(component, componentSpec, **options)
|
||||
|
||||
return substrate, True, True
|
||||
|
||||
|
||||
class SequenceOfEncoder(AbstractItemEncoder):
|
||||
def _encodeComponents(self, value, asn1Spec, encodeFun, **options):
|
||||
|
||||
if asn1Spec is None:
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise inconsistency
|
||||
|
||||
else:
|
||||
asn1Spec = asn1Spec.componentType
|
||||
|
||||
chunks = []
|
||||
|
||||
wrapType = options.pop('wrapType', None)
|
||||
|
||||
for idx, component in enumerate(value):
|
||||
chunk = encodeFun(component, asn1Spec, **options)
|
||||
|
||||
if (wrapType is not None and
|
||||
not wrapType.isSameTypeWith(component)):
|
||||
# wrap encoded value with wrapper container (e.g. ANY)
|
||||
chunk = encodeFun(chunk, wrapType, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('wrapped with wrap type %r' % (wrapType,))
|
||||
|
||||
chunks.append(chunk)
|
||||
|
||||
return chunks
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
chunks = self._encodeComponents(
|
||||
value, asn1Spec, encodeFun, **options)
|
||||
|
||||
return null.join(chunks), True, True
|
||||
|
||||
|
||||
class ChoiceEncoder(AbstractItemEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if asn1Spec is None:
|
||||
component = value.getComponent()
|
||||
else:
|
||||
names = [namedType.name for namedType in asn1Spec.componentType.namedTypes
|
||||
if namedType.name in value]
|
||||
if len(names) != 1:
|
||||
raise error.PyAsn1Error('%s components for Choice at %r' % (len(names) and 'Multiple ' or 'None ', value))
|
||||
|
||||
name = names[0]
|
||||
|
||||
component = value[name]
|
||||
asn1Spec = asn1Spec[name]
|
||||
|
||||
return encodeFun(component, asn1Spec, **options), True, True
|
||||
|
||||
|
||||
class AnyEncoder(OctetStringEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if asn1Spec is None:
|
||||
value = value.asOctets()
|
||||
elif not isOctetsType(value):
|
||||
value = asn1Spec.clone(value).asOctets()
|
||||
|
||||
return value, not options.get('defMode', True), True
|
||||
|
||||
|
||||
tagMap = {
|
||||
eoo.endOfOctets.tagSet: EndOfOctetsEncoder(),
|
||||
univ.Boolean.tagSet: BooleanEncoder(),
|
||||
univ.Integer.tagSet: IntegerEncoder(),
|
||||
univ.BitString.tagSet: BitStringEncoder(),
|
||||
univ.OctetString.tagSet: OctetStringEncoder(),
|
||||
univ.Null.tagSet: NullEncoder(),
|
||||
univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(),
|
||||
univ.Enumerated.tagSet: IntegerEncoder(),
|
||||
univ.Real.tagSet: RealEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
univ.SequenceOf.tagSet: SequenceOfEncoder(),
|
||||
univ.SetOf.tagSet: SequenceOfEncoder(),
|
||||
univ.Choice.tagSet: ChoiceEncoder(),
|
||||
# character string types
|
||||
char.UTF8String.tagSet: OctetStringEncoder(),
|
||||
char.NumericString.tagSet: OctetStringEncoder(),
|
||||
char.PrintableString.tagSet: OctetStringEncoder(),
|
||||
char.TeletexString.tagSet: OctetStringEncoder(),
|
||||
char.VideotexString.tagSet: OctetStringEncoder(),
|
||||
char.IA5String.tagSet: OctetStringEncoder(),
|
||||
char.GraphicString.tagSet: OctetStringEncoder(),
|
||||
char.VisibleString.tagSet: OctetStringEncoder(),
|
||||
char.GeneralString.tagSet: OctetStringEncoder(),
|
||||
char.UniversalString.tagSet: OctetStringEncoder(),
|
||||
char.BMPString.tagSet: OctetStringEncoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.tagSet: OctetStringEncoder(),
|
||||
useful.GeneralizedTime.tagSet: OctetStringEncoder(),
|
||||
useful.UTCTime.tagSet: OctetStringEncoder()
|
||||
}
|
||||
|
||||
# Put in ambiguous & non-ambiguous types for faster codec lookup
|
||||
typeMap = {
|
||||
univ.Boolean.typeId: BooleanEncoder(),
|
||||
univ.Integer.typeId: IntegerEncoder(),
|
||||
univ.BitString.typeId: BitStringEncoder(),
|
||||
univ.OctetString.typeId: OctetStringEncoder(),
|
||||
univ.Null.typeId: NullEncoder(),
|
||||
univ.ObjectIdentifier.typeId: ObjectIdentifierEncoder(),
|
||||
univ.Enumerated.typeId: IntegerEncoder(),
|
||||
univ.Real.typeId: RealEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
univ.Set.typeId: SequenceEncoder(),
|
||||
univ.SetOf.typeId: SequenceOfEncoder(),
|
||||
univ.Sequence.typeId: SequenceEncoder(),
|
||||
univ.SequenceOf.typeId: SequenceOfEncoder(),
|
||||
univ.Choice.typeId: ChoiceEncoder(),
|
||||
univ.Any.typeId: AnyEncoder(),
|
||||
# character string types
|
||||
char.UTF8String.typeId: OctetStringEncoder(),
|
||||
char.NumericString.typeId: OctetStringEncoder(),
|
||||
char.PrintableString.typeId: OctetStringEncoder(),
|
||||
char.TeletexString.typeId: OctetStringEncoder(),
|
||||
char.VideotexString.typeId: OctetStringEncoder(),
|
||||
char.IA5String.typeId: OctetStringEncoder(),
|
||||
char.GraphicString.typeId: OctetStringEncoder(),
|
||||
char.VisibleString.typeId: OctetStringEncoder(),
|
||||
char.GeneralString.typeId: OctetStringEncoder(),
|
||||
char.UniversalString.typeId: OctetStringEncoder(),
|
||||
char.BMPString.typeId: OctetStringEncoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.typeId: OctetStringEncoder(),
|
||||
useful.GeneralizedTime.typeId: OctetStringEncoder(),
|
||||
useful.UTCTime.typeId: OctetStringEncoder()
|
||||
}
|
||||
|
||||
|
||||
class Encoder(object):
|
||||
fixedDefLengthMode = None
|
||||
fixedChunkSize = None
|
||||
|
||||
# noinspection PyDefaultArgument
|
||||
def __init__(self, tagMap, typeMap={}):
|
||||
self.__tagMap = tagMap
|
||||
self.__typeMap = typeMap
|
||||
|
||||
def __call__(self, value, asn1Spec=None, **options):
|
||||
try:
|
||||
if asn1Spec is None:
|
||||
typeId = value.typeId
|
||||
else:
|
||||
typeId = asn1Spec.typeId
|
||||
|
||||
except AttributeError:
|
||||
raise error.PyAsn1Error('Value %r is not ASN.1 type instance '
|
||||
'and "asn1Spec" not given' % (value,))
|
||||
|
||||
if LOG:
|
||||
LOG('encoder called in %sdef mode, chunk size %s for '
|
||||
'type %s, value:\n%s' % (not options.get('defMode', True) and 'in' or '', options.get('maxChunkSize', 0), asn1Spec is None and value.prettyPrintType() or asn1Spec.prettyPrintType(), value))
|
||||
|
||||
if self.fixedDefLengthMode is not None:
|
||||
options.update(defMode=self.fixedDefLengthMode)
|
||||
|
||||
if self.fixedChunkSize is not None:
|
||||
options.update(maxChunkSize=self.fixedChunkSize)
|
||||
|
||||
|
||||
try:
|
||||
concreteEncoder = self.__typeMap[typeId]
|
||||
|
||||
if LOG:
|
||||
LOG('using value codec %s chosen by type ID %s' % (concreteEncoder.__class__.__name__, typeId))
|
||||
|
||||
except KeyError:
|
||||
if asn1Spec is None:
|
||||
tagSet = value.tagSet
|
||||
else:
|
||||
tagSet = asn1Spec.tagSet
|
||||
|
||||
# use base type for codec lookup to recover untagged types
|
||||
baseTagSet = tag.TagSet(tagSet.baseTag, tagSet.baseTag)
|
||||
|
||||
try:
|
||||
concreteEncoder = self.__tagMap[baseTagSet]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('No encoder for %r (%s)' % (value, tagSet))
|
||||
|
||||
if LOG:
|
||||
LOG('using value codec %s chosen by tagSet %s' % (concreteEncoder.__class__.__name__, tagSet))
|
||||
|
||||
substrate = concreteEncoder.encode(value, asn1Spec, self, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('codec %s built %s octets of substrate: %s\nencoder completed' % (concreteEncoder, len(substrate), debug.hexdump(substrate)))
|
||||
|
||||
return substrate
|
||||
|
||||
#: Turns ASN.1 object into BER octet stream.
|
||||
#:
|
||||
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: walks all its components recursively and produces a BER octet stream.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
#: value: either a Python or pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: A Python or pyasn1 object to encode. If Python object is given, `asnSpec`
|
||||
#: parameter is required to guide the encoding process.
|
||||
#:
|
||||
#: Keyword Args
|
||||
#: ------------
|
||||
#: asn1Spec:
|
||||
#: Optional ASN.1 schema or value object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
|
||||
#:
|
||||
#: defMode: :py:class:`bool`
|
||||
#: If :obj:`False`, produces indefinite length encoding
|
||||
#:
|
||||
#: maxChunkSize: :py:class:`int`
|
||||
#: Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size)
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: Given ASN.1 object encoded into BER octetstream
|
||||
#:
|
||||
#: Raises
|
||||
#: ------
|
||||
#: ~pyasn1.error.PyAsn1Error
|
||||
#: On encoding errors
|
||||
#:
|
||||
#: Examples
|
||||
#: --------
|
||||
#: Encode Python value into BER with ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> encode([1, 2, 3], asn1Spec=seq)
|
||||
#: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03'
|
||||
#:
|
||||
#: Encode ASN.1 value object into BER
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> seq.extend([1, 2, 3])
|
||||
#: >>> encode(seq)
|
||||
#: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03'
|
||||
#:
|
||||
encode = Encoder(tagMap, typeMap)
|
||||
28
Lambdas/Websocket Authorizer/pyasn1/codec/ber/eoo.py
Normal file
28
Lambdas/Websocket Authorizer/pyasn1/codec/ber/eoo.py
Normal file
@@ -0,0 +1,28 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1.type import base
|
||||
from pyasn1.type import tag
|
||||
|
||||
__all__ = ['endOfOctets']
|
||||
|
||||
|
||||
class EndOfOctets(base.SimpleAsn1Type):
|
||||
defaultValue = 0
|
||||
tagSet = tag.initTagSet(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x00)
|
||||
)
|
||||
|
||||
_instance = None
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
if cls._instance is None:
|
||||
cls._instance = object.__new__(cls, *args, **kwargs)
|
||||
|
||||
return cls._instance
|
||||
|
||||
|
||||
endOfOctets = EndOfOctets()
|
||||
@@ -0,0 +1 @@
|
||||
# This file is necessary to make this directory a package.
|
||||
114
Lambdas/Websocket Authorizer/pyasn1/codec/cer/decoder.py
Normal file
114
Lambdas/Websocket Authorizer/pyasn1/codec/cer/decoder.py
Normal file
@@ -0,0 +1,114 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.ber import decoder
|
||||
from pyasn1.compat.octets import oct2int
|
||||
from pyasn1.type import univ
|
||||
|
||||
__all__ = ['decode']
|
||||
|
||||
|
||||
class BooleanDecoder(decoder.AbstractSimpleDecoder):
|
||||
protoComponent = univ.Boolean(0)
|
||||
|
||||
def valueDecoder(self, substrate, asn1Spec,
|
||||
tagSet=None, length=None, state=None,
|
||||
decodeFun=None, substrateFun=None,
|
||||
**options):
|
||||
head, tail = substrate[:length], substrate[length:]
|
||||
if not head or length != 1:
|
||||
raise error.PyAsn1Error('Not single-octet Boolean payload')
|
||||
byte = oct2int(head[0])
|
||||
# CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while
|
||||
# BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1
|
||||
# in https://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf
|
||||
if byte == 0xff:
|
||||
value = 1
|
||||
elif byte == 0x00:
|
||||
value = 0
|
||||
else:
|
||||
raise error.PyAsn1Error('Unexpected Boolean payload: %s' % byte)
|
||||
return self._createComponent(asn1Spec, tagSet, value, **options), tail
|
||||
|
||||
# TODO: prohibit non-canonical encoding
|
||||
BitStringDecoder = decoder.BitStringDecoder
|
||||
OctetStringDecoder = decoder.OctetStringDecoder
|
||||
RealDecoder = decoder.RealDecoder
|
||||
|
||||
tagMap = decoder.tagMap.copy()
|
||||
tagMap.update(
|
||||
{univ.Boolean.tagSet: BooleanDecoder(),
|
||||
univ.BitString.tagSet: BitStringDecoder(),
|
||||
univ.OctetString.tagSet: OctetStringDecoder(),
|
||||
univ.Real.tagSet: RealDecoder()}
|
||||
)
|
||||
|
||||
typeMap = decoder.typeMap.copy()
|
||||
|
||||
# Put in non-ambiguous types for faster codec lookup
|
||||
for typeDecoder in tagMap.values():
|
||||
if typeDecoder.protoComponent is not None:
|
||||
typeId = typeDecoder.protoComponent.__class__.typeId
|
||||
if typeId is not None and typeId not in typeMap:
|
||||
typeMap[typeId] = typeDecoder
|
||||
|
||||
|
||||
class Decoder(decoder.Decoder):
|
||||
pass
|
||||
|
||||
|
||||
#: Turns CER octet stream into an ASN.1 object.
|
||||
#:
|
||||
#: Takes CER octet-stream and decode it into an ASN.1 object
|
||||
#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
|
||||
#: may be a scalar or an arbitrary nested structure.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: CER octet-stream
|
||||
#:
|
||||
#: Keyword Args
|
||||
#: ------------
|
||||
#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
|
||||
#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure
|
||||
#: being decoded, *asn1Spec* may or may not be required. Most common reason for
|
||||
#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode.
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`tuple`
|
||||
#: A tuple of pyasn1 object recovered from CER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: and the unprocessed trailing portion of the *substrate* (may be empty)
|
||||
#:
|
||||
#: Raises
|
||||
#: ------
|
||||
#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError
|
||||
#: On decoding errors
|
||||
#:
|
||||
#: Examples
|
||||
#: --------
|
||||
#: Decode CER serialisation without ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> s, _ = decode(b'0\x80\x02\x01\x01\x02\x01\x02\x02\x01\x03\x00\x00')
|
||||
#: >>> str(s)
|
||||
#: SequenceOf:
|
||||
#: 1 2 3
|
||||
#:
|
||||
#: Decode CER serialisation with ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> s, _ = decode(b'0\x80\x02\x01\x01\x02\x01\x02\x02\x01\x03\x00\x00', asn1Spec=seq)
|
||||
#: >>> str(s)
|
||||
#: SequenceOf:
|
||||
#: 1 2 3
|
||||
#:
|
||||
decode = Decoder(tagMap, decoder.typeMap)
|
||||
313
Lambdas/Websocket Authorizer/pyasn1/codec/cer/encoder.py
Normal file
313
Lambdas/Websocket Authorizer/pyasn1/codec/cer/encoder.py
Normal file
@@ -0,0 +1,313 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.ber import encoder
|
||||
from pyasn1.compat.octets import str2octs, null
|
||||
from pyasn1.type import univ
|
||||
from pyasn1.type import useful
|
||||
|
||||
__all__ = ['encode']
|
||||
|
||||
|
||||
class BooleanEncoder(encoder.IntegerEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
if value == 0:
|
||||
substrate = (0,)
|
||||
else:
|
||||
substrate = (255,)
|
||||
return substrate, False, False
|
||||
|
||||
|
||||
class RealEncoder(encoder.RealEncoder):
|
||||
def _chooseEncBase(self, value):
|
||||
m, b, e = value
|
||||
return self._dropFloatingPoint(m, b, e)
|
||||
|
||||
|
||||
# specialized GeneralStringEncoder here
|
||||
|
||||
class TimeEncoderMixIn(object):
|
||||
Z_CHAR = ord('Z')
|
||||
PLUS_CHAR = ord('+')
|
||||
MINUS_CHAR = ord('-')
|
||||
COMMA_CHAR = ord(',')
|
||||
DOT_CHAR = ord('.')
|
||||
ZERO_CHAR = ord('0')
|
||||
|
||||
MIN_LENGTH = 12
|
||||
MAX_LENGTH = 19
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
# CER encoding constraints:
|
||||
# - minutes are mandatory, seconds are optional
|
||||
# - sub-seconds must NOT be zero / no meaningless zeros
|
||||
# - no hanging fraction dot
|
||||
# - time in UTC (Z)
|
||||
# - only dot is allowed for fractions
|
||||
|
||||
if asn1Spec is not None:
|
||||
value = asn1Spec.clone(value)
|
||||
|
||||
numbers = value.asNumbers()
|
||||
|
||||
if self.PLUS_CHAR in numbers or self.MINUS_CHAR in numbers:
|
||||
raise error.PyAsn1Error('Must be UTC time: %r' % value)
|
||||
|
||||
if numbers[-1] != self.Z_CHAR:
|
||||
raise error.PyAsn1Error('Missing "Z" time zone specifier: %r' % value)
|
||||
|
||||
if self.COMMA_CHAR in numbers:
|
||||
raise error.PyAsn1Error('Comma in fractions disallowed: %r' % value)
|
||||
|
||||
if self.DOT_CHAR in numbers:
|
||||
|
||||
isModified = False
|
||||
|
||||
numbers = list(numbers)
|
||||
|
||||
searchIndex = min(numbers.index(self.DOT_CHAR) + 4, len(numbers) - 1)
|
||||
|
||||
while numbers[searchIndex] != self.DOT_CHAR:
|
||||
if numbers[searchIndex] == self.ZERO_CHAR:
|
||||
del numbers[searchIndex]
|
||||
isModified = True
|
||||
|
||||
searchIndex -= 1
|
||||
|
||||
searchIndex += 1
|
||||
|
||||
if searchIndex < len(numbers):
|
||||
if numbers[searchIndex] == self.Z_CHAR:
|
||||
# drop hanging comma
|
||||
del numbers[searchIndex - 1]
|
||||
isModified = True
|
||||
|
||||
if isModified:
|
||||
value = value.clone(numbers)
|
||||
|
||||
if not self.MIN_LENGTH < len(numbers) < self.MAX_LENGTH:
|
||||
raise error.PyAsn1Error('Length constraint violated: %r' % value)
|
||||
|
||||
options.update(maxChunkSize=1000)
|
||||
|
||||
return encoder.OctetStringEncoder.encodeValue(
|
||||
self, value, asn1Spec, encodeFun, **options
|
||||
)
|
||||
|
||||
|
||||
class GeneralizedTimeEncoder(TimeEncoderMixIn, encoder.OctetStringEncoder):
|
||||
MIN_LENGTH = 12
|
||||
MAX_LENGTH = 20
|
||||
|
||||
|
||||
class UTCTimeEncoder(TimeEncoderMixIn, encoder.OctetStringEncoder):
|
||||
MIN_LENGTH = 10
|
||||
MAX_LENGTH = 14
|
||||
|
||||
|
||||
class SetOfEncoder(encoder.SequenceOfEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
chunks = self._encodeComponents(
|
||||
value, asn1Spec, encodeFun, **options)
|
||||
|
||||
# sort by serialised and padded components
|
||||
if len(chunks) > 1:
|
||||
zero = str2octs('\x00')
|
||||
maxLen = max(map(len, chunks))
|
||||
paddedChunks = [
|
||||
(x.ljust(maxLen, zero), x) for x in chunks
|
||||
]
|
||||
paddedChunks.sort(key=lambda x: x[0])
|
||||
|
||||
chunks = [x[1] for x in paddedChunks]
|
||||
|
||||
return null.join(chunks), True, True
|
||||
|
||||
|
||||
class SequenceOfEncoder(encoder.SequenceOfEncoder):
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
|
||||
if options.get('ifNotEmpty', False) and not len(value):
|
||||
return null, True, True
|
||||
|
||||
chunks = self._encodeComponents(
|
||||
value, asn1Spec, encodeFun, **options)
|
||||
|
||||
return null.join(chunks), True, True
|
||||
|
||||
|
||||
class SetEncoder(encoder.SequenceEncoder):
|
||||
@staticmethod
|
||||
def _componentSortKey(componentAndType):
|
||||
"""Sort SET components by tag
|
||||
|
||||
Sort regardless of the Choice value (static sort)
|
||||
"""
|
||||
component, asn1Spec = componentAndType
|
||||
|
||||
if asn1Spec is None:
|
||||
asn1Spec = component
|
||||
|
||||
if asn1Spec.typeId == univ.Choice.typeId and not asn1Spec.tagSet:
|
||||
if asn1Spec.tagSet:
|
||||
return asn1Spec.tagSet
|
||||
else:
|
||||
return asn1Spec.componentType.minTagSet
|
||||
else:
|
||||
return asn1Spec.tagSet
|
||||
|
||||
def encodeValue(self, value, asn1Spec, encodeFun, **options):
|
||||
|
||||
substrate = null
|
||||
|
||||
comps = []
|
||||
compsMap = {}
|
||||
|
||||
if asn1Spec is None:
|
||||
# instance of ASN.1 schema
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise inconsistency
|
||||
|
||||
namedTypes = value.componentType
|
||||
|
||||
for idx, component in enumerate(value.values()):
|
||||
if namedTypes:
|
||||
namedType = namedTypes[idx]
|
||||
|
||||
if namedType.isOptional and not component.isValue:
|
||||
continue
|
||||
|
||||
if namedType.isDefaulted and component == namedType.asn1Object:
|
||||
continue
|
||||
|
||||
compsMap[id(component)] = namedType
|
||||
|
||||
else:
|
||||
compsMap[id(component)] = None
|
||||
|
||||
comps.append((component, asn1Spec))
|
||||
|
||||
else:
|
||||
# bare Python value + ASN.1 schema
|
||||
for idx, namedType in enumerate(asn1Spec.componentType.namedTypes):
|
||||
|
||||
try:
|
||||
component = value[namedType.name]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Component name "%s" not found in %r' % (namedType.name, value))
|
||||
|
||||
if namedType.isOptional and namedType.name not in value:
|
||||
continue
|
||||
|
||||
if namedType.isDefaulted and component == namedType.asn1Object:
|
||||
continue
|
||||
|
||||
compsMap[id(component)] = namedType
|
||||
comps.append((component, asn1Spec[idx]))
|
||||
|
||||
for comp, compType in sorted(comps, key=self._componentSortKey):
|
||||
namedType = compsMap[id(comp)]
|
||||
|
||||
if namedType:
|
||||
options.update(ifNotEmpty=namedType.isOptional)
|
||||
|
||||
chunk = encodeFun(comp, compType, **options)
|
||||
|
||||
# wrap open type blob if needed
|
||||
if namedType and namedType.openType:
|
||||
wrapType = namedType.asn1Object
|
||||
if wrapType.tagSet and not wrapType.isSameTypeWith(comp):
|
||||
chunk = encodeFun(chunk, wrapType, **options)
|
||||
|
||||
substrate += chunk
|
||||
|
||||
return substrate, True, True
|
||||
|
||||
|
||||
class SequenceEncoder(encoder.SequenceEncoder):
|
||||
omitEmptyOptionals = True
|
||||
|
||||
|
||||
tagMap = encoder.tagMap.copy()
|
||||
tagMap.update({
|
||||
univ.Boolean.tagSet: BooleanEncoder(),
|
||||
univ.Real.tagSet: RealEncoder(),
|
||||
useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(),
|
||||
useful.UTCTime.tagSet: UTCTimeEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
univ.SetOf.tagSet: SetOfEncoder(),
|
||||
univ.Sequence.typeId: SequenceEncoder()
|
||||
})
|
||||
|
||||
typeMap = encoder.typeMap.copy()
|
||||
typeMap.update({
|
||||
univ.Boolean.typeId: BooleanEncoder(),
|
||||
univ.Real.typeId: RealEncoder(),
|
||||
useful.GeneralizedTime.typeId: GeneralizedTimeEncoder(),
|
||||
useful.UTCTime.typeId: UTCTimeEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
univ.Set.typeId: SetEncoder(),
|
||||
univ.SetOf.typeId: SetOfEncoder(),
|
||||
univ.Sequence.typeId: SequenceEncoder(),
|
||||
univ.SequenceOf.typeId: SequenceOfEncoder()
|
||||
})
|
||||
|
||||
|
||||
class Encoder(encoder.Encoder):
|
||||
fixedDefLengthMode = False
|
||||
fixedChunkSize = 1000
|
||||
|
||||
#: Turns ASN.1 object into CER octet stream.
|
||||
#:
|
||||
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: walks all its components recursively and produces a CER octet stream.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
#: value: either a Python or pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: A Python or pyasn1 object to encode. If Python object is given, `asnSpec`
|
||||
#: parameter is required to guide the encoding process.
|
||||
#:
|
||||
#: Keyword Args
|
||||
#: ------------
|
||||
#: asn1Spec:
|
||||
#: Optional ASN.1 schema or value object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: Given ASN.1 object encoded into BER octet-stream
|
||||
#:
|
||||
#: Raises
|
||||
#: ------
|
||||
#: ~pyasn1.error.PyAsn1Error
|
||||
#: On encoding errors
|
||||
#:
|
||||
#: Examples
|
||||
#: --------
|
||||
#: Encode Python value into CER with ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> encode([1, 2, 3], asn1Spec=seq)
|
||||
#: b'0\x80\x02\x01\x01\x02\x01\x02\x02\x01\x03\x00\x00'
|
||||
#:
|
||||
#: Encode ASN.1 value object into CER
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> seq.extend([1, 2, 3])
|
||||
#: >>> encode(seq)
|
||||
#: b'0\x80\x02\x01\x01\x02\x01\x02\x02\x01\x03\x00\x00'
|
||||
#:
|
||||
encode = Encoder(tagMap, typeMap)
|
||||
|
||||
# EncoderFactory queries class instance and builds a map of tags -> encoders
|
||||
@@ -0,0 +1 @@
|
||||
# This file is necessary to make this directory a package.
|
||||
94
Lambdas/Websocket Authorizer/pyasn1/codec/der/decoder.py
Normal file
94
Lambdas/Websocket Authorizer/pyasn1/codec/der/decoder.py
Normal file
@@ -0,0 +1,94 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1.codec.cer import decoder
|
||||
from pyasn1.type import univ
|
||||
|
||||
__all__ = ['decode']
|
||||
|
||||
|
||||
class BitStringDecoder(decoder.BitStringDecoder):
|
||||
supportConstructedForm = False
|
||||
|
||||
|
||||
class OctetStringDecoder(decoder.OctetStringDecoder):
|
||||
supportConstructedForm = False
|
||||
|
||||
# TODO: prohibit non-canonical encoding
|
||||
RealDecoder = decoder.RealDecoder
|
||||
|
||||
tagMap = decoder.tagMap.copy()
|
||||
tagMap.update(
|
||||
{univ.BitString.tagSet: BitStringDecoder(),
|
||||
univ.OctetString.tagSet: OctetStringDecoder(),
|
||||
univ.Real.tagSet: RealDecoder()}
|
||||
)
|
||||
|
||||
typeMap = decoder.typeMap.copy()
|
||||
|
||||
# Put in non-ambiguous types for faster codec lookup
|
||||
for typeDecoder in tagMap.values():
|
||||
if typeDecoder.protoComponent is not None:
|
||||
typeId = typeDecoder.protoComponent.__class__.typeId
|
||||
if typeId is not None and typeId not in typeMap:
|
||||
typeMap[typeId] = typeDecoder
|
||||
|
||||
|
||||
class Decoder(decoder.Decoder):
|
||||
supportIndefLength = False
|
||||
|
||||
|
||||
#: Turns DER octet stream into an ASN.1 object.
|
||||
#:
|
||||
#: Takes DER octet-stream and decode it into an ASN.1 object
|
||||
#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
|
||||
#: may be a scalar or an arbitrary nested structure.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: DER octet-stream
|
||||
#:
|
||||
#: Keyword Args
|
||||
#: ------------
|
||||
#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
|
||||
#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure
|
||||
#: being decoded, *asn1Spec* may or may not be required. Most common reason for
|
||||
#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode.
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`tuple`
|
||||
#: A tuple of pyasn1 object recovered from DER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: and the unprocessed trailing portion of the *substrate* (may be empty)
|
||||
#:
|
||||
#: Raises
|
||||
#: ------
|
||||
#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError
|
||||
#: On decoding errors
|
||||
#:
|
||||
#: Examples
|
||||
#: --------
|
||||
#: Decode DER serialisation without ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> s, _ = decode(b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03')
|
||||
#: >>> str(s)
|
||||
#: SequenceOf:
|
||||
#: 1 2 3
|
||||
#:
|
||||
#: Decode DER serialisation with ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> s, _ = decode(b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03', asn1Spec=seq)
|
||||
#: >>> str(s)
|
||||
#: SequenceOf:
|
||||
#: 1 2 3
|
||||
#:
|
||||
decode = Decoder(tagMap, typeMap)
|
||||
107
Lambdas/Websocket Authorizer/pyasn1/codec/der/encoder.py
Normal file
107
Lambdas/Websocket Authorizer/pyasn1/codec/der/encoder.py
Normal file
@@ -0,0 +1,107 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1 import error
|
||||
from pyasn1.codec.cer import encoder
|
||||
from pyasn1.type import univ
|
||||
|
||||
__all__ = ['encode']
|
||||
|
||||
|
||||
class SetEncoder(encoder.SetEncoder):
|
||||
@staticmethod
|
||||
def _componentSortKey(componentAndType):
|
||||
"""Sort SET components by tag
|
||||
|
||||
Sort depending on the actual Choice value (dynamic sort)
|
||||
"""
|
||||
component, asn1Spec = componentAndType
|
||||
|
||||
if asn1Spec is None:
|
||||
compType = component
|
||||
else:
|
||||
compType = asn1Spec
|
||||
|
||||
if compType.typeId == univ.Choice.typeId and not compType.tagSet:
|
||||
if asn1Spec is None:
|
||||
return component.getComponent().tagSet
|
||||
else:
|
||||
# TODO: move out of sorting key function
|
||||
names = [namedType.name for namedType in asn1Spec.componentType.namedTypes
|
||||
if namedType.name in component]
|
||||
if len(names) != 1:
|
||||
raise error.PyAsn1Error(
|
||||
'%s components for Choice at %r' % (len(names) and 'Multiple ' or 'None ', component))
|
||||
|
||||
# TODO: support nested CHOICE ordering
|
||||
return asn1Spec[names[0]].tagSet
|
||||
|
||||
else:
|
||||
return compType.tagSet
|
||||
|
||||
tagMap = encoder.tagMap.copy()
|
||||
tagMap.update({
|
||||
# Set & SetOf have same tags
|
||||
univ.Set.tagSet: SetEncoder()
|
||||
})
|
||||
|
||||
typeMap = encoder.typeMap.copy()
|
||||
typeMap.update({
|
||||
# Set & SetOf have same tags
|
||||
univ.Set.typeId: SetEncoder()
|
||||
})
|
||||
|
||||
|
||||
class Encoder(encoder.Encoder):
|
||||
fixedDefLengthMode = True
|
||||
fixedChunkSize = 0
|
||||
|
||||
#: Turns ASN.1 object into DER octet stream.
|
||||
#:
|
||||
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: walks all its components recursively and produces a DER octet stream.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
#: value: either a Python or pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: A Python or pyasn1 object to encode. If Python object is given, `asnSpec`
|
||||
#: parameter is required to guide the encoding process.
|
||||
#:
|
||||
#: Keyword Args
|
||||
#: ------------
|
||||
#: asn1Spec:
|
||||
#: Optional ASN.1 schema or value object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
|
||||
#: Given ASN.1 object encoded into BER octet-stream
|
||||
#:
|
||||
#: Raises
|
||||
#: ------
|
||||
#: ~pyasn1.error.PyAsn1Error
|
||||
#: On encoding errors
|
||||
#:
|
||||
#: Examples
|
||||
#: --------
|
||||
#: Encode Python value into DER with ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> encode([1, 2, 3], asn1Spec=seq)
|
||||
#: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03'
|
||||
#:
|
||||
#: Encode ASN.1 value object into DER
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> seq.extend([1, 2, 3])
|
||||
#: >>> encode(seq)
|
||||
#: b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03'
|
||||
#:
|
||||
encode = Encoder(tagMap, typeMap)
|
||||
@@ -0,0 +1 @@
|
||||
# This file is necessary to make this directory a package.
|
||||
213
Lambdas/Websocket Authorizer/pyasn1/codec/native/decoder.py
Normal file
213
Lambdas/Websocket Authorizer/pyasn1/codec/native/decoder.py
Normal file
@@ -0,0 +1,213 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1 import debug
|
||||
from pyasn1 import error
|
||||
from pyasn1.type import base
|
||||
from pyasn1.type import char
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import univ
|
||||
from pyasn1.type import useful
|
||||
|
||||
__all__ = ['decode']
|
||||
|
||||
LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_DECODER)
|
||||
|
||||
|
||||
class AbstractScalarDecoder(object):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
return asn1Spec.clone(pyObject)
|
||||
|
||||
|
||||
class BitStringDecoder(AbstractScalarDecoder):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
return asn1Spec.clone(univ.BitString.fromBinaryString(pyObject))
|
||||
|
||||
|
||||
class SequenceOrSetDecoder(object):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
asn1Value = asn1Spec.clone()
|
||||
|
||||
componentsTypes = asn1Spec.componentType
|
||||
|
||||
for field in asn1Value:
|
||||
if field in pyObject:
|
||||
asn1Value[field] = decodeFun(pyObject[field], componentsTypes[field].asn1Object, **options)
|
||||
|
||||
return asn1Value
|
||||
|
||||
|
||||
class SequenceOfOrSetOfDecoder(object):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
asn1Value = asn1Spec.clone()
|
||||
|
||||
for pyValue in pyObject:
|
||||
asn1Value.append(decodeFun(pyValue, asn1Spec.componentType), **options)
|
||||
|
||||
return asn1Value
|
||||
|
||||
|
||||
class ChoiceDecoder(object):
|
||||
def __call__(self, pyObject, asn1Spec, decodeFun=None, **options):
|
||||
asn1Value = asn1Spec.clone()
|
||||
|
||||
componentsTypes = asn1Spec.componentType
|
||||
|
||||
for field in pyObject:
|
||||
if field in componentsTypes:
|
||||
asn1Value[field] = decodeFun(pyObject[field], componentsTypes[field].asn1Object, **options)
|
||||
break
|
||||
|
||||
return asn1Value
|
||||
|
||||
|
||||
tagMap = {
|
||||
univ.Integer.tagSet: AbstractScalarDecoder(),
|
||||
univ.Boolean.tagSet: AbstractScalarDecoder(),
|
||||
univ.BitString.tagSet: BitStringDecoder(),
|
||||
univ.OctetString.tagSet: AbstractScalarDecoder(),
|
||||
univ.Null.tagSet: AbstractScalarDecoder(),
|
||||
univ.ObjectIdentifier.tagSet: AbstractScalarDecoder(),
|
||||
univ.Enumerated.tagSet: AbstractScalarDecoder(),
|
||||
univ.Real.tagSet: AbstractScalarDecoder(),
|
||||
univ.Sequence.tagSet: SequenceOrSetDecoder(), # conflicts with SequenceOf
|
||||
univ.Set.tagSet: SequenceOrSetDecoder(), # conflicts with SetOf
|
||||
univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any
|
||||
# character string types
|
||||
char.UTF8String.tagSet: AbstractScalarDecoder(),
|
||||
char.NumericString.tagSet: AbstractScalarDecoder(),
|
||||
char.PrintableString.tagSet: AbstractScalarDecoder(),
|
||||
char.TeletexString.tagSet: AbstractScalarDecoder(),
|
||||
char.VideotexString.tagSet: AbstractScalarDecoder(),
|
||||
char.IA5String.tagSet: AbstractScalarDecoder(),
|
||||
char.GraphicString.tagSet: AbstractScalarDecoder(),
|
||||
char.VisibleString.tagSet: AbstractScalarDecoder(),
|
||||
char.GeneralString.tagSet: AbstractScalarDecoder(),
|
||||
char.UniversalString.tagSet: AbstractScalarDecoder(),
|
||||
char.BMPString.tagSet: AbstractScalarDecoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.tagSet: AbstractScalarDecoder(),
|
||||
useful.GeneralizedTime.tagSet: AbstractScalarDecoder(),
|
||||
useful.UTCTime.tagSet: AbstractScalarDecoder()
|
||||
}
|
||||
|
||||
# Put in ambiguous & non-ambiguous types for faster codec lookup
|
||||
typeMap = {
|
||||
univ.Integer.typeId: AbstractScalarDecoder(),
|
||||
univ.Boolean.typeId: AbstractScalarDecoder(),
|
||||
univ.BitString.typeId: BitStringDecoder(),
|
||||
univ.OctetString.typeId: AbstractScalarDecoder(),
|
||||
univ.Null.typeId: AbstractScalarDecoder(),
|
||||
univ.ObjectIdentifier.typeId: AbstractScalarDecoder(),
|
||||
univ.Enumerated.typeId: AbstractScalarDecoder(),
|
||||
univ.Real.typeId: AbstractScalarDecoder(),
|
||||
# ambiguous base types
|
||||
univ.Set.typeId: SequenceOrSetDecoder(),
|
||||
univ.SetOf.typeId: SequenceOfOrSetOfDecoder(),
|
||||
univ.Sequence.typeId: SequenceOrSetDecoder(),
|
||||
univ.SequenceOf.typeId: SequenceOfOrSetOfDecoder(),
|
||||
univ.Choice.typeId: ChoiceDecoder(),
|
||||
univ.Any.typeId: AbstractScalarDecoder(),
|
||||
# character string types
|
||||
char.UTF8String.typeId: AbstractScalarDecoder(),
|
||||
char.NumericString.typeId: AbstractScalarDecoder(),
|
||||
char.PrintableString.typeId: AbstractScalarDecoder(),
|
||||
char.TeletexString.typeId: AbstractScalarDecoder(),
|
||||
char.VideotexString.typeId: AbstractScalarDecoder(),
|
||||
char.IA5String.typeId: AbstractScalarDecoder(),
|
||||
char.GraphicString.typeId: AbstractScalarDecoder(),
|
||||
char.VisibleString.typeId: AbstractScalarDecoder(),
|
||||
char.GeneralString.typeId: AbstractScalarDecoder(),
|
||||
char.UniversalString.typeId: AbstractScalarDecoder(),
|
||||
char.BMPString.typeId: AbstractScalarDecoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.typeId: AbstractScalarDecoder(),
|
||||
useful.GeneralizedTime.typeId: AbstractScalarDecoder(),
|
||||
useful.UTCTime.typeId: AbstractScalarDecoder()
|
||||
}
|
||||
|
||||
|
||||
class Decoder(object):
|
||||
|
||||
# noinspection PyDefaultArgument
|
||||
def __init__(self, tagMap, typeMap):
|
||||
self.__tagMap = tagMap
|
||||
self.__typeMap = typeMap
|
||||
|
||||
def __call__(self, pyObject, asn1Spec, **options):
|
||||
|
||||
if LOG:
|
||||
debug.scope.push(type(pyObject).__name__)
|
||||
LOG('decoder called at scope %s, working with type %s' % (debug.scope, type(pyObject).__name__))
|
||||
|
||||
if asn1Spec is None or not isinstance(asn1Spec, base.Asn1Item):
|
||||
raise error.PyAsn1Error('asn1Spec is not valid (should be an instance of an ASN.1 Item, not %s)' % asn1Spec.__class__.__name__)
|
||||
|
||||
try:
|
||||
valueDecoder = self.__typeMap[asn1Spec.typeId]
|
||||
|
||||
except KeyError:
|
||||
# use base type for codec lookup to recover untagged types
|
||||
baseTagSet = tag.TagSet(asn1Spec.tagSet.baseTag, asn1Spec.tagSet.baseTag)
|
||||
|
||||
try:
|
||||
valueDecoder = self.__tagMap[baseTagSet]
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Unknown ASN.1 tag %s' % asn1Spec.tagSet)
|
||||
|
||||
if LOG:
|
||||
LOG('calling decoder %s on Python type %s <%s>' % (type(valueDecoder).__name__, type(pyObject).__name__, repr(pyObject)))
|
||||
|
||||
value = valueDecoder(pyObject, asn1Spec, self, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('decoder %s produced ASN.1 type %s <%s>' % (type(valueDecoder).__name__, type(value).__name__, repr(value)))
|
||||
debug.scope.pop()
|
||||
|
||||
return value
|
||||
|
||||
|
||||
#: Turns Python objects of built-in types into ASN.1 objects.
|
||||
#:
|
||||
#: Takes Python objects of built-in types and turns them into a tree of
|
||||
#: ASN.1 objects (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
|
||||
#: may be a scalar or an arbitrary nested structure.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
#: pyObject: :py:class:`object`
|
||||
#: A scalar or nested Python objects
|
||||
#:
|
||||
#: Keyword Args
|
||||
#: ------------
|
||||
#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
|
||||
#: A pyasn1 type object to act as a template guiding the decoder. It is required
|
||||
#: for successful interpretation of Python objects mapping into their ASN.1
|
||||
#: representations.
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
|
||||
#: A scalar or constructed pyasn1 object
|
||||
#:
|
||||
#: Raises
|
||||
#: ------
|
||||
#: ~pyasn1.error.PyAsn1Error
|
||||
#: On decoding errors
|
||||
#:
|
||||
#: Examples
|
||||
#: --------
|
||||
#: Decode native Python object into ASN.1 objects with ASN.1 schema
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> s, _ = decode([1, 2, 3], asn1Spec=seq)
|
||||
#: >>> str(s)
|
||||
#: SequenceOf:
|
||||
#: 1 2 3
|
||||
#:
|
||||
decode = Decoder(tagMap, typeMap)
|
||||
256
Lambdas/Websocket Authorizer/pyasn1/codec/native/encoder.py
Normal file
256
Lambdas/Websocket Authorizer/pyasn1/codec/native/encoder.py
Normal file
@@ -0,0 +1,256 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
|
||||
except ImportError:
|
||||
OrderedDict = dict
|
||||
|
||||
from pyasn1 import debug
|
||||
from pyasn1 import error
|
||||
from pyasn1.type import base
|
||||
from pyasn1.type import char
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import univ
|
||||
from pyasn1.type import useful
|
||||
|
||||
__all__ = ['encode']
|
||||
|
||||
LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_ENCODER)
|
||||
|
||||
|
||||
class AbstractItemEncoder(object):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
raise error.PyAsn1Error('Not implemented')
|
||||
|
||||
|
||||
class BooleanEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return bool(value)
|
||||
|
||||
|
||||
class IntegerEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return int(value)
|
||||
|
||||
|
||||
class BitStringEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return str(value)
|
||||
|
||||
|
||||
class OctetStringEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return value.asOctets()
|
||||
|
||||
|
||||
class TextStringEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return str(value)
|
||||
|
||||
|
||||
class NullEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return None
|
||||
|
||||
|
||||
class ObjectIdentifierEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return str(value)
|
||||
|
||||
|
||||
class RealEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return float(value)
|
||||
|
||||
|
||||
class SetEncoder(AbstractItemEncoder):
|
||||
protoDict = dict
|
||||
|
||||
def encode(self, value, encodeFun, **options):
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise inconsistency
|
||||
|
||||
namedTypes = value.componentType
|
||||
substrate = self.protoDict()
|
||||
|
||||
for idx, (key, subValue) in enumerate(value.items()):
|
||||
if namedTypes and namedTypes[idx].isOptional and not value[idx].isValue:
|
||||
continue
|
||||
substrate[key] = encodeFun(subValue, **options)
|
||||
return substrate
|
||||
|
||||
|
||||
class SequenceEncoder(SetEncoder):
|
||||
protoDict = OrderedDict
|
||||
|
||||
|
||||
class SequenceOfEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
inconsistency = value.isInconsistent
|
||||
if inconsistency:
|
||||
raise inconsistency
|
||||
return [encodeFun(x, **options) for x in value]
|
||||
|
||||
|
||||
class ChoiceEncoder(SequenceEncoder):
|
||||
pass
|
||||
|
||||
|
||||
class AnyEncoder(AbstractItemEncoder):
|
||||
def encode(self, value, encodeFun, **options):
|
||||
return value.asOctets()
|
||||
|
||||
|
||||
tagMap = {
|
||||
univ.Boolean.tagSet: BooleanEncoder(),
|
||||
univ.Integer.tagSet: IntegerEncoder(),
|
||||
univ.BitString.tagSet: BitStringEncoder(),
|
||||
univ.OctetString.tagSet: OctetStringEncoder(),
|
||||
univ.Null.tagSet: NullEncoder(),
|
||||
univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(),
|
||||
univ.Enumerated.tagSet: IntegerEncoder(),
|
||||
univ.Real.tagSet: RealEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
univ.SequenceOf.tagSet: SequenceOfEncoder(),
|
||||
univ.SetOf.tagSet: SequenceOfEncoder(),
|
||||
univ.Choice.tagSet: ChoiceEncoder(),
|
||||
# character string types
|
||||
char.UTF8String.tagSet: TextStringEncoder(),
|
||||
char.NumericString.tagSet: TextStringEncoder(),
|
||||
char.PrintableString.tagSet: TextStringEncoder(),
|
||||
char.TeletexString.tagSet: TextStringEncoder(),
|
||||
char.VideotexString.tagSet: TextStringEncoder(),
|
||||
char.IA5String.tagSet: TextStringEncoder(),
|
||||
char.GraphicString.tagSet: TextStringEncoder(),
|
||||
char.VisibleString.tagSet: TextStringEncoder(),
|
||||
char.GeneralString.tagSet: TextStringEncoder(),
|
||||
char.UniversalString.tagSet: TextStringEncoder(),
|
||||
char.BMPString.tagSet: TextStringEncoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.tagSet: OctetStringEncoder(),
|
||||
useful.GeneralizedTime.tagSet: OctetStringEncoder(),
|
||||
useful.UTCTime.tagSet: OctetStringEncoder()
|
||||
}
|
||||
|
||||
|
||||
# Put in ambiguous & non-ambiguous types for faster codec lookup
|
||||
typeMap = {
|
||||
univ.Boolean.typeId: BooleanEncoder(),
|
||||
univ.Integer.typeId: IntegerEncoder(),
|
||||
univ.BitString.typeId: BitStringEncoder(),
|
||||
univ.OctetString.typeId: OctetStringEncoder(),
|
||||
univ.Null.typeId: NullEncoder(),
|
||||
univ.ObjectIdentifier.typeId: ObjectIdentifierEncoder(),
|
||||
univ.Enumerated.typeId: IntegerEncoder(),
|
||||
univ.Real.typeId: RealEncoder(),
|
||||
# Sequence & Set have same tags as SequenceOf & SetOf
|
||||
univ.Set.typeId: SetEncoder(),
|
||||
univ.SetOf.typeId: SequenceOfEncoder(),
|
||||
univ.Sequence.typeId: SequenceEncoder(),
|
||||
univ.SequenceOf.typeId: SequenceOfEncoder(),
|
||||
univ.Choice.typeId: ChoiceEncoder(),
|
||||
univ.Any.typeId: AnyEncoder(),
|
||||
# character string types
|
||||
char.UTF8String.typeId: OctetStringEncoder(),
|
||||
char.NumericString.typeId: OctetStringEncoder(),
|
||||
char.PrintableString.typeId: OctetStringEncoder(),
|
||||
char.TeletexString.typeId: OctetStringEncoder(),
|
||||
char.VideotexString.typeId: OctetStringEncoder(),
|
||||
char.IA5String.typeId: OctetStringEncoder(),
|
||||
char.GraphicString.typeId: OctetStringEncoder(),
|
||||
char.VisibleString.typeId: OctetStringEncoder(),
|
||||
char.GeneralString.typeId: OctetStringEncoder(),
|
||||
char.UniversalString.typeId: OctetStringEncoder(),
|
||||
char.BMPString.typeId: OctetStringEncoder(),
|
||||
# useful types
|
||||
useful.ObjectDescriptor.typeId: OctetStringEncoder(),
|
||||
useful.GeneralizedTime.typeId: OctetStringEncoder(),
|
||||
useful.UTCTime.typeId: OctetStringEncoder()
|
||||
}
|
||||
|
||||
|
||||
class Encoder(object):
|
||||
|
||||
# noinspection PyDefaultArgument
|
||||
def __init__(self, tagMap, typeMap={}):
|
||||
self.__tagMap = tagMap
|
||||
self.__typeMap = typeMap
|
||||
|
||||
def __call__(self, value, **options):
|
||||
if not isinstance(value, base.Asn1Item):
|
||||
raise error.PyAsn1Error('value is not valid (should be an instance of an ASN.1 Item)')
|
||||
|
||||
if LOG:
|
||||
debug.scope.push(type(value).__name__)
|
||||
LOG('encoder called for type %s <%s>' % (type(value).__name__, value.prettyPrint()))
|
||||
|
||||
tagSet = value.tagSet
|
||||
|
||||
try:
|
||||
concreteEncoder = self.__typeMap[value.typeId]
|
||||
|
||||
except KeyError:
|
||||
# use base type for codec lookup to recover untagged types
|
||||
baseTagSet = tag.TagSet(value.tagSet.baseTag, value.tagSet.baseTag)
|
||||
|
||||
try:
|
||||
concreteEncoder = self.__tagMap[baseTagSet]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('No encoder for %s' % (value,))
|
||||
|
||||
if LOG:
|
||||
LOG('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet))
|
||||
|
||||
pyObject = concreteEncoder.encode(value, self, **options)
|
||||
|
||||
if LOG:
|
||||
LOG('encoder %s produced: %s' % (type(concreteEncoder).__name__, repr(pyObject)))
|
||||
debug.scope.pop()
|
||||
|
||||
return pyObject
|
||||
|
||||
|
||||
#: Turns ASN.1 object into a Python built-in type object(s).
|
||||
#:
|
||||
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: walks all its components recursively and produces a Python built-in type or a tree
|
||||
#: of those.
|
||||
#:
|
||||
#: One exception is that instead of :py:class:`dict`, the :py:class:`OrderedDict`
|
||||
#: can be produced (whenever available) to preserve ordering of the components
|
||||
#: in ASN.1 SEQUENCE.
|
||||
#:
|
||||
#: Parameters
|
||||
#: ----------
|
||||
# asn1Value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
|
||||
#: pyasn1 object to encode (or a tree of them)
|
||||
#:
|
||||
#: Returns
|
||||
#: -------
|
||||
#: : :py:class:`object`
|
||||
#: Python built-in type instance (or a tree of them)
|
||||
#:
|
||||
#: Raises
|
||||
#: ------
|
||||
#: ~pyasn1.error.PyAsn1Error
|
||||
#: On encoding errors
|
||||
#:
|
||||
#: Examples
|
||||
#: --------
|
||||
#: Encode ASN.1 value object into native Python types
|
||||
#:
|
||||
#: .. code-block:: pycon
|
||||
#:
|
||||
#: >>> seq = SequenceOf(componentType=Integer())
|
||||
#: >>> seq.extend([1, 2, 3])
|
||||
#: >>> encode(seq)
|
||||
#: [1, 2, 3]
|
||||
#:
|
||||
encode = Encoder(tagMap, typeMap)
|
||||
1
Lambdas/Websocket Authorizer/pyasn1/compat/__init__.py
Normal file
1
Lambdas/Websocket Authorizer/pyasn1/compat/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# This file is necessary to make this directory a package.
|
||||
33
Lambdas/Websocket Authorizer/pyasn1/compat/binary.py
Normal file
33
Lambdas/Websocket Authorizer/pyasn1/compat/binary.py
Normal file
@@ -0,0 +1,33 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from sys import version_info
|
||||
|
||||
if version_info[0:2] < (2, 6):
|
||||
def bin(value):
|
||||
bitstring = []
|
||||
|
||||
if value > 0:
|
||||
prefix = '0b'
|
||||
elif value < 0:
|
||||
prefix = '-0b'
|
||||
value = abs(value)
|
||||
else:
|
||||
prefix = '0b0'
|
||||
|
||||
while value:
|
||||
if value & 1 == 1:
|
||||
bitstring.append('1')
|
||||
else:
|
||||
bitstring.append('0')
|
||||
|
||||
value >>= 1
|
||||
|
||||
bitstring.reverse()
|
||||
|
||||
return prefix + ''.join(bitstring)
|
||||
else:
|
||||
bin = bin
|
||||
20
Lambdas/Websocket Authorizer/pyasn1/compat/calling.py
Normal file
20
Lambdas/Websocket Authorizer/pyasn1/compat/calling.py
Normal file
@@ -0,0 +1,20 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from sys import version_info
|
||||
|
||||
__all__ = ['callable']
|
||||
|
||||
|
||||
if (2, 7) < version_info[:2] < (3, 2):
|
||||
import collections
|
||||
|
||||
def callable(x):
|
||||
return isinstance(x, collections.Callable)
|
||||
|
||||
else:
|
||||
|
||||
callable = callable
|
||||
22
Lambdas/Websocket Authorizer/pyasn1/compat/dateandtime.py
Normal file
22
Lambdas/Websocket Authorizer/pyasn1/compat/dateandtime.py
Normal file
@@ -0,0 +1,22 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import time
|
||||
from datetime import datetime
|
||||
from sys import version_info
|
||||
|
||||
__all__ = ['strptime']
|
||||
|
||||
|
||||
if version_info[:2] <= (2, 4):
|
||||
|
||||
def strptime(text, dateFormat):
|
||||
return datetime(*(time.strptime(text, dateFormat)[0:6]))
|
||||
|
||||
else:
|
||||
|
||||
def strptime(text, dateFormat):
|
||||
return datetime.strptime(text, dateFormat)
|
||||
110
Lambdas/Websocket Authorizer/pyasn1/compat/integer.py
Normal file
110
Lambdas/Websocket Authorizer/pyasn1/compat/integer.py
Normal file
@@ -0,0 +1,110 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import sys
|
||||
|
||||
try:
|
||||
import platform
|
||||
|
||||
implementation = platform.python_implementation()
|
||||
|
||||
except (ImportError, AttributeError):
|
||||
implementation = 'CPython'
|
||||
|
||||
from pyasn1.compat.octets import oct2int, null, ensureString
|
||||
|
||||
if sys.version_info[0:2] < (3, 2) or implementation != 'CPython':
|
||||
from binascii import a2b_hex, b2a_hex
|
||||
|
||||
if sys.version_info[0] > 2:
|
||||
long = int
|
||||
|
||||
def from_bytes(octets, signed=False):
|
||||
if not octets:
|
||||
return 0
|
||||
|
||||
value = long(b2a_hex(ensureString(octets)), 16)
|
||||
|
||||
if signed and oct2int(octets[0]) & 0x80:
|
||||
return value - (1 << len(octets) * 8)
|
||||
|
||||
return value
|
||||
|
||||
def to_bytes(value, signed=False, length=0):
|
||||
if value < 0:
|
||||
if signed:
|
||||
bits = bitLength(value)
|
||||
|
||||
# two's complement form
|
||||
maxValue = 1 << bits
|
||||
valueToEncode = (value + maxValue) % maxValue
|
||||
|
||||
else:
|
||||
raise OverflowError('can\'t convert negative int to unsigned')
|
||||
elif value == 0 and length == 0:
|
||||
return null
|
||||
else:
|
||||
bits = 0
|
||||
valueToEncode = value
|
||||
|
||||
hexValue = hex(valueToEncode)[2:]
|
||||
if hexValue.endswith('L'):
|
||||
hexValue = hexValue[:-1]
|
||||
|
||||
if len(hexValue) & 1:
|
||||
hexValue = '0' + hexValue
|
||||
|
||||
# padding may be needed for two's complement encoding
|
||||
if value != valueToEncode or length:
|
||||
hexLength = len(hexValue) * 4
|
||||
|
||||
padLength = max(length, bits)
|
||||
|
||||
if padLength > hexLength:
|
||||
hexValue = '00' * ((padLength - hexLength - 1) // 8 + 1) + hexValue
|
||||
elif length and hexLength - length > 7:
|
||||
raise OverflowError('int too big to convert')
|
||||
|
||||
firstOctet = int(hexValue[:2], 16)
|
||||
|
||||
if signed:
|
||||
if firstOctet & 0x80:
|
||||
if value >= 0:
|
||||
hexValue = '00' + hexValue
|
||||
elif value < 0:
|
||||
hexValue = 'ff' + hexValue
|
||||
|
||||
octets_value = a2b_hex(hexValue)
|
||||
|
||||
return octets_value
|
||||
|
||||
def bitLength(number):
|
||||
# bits in unsigned number
|
||||
hexValue = hex(abs(number))
|
||||
bits = len(hexValue) - 2
|
||||
if hexValue.endswith('L'):
|
||||
bits -= 1
|
||||
if bits & 1:
|
||||
bits += 1
|
||||
bits *= 4
|
||||
# TODO: strip lhs zeros
|
||||
return bits
|
||||
|
||||
else:
|
||||
|
||||
def from_bytes(octets, signed=False):
|
||||
return int.from_bytes(bytes(octets), 'big', signed=signed)
|
||||
|
||||
def to_bytes(value, signed=False, length=0):
|
||||
length = max(value.bit_length(), length)
|
||||
|
||||
if signed and length % 8 == 0:
|
||||
length += 1
|
||||
|
||||
return value.to_bytes(length // 8 + (length % 8 and 1 or 0), 'big', signed=signed)
|
||||
|
||||
def bitLength(number):
|
||||
return int(number).bit_length()
|
||||
46
Lambdas/Websocket Authorizer/pyasn1/compat/octets.py
Normal file
46
Lambdas/Websocket Authorizer/pyasn1/compat/octets.py
Normal file
@@ -0,0 +1,46 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from sys import version_info
|
||||
|
||||
if version_info[0] <= 2:
|
||||
int2oct = chr
|
||||
# noinspection PyPep8
|
||||
ints2octs = lambda s: ''.join([int2oct(x) for x in s])
|
||||
null = ''
|
||||
oct2int = ord
|
||||
# TODO: refactor to return a sequence of ints
|
||||
# noinspection PyPep8
|
||||
octs2ints = lambda s: [oct2int(x) for x in s]
|
||||
# noinspection PyPep8
|
||||
str2octs = lambda x: x
|
||||
# noinspection PyPep8
|
||||
octs2str = lambda x: x
|
||||
# noinspection PyPep8
|
||||
isOctetsType = lambda s: isinstance(s, str)
|
||||
# noinspection PyPep8
|
||||
isStringType = lambda s: isinstance(s, (str, unicode))
|
||||
# noinspection PyPep8
|
||||
ensureString = str
|
||||
else:
|
||||
ints2octs = bytes
|
||||
# noinspection PyPep8
|
||||
int2oct = lambda x: ints2octs((x,))
|
||||
null = ints2octs()
|
||||
# noinspection PyPep8
|
||||
oct2int = lambda x: x
|
||||
# noinspection PyPep8
|
||||
octs2ints = lambda x: x
|
||||
# noinspection PyPep8
|
||||
str2octs = lambda x: x.encode('iso-8859-1')
|
||||
# noinspection PyPep8
|
||||
octs2str = lambda x: x.decode('iso-8859-1')
|
||||
# noinspection PyPep8
|
||||
isOctetsType = lambda s: isinstance(s, bytes)
|
||||
# noinspection PyPep8
|
||||
isStringType = lambda s: isinstance(s, str)
|
||||
# noinspection PyPep8
|
||||
ensureString = bytes
|
||||
26
Lambdas/Websocket Authorizer/pyasn1/compat/string.py
Normal file
26
Lambdas/Websocket Authorizer/pyasn1/compat/string.py
Normal file
@@ -0,0 +1,26 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from sys import version_info
|
||||
|
||||
if version_info[:2] <= (2, 5):
|
||||
|
||||
def partition(string, sep):
|
||||
try:
|
||||
a, c = string.split(sep, 1)
|
||||
|
||||
except ValueError:
|
||||
a, b, c = string, '', ''
|
||||
|
||||
else:
|
||||
b = sep
|
||||
|
||||
return a, b, c
|
||||
|
||||
else:
|
||||
|
||||
def partition(string, sep):
|
||||
return string.partition(sep)
|
||||
157
Lambdas/Websocket Authorizer/pyasn1/debug.py
Normal file
157
Lambdas/Websocket Authorizer/pyasn1/debug.py
Normal file
@@ -0,0 +1,157 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from pyasn1 import __version__
|
||||
from pyasn1 import error
|
||||
from pyasn1.compat.octets import octs2ints
|
||||
|
||||
__all__ = ['Debug', 'setLogger', 'hexdump']
|
||||
|
||||
DEBUG_NONE = 0x0000
|
||||
DEBUG_ENCODER = 0x0001
|
||||
DEBUG_DECODER = 0x0002
|
||||
DEBUG_ALL = 0xffff
|
||||
|
||||
FLAG_MAP = {
|
||||
'none': DEBUG_NONE,
|
||||
'encoder': DEBUG_ENCODER,
|
||||
'decoder': DEBUG_DECODER,
|
||||
'all': DEBUG_ALL
|
||||
}
|
||||
|
||||
LOGGEE_MAP = {}
|
||||
|
||||
|
||||
class Printer(object):
|
||||
# noinspection PyShadowingNames
|
||||
def __init__(self, logger=None, handler=None, formatter=None):
|
||||
if logger is None:
|
||||
logger = logging.getLogger('pyasn1')
|
||||
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
if handler is None:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
if formatter is None:
|
||||
formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s')
|
||||
|
||||
handler.setFormatter(formatter)
|
||||
handler.setLevel(logging.DEBUG)
|
||||
logger.addHandler(handler)
|
||||
|
||||
self.__logger = logger
|
||||
|
||||
def __call__(self, msg):
|
||||
self.__logger.debug(msg)
|
||||
|
||||
def __str__(self):
|
||||
return '<python logging>'
|
||||
|
||||
|
||||
if hasattr(logging, 'NullHandler'):
|
||||
NullHandler = logging.NullHandler
|
||||
|
||||
else:
|
||||
# Python 2.6 and older
|
||||
class NullHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
pass
|
||||
|
||||
|
||||
class Debug(object):
|
||||
defaultPrinter = Printer()
|
||||
|
||||
def __init__(self, *flags, **options):
|
||||
self._flags = DEBUG_NONE
|
||||
|
||||
if 'loggerName' in options:
|
||||
# route our logs to parent logger
|
||||
self._printer = Printer(
|
||||
logger=logging.getLogger(options['loggerName']),
|
||||
handler=NullHandler()
|
||||
)
|
||||
|
||||
elif 'printer' in options:
|
||||
self._printer = options.get('printer')
|
||||
|
||||
else:
|
||||
self._printer = self.defaultPrinter
|
||||
|
||||
self._printer('running pyasn1 %s, debug flags %s' % (__version__, ', '.join(flags)))
|
||||
|
||||
for flag in flags:
|
||||
inverse = flag and flag[0] in ('!', '~')
|
||||
if inverse:
|
||||
flag = flag[1:]
|
||||
try:
|
||||
if inverse:
|
||||
self._flags &= ~FLAG_MAP[flag]
|
||||
else:
|
||||
self._flags |= FLAG_MAP[flag]
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('bad debug flag %s' % flag)
|
||||
|
||||
self._printer("debug category '%s' %s" % (flag, inverse and 'disabled' or 'enabled'))
|
||||
|
||||
def __str__(self):
|
||||
return 'logger %s, flags %x' % (self._printer, self._flags)
|
||||
|
||||
def __call__(self, msg):
|
||||
self._printer(msg)
|
||||
|
||||
def __and__(self, flag):
|
||||
return self._flags & flag
|
||||
|
||||
def __rand__(self, flag):
|
||||
return flag & self._flags
|
||||
|
||||
_LOG = DEBUG_NONE
|
||||
|
||||
|
||||
def setLogger(userLogger):
|
||||
global _LOG
|
||||
|
||||
if userLogger:
|
||||
_LOG = userLogger
|
||||
else:
|
||||
_LOG = DEBUG_NONE
|
||||
|
||||
# Update registered logging clients
|
||||
for module, (name, flags) in LOGGEE_MAP.items():
|
||||
setattr(module, name, _LOG & flags and _LOG or DEBUG_NONE)
|
||||
|
||||
|
||||
def registerLoggee(module, name='LOG', flags=DEBUG_NONE):
|
||||
LOGGEE_MAP[sys.modules[module]] = name, flags
|
||||
setLogger(_LOG)
|
||||
return _LOG
|
||||
|
||||
|
||||
def hexdump(octets):
|
||||
return ' '.join(
|
||||
['%s%.2X' % (n % 16 == 0 and ('\n%.5d: ' % n) or '', x)
|
||||
for n, x in zip(range(len(octets)), octs2ints(octets))]
|
||||
)
|
||||
|
||||
|
||||
class Scope(object):
|
||||
def __init__(self):
|
||||
self._list = []
|
||||
|
||||
def __str__(self): return '.'.join(self._list)
|
||||
|
||||
def push(self, token):
|
||||
self._list.append(token)
|
||||
|
||||
def pop(self):
|
||||
return self._list.pop()
|
||||
|
||||
|
||||
scope = Scope()
|
||||
75
Lambdas/Websocket Authorizer/pyasn1/error.py
Normal file
75
Lambdas/Websocket Authorizer/pyasn1/error.py
Normal file
@@ -0,0 +1,75 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
|
||||
|
||||
class PyAsn1Error(Exception):
|
||||
"""Base pyasn1 exception
|
||||
|
||||
`PyAsn1Error` is the base exception class (based on
|
||||
:class:`Exception`) that represents all possible ASN.1 related
|
||||
errors.
|
||||
"""
|
||||
|
||||
|
||||
class ValueConstraintError(PyAsn1Error):
|
||||
"""ASN.1 type constraints violation exception
|
||||
|
||||
The `ValueConstraintError` exception indicates an ASN.1 value
|
||||
constraint violation.
|
||||
|
||||
It might happen on value object instantiation (for scalar types) or on
|
||||
serialization (for constructed types).
|
||||
"""
|
||||
|
||||
|
||||
class SubstrateUnderrunError(PyAsn1Error):
|
||||
"""ASN.1 data structure deserialization error
|
||||
|
||||
The `SubstrateUnderrunError` exception indicates insufficient serialised
|
||||
data on input of a de-serialization codec.
|
||||
"""
|
||||
|
||||
|
||||
class PyAsn1UnicodeError(PyAsn1Error, UnicodeError):
|
||||
"""Unicode text processing error
|
||||
|
||||
The `PyAsn1UnicodeError` exception is a base class for errors relating to
|
||||
unicode text de/serialization.
|
||||
|
||||
Apart from inheriting from :class:`PyAsn1Error`, it also inherits from
|
||||
:class:`UnicodeError` to help the caller catching unicode-related errors.
|
||||
"""
|
||||
def __init__(self, message, unicode_error=None):
|
||||
if isinstance(unicode_error, UnicodeError):
|
||||
UnicodeError.__init__(self, *unicode_error.args)
|
||||
PyAsn1Error.__init__(self, message)
|
||||
|
||||
|
||||
class PyAsn1UnicodeDecodeError(PyAsn1UnicodeError, UnicodeDecodeError):
|
||||
"""Unicode text decoding error
|
||||
|
||||
The `PyAsn1UnicodeDecodeError` exception represents a failure to
|
||||
deserialize unicode text.
|
||||
|
||||
Apart from inheriting from :class:`PyAsn1UnicodeError`, it also inherits
|
||||
from :class:`UnicodeDecodeError` to help the caller catching unicode-related
|
||||
errors.
|
||||
"""
|
||||
|
||||
|
||||
class PyAsn1UnicodeEncodeError(PyAsn1UnicodeError, UnicodeEncodeError):
|
||||
"""Unicode text encoding error
|
||||
|
||||
The `PyAsn1UnicodeEncodeError` exception represents a failure to
|
||||
serialize unicode text.
|
||||
|
||||
Apart from inheriting from :class:`PyAsn1UnicodeError`, it also inherits
|
||||
from :class:`UnicodeEncodeError` to help the caller catching
|
||||
unicode-related errors.
|
||||
"""
|
||||
|
||||
|
||||
1
Lambdas/Websocket Authorizer/pyasn1/type/__init__.py
Normal file
1
Lambdas/Websocket Authorizer/pyasn1/type/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# This file is necessary to make this directory a package.
|
||||
707
Lambdas/Websocket Authorizer/pyasn1/type/base.py
Normal file
707
Lambdas/Websocket Authorizer/pyasn1/type/base.py
Normal file
@@ -0,0 +1,707 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import sys
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.compat import calling
|
||||
from pyasn1.type import constraint
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import tagmap
|
||||
|
||||
__all__ = ['Asn1Item', 'Asn1Type', 'SimpleAsn1Type',
|
||||
'ConstructedAsn1Type']
|
||||
|
||||
|
||||
class Asn1Item(object):
|
||||
@classmethod
|
||||
def getTypeId(cls, increment=1):
|
||||
try:
|
||||
Asn1Item._typeCounter += increment
|
||||
except AttributeError:
|
||||
Asn1Item._typeCounter = increment
|
||||
return Asn1Item._typeCounter
|
||||
|
||||
|
||||
class Asn1Type(Asn1Item):
|
||||
"""Base class for all classes representing ASN.1 types.
|
||||
|
||||
In the user code, |ASN.1| class is normally used only for telling
|
||||
ASN.1 objects from others.
|
||||
|
||||
Note
|
||||
----
|
||||
For as long as ASN.1 is concerned, a way to compare ASN.1 types
|
||||
is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods.
|
||||
"""
|
||||
#: Set or return a :py:class:`~pyasn1.type.tag.TagSet` object representing
|
||||
#: ASN.1 tag(s) associated with |ASN.1| type.
|
||||
tagSet = tag.TagSet()
|
||||
|
||||
#: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
|
||||
#: object imposing constraints on initialization values.
|
||||
subtypeSpec = constraint.ConstraintsIntersection()
|
||||
|
||||
# Disambiguation ASN.1 types identification
|
||||
typeId = None
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
readOnly = {
|
||||
'tagSet': self.tagSet,
|
||||
'subtypeSpec': self.subtypeSpec
|
||||
}
|
||||
|
||||
readOnly.update(kwargs)
|
||||
|
||||
self.__dict__.update(readOnly)
|
||||
|
||||
self._readOnly = readOnly
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if name[0] != '_' and name in self._readOnly:
|
||||
raise error.PyAsn1Error('read-only instance attribute "%s"' % name)
|
||||
|
||||
self.__dict__[name] = value
|
||||
|
||||
def __str__(self):
|
||||
return self.prettyPrint()
|
||||
|
||||
@property
|
||||
def readOnly(self):
|
||||
return self._readOnly
|
||||
|
||||
@property
|
||||
def effectiveTagSet(self):
|
||||
"""For |ASN.1| type is equivalent to *tagSet*
|
||||
"""
|
||||
return self.tagSet # used by untagged types
|
||||
|
||||
@property
|
||||
def tagMap(self):
|
||||
"""Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping ASN.1 tags to ASN.1 objects within callee object.
|
||||
"""
|
||||
return tagmap.TagMap({self.tagSet: self})
|
||||
|
||||
def isSameTypeWith(self, other, matchTags=True, matchConstraints=True):
|
||||
"""Examine |ASN.1| type for equality with other ASN.1 type.
|
||||
|
||||
ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints
|
||||
(:py:mod:`~pyasn1.type.constraint`) are examined when carrying
|
||||
out ASN.1 types comparison.
|
||||
|
||||
Python class inheritance relationship is NOT considered.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
other: a pyasn1 type object
|
||||
Class instance representing ASN.1 type.
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`bool`
|
||||
:obj:`True` if *other* is |ASN.1| type,
|
||||
:obj:`False` otherwise.
|
||||
"""
|
||||
return (self is other or
|
||||
(not matchTags or self.tagSet == other.tagSet) and
|
||||
(not matchConstraints or self.subtypeSpec == other.subtypeSpec))
|
||||
|
||||
def isSuperTypeOf(self, other, matchTags=True, matchConstraints=True):
|
||||
"""Examine |ASN.1| type for subtype relationship with other ASN.1 type.
|
||||
|
||||
ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints
|
||||
(:py:mod:`~pyasn1.type.constraint`) are examined when carrying
|
||||
out ASN.1 types comparison.
|
||||
|
||||
Python class inheritance relationship is NOT considered.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
other: a pyasn1 type object
|
||||
Class instance representing ASN.1 type.
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`bool`
|
||||
:obj:`True` if *other* is a subtype of |ASN.1| type,
|
||||
:obj:`False` otherwise.
|
||||
"""
|
||||
return (not matchTags or
|
||||
(self.tagSet.isSuperTagSetOf(other.tagSet)) and
|
||||
(not matchConstraints or self.subtypeSpec.isSuperTypeOf(other.subtypeSpec)))
|
||||
|
||||
@staticmethod
|
||||
def isNoValue(*values):
|
||||
for value in values:
|
||||
if value is not noValue:
|
||||
return False
|
||||
return True
|
||||
|
||||
def prettyPrint(self, scope=0):
|
||||
raise NotImplementedError()
|
||||
|
||||
# backward compatibility
|
||||
|
||||
def getTagSet(self):
|
||||
return self.tagSet
|
||||
|
||||
def getEffectiveTagSet(self):
|
||||
return self.effectiveTagSet
|
||||
|
||||
def getTagMap(self):
|
||||
return self.tagMap
|
||||
|
||||
def getSubtypeSpec(self):
|
||||
return self.subtypeSpec
|
||||
|
||||
# backward compatibility
|
||||
def hasValue(self):
|
||||
return self.isValue
|
||||
|
||||
# Backward compatibility
|
||||
Asn1ItemBase = Asn1Type
|
||||
|
||||
|
||||
class NoValue(object):
|
||||
"""Create a singleton instance of NoValue class.
|
||||
|
||||
The *NoValue* sentinel object represents an instance of ASN.1 schema
|
||||
object as opposed to ASN.1 value object.
|
||||
|
||||
Only ASN.1 schema-related operations can be performed on ASN.1
|
||||
schema objects.
|
||||
|
||||
Warning
|
||||
-------
|
||||
Any operation attempted on the *noValue* object will raise the
|
||||
*PyAsn1Error* exception.
|
||||
"""
|
||||
skipMethods = set(
|
||||
('__slots__',
|
||||
# attributes
|
||||
'__getattribute__',
|
||||
'__getattr__',
|
||||
'__setattr__',
|
||||
'__delattr__',
|
||||
# class instance
|
||||
'__class__',
|
||||
'__init__',
|
||||
'__del__',
|
||||
'__new__',
|
||||
'__repr__',
|
||||
'__qualname__',
|
||||
'__objclass__',
|
||||
'im_class',
|
||||
'__sizeof__',
|
||||
# pickle protocol
|
||||
'__reduce__',
|
||||
'__reduce_ex__',
|
||||
'__getnewargs__',
|
||||
'__getinitargs__',
|
||||
'__getstate__',
|
||||
'__setstate__')
|
||||
)
|
||||
|
||||
_instance = None
|
||||
|
||||
def __new__(cls):
|
||||
if cls._instance is None:
|
||||
def getPlug(name):
|
||||
def plug(self, *args, **kw):
|
||||
raise error.PyAsn1Error('Attempted "%s" operation on ASN.1 schema object' % name)
|
||||
return plug
|
||||
|
||||
op_names = [name
|
||||
for typ in (str, int, list, dict)
|
||||
for name in dir(typ)
|
||||
if (name not in cls.skipMethods and
|
||||
name.startswith('__') and
|
||||
name.endswith('__') and
|
||||
calling.callable(getattr(typ, name)))]
|
||||
|
||||
for name in set(op_names):
|
||||
setattr(cls, name, getPlug(name))
|
||||
|
||||
cls._instance = object.__new__(cls)
|
||||
|
||||
return cls._instance
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr in self.skipMethods:
|
||||
raise AttributeError('Attribute %s not present' % attr)
|
||||
|
||||
raise error.PyAsn1Error('Attempted "%s" operation on ASN.1 schema object' % attr)
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s object>' % self.__class__.__name__
|
||||
|
||||
|
||||
noValue = NoValue()
|
||||
|
||||
|
||||
class SimpleAsn1Type(Asn1Type):
|
||||
"""Base class for all simple classes representing ASN.1 types.
|
||||
|
||||
ASN.1 distinguishes types by their ability to hold other objects.
|
||||
Scalar types are known as *simple* in ASN.1.
|
||||
|
||||
In the user code, |ASN.1| class is normally used only for telling
|
||||
ASN.1 objects from others.
|
||||
|
||||
Note
|
||||
----
|
||||
For as long as ASN.1 is concerned, a way to compare ASN.1 types
|
||||
is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods.
|
||||
"""
|
||||
#: Default payload value
|
||||
defaultValue = noValue
|
||||
|
||||
def __init__(self, value=noValue, **kwargs):
|
||||
Asn1Type.__init__(self, **kwargs)
|
||||
if value is noValue:
|
||||
value = self.defaultValue
|
||||
else:
|
||||
value = self.prettyIn(value)
|
||||
try:
|
||||
self.subtypeSpec(value)
|
||||
|
||||
except error.PyAsn1Error:
|
||||
exType, exValue, exTb = sys.exc_info()
|
||||
raise exType('%s at %s' % (exValue, self.__class__.__name__))
|
||||
|
||||
self._value = value
|
||||
|
||||
def __repr__(self):
|
||||
representation = '%s %s object' % (
|
||||
self.__class__.__name__, self.isValue and 'value' or 'schema')
|
||||
|
||||
for attr, value in self.readOnly.items():
|
||||
if value:
|
||||
representation += ', %s %s' % (attr, value)
|
||||
|
||||
if self.isValue:
|
||||
value = self.prettyPrint()
|
||||
if len(value) > 32:
|
||||
value = value[:16] + '...' + value[-16:]
|
||||
representation += ', payload [%s]' % value
|
||||
|
||||
return '<%s>' % representation
|
||||
|
||||
def __eq__(self, other):
|
||||
return self is other and True or self._value == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self._value != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._value < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self._value <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self._value > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self._value >= other
|
||||
|
||||
if sys.version_info[0] <= 2:
|
||||
def __nonzero__(self):
|
||||
return self._value and True or False
|
||||
else:
|
||||
def __bool__(self):
|
||||
return self._value and True or False
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._value)
|
||||
|
||||
@property
|
||||
def isValue(self):
|
||||
"""Indicate that |ASN.1| object represents ASN.1 value.
|
||||
|
||||
If *isValue* is :obj:`False` then this object represents just
|
||||
ASN.1 schema.
|
||||
|
||||
If *isValue* is :obj:`True` then, in addition to its ASN.1 schema
|
||||
features, this object can also be used like a Python built-in object
|
||||
(e.g. :class:`int`, :class:`str`, :class:`dict` etc.).
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`bool`
|
||||
:obj:`False` if object represents just ASN.1 schema.
|
||||
:obj:`True` if object represents ASN.1 schema and can be used as a normal value.
|
||||
|
||||
Note
|
||||
----
|
||||
There is an important distinction between PyASN1 schema and value objects.
|
||||
The PyASN1 schema objects can only participate in ASN.1 schema-related
|
||||
operations (e.g. defining or testing the structure of the data). Most
|
||||
obvious uses of ASN.1 schema is to guide serialisation codecs whilst
|
||||
encoding/decoding serialised ASN.1 contents.
|
||||
|
||||
The PyASN1 value objects can **additionally** participate in many operations
|
||||
involving regular Python objects (e.g. arithmetic, comprehension etc).
|
||||
"""
|
||||
return self._value is not noValue
|
||||
|
||||
def clone(self, value=noValue, **kwargs):
|
||||
"""Create a modified version of |ASN.1| schema or value object.
|
||||
|
||||
The `clone()` method accepts the same set arguments as |ASN.1|
|
||||
class takes on instantiation except that all arguments
|
||||
of the `clone()` method are optional.
|
||||
|
||||
Whatever arguments are supplied, they are used to create a copy
|
||||
of `self` taking precedence over the ones used to instantiate `self`.
|
||||
|
||||
Note
|
||||
----
|
||||
Due to the immutable nature of the |ASN.1| object, if no arguments
|
||||
are supplied, no new |ASN.1| object will be created and `self` will
|
||||
be returned instead.
|
||||
"""
|
||||
if value is noValue:
|
||||
if not kwargs:
|
||||
return self
|
||||
|
||||
value = self._value
|
||||
|
||||
initializers = self.readOnly.copy()
|
||||
initializers.update(kwargs)
|
||||
|
||||
return self.__class__(value, **initializers)
|
||||
|
||||
def subtype(self, value=noValue, **kwargs):
|
||||
"""Create a specialization of |ASN.1| schema or value object.
|
||||
|
||||
The subtype relationship between ASN.1 types has no correlation with
|
||||
subtype relationship between Python types. ASN.1 type is mainly identified
|
||||
by its tag(s) (:py:class:`~pyasn1.type.tag.TagSet`) and value range
|
||||
constraints (:py:class:`~pyasn1.type.constraint.ConstraintsIntersection`).
|
||||
These ASN.1 type properties are implemented as |ASN.1| attributes.
|
||||
|
||||
The `subtype()` method accepts the same set arguments as |ASN.1|
|
||||
class takes on instantiation except that all parameters
|
||||
of the `subtype()` method are optional.
|
||||
|
||||
With the exception of the arguments described below, the rest of
|
||||
supplied arguments they are used to create a copy of `self` taking
|
||||
precedence over the ones used to instantiate `self`.
|
||||
|
||||
The following arguments to `subtype()` create a ASN.1 subtype out of
|
||||
|ASN.1| type:
|
||||
|
||||
Other Parameters
|
||||
----------------
|
||||
implicitTag: :py:class:`~pyasn1.type.tag.Tag`
|
||||
Implicitly apply given ASN.1 tag object to `self`'s
|
||||
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
|
||||
new object's ASN.1 tag(s).
|
||||
|
||||
explicitTag: :py:class:`~pyasn1.type.tag.Tag`
|
||||
Explicitly apply given ASN.1 tag object to `self`'s
|
||||
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
|
||||
new object's ASN.1 tag(s).
|
||||
|
||||
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
|
||||
Add ASN.1 constraints object to one of the `self`'s, then
|
||||
use the result as new object's ASN.1 constraints.
|
||||
|
||||
Returns
|
||||
-------
|
||||
:
|
||||
new instance of |ASN.1| schema or value object
|
||||
|
||||
Note
|
||||
----
|
||||
Due to the immutable nature of the |ASN.1| object, if no arguments
|
||||
are supplied, no new |ASN.1| object will be created and `self` will
|
||||
be returned instead.
|
||||
"""
|
||||
if value is noValue:
|
||||
if not kwargs:
|
||||
return self
|
||||
|
||||
value = self._value
|
||||
|
||||
initializers = self.readOnly.copy()
|
||||
|
||||
implicitTag = kwargs.pop('implicitTag', None)
|
||||
if implicitTag is not None:
|
||||
initializers['tagSet'] = self.tagSet.tagImplicitly(implicitTag)
|
||||
|
||||
explicitTag = kwargs.pop('explicitTag', None)
|
||||
if explicitTag is not None:
|
||||
initializers['tagSet'] = self.tagSet.tagExplicitly(explicitTag)
|
||||
|
||||
for arg, option in kwargs.items():
|
||||
initializers[arg] += option
|
||||
|
||||
return self.__class__(value, **initializers)
|
||||
|
||||
def prettyIn(self, value):
|
||||
return value
|
||||
|
||||
def prettyOut(self, value):
|
||||
return str(value)
|
||||
|
||||
def prettyPrint(self, scope=0):
|
||||
return self.prettyOut(self._value)
|
||||
|
||||
def prettyPrintType(self, scope=0):
|
||||
return '%s -> %s' % (self.tagSet, self.__class__.__name__)
|
||||
|
||||
# Backward compatibility
|
||||
AbstractSimpleAsn1Item = SimpleAsn1Type
|
||||
|
||||
#
|
||||
# Constructed types:
|
||||
# * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice
|
||||
# * ASN1 types and values are represened by Python class instances
|
||||
# * Value initialization is made for defaulted components only
|
||||
# * Primary method of component addressing is by-position. Data model for base
|
||||
# type is Python sequence. Additional type-specific addressing methods
|
||||
# may be implemented for particular types.
|
||||
# * SequenceOf and SetOf types do not implement any additional methods
|
||||
# * Sequence, Set and Choice types also implement by-identifier addressing
|
||||
# * Sequence, Set and Choice types also implement by-asn1-type (tag) addressing
|
||||
# * Sequence and Set types may include optional and defaulted
|
||||
# components
|
||||
# * Constructed types hold a reference to component types used for value
|
||||
# verification and ordering.
|
||||
# * Component type is a scalar type for SequenceOf/SetOf types and a list
|
||||
# of types for Sequence/Set/Choice.
|
||||
#
|
||||
|
||||
|
||||
class ConstructedAsn1Type(Asn1Type):
|
||||
"""Base class for all constructed classes representing ASN.1 types.
|
||||
|
||||
ASN.1 distinguishes types by their ability to hold other objects.
|
||||
Those "nesting" types are known as *constructed* in ASN.1.
|
||||
|
||||
In the user code, |ASN.1| class is normally used only for telling
|
||||
ASN.1 objects from others.
|
||||
|
||||
Note
|
||||
----
|
||||
For as long as ASN.1 is concerned, a way to compare ASN.1 types
|
||||
is to use :meth:`isSameTypeWith` and :meth:`isSuperTypeOf` methods.
|
||||
"""
|
||||
|
||||
#: If :obj:`True`, requires exact component type matching,
|
||||
#: otherwise subtype relation is only enforced
|
||||
strictConstraints = False
|
||||
|
||||
componentType = None
|
||||
|
||||
# backward compatibility, unused
|
||||
sizeSpec = constraint.ConstraintsIntersection()
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
readOnly = {
|
||||
'componentType': self.componentType,
|
||||
# backward compatibility, unused
|
||||
'sizeSpec': self.sizeSpec
|
||||
}
|
||||
|
||||
# backward compatibility: preserve legacy sizeSpec support
|
||||
kwargs = self._moveSizeSpec(**kwargs)
|
||||
|
||||
readOnly.update(kwargs)
|
||||
|
||||
Asn1Type.__init__(self, **readOnly)
|
||||
|
||||
def _moveSizeSpec(self, **kwargs):
|
||||
# backward compatibility, unused
|
||||
sizeSpec = kwargs.pop('sizeSpec', self.sizeSpec)
|
||||
if sizeSpec:
|
||||
subtypeSpec = kwargs.pop('subtypeSpec', self.subtypeSpec)
|
||||
if subtypeSpec:
|
||||
subtypeSpec = sizeSpec
|
||||
|
||||
else:
|
||||
subtypeSpec += sizeSpec
|
||||
|
||||
kwargs['subtypeSpec'] = subtypeSpec
|
||||
|
||||
return kwargs
|
||||
|
||||
def __repr__(self):
|
||||
representation = '%s %s object' % (
|
||||
self.__class__.__name__, self.isValue and 'value' or 'schema'
|
||||
)
|
||||
|
||||
for attr, value in self.readOnly.items():
|
||||
if value is not noValue:
|
||||
representation += ', %s=%r' % (attr, value)
|
||||
|
||||
if self.isValue and self.components:
|
||||
representation += ', payload [%s]' % ', '.join(
|
||||
[repr(x) for x in self.components])
|
||||
|
||||
return '<%s>' % representation
|
||||
|
||||
def __eq__(self, other):
|
||||
return self is other or self.components == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.components != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.components < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self.components <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.components > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.components >= other
|
||||
|
||||
if sys.version_info[0] <= 2:
|
||||
def __nonzero__(self):
|
||||
return bool(self.components)
|
||||
else:
|
||||
def __bool__(self):
|
||||
return bool(self.components)
|
||||
|
||||
@property
|
||||
def components(self):
|
||||
raise error.PyAsn1Error('Method not implemented')
|
||||
|
||||
def _cloneComponentValues(self, myClone, cloneValueFlag):
|
||||
pass
|
||||
|
||||
def clone(self, **kwargs):
|
||||
"""Create a modified version of |ASN.1| schema object.
|
||||
|
||||
The `clone()` method accepts the same set arguments as |ASN.1|
|
||||
class takes on instantiation except that all arguments
|
||||
of the `clone()` method are optional.
|
||||
|
||||
Whatever arguments are supplied, they are used to create a copy
|
||||
of `self` taking precedence over the ones used to instantiate `self`.
|
||||
|
||||
Possible values of `self` are never copied over thus `clone()` can
|
||||
only create a new schema object.
|
||||
|
||||
Returns
|
||||
-------
|
||||
:
|
||||
new instance of |ASN.1| type/value
|
||||
|
||||
Note
|
||||
----
|
||||
Due to the mutable nature of the |ASN.1| object, even if no arguments
|
||||
are supplied, a new |ASN.1| object will be created and returned.
|
||||
"""
|
||||
cloneValueFlag = kwargs.pop('cloneValueFlag', False)
|
||||
|
||||
initializers = self.readOnly.copy()
|
||||
initializers.update(kwargs)
|
||||
|
||||
clone = self.__class__(**initializers)
|
||||
|
||||
if cloneValueFlag:
|
||||
self._cloneComponentValues(clone, cloneValueFlag)
|
||||
|
||||
return clone
|
||||
|
||||
def subtype(self, **kwargs):
|
||||
"""Create a specialization of |ASN.1| schema object.
|
||||
|
||||
The `subtype()` method accepts the same set arguments as |ASN.1|
|
||||
class takes on instantiation except that all parameters
|
||||
of the `subtype()` method are optional.
|
||||
|
||||
With the exception of the arguments described below, the rest of
|
||||
supplied arguments they are used to create a copy of `self` taking
|
||||
precedence over the ones used to instantiate `self`.
|
||||
|
||||
The following arguments to `subtype()` create a ASN.1 subtype out of
|
||||
|ASN.1| type.
|
||||
|
||||
Other Parameters
|
||||
----------------
|
||||
implicitTag: :py:class:`~pyasn1.type.tag.Tag`
|
||||
Implicitly apply given ASN.1 tag object to `self`'s
|
||||
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
|
||||
new object's ASN.1 tag(s).
|
||||
|
||||
explicitTag: :py:class:`~pyasn1.type.tag.Tag`
|
||||
Explicitly apply given ASN.1 tag object to `self`'s
|
||||
:py:class:`~pyasn1.type.tag.TagSet`, then use the result as
|
||||
new object's ASN.1 tag(s).
|
||||
|
||||
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
|
||||
Add ASN.1 constraints object to one of the `self`'s, then
|
||||
use the result as new object's ASN.1 constraints.
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
:
|
||||
new instance of |ASN.1| type/value
|
||||
|
||||
Note
|
||||
----
|
||||
Due to the mutable nature of the |ASN.1| object, even if no arguments
|
||||
are supplied, a new |ASN.1| object will be created and returned.
|
||||
"""
|
||||
|
||||
initializers = self.readOnly.copy()
|
||||
|
||||
cloneValueFlag = kwargs.pop('cloneValueFlag', False)
|
||||
|
||||
implicitTag = kwargs.pop('implicitTag', None)
|
||||
if implicitTag is not None:
|
||||
initializers['tagSet'] = self.tagSet.tagImplicitly(implicitTag)
|
||||
|
||||
explicitTag = kwargs.pop('explicitTag', None)
|
||||
if explicitTag is not None:
|
||||
initializers['tagSet'] = self.tagSet.tagExplicitly(explicitTag)
|
||||
|
||||
for arg, option in kwargs.items():
|
||||
initializers[arg] += option
|
||||
|
||||
clone = self.__class__(**initializers)
|
||||
|
||||
if cloneValueFlag:
|
||||
self._cloneComponentValues(clone, cloneValueFlag)
|
||||
|
||||
return clone
|
||||
|
||||
def getComponentByPosition(self, idx):
|
||||
raise error.PyAsn1Error('Method not implemented')
|
||||
|
||||
def setComponentByPosition(self, idx, value, verifyConstraints=True):
|
||||
raise error.PyAsn1Error('Method not implemented')
|
||||
|
||||
def setComponents(self, *args, **kwargs):
|
||||
for idx, value in enumerate(args):
|
||||
self[idx] = value
|
||||
for k in kwargs:
|
||||
self[k] = kwargs[k]
|
||||
return self
|
||||
|
||||
# backward compatibility
|
||||
|
||||
def setDefaultComponents(self):
|
||||
pass
|
||||
|
||||
def getComponentType(self):
|
||||
return self.componentType
|
||||
|
||||
# backward compatibility, unused
|
||||
def verifySizeSpec(self):
|
||||
self.subtypeSpec(self)
|
||||
|
||||
|
||||
# Backward compatibility
|
||||
AbstractConstructedAsn1Item = ConstructedAsn1Type
|
||||
335
Lambdas/Websocket Authorizer/pyasn1/type/char.py
Normal file
335
Lambdas/Websocket Authorizer/pyasn1/type/char.py
Normal file
@@ -0,0 +1,335 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import sys
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import univ
|
||||
|
||||
__all__ = ['NumericString', 'PrintableString', 'TeletexString', 'T61String', 'VideotexString',
|
||||
'IA5String', 'GraphicString', 'VisibleString', 'ISO646String',
|
||||
'GeneralString', 'UniversalString', 'BMPString', 'UTF8String']
|
||||
|
||||
NoValue = univ.NoValue
|
||||
noValue = univ.noValue
|
||||
|
||||
|
||||
class AbstractCharacterString(univ.OctetString):
|
||||
"""Creates |ASN.1| schema or value object.
|
||||
|
||||
|ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`,
|
||||
its objects are immutable and duck-type Python 2 :class:`str` or Python 3
|
||||
:class:`bytes`. When used in octet-stream context, |ASN.1| type assumes
|
||||
"|encoding|" encoding.
|
||||
|
||||
Keyword Args
|
||||
------------
|
||||
value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object
|
||||
:class:`unicode` object (Python 2) or :class:`str` (Python 3),
|
||||
alternatively :class:`str` (Python 2) or :class:`bytes` (Python 3)
|
||||
representing octet-stream of serialised unicode string
|
||||
(note `encoding` parameter) or |ASN.1| class instance.
|
||||
If `value` is not given, schema object will be created.
|
||||
|
||||
tagSet: :py:class:`~pyasn1.type.tag.TagSet`
|
||||
Object representing non-default ASN.1 tag(s)
|
||||
|
||||
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
|
||||
Object representing non-default ASN.1 subtype constraint(s). Constraints
|
||||
verification for |ASN.1| type occurs automatically on object
|
||||
instantiation.
|
||||
|
||||
encoding: :py:class:`str`
|
||||
Unicode codec ID to encode/decode :class:`unicode` (Python 2) or
|
||||
:class:`str` (Python 3) the payload when |ASN.1| object is used
|
||||
in octet-stream context.
|
||||
|
||||
Raises
|
||||
------
|
||||
~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error
|
||||
On constraint violation or bad initializer.
|
||||
"""
|
||||
|
||||
if sys.version_info[0] <= 2:
|
||||
def __str__(self):
|
||||
try:
|
||||
# `str` is Py2 text representation
|
||||
return self._value.encode(self.encoding)
|
||||
|
||||
except UnicodeEncodeError:
|
||||
exc = sys.exc_info()[1]
|
||||
raise error.PyAsn1UnicodeEncodeError(
|
||||
"Can't encode string '%s' with codec "
|
||||
"%s" % (self._value, self.encoding), exc
|
||||
)
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self._value)
|
||||
|
||||
def prettyIn(self, value):
|
||||
try:
|
||||
if isinstance(value, unicode):
|
||||
return value
|
||||
elif isinstance(value, str):
|
||||
return value.decode(self.encoding)
|
||||
elif isinstance(value, (tuple, list)):
|
||||
return self.prettyIn(''.join([chr(x) for x in value]))
|
||||
elif isinstance(value, univ.OctetString):
|
||||
return value.asOctets().decode(self.encoding)
|
||||
else:
|
||||
return unicode(value)
|
||||
|
||||
except (UnicodeDecodeError, LookupError):
|
||||
exc = sys.exc_info()[1]
|
||||
raise error.PyAsn1UnicodeDecodeError(
|
||||
"Can't decode string '%s' with codec "
|
||||
"%s" % (value, self.encoding), exc
|
||||
)
|
||||
|
||||
def asOctets(self, padding=True):
|
||||
return str(self)
|
||||
|
||||
def asNumbers(self, padding=True):
|
||||
return tuple([ord(x) for x in str(self)])
|
||||
|
||||
else:
|
||||
def __str__(self):
|
||||
# `unicode` is Py3 text representation
|
||||
return str(self._value)
|
||||
|
||||
def __bytes__(self):
|
||||
try:
|
||||
return self._value.encode(self.encoding)
|
||||
except UnicodeEncodeError:
|
||||
exc = sys.exc_info()[1]
|
||||
raise error.PyAsn1UnicodeEncodeError(
|
||||
"Can't encode string '%s' with codec "
|
||||
"%s" % (self._value, self.encoding), exc
|
||||
)
|
||||
|
||||
def prettyIn(self, value):
|
||||
try:
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
elif isinstance(value, bytes):
|
||||
return value.decode(self.encoding)
|
||||
elif isinstance(value, (tuple, list)):
|
||||
return self.prettyIn(bytes(value))
|
||||
elif isinstance(value, univ.OctetString):
|
||||
return value.asOctets().decode(self.encoding)
|
||||
else:
|
||||
return str(value)
|
||||
|
||||
except (UnicodeDecodeError, LookupError):
|
||||
exc = sys.exc_info()[1]
|
||||
raise error.PyAsn1UnicodeDecodeError(
|
||||
"Can't decode string '%s' with codec "
|
||||
"%s" % (value, self.encoding), exc
|
||||
)
|
||||
|
||||
def asOctets(self, padding=True):
|
||||
return bytes(self)
|
||||
|
||||
def asNumbers(self, padding=True):
|
||||
return tuple(bytes(self))
|
||||
|
||||
#
|
||||
# See OctetString.prettyPrint() for the explanation
|
||||
#
|
||||
|
||||
def prettyOut(self, value):
|
||||
return value
|
||||
|
||||
def prettyPrint(self, scope=0):
|
||||
# first see if subclass has its own .prettyOut()
|
||||
value = self.prettyOut(self._value)
|
||||
|
||||
if value is not self._value:
|
||||
return value
|
||||
|
||||
return AbstractCharacterString.__str__(self)
|
||||
|
||||
def __reversed__(self):
|
||||
return reversed(self._value)
|
||||
|
||||
|
||||
class NumericString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18)
|
||||
)
|
||||
encoding = 'us-ascii'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class PrintableString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 19)
|
||||
)
|
||||
encoding = 'us-ascii'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class TeletexString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20)
|
||||
)
|
||||
encoding = 'iso-8859-1'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class T61String(TeletexString):
|
||||
__doc__ = TeletexString.__doc__
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class VideotexString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 21)
|
||||
)
|
||||
encoding = 'iso-8859-1'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class IA5String(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 22)
|
||||
)
|
||||
encoding = 'us-ascii'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class GraphicString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 25)
|
||||
)
|
||||
encoding = 'iso-8859-1'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class VisibleString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26)
|
||||
)
|
||||
encoding = 'us-ascii'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class ISO646String(VisibleString):
|
||||
__doc__ = VisibleString.__doc__
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
class GeneralString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27)
|
||||
)
|
||||
encoding = 'iso-8859-1'
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class UniversalString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 28)
|
||||
)
|
||||
encoding = "utf-32-be"
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class BMPString(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30)
|
||||
)
|
||||
encoding = "utf-16-be"
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
|
||||
|
||||
class UTF8String(AbstractCharacterString):
|
||||
__doc__ = AbstractCharacterString.__doc__
|
||||
|
||||
#: Set (on class, not on instance) or return a
|
||||
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
|
||||
#: associated with |ASN.1| type.
|
||||
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
|
||||
)
|
||||
encoding = "utf-8"
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = AbstractCharacterString.getTypeId()
|
||||
702
Lambdas/Websocket Authorizer/pyasn1/type/constraint.py
Normal file
702
Lambdas/Websocket Authorizer/pyasn1/type/constraint.py
Normal file
@@ -0,0 +1,702 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
# Original concept and code by Mike C. Fletcher.
|
||||
#
|
||||
import sys
|
||||
|
||||
from pyasn1.type import error
|
||||
|
||||
__all__ = ['SingleValueConstraint', 'ContainedSubtypeConstraint',
|
||||
'ValueRangeConstraint', 'ValueSizeConstraint',
|
||||
'PermittedAlphabetConstraint', 'InnerTypeConstraint',
|
||||
'ConstraintsExclusion', 'ConstraintsIntersection',
|
||||
'ConstraintsUnion']
|
||||
|
||||
|
||||
class AbstractConstraint(object):
|
||||
|
||||
def __init__(self, *values):
|
||||
self._valueMap = set()
|
||||
self._setValues(values)
|
||||
self.__hash = hash((self.__class__.__name__, self._values))
|
||||
|
||||
def __call__(self, value, idx=None):
|
||||
if not self._values:
|
||||
return
|
||||
|
||||
try:
|
||||
self._testValue(value, idx)
|
||||
|
||||
except error.ValueConstraintError:
|
||||
raise error.ValueConstraintError(
|
||||
'%s failed at: %r' % (self, sys.exc_info()[1])
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
representation = '%s object' % (self.__class__.__name__)
|
||||
|
||||
if self._values:
|
||||
representation += ', consts %s' % ', '.join(
|
||||
[repr(x) for x in self._values])
|
||||
|
||||
return '<%s>' % representation
|
||||
|
||||
def __eq__(self, other):
|
||||
return self is other and True or self._values == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self._values != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._values < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self._values <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self._values > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self._values >= other
|
||||
|
||||
if sys.version_info[0] <= 2:
|
||||
def __nonzero__(self):
|
||||
return self._values and True or False
|
||||
else:
|
||||
def __bool__(self):
|
||||
return self._values and True or False
|
||||
|
||||
def __hash__(self):
|
||||
return self.__hash
|
||||
|
||||
def _setValues(self, values):
|
||||
self._values = values
|
||||
|
||||
def _testValue(self, value, idx):
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
# Constraints derivation logic
|
||||
def getValueMap(self):
|
||||
return self._valueMap
|
||||
|
||||
def isSuperTypeOf(self, otherConstraint):
|
||||
# TODO: fix possible comparison of set vs scalars here
|
||||
return (otherConstraint is self or
|
||||
not self._values or
|
||||
otherConstraint == self or
|
||||
self in otherConstraint.getValueMap())
|
||||
|
||||
def isSubTypeOf(self, otherConstraint):
|
||||
return (otherConstraint is self or
|
||||
not self or
|
||||
otherConstraint == self or
|
||||
otherConstraint in self._valueMap)
|
||||
|
||||
|
||||
class SingleValueConstraint(AbstractConstraint):
|
||||
"""Create a SingleValueConstraint object.
|
||||
|
||||
The SingleValueConstraint satisfies any value that
|
||||
is present in the set of permitted values.
|
||||
|
||||
The SingleValueConstraint object can be applied to
|
||||
any ASN.1 type.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*values: :class:`int`
|
||||
Full set of values permitted by this constraint object.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class DivisorOfSix(Integer):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
Divisor-Of-6 ::= INTEGER (1 | 2 | 3 | 6)
|
||||
'''
|
||||
subtypeSpec = SingleValueConstraint(1, 2, 3, 6)
|
||||
|
||||
# this will succeed
|
||||
divisor_of_six = DivisorOfSix(1)
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
divisor_of_six = DivisorOfSix(7)
|
||||
"""
|
||||
def _setValues(self, values):
|
||||
self._values = values
|
||||
self._set = set(values)
|
||||
|
||||
def _testValue(self, value, idx):
|
||||
if value not in self._set:
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
|
||||
class ContainedSubtypeConstraint(AbstractConstraint):
|
||||
"""Create a ContainedSubtypeConstraint object.
|
||||
|
||||
The ContainedSubtypeConstraint satisfies any value that
|
||||
is present in the set of permitted values and also
|
||||
satisfies included constraints.
|
||||
|
||||
The ContainedSubtypeConstraint object can be applied to
|
||||
any ASN.1 type.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*values:
|
||||
Full set of values and constraint objects permitted
|
||||
by this constraint object.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class DivisorOfEighteen(Integer):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
Divisors-of-18 ::= INTEGER (INCLUDES Divisors-of-6 | 9 | 18)
|
||||
'''
|
||||
subtypeSpec = ContainedSubtypeConstraint(
|
||||
SingleValueConstraint(1, 2, 3, 6), 9, 18
|
||||
)
|
||||
|
||||
# this will succeed
|
||||
divisor_of_eighteen = DivisorOfEighteen(9)
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
divisor_of_eighteen = DivisorOfEighteen(10)
|
||||
"""
|
||||
def _testValue(self, value, idx):
|
||||
for constraint in self._values:
|
||||
if isinstance(constraint, AbstractConstraint):
|
||||
constraint(value, idx)
|
||||
elif value not in self._set:
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
|
||||
class ValueRangeConstraint(AbstractConstraint):
|
||||
"""Create a ValueRangeConstraint object.
|
||||
|
||||
The ValueRangeConstraint satisfies any value that
|
||||
falls in the range of permitted values.
|
||||
|
||||
The ValueRangeConstraint object can only be applied
|
||||
to :class:`~pyasn1.type.univ.Integer` and
|
||||
:class:`~pyasn1.type.univ.Real` types.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
start: :class:`int`
|
||||
Minimum permitted value in the range (inclusive)
|
||||
|
||||
end: :class:`int`
|
||||
Maximum permitted value in the range (inclusive)
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class TeenAgeYears(Integer):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
TeenAgeYears ::= INTEGER (13 .. 19)
|
||||
'''
|
||||
subtypeSpec = ValueRangeConstraint(13, 19)
|
||||
|
||||
# this will succeed
|
||||
teen_year = TeenAgeYears(18)
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
teen_year = TeenAgeYears(20)
|
||||
"""
|
||||
def _testValue(self, value, idx):
|
||||
if value < self.start or value > self.stop:
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
def _setValues(self, values):
|
||||
if len(values) != 2:
|
||||
raise error.PyAsn1Error(
|
||||
'%s: bad constraint values' % (self.__class__.__name__,)
|
||||
)
|
||||
self.start, self.stop = values
|
||||
if self.start > self.stop:
|
||||
raise error.PyAsn1Error(
|
||||
'%s: screwed constraint values (start > stop): %s > %s' % (
|
||||
self.__class__.__name__,
|
||||
self.start, self.stop
|
||||
)
|
||||
)
|
||||
AbstractConstraint._setValues(self, values)
|
||||
|
||||
|
||||
class ValueSizeConstraint(ValueRangeConstraint):
|
||||
"""Create a ValueSizeConstraint object.
|
||||
|
||||
The ValueSizeConstraint satisfies any value for
|
||||
as long as its size falls within the range of
|
||||
permitted sizes.
|
||||
|
||||
The ValueSizeConstraint object can be applied
|
||||
to :class:`~pyasn1.type.univ.BitString`,
|
||||
:class:`~pyasn1.type.univ.OctetString` (including
|
||||
all :ref:`character ASN.1 types <type.char>`),
|
||||
:class:`~pyasn1.type.univ.SequenceOf`
|
||||
and :class:`~pyasn1.type.univ.SetOf` types.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
minimum: :class:`int`
|
||||
Minimum permitted size of the value (inclusive)
|
||||
|
||||
maximum: :class:`int`
|
||||
Maximum permitted size of the value (inclusive)
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class BaseballTeamRoster(SetOf):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
BaseballTeamRoster ::= SET SIZE (1..25) OF PlayerNames
|
||||
'''
|
||||
componentType = PlayerNames()
|
||||
subtypeSpec = ValueSizeConstraint(1, 25)
|
||||
|
||||
# this will succeed
|
||||
team = BaseballTeamRoster()
|
||||
team.extend(['Jan', 'Matej'])
|
||||
encode(team)
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
team = BaseballTeamRoster()
|
||||
team.extend(['Jan'] * 26)
|
||||
encode(team)
|
||||
|
||||
Note
|
||||
----
|
||||
Whenever ValueSizeConstraint is applied to mutable types
|
||||
(e.g. :class:`~pyasn1.type.univ.SequenceOf`,
|
||||
:class:`~pyasn1.type.univ.SetOf`), constraint
|
||||
validation only happens at the serialisation phase rather
|
||||
than schema instantiation phase (as it is with immutable
|
||||
types).
|
||||
"""
|
||||
def _testValue(self, value, idx):
|
||||
valueSize = len(value)
|
||||
if valueSize < self.start or valueSize > self.stop:
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
|
||||
class PermittedAlphabetConstraint(SingleValueConstraint):
|
||||
"""Create a PermittedAlphabetConstraint object.
|
||||
|
||||
The PermittedAlphabetConstraint satisfies any character
|
||||
string for as long as all its characters are present in
|
||||
the set of permitted characters.
|
||||
|
||||
The PermittedAlphabetConstraint object can only be applied
|
||||
to the :ref:`character ASN.1 types <type.char>` such as
|
||||
:class:`~pyasn1.type.char.IA5String`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*alphabet: :class:`str`
|
||||
Full set of characters permitted by this constraint object.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class BooleanValue(IA5String):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
BooleanValue ::= IA5String (FROM ('T' | 'F'))
|
||||
'''
|
||||
subtypeSpec = PermittedAlphabetConstraint('T', 'F')
|
||||
|
||||
# this will succeed
|
||||
truth = BooleanValue('T')
|
||||
truth = BooleanValue('TF')
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
garbage = BooleanValue('TAF')
|
||||
"""
|
||||
def _setValues(self, values):
|
||||
self._values = values
|
||||
self._set = set(values)
|
||||
|
||||
def _testValue(self, value, idx):
|
||||
if not self._set.issuperset(value):
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
|
||||
class ComponentPresentConstraint(AbstractConstraint):
|
||||
"""Create a ComponentPresentConstraint object.
|
||||
|
||||
The ComponentPresentConstraint is only satisfied when the value
|
||||
is not `None`.
|
||||
|
||||
The ComponentPresentConstraint object is typically used with
|
||||
`WithComponentsConstraint`.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
present = ComponentPresentConstraint()
|
||||
|
||||
# this will succeed
|
||||
present('whatever')
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
present(None)
|
||||
"""
|
||||
def _setValues(self, values):
|
||||
self._values = ('<must be present>',)
|
||||
|
||||
if values:
|
||||
raise error.PyAsn1Error('No arguments expected')
|
||||
|
||||
def _testValue(self, value, idx):
|
||||
if value is None:
|
||||
raise error.ValueConstraintError(
|
||||
'Component is not present:')
|
||||
|
||||
|
||||
class ComponentAbsentConstraint(AbstractConstraint):
|
||||
"""Create a ComponentAbsentConstraint object.
|
||||
|
||||
The ComponentAbsentConstraint is only satisfied when the value
|
||||
is `None`.
|
||||
|
||||
The ComponentAbsentConstraint object is typically used with
|
||||
`WithComponentsConstraint`.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
absent = ComponentAbsentConstraint()
|
||||
|
||||
# this will succeed
|
||||
absent(None)
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
absent('whatever')
|
||||
"""
|
||||
def _setValues(self, values):
|
||||
self._values = ('<must be absent>',)
|
||||
|
||||
if values:
|
||||
raise error.PyAsn1Error('No arguments expected')
|
||||
|
||||
def _testValue(self, value, idx):
|
||||
if value is not None:
|
||||
raise error.ValueConstraintError(
|
||||
'Component is not absent: %r' % value)
|
||||
|
||||
|
||||
class WithComponentsConstraint(AbstractConstraint):
|
||||
"""Create a WithComponentsConstraint object.
|
||||
|
||||
The `WithComponentsConstraint` satisfies any mapping object that has
|
||||
constrained fields present or absent, what is indicated by
|
||||
`ComponentPresentConstraint` and `ComponentAbsentConstraint`
|
||||
objects respectively.
|
||||
|
||||
The `WithComponentsConstraint` object is typically applied
|
||||
to :class:`~pyasn1.type.univ.Set` or
|
||||
:class:`~pyasn1.type.univ.Sequence` types.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*fields: :class:`tuple`
|
||||
Zero or more tuples of (`field`, `constraint`) indicating constrained
|
||||
fields.
|
||||
|
||||
Notes
|
||||
-----
|
||||
On top of the primary use of `WithComponentsConstraint` (ensuring presence
|
||||
or absence of particular components of a :class:`~pyasn1.type.univ.Set` or
|
||||
:class:`~pyasn1.type.univ.Sequence`), it is also possible to pass any other
|
||||
constraint objects or their combinations. In case of scalar fields, these
|
||||
constraints will be verified in addition to the constraints belonging to
|
||||
scalar components themselves. However, formally, these additional
|
||||
constraints do not change the type of these ASN.1 objects.
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Item(Sequence): # Set is similar
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
Item ::= SEQUENCE {
|
||||
id INTEGER OPTIONAL,
|
||||
name OCTET STRING OPTIONAL
|
||||
} WITH COMPONENTS id PRESENT, name ABSENT | id ABSENT, name PRESENT
|
||||
'''
|
||||
componentType = NamedTypes(
|
||||
OptionalNamedType('id', Integer()),
|
||||
OptionalNamedType('name', OctetString())
|
||||
)
|
||||
withComponents = ConstraintsUnion(
|
||||
WithComponentsConstraint(
|
||||
('id', ComponentPresentConstraint()),
|
||||
('name', ComponentAbsentConstraint())
|
||||
),
|
||||
WithComponentsConstraint(
|
||||
('id', ComponentAbsentConstraint()),
|
||||
('name', ComponentPresentConstraint())
|
||||
)
|
||||
)
|
||||
|
||||
item = Item()
|
||||
|
||||
# This will succeed
|
||||
item['id'] = 1
|
||||
|
||||
# This will succeed
|
||||
item.reset()
|
||||
item['name'] = 'John'
|
||||
|
||||
# This will fail (on encoding)
|
||||
item.reset()
|
||||
descr['id'] = 1
|
||||
descr['name'] = 'John'
|
||||
"""
|
||||
def _testValue(self, value, idx):
|
||||
for field, constraint in self._values:
|
||||
constraint(value.get(field))
|
||||
|
||||
def _setValues(self, values):
|
||||
AbstractConstraint._setValues(self, values)
|
||||
|
||||
|
||||
# This is a bit kludgy, meaning two op modes within a single constraint
|
||||
class InnerTypeConstraint(AbstractConstraint):
|
||||
"""Value must satisfy the type and presence constraints"""
|
||||
|
||||
def _testValue(self, value, idx):
|
||||
if self.__singleTypeConstraint:
|
||||
self.__singleTypeConstraint(value)
|
||||
elif self.__multipleTypeConstraint:
|
||||
if idx not in self.__multipleTypeConstraint:
|
||||
raise error.ValueConstraintError(value)
|
||||
constraint, status = self.__multipleTypeConstraint[idx]
|
||||
if status == 'ABSENT': # XXX presence is not checked!
|
||||
raise error.ValueConstraintError(value)
|
||||
constraint(value)
|
||||
|
||||
def _setValues(self, values):
|
||||
self.__multipleTypeConstraint = {}
|
||||
self.__singleTypeConstraint = None
|
||||
for v in values:
|
||||
if isinstance(v, tuple):
|
||||
self.__multipleTypeConstraint[v[0]] = v[1], v[2]
|
||||
else:
|
||||
self.__singleTypeConstraint = v
|
||||
AbstractConstraint._setValues(self, values)
|
||||
|
||||
|
||||
# Logic operations on constraints
|
||||
|
||||
class ConstraintsExclusion(AbstractConstraint):
|
||||
"""Create a ConstraintsExclusion logic operator object.
|
||||
|
||||
The ConstraintsExclusion logic operator succeeds when the
|
||||
value does *not* satisfy the operand constraint.
|
||||
|
||||
The ConstraintsExclusion object can be applied to
|
||||
any constraint and logic operator object.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
constraint:
|
||||
Constraint or logic operator object.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class Lipogramme(IA5STRING):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
Lipogramme ::=
|
||||
IA5String (FROM (ALL EXCEPT ("e"|"E")))
|
||||
'''
|
||||
subtypeSpec = ConstraintsExclusion(
|
||||
PermittedAlphabetConstraint('e', 'E')
|
||||
)
|
||||
|
||||
# this will succeed
|
||||
lipogramme = Lipogramme('A work of fiction?')
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
lipogramme = Lipogramme('Eel')
|
||||
|
||||
Warning
|
||||
-------
|
||||
The above example involving PermittedAlphabetConstraint might
|
||||
not work due to the way how PermittedAlphabetConstraint works.
|
||||
The other constraints might work with ConstraintsExclusion
|
||||
though.
|
||||
"""
|
||||
def _testValue(self, value, idx):
|
||||
try:
|
||||
self._values[0](value, idx)
|
||||
except error.ValueConstraintError:
|
||||
return
|
||||
else:
|
||||
raise error.ValueConstraintError(value)
|
||||
|
||||
def _setValues(self, values):
|
||||
if len(values) != 1:
|
||||
raise error.PyAsn1Error('Single constraint expected')
|
||||
|
||||
AbstractConstraint._setValues(self, values)
|
||||
|
||||
|
||||
class AbstractConstraintSet(AbstractConstraint):
|
||||
|
||||
def __getitem__(self, idx):
|
||||
return self._values[idx]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._values)
|
||||
|
||||
def __add__(self, value):
|
||||
return self.__class__(*(self._values + (value,)))
|
||||
|
||||
def __radd__(self, value):
|
||||
return self.__class__(*((value,) + self._values))
|
||||
|
||||
def __len__(self):
|
||||
return len(self._values)
|
||||
|
||||
# Constraints inclusion in sets
|
||||
|
||||
def _setValues(self, values):
|
||||
self._values = values
|
||||
for constraint in values:
|
||||
if constraint:
|
||||
self._valueMap.add(constraint)
|
||||
self._valueMap.update(constraint.getValueMap())
|
||||
|
||||
|
||||
class ConstraintsIntersection(AbstractConstraintSet):
|
||||
"""Create a ConstraintsIntersection logic operator object.
|
||||
|
||||
The ConstraintsIntersection logic operator only succeeds
|
||||
if *all* its operands succeed.
|
||||
|
||||
The ConstraintsIntersection object can be applied to
|
||||
any constraint and logic operator objects.
|
||||
|
||||
The ConstraintsIntersection object duck-types the immutable
|
||||
container object like Python :py:class:`tuple`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*constraints:
|
||||
Constraint or logic operator objects.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class CapitalAndSmall(IA5String):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
CapitalAndSmall ::=
|
||||
IA5String (FROM ("A".."Z"|"a".."z"))
|
||||
'''
|
||||
subtypeSpec = ConstraintsIntersection(
|
||||
PermittedAlphabetConstraint('A', 'Z'),
|
||||
PermittedAlphabetConstraint('a', 'z')
|
||||
)
|
||||
|
||||
# this will succeed
|
||||
capital_and_small = CapitalAndSmall('Hello')
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
capital_and_small = CapitalAndSmall('hello')
|
||||
"""
|
||||
def _testValue(self, value, idx):
|
||||
for constraint in self._values:
|
||||
constraint(value, idx)
|
||||
|
||||
|
||||
class ConstraintsUnion(AbstractConstraintSet):
|
||||
"""Create a ConstraintsUnion logic operator object.
|
||||
|
||||
The ConstraintsUnion logic operator succeeds if
|
||||
*at least* a single operand succeeds.
|
||||
|
||||
The ConstraintsUnion object can be applied to
|
||||
any constraint and logic operator objects.
|
||||
|
||||
The ConstraintsUnion object duck-types the immutable
|
||||
container object like Python :py:class:`tuple`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*constraints:
|
||||
Constraint or logic operator objects.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class CapitalOrSmall(IA5String):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
CapitalOrSmall ::=
|
||||
IA5String (FROM ("A".."Z") | FROM ("a".."z"))
|
||||
'''
|
||||
subtypeSpec = ConstraintsUnion(
|
||||
PermittedAlphabetConstraint('A', 'Z'),
|
||||
PermittedAlphabetConstraint('a', 'z')
|
||||
)
|
||||
|
||||
# this will succeed
|
||||
capital_or_small = CapitalAndSmall('Hello')
|
||||
|
||||
# this will raise ValueConstraintError
|
||||
capital_or_small = CapitalOrSmall('hello!')
|
||||
"""
|
||||
def _testValue(self, value, idx):
|
||||
for constraint in self._values:
|
||||
try:
|
||||
constraint(value, idx)
|
||||
except error.ValueConstraintError:
|
||||
pass
|
||||
else:
|
||||
return
|
||||
|
||||
raise error.ValueConstraintError(
|
||||
'all of %s failed for "%s"' % (self._values, value)
|
||||
)
|
||||
|
||||
# TODO:
|
||||
# refactor InnerTypeConstraint
|
||||
# add tests for type check
|
||||
# implement other constraint types
|
||||
# make constraint validation easy to skip
|
||||
11
Lambdas/Websocket Authorizer/pyasn1/type/error.py
Normal file
11
Lambdas/Websocket Authorizer/pyasn1/type/error.py
Normal file
@@ -0,0 +1,11 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1.error import PyAsn1Error
|
||||
|
||||
|
||||
class ValueConstraintError(PyAsn1Error):
|
||||
pass
|
||||
561
Lambdas/Websocket Authorizer/pyasn1/type/namedtype.py
Normal file
561
Lambdas/Websocket Authorizer/pyasn1/type/namedtype.py
Normal file
@@ -0,0 +1,561 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import sys
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import tagmap
|
||||
|
||||
__all__ = ['NamedType', 'OptionalNamedType', 'DefaultedNamedType',
|
||||
'NamedTypes']
|
||||
|
||||
try:
|
||||
any
|
||||
|
||||
except NameError:
|
||||
any = lambda x: bool(filter(bool, x))
|
||||
|
||||
|
||||
class NamedType(object):
|
||||
"""Create named field object for a constructed ASN.1 type.
|
||||
|
||||
The |NamedType| object represents a single name and ASN.1 type of a constructed ASN.1 type.
|
||||
|
||||
|NamedType| objects are immutable and duck-type Python :class:`tuple` objects
|
||||
holding *name* and *asn1Object* components.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name: :py:class:`str`
|
||||
Field name
|
||||
|
||||
asn1Object:
|
||||
ASN.1 type object
|
||||
"""
|
||||
isOptional = False
|
||||
isDefaulted = False
|
||||
|
||||
def __init__(self, name, asn1Object, openType=None):
|
||||
self.__name = name
|
||||
self.__type = asn1Object
|
||||
self.__nameAndType = name, asn1Object
|
||||
self.__openType = openType
|
||||
|
||||
def __repr__(self):
|
||||
representation = '%s=%r' % (self.name, self.asn1Object)
|
||||
|
||||
if self.openType:
|
||||
representation += ', open type %r' % self.openType
|
||||
|
||||
return '<%s object, type %s>' % (
|
||||
self.__class__.__name__, representation)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__nameAndType == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.__nameAndType != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.__nameAndType < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__nameAndType <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.__nameAndType > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__nameAndType >= other
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.__nameAndType)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
return self.__nameAndType[idx]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__nameAndType)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.__name
|
||||
|
||||
@property
|
||||
def asn1Object(self):
|
||||
return self.__type
|
||||
|
||||
@property
|
||||
def openType(self):
|
||||
return self.__openType
|
||||
|
||||
# Backward compatibility
|
||||
|
||||
def getName(self):
|
||||
return self.name
|
||||
|
||||
def getType(self):
|
||||
return self.asn1Object
|
||||
|
||||
|
||||
class OptionalNamedType(NamedType):
|
||||
__doc__ = NamedType.__doc__
|
||||
|
||||
isOptional = True
|
||||
|
||||
|
||||
class DefaultedNamedType(NamedType):
|
||||
__doc__ = NamedType.__doc__
|
||||
|
||||
isDefaulted = True
|
||||
|
||||
|
||||
class NamedTypes(object):
|
||||
"""Create a collection of named fields for a constructed ASN.1 type.
|
||||
|
||||
The NamedTypes object represents a collection of named fields of a constructed ASN.1 type.
|
||||
|
||||
*NamedTypes* objects are immutable and duck-type Python :class:`dict` objects
|
||||
holding *name* as keys and ASN.1 type object as values.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*namedTypes: :class:`~pyasn1.type.namedtype.NamedType`
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Description(Sequence):
|
||||
'''
|
||||
ASN.1 specification:
|
||||
|
||||
Description ::= SEQUENCE {
|
||||
surname IA5String,
|
||||
first-name IA5String OPTIONAL,
|
||||
age INTEGER DEFAULT 40
|
||||
}
|
||||
'''
|
||||
componentType = NamedTypes(
|
||||
NamedType('surname', IA5String()),
|
||||
OptionalNamedType('first-name', IA5String()),
|
||||
DefaultedNamedType('age', Integer(40))
|
||||
)
|
||||
|
||||
descr = Description()
|
||||
descr['surname'] = 'Smith'
|
||||
descr['first-name'] = 'John'
|
||||
"""
|
||||
def __init__(self, *namedTypes, **kwargs):
|
||||
self.__namedTypes = namedTypes
|
||||
self.__namedTypesLen = len(self.__namedTypes)
|
||||
self.__minTagSet = self.__computeMinTagSet()
|
||||
self.__nameToPosMap = self.__computeNameToPosMap()
|
||||
self.__tagToPosMap = self.__computeTagToPosMap()
|
||||
self.__ambiguousTypes = 'terminal' not in kwargs and self.__computeAmbiguousTypes() or {}
|
||||
self.__uniqueTagMap = self.__computeTagMaps(unique=True)
|
||||
self.__nonUniqueTagMap = self.__computeTagMaps(unique=False)
|
||||
self.__hasOptionalOrDefault = any([True for namedType in self.__namedTypes
|
||||
if namedType.isDefaulted or namedType.isOptional])
|
||||
self.__hasOpenTypes = any([True for namedType in self.__namedTypes
|
||||
if namedType.openType])
|
||||
|
||||
self.__requiredComponents = frozenset(
|
||||
[idx for idx, nt in enumerate(self.__namedTypes) if not nt.isOptional and not nt.isDefaulted]
|
||||
)
|
||||
self.__keys = frozenset([namedType.name for namedType in self.__namedTypes])
|
||||
self.__values = tuple([namedType.asn1Object for namedType in self.__namedTypes])
|
||||
self.__items = tuple([(namedType.name, namedType.asn1Object) for namedType in self.__namedTypes])
|
||||
|
||||
def __repr__(self):
|
||||
representation = ', '.join(['%r' % x for x in self.__namedTypes])
|
||||
return '<%s object, types %s>' % (
|
||||
self.__class__.__name__, representation)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__namedTypes == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.__namedTypes != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.__namedTypes < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__namedTypes <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.__namedTypes > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__namedTypes >= other
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.__namedTypes)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
try:
|
||||
return self.__namedTypes[idx]
|
||||
|
||||
except TypeError:
|
||||
return self.__namedTypes[self.__nameToPosMap[idx]]
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.__nameToPosMap
|
||||
|
||||
def __iter__(self):
|
||||
return (x[0] for x in self.__namedTypes)
|
||||
|
||||
if sys.version_info[0] <= 2:
|
||||
def __nonzero__(self):
|
||||
return self.__namedTypesLen > 0
|
||||
else:
|
||||
def __bool__(self):
|
||||
return self.__namedTypesLen > 0
|
||||
|
||||
def __len__(self):
|
||||
return self.__namedTypesLen
|
||||
|
||||
# Python dict protocol
|
||||
|
||||
def values(self):
|
||||
return self.__values
|
||||
|
||||
def keys(self):
|
||||
return self.__keys
|
||||
|
||||
def items(self):
|
||||
return self.__items
|
||||
|
||||
def clone(self):
|
||||
return self.__class__(*self.__namedTypes)
|
||||
|
||||
class PostponedError(object):
|
||||
def __init__(self, errorMsg):
|
||||
self.__errorMsg = errorMsg
|
||||
|
||||
def __getitem__(self, item):
|
||||
raise error.PyAsn1Error(self.__errorMsg)
|
||||
|
||||
def __computeTagToPosMap(self):
|
||||
tagToPosMap = {}
|
||||
for idx, namedType in enumerate(self.__namedTypes):
|
||||
tagMap = namedType.asn1Object.tagMap
|
||||
if isinstance(tagMap, NamedTypes.PostponedError):
|
||||
return tagMap
|
||||
if not tagMap:
|
||||
continue
|
||||
for _tagSet in tagMap.presentTypes:
|
||||
if _tagSet in tagToPosMap:
|
||||
return NamedTypes.PostponedError('Duplicate component tag %s at %s' % (_tagSet, namedType))
|
||||
tagToPosMap[_tagSet] = idx
|
||||
|
||||
return tagToPosMap
|
||||
|
||||
def __computeNameToPosMap(self):
|
||||
nameToPosMap = {}
|
||||
for idx, namedType in enumerate(self.__namedTypes):
|
||||
if namedType.name in nameToPosMap:
|
||||
return NamedTypes.PostponedError('Duplicate component name %s at %s' % (namedType.name, namedType))
|
||||
nameToPosMap[namedType.name] = idx
|
||||
|
||||
return nameToPosMap
|
||||
|
||||
def __computeAmbiguousTypes(self):
|
||||
ambiguousTypes = {}
|
||||
partialAmbiguousTypes = ()
|
||||
for idx, namedType in reversed(tuple(enumerate(self.__namedTypes))):
|
||||
if namedType.isOptional or namedType.isDefaulted:
|
||||
partialAmbiguousTypes = (namedType,) + partialAmbiguousTypes
|
||||
else:
|
||||
partialAmbiguousTypes = (namedType,)
|
||||
if len(partialAmbiguousTypes) == len(self.__namedTypes):
|
||||
ambiguousTypes[idx] = self
|
||||
else:
|
||||
ambiguousTypes[idx] = NamedTypes(*partialAmbiguousTypes, **dict(terminal=True))
|
||||
return ambiguousTypes
|
||||
|
||||
def getTypeByPosition(self, idx):
|
||||
"""Return ASN.1 type object by its position in fields set.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
idx: :py:class:`int`
|
||||
Field index
|
||||
|
||||
Returns
|
||||
-------
|
||||
:
|
||||
ASN.1 type
|
||||
|
||||
Raises
|
||||
------
|
||||
~pyasn1.error.PyAsn1Error
|
||||
If given position is out of fields range
|
||||
"""
|
||||
try:
|
||||
return self.__namedTypes[idx].asn1Object
|
||||
|
||||
except IndexError:
|
||||
raise error.PyAsn1Error('Type position out of range')
|
||||
|
||||
def getPositionByType(self, tagSet):
|
||||
"""Return field position by its ASN.1 type.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
tagSet: :class:`~pysnmp.type.tag.TagSet`
|
||||
ASN.1 tag set distinguishing one ASN.1 type from others.
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`int`
|
||||
ASN.1 type position in fields set
|
||||
|
||||
Raises
|
||||
------
|
||||
~pyasn1.error.PyAsn1Error
|
||||
If *tagSet* is not present or ASN.1 types are not unique within callee *NamedTypes*
|
||||
"""
|
||||
try:
|
||||
return self.__tagToPosMap[tagSet]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Type %s not found' % (tagSet,))
|
||||
|
||||
def getNameByPosition(self, idx):
|
||||
"""Return field name by its position in fields set.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
idx: :py:class:`idx`
|
||||
Field index
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`str`
|
||||
Field name
|
||||
|
||||
Raises
|
||||
------
|
||||
~pyasn1.error.PyAsn1Error
|
||||
If given field name is not present in callee *NamedTypes*
|
||||
"""
|
||||
try:
|
||||
return self.__namedTypes[idx].name
|
||||
|
||||
except IndexError:
|
||||
raise error.PyAsn1Error('Type position out of range')
|
||||
|
||||
def getPositionByName(self, name):
|
||||
"""Return field position by filed name.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name: :py:class:`str`
|
||||
Field name
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`int`
|
||||
Field position in fields set
|
||||
|
||||
Raises
|
||||
------
|
||||
~pyasn1.error.PyAsn1Error
|
||||
If *name* is not present or not unique within callee *NamedTypes*
|
||||
"""
|
||||
try:
|
||||
return self.__nameToPosMap[name]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Name %s not found' % (name,))
|
||||
|
||||
def getTagMapNearPosition(self, idx):
|
||||
"""Return ASN.1 types that are allowed at or past given field position.
|
||||
|
||||
Some ASN.1 serialisation allow for skipping optional and defaulted fields.
|
||||
Some constructed ASN.1 types allow reordering of the fields. When recovering
|
||||
such objects it may be important to know which types can possibly be
|
||||
present at any given position in the field sets.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
idx: :py:class:`int`
|
||||
Field index
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`~pyasn1.type.tagmap.TagMap`
|
||||
Map if ASN.1 types allowed at given field position
|
||||
|
||||
Raises
|
||||
------
|
||||
~pyasn1.error.PyAsn1Error
|
||||
If given position is out of fields range
|
||||
"""
|
||||
try:
|
||||
return self.__ambiguousTypes[idx].tagMap
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Type position out of range')
|
||||
|
||||
def getPositionNearType(self, tagSet, idx):
|
||||
"""Return the closest field position where given ASN.1 type is allowed.
|
||||
|
||||
Some ASN.1 serialisation allow for skipping optional and defaulted fields.
|
||||
Some constructed ASN.1 types allow reordering of the fields. When recovering
|
||||
such objects it may be important to know at which field position, in field set,
|
||||
given *tagSet* is allowed at or past *idx* position.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
tagSet: :class:`~pyasn1.type.tag.TagSet`
|
||||
ASN.1 type which field position to look up
|
||||
|
||||
idx: :py:class:`int`
|
||||
Field position at or past which to perform ASN.1 type look up
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`int`
|
||||
Field position in fields set
|
||||
|
||||
Raises
|
||||
------
|
||||
~pyasn1.error.PyAsn1Error
|
||||
If *tagSet* is not present or not unique within callee *NamedTypes*
|
||||
or *idx* is out of fields range
|
||||
"""
|
||||
try:
|
||||
return idx + self.__ambiguousTypes[idx].getPositionByType(tagSet)
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error('Type position out of range')
|
||||
|
||||
def __computeMinTagSet(self):
|
||||
minTagSet = None
|
||||
for namedType in self.__namedTypes:
|
||||
asn1Object = namedType.asn1Object
|
||||
|
||||
try:
|
||||
tagSet = asn1Object.minTagSet
|
||||
|
||||
except AttributeError:
|
||||
tagSet = asn1Object.tagSet
|
||||
|
||||
if minTagSet is None or tagSet < minTagSet:
|
||||
minTagSet = tagSet
|
||||
|
||||
return minTagSet or tag.TagSet()
|
||||
|
||||
@property
|
||||
def minTagSet(self):
|
||||
"""Return the minimal TagSet among ASN.1 type in callee *NamedTypes*.
|
||||
|
||||
Some ASN.1 types/serialisation protocols require ASN.1 types to be
|
||||
arranged based on their numerical tag value. The *minTagSet* property
|
||||
returns that.
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`~pyasn1.type.tagset.TagSet`
|
||||
Minimal TagSet among ASN.1 types in callee *NamedTypes*
|
||||
"""
|
||||
return self.__minTagSet
|
||||
|
||||
def __computeTagMaps(self, unique):
|
||||
presentTypes = {}
|
||||
skipTypes = {}
|
||||
defaultType = None
|
||||
for namedType in self.__namedTypes:
|
||||
tagMap = namedType.asn1Object.tagMap
|
||||
if isinstance(tagMap, NamedTypes.PostponedError):
|
||||
return tagMap
|
||||
for tagSet in tagMap:
|
||||
if unique and tagSet in presentTypes:
|
||||
return NamedTypes.PostponedError('Non-unique tagSet %s of %s at %s' % (tagSet, namedType, self))
|
||||
presentTypes[tagSet] = namedType.asn1Object
|
||||
skipTypes.update(tagMap.skipTypes)
|
||||
|
||||
if defaultType is None:
|
||||
defaultType = tagMap.defaultType
|
||||
elif tagMap.defaultType is not None:
|
||||
return NamedTypes.PostponedError('Duplicate default ASN.1 type at %s' % (self,))
|
||||
|
||||
return tagmap.TagMap(presentTypes, skipTypes, defaultType)
|
||||
|
||||
@property
|
||||
def tagMap(self):
|
||||
"""Return a *TagMap* object from tags and types recursively.
|
||||
|
||||
Return a :class:`~pyasn1.type.tagmap.TagMap` object by
|
||||
combining tags from *TagMap* objects of children types and
|
||||
associating them with their immediate child type.
|
||||
|
||||
Example
|
||||
-------
|
||||
.. code-block:: python
|
||||
|
||||
OuterType ::= CHOICE {
|
||||
innerType INTEGER
|
||||
}
|
||||
|
||||
Calling *.tagMap* on *OuterType* will yield a map like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
Integer.tagSet -> Choice
|
||||
"""
|
||||
return self.__nonUniqueTagMap
|
||||
|
||||
@property
|
||||
def tagMapUnique(self):
|
||||
"""Return a *TagMap* object from unique tags and types recursively.
|
||||
|
||||
Return a :class:`~pyasn1.type.tagmap.TagMap` object by
|
||||
combining tags from *TagMap* objects of children types and
|
||||
associating them with their immediate child type.
|
||||
|
||||
Example
|
||||
-------
|
||||
.. code-block:: python
|
||||
|
||||
OuterType ::= CHOICE {
|
||||
innerType INTEGER
|
||||
}
|
||||
|
||||
Calling *.tagMapUnique* on *OuterType* will yield a map like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
Integer.tagSet -> Choice
|
||||
|
||||
Note
|
||||
----
|
||||
|
||||
Duplicate *TagSet* objects found in the tree of children
|
||||
types would cause error.
|
||||
"""
|
||||
return self.__uniqueTagMap
|
||||
|
||||
@property
|
||||
def hasOptionalOrDefault(self):
|
||||
return self.__hasOptionalOrDefault
|
||||
|
||||
@property
|
||||
def hasOpenTypes(self):
|
||||
return self.__hasOpenTypes
|
||||
|
||||
@property
|
||||
def namedTypes(self):
|
||||
return tuple(self.__namedTypes)
|
||||
|
||||
@property
|
||||
def requiredComponents(self):
|
||||
return self.__requiredComponents
|
||||
192
Lambdas/Websocket Authorizer/pyasn1/type/namedval.py
Normal file
192
Lambdas/Websocket Authorizer/pyasn1/type/namedval.py
Normal file
@@ -0,0 +1,192 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
# ASN.1 named integers
|
||||
#
|
||||
from pyasn1 import error
|
||||
|
||||
__all__ = ['NamedValues']
|
||||
|
||||
|
||||
class NamedValues(object):
|
||||
"""Create named values object.
|
||||
|
||||
The |NamedValues| object represents a collection of string names
|
||||
associated with numeric IDs. These objects are used for giving
|
||||
names to otherwise numerical values.
|
||||
|
||||
|NamedValues| objects are immutable and duck-type Python
|
||||
:class:`dict` object mapping ID to name and vice-versa.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*args: variable number of two-element :py:class:`tuple`
|
||||
|
||||
name: :py:class:`str`
|
||||
Value label
|
||||
|
||||
value: :py:class:`int`
|
||||
Numeric value
|
||||
|
||||
Keyword Args
|
||||
------------
|
||||
name: :py:class:`str`
|
||||
Value label
|
||||
|
||||
value: :py:class:`int`
|
||||
Numeric value
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
>>> nv = NamedValues('a', 'b', ('c', 0), d=1)
|
||||
>>> nv
|
||||
>>> {'c': 0, 'd': 1, 'a': 2, 'b': 3}
|
||||
>>> nv[0]
|
||||
'c'
|
||||
>>> nv['a']
|
||||
2
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.__names = {}
|
||||
self.__numbers = {}
|
||||
|
||||
anonymousNames = []
|
||||
|
||||
for namedValue in args:
|
||||
if isinstance(namedValue, (tuple, list)):
|
||||
try:
|
||||
name, number = namedValue
|
||||
|
||||
except ValueError:
|
||||
raise error.PyAsn1Error('Not a proper attribute-value pair %r' % (namedValue,))
|
||||
|
||||
else:
|
||||
anonymousNames.append(namedValue)
|
||||
continue
|
||||
|
||||
if name in self.__names:
|
||||
raise error.PyAsn1Error('Duplicate name %s' % (name,))
|
||||
|
||||
if number in self.__numbers:
|
||||
raise error.PyAsn1Error('Duplicate number %s=%s' % (name, number))
|
||||
|
||||
self.__names[name] = number
|
||||
self.__numbers[number] = name
|
||||
|
||||
for name, number in kwargs.items():
|
||||
if name in self.__names:
|
||||
raise error.PyAsn1Error('Duplicate name %s' % (name,))
|
||||
|
||||
if number in self.__numbers:
|
||||
raise error.PyAsn1Error('Duplicate number %s=%s' % (name, number))
|
||||
|
||||
self.__names[name] = number
|
||||
self.__numbers[number] = name
|
||||
|
||||
if anonymousNames:
|
||||
|
||||
number = self.__numbers and max(self.__numbers) + 1 or 0
|
||||
|
||||
for name in anonymousNames:
|
||||
|
||||
if name in self.__names:
|
||||
raise error.PyAsn1Error('Duplicate name %s' % (name,))
|
||||
|
||||
self.__names[name] = number
|
||||
self.__numbers[number] = name
|
||||
|
||||
number += 1
|
||||
|
||||
def __repr__(self):
|
||||
representation = ', '.join(['%s=%d' % x for x in self.items()])
|
||||
|
||||
if len(representation) > 64:
|
||||
representation = representation[:32] + '...' + representation[-32:]
|
||||
|
||||
return '<%s object, enums %s>' % (
|
||||
self.__class__.__name__, representation)
|
||||
|
||||
def __eq__(self, other):
|
||||
return dict(self) == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return dict(self) != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return dict(self) < other
|
||||
|
||||
def __le__(self, other):
|
||||
return dict(self) <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return dict(self) > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return dict(self) >= other
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.items())
|
||||
|
||||
# Python dict protocol (read-only)
|
||||
|
||||
def __getitem__(self, key):
|
||||
try:
|
||||
return self.__numbers[key]
|
||||
|
||||
except KeyError:
|
||||
return self.__names[key]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.__names)
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.__names or key in self.__numbers
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__names)
|
||||
|
||||
def values(self):
|
||||
return iter(self.__numbers)
|
||||
|
||||
def keys(self):
|
||||
return iter(self.__names)
|
||||
|
||||
def items(self):
|
||||
for name in self.__names:
|
||||
yield name, self.__names[name]
|
||||
|
||||
# support merging
|
||||
|
||||
def __add__(self, namedValues):
|
||||
return self.__class__(*tuple(self.items()) + tuple(namedValues.items()))
|
||||
|
||||
# XXX clone/subtype?
|
||||
|
||||
def clone(self, *args, **kwargs):
|
||||
new = self.__class__(*args, **kwargs)
|
||||
return self + new
|
||||
|
||||
# legacy protocol
|
||||
|
||||
def getName(self, value):
|
||||
if value in self.__numbers:
|
||||
return self.__numbers[value]
|
||||
|
||||
def getValue(self, name):
|
||||
if name in self.__names:
|
||||
return self.__names[name]
|
||||
|
||||
def getValues(self, *names):
|
||||
try:
|
||||
return [self.__names[name] for name in names]
|
||||
|
||||
except KeyError:
|
||||
raise error.PyAsn1Error(
|
||||
'Unknown bit identifier(s): %s' % (set(names).difference(self.__names),)
|
||||
)
|
||||
104
Lambdas/Websocket Authorizer/pyasn1/type/opentype.py
Normal file
104
Lambdas/Websocket Authorizer/pyasn1/type/opentype.py
Normal file
@@ -0,0 +1,104 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
|
||||
__all__ = ['OpenType']
|
||||
|
||||
|
||||
class OpenType(object):
|
||||
"""Create ASN.1 type map indexed by a value
|
||||
|
||||
The *OpenType* object models an untyped field of a constructed ASN.1
|
||||
type. In ASN.1 syntax it is usually represented by the
|
||||
`ANY DEFINED BY` for scalars or `SET OF ANY DEFINED BY`,
|
||||
`SEQUENCE OF ANY DEFINED BY` for container types clauses. Typically
|
||||
used together with :class:`~pyasn1.type.univ.Any` object.
|
||||
|
||||
OpenType objects duck-type a read-only Python :class:`dict` objects,
|
||||
however the passed `typeMap` is not copied, but stored by reference.
|
||||
That means the user can manipulate `typeMap` at run time having this
|
||||
reflected on *OpenType* object behavior.
|
||||
|
||||
The |OpenType| class models an untyped field of a constructed ASN.1
|
||||
type. In ASN.1 syntax it is usually represented by the
|
||||
`ANY DEFINED BY` for scalars or `SET OF ANY DEFINED BY`,
|
||||
`SEQUENCE OF ANY DEFINED BY` for container types clauses. Typically
|
||||
used with :class:`~pyasn1.type.univ.Any` type.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name: :py:class:`str`
|
||||
Field name
|
||||
|
||||
typeMap: :py:class:`dict`
|
||||
A map of value->ASN.1 type. It's stored by reference and can be
|
||||
mutated later to register new mappings.
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
For untyped scalars:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
openType = OpenType(
|
||||
'id', {1: Integer(),
|
||||
2: OctetString()}
|
||||
)
|
||||
Sequence(
|
||||
componentType=NamedTypes(
|
||||
NamedType('id', Integer()),
|
||||
NamedType('blob', Any(), openType=openType)
|
||||
)
|
||||
)
|
||||
|
||||
For untyped `SET OF` or `SEQUENCE OF` vectors:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
openType = OpenType(
|
||||
'id', {1: Integer(),
|
||||
2: OctetString()}
|
||||
)
|
||||
Sequence(
|
||||
componentType=NamedTypes(
|
||||
NamedType('id', Integer()),
|
||||
NamedType('blob', SetOf(componentType=Any()),
|
||||
openType=openType)
|
||||
)
|
||||
)
|
||||
"""
|
||||
|
||||
def __init__(self, name, typeMap=None):
|
||||
self.__name = name
|
||||
if typeMap is None:
|
||||
self.__typeMap = {}
|
||||
else:
|
||||
self.__typeMap = typeMap
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.__name
|
||||
|
||||
# Python dict protocol
|
||||
|
||||
def values(self):
|
||||
return self.__typeMap.values()
|
||||
|
||||
def keys(self):
|
||||
return self.__typeMap.keys()
|
||||
|
||||
def items(self):
|
||||
return self.__typeMap.items()
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.__typeMap
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.__typeMap[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__typeMap)
|
||||
335
Lambdas/Websocket Authorizer/pyasn1/type/tag.py
Normal file
335
Lambdas/Websocket Authorizer/pyasn1/type/tag.py
Normal file
@@ -0,0 +1,335 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1 import error
|
||||
|
||||
__all__ = ['tagClassUniversal', 'tagClassApplication', 'tagClassContext',
|
||||
'tagClassPrivate', 'tagFormatSimple', 'tagFormatConstructed',
|
||||
'tagCategoryImplicit', 'tagCategoryExplicit',
|
||||
'tagCategoryUntagged', 'Tag', 'TagSet']
|
||||
|
||||
#: Identifier for ASN.1 class UNIVERSAL
|
||||
tagClassUniversal = 0x00
|
||||
|
||||
#: Identifier for ASN.1 class APPLICATION
|
||||
tagClassApplication = 0x40
|
||||
|
||||
#: Identifier for ASN.1 class context-specific
|
||||
tagClassContext = 0x80
|
||||
|
||||
#: Identifier for ASN.1 class private
|
||||
tagClassPrivate = 0xC0
|
||||
|
||||
#: Identifier for "simple" ASN.1 structure (e.g. scalar)
|
||||
tagFormatSimple = 0x00
|
||||
|
||||
#: Identifier for "constructed" ASN.1 structure (e.g. may have inner components)
|
||||
tagFormatConstructed = 0x20
|
||||
|
||||
tagCategoryImplicit = 0x01
|
||||
tagCategoryExplicit = 0x02
|
||||
tagCategoryUntagged = 0x04
|
||||
|
||||
|
||||
class Tag(object):
|
||||
"""Create ASN.1 tag
|
||||
|
||||
Represents ASN.1 tag that can be attached to a ASN.1 type to make
|
||||
types distinguishable from each other.
|
||||
|
||||
*Tag* objects are immutable and duck-type Python :class:`tuple` objects
|
||||
holding three integer components of a tag.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
tagClass: :py:class:`int`
|
||||
Tag *class* value
|
||||
|
||||
tagFormat: :py:class:`int`
|
||||
Tag *format* value
|
||||
|
||||
tagId: :py:class:`int`
|
||||
Tag ID value
|
||||
"""
|
||||
def __init__(self, tagClass, tagFormat, tagId):
|
||||
if tagId < 0:
|
||||
raise error.PyAsn1Error('Negative tag ID (%s) not allowed' % tagId)
|
||||
self.__tagClass = tagClass
|
||||
self.__tagFormat = tagFormat
|
||||
self.__tagId = tagId
|
||||
self.__tagClassId = tagClass, tagId
|
||||
self.__hash = hash(self.__tagClassId)
|
||||
|
||||
def __repr__(self):
|
||||
representation = '[%s:%s:%s]' % (
|
||||
self.__tagClass, self.__tagFormat, self.__tagId)
|
||||
return '<%s object, tag %s>' % (
|
||||
self.__class__.__name__, representation)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__tagClassId == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.__tagClassId != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.__tagClassId < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__tagClassId <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.__tagClassId > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__tagClassId >= other
|
||||
|
||||
def __hash__(self):
|
||||
return self.__hash
|
||||
|
||||
def __getitem__(self, idx):
|
||||
if idx == 0:
|
||||
return self.__tagClass
|
||||
elif idx == 1:
|
||||
return self.__tagFormat
|
||||
elif idx == 2:
|
||||
return self.__tagId
|
||||
else:
|
||||
raise IndexError()
|
||||
|
||||
def __iter__(self):
|
||||
yield self.__tagClass
|
||||
yield self.__tagFormat
|
||||
yield self.__tagId
|
||||
|
||||
def __and__(self, otherTag):
|
||||
return self.__class__(self.__tagClass & otherTag.tagClass,
|
||||
self.__tagFormat & otherTag.tagFormat,
|
||||
self.__tagId & otherTag.tagId)
|
||||
|
||||
def __or__(self, otherTag):
|
||||
return self.__class__(self.__tagClass | otherTag.tagClass,
|
||||
self.__tagFormat | otherTag.tagFormat,
|
||||
self.__tagId | otherTag.tagId)
|
||||
|
||||
@property
|
||||
def tagClass(self):
|
||||
"""ASN.1 tag class
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`int`
|
||||
Tag class
|
||||
"""
|
||||
return self.__tagClass
|
||||
|
||||
@property
|
||||
def tagFormat(self):
|
||||
"""ASN.1 tag format
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`int`
|
||||
Tag format
|
||||
"""
|
||||
return self.__tagFormat
|
||||
|
||||
@property
|
||||
def tagId(self):
|
||||
"""ASN.1 tag ID
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`int`
|
||||
Tag ID
|
||||
"""
|
||||
return self.__tagId
|
||||
|
||||
|
||||
class TagSet(object):
|
||||
"""Create a collection of ASN.1 tags
|
||||
|
||||
Represents a combination of :class:`~pyasn1.type.tag.Tag` objects
|
||||
that can be attached to a ASN.1 type to make types distinguishable
|
||||
from each other.
|
||||
|
||||
*TagSet* objects are immutable and duck-type Python :class:`tuple` objects
|
||||
holding arbitrary number of :class:`~pyasn1.type.tag.Tag` objects.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
baseTag: :class:`~pyasn1.type.tag.Tag`
|
||||
Base *Tag* object. This tag survives IMPLICIT tagging.
|
||||
|
||||
*superTags: :class:`~pyasn1.type.tag.Tag`
|
||||
Additional *Tag* objects taking part in subtyping.
|
||||
|
||||
Examples
|
||||
--------
|
||||
.. code-block:: python
|
||||
|
||||
class OrderNumber(NumericString):
|
||||
'''
|
||||
ASN.1 specification
|
||||
|
||||
Order-number ::=
|
||||
[APPLICATION 5] IMPLICIT NumericString
|
||||
'''
|
||||
tagSet = NumericString.tagSet.tagImplicitly(
|
||||
Tag(tagClassApplication, tagFormatSimple, 5)
|
||||
)
|
||||
|
||||
orderNumber = OrderNumber('1234')
|
||||
"""
|
||||
def __init__(self, baseTag=(), *superTags):
|
||||
self.__baseTag = baseTag
|
||||
self.__superTags = superTags
|
||||
self.__superTagsClassId = tuple(
|
||||
[(superTag.tagClass, superTag.tagId) for superTag in superTags]
|
||||
)
|
||||
self.__lenOfSuperTags = len(superTags)
|
||||
self.__hash = hash(self.__superTagsClassId)
|
||||
|
||||
def __repr__(self):
|
||||
representation = '-'.join(['%s:%s:%s' % (x.tagClass, x.tagFormat, x.tagId)
|
||||
for x in self.__superTags])
|
||||
if representation:
|
||||
representation = 'tags ' + representation
|
||||
else:
|
||||
representation = 'untagged'
|
||||
|
||||
return '<%s object, %s>' % (self.__class__.__name__, representation)
|
||||
|
||||
def __add__(self, superTag):
|
||||
return self.__class__(self.__baseTag, *self.__superTags + (superTag,))
|
||||
|
||||
def __radd__(self, superTag):
|
||||
return self.__class__(self.__baseTag, *(superTag,) + self.__superTags)
|
||||
|
||||
def __getitem__(self, i):
|
||||
if i.__class__ is slice:
|
||||
return self.__class__(self.__baseTag, *self.__superTags[i])
|
||||
else:
|
||||
return self.__superTags[i]
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__superTagsClassId == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.__superTagsClassId != other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.__superTagsClassId < other
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__superTagsClassId <= other
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.__superTagsClassId > other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__superTagsClassId >= other
|
||||
|
||||
def __hash__(self):
|
||||
return self.__hash
|
||||
|
||||
def __len__(self):
|
||||
return self.__lenOfSuperTags
|
||||
|
||||
@property
|
||||
def baseTag(self):
|
||||
"""Return base ASN.1 tag
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`~pyasn1.type.tag.Tag`
|
||||
Base tag of this *TagSet*
|
||||
"""
|
||||
return self.__baseTag
|
||||
|
||||
@property
|
||||
def superTags(self):
|
||||
"""Return ASN.1 tags
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`tuple`
|
||||
Tuple of :class:`~pyasn1.type.tag.Tag` objects that this *TagSet* contains
|
||||
"""
|
||||
return self.__superTags
|
||||
|
||||
def tagExplicitly(self, superTag):
|
||||
"""Return explicitly tagged *TagSet*
|
||||
|
||||
Create a new *TagSet* representing callee *TagSet* explicitly tagged
|
||||
with passed tag(s). With explicit tagging mode, new tags are appended
|
||||
to existing tag(s).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
superTag: :class:`~pyasn1.type.tag.Tag`
|
||||
*Tag* object to tag this *TagSet*
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`~pyasn1.type.tag.TagSet`
|
||||
New *TagSet* object
|
||||
"""
|
||||
if superTag.tagClass == tagClassUniversal:
|
||||
raise error.PyAsn1Error("Can't tag with UNIVERSAL class tag")
|
||||
if superTag.tagFormat != tagFormatConstructed:
|
||||
superTag = Tag(superTag.tagClass, tagFormatConstructed, superTag.tagId)
|
||||
return self + superTag
|
||||
|
||||
def tagImplicitly(self, superTag):
|
||||
"""Return implicitly tagged *TagSet*
|
||||
|
||||
Create a new *TagSet* representing callee *TagSet* implicitly tagged
|
||||
with passed tag(s). With implicit tagging mode, new tag(s) replace the
|
||||
last existing tag.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
superTag: :class:`~pyasn1.type.tag.Tag`
|
||||
*Tag* object to tag this *TagSet*
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :class:`~pyasn1.type.tag.TagSet`
|
||||
New *TagSet* object
|
||||
"""
|
||||
if self.__superTags:
|
||||
superTag = Tag(superTag.tagClass, self.__superTags[-1].tagFormat, superTag.tagId)
|
||||
return self[:-1] + superTag
|
||||
|
||||
def isSuperTagSetOf(self, tagSet):
|
||||
"""Test type relationship against given *TagSet*
|
||||
|
||||
The callee is considered to be a supertype of given *TagSet*
|
||||
tag-wise if all tags in *TagSet* are present in the callee and
|
||||
they are in the same order.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
tagSet: :class:`~pyasn1.type.tag.TagSet`
|
||||
*TagSet* object to evaluate against the callee
|
||||
|
||||
Returns
|
||||
-------
|
||||
: :py:class:`bool`
|
||||
:obj:`True` if callee is a supertype of *tagSet*
|
||||
"""
|
||||
if len(tagSet) < self.__lenOfSuperTags:
|
||||
return False
|
||||
return self.__superTags == tagSet[:self.__lenOfSuperTags]
|
||||
|
||||
# Backward compatibility
|
||||
|
||||
def getBaseTag(self):
|
||||
return self.__baseTag
|
||||
|
||||
def initTagSet(tag):
|
||||
return TagSet(tag, tag)
|
||||
96
Lambdas/Websocket Authorizer/pyasn1/type/tagmap.py
Normal file
96
Lambdas/Websocket Authorizer/pyasn1/type/tagmap.py
Normal file
@@ -0,0 +1,96 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
from pyasn1 import error
|
||||
|
||||
__all__ = ['TagMap']
|
||||
|
||||
|
||||
class TagMap(object):
|
||||
"""Map *TagSet* objects to ASN.1 types
|
||||
|
||||
Create an object mapping *TagSet* object to ASN.1 type.
|
||||
|
||||
*TagMap* objects are immutable and duck-type read-only Python
|
||||
:class:`dict` objects holding *TagSet* objects as keys and ASN.1
|
||||
type objects as values.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
presentTypes: :py:class:`dict`
|
||||
Map of :class:`~pyasn1.type.tag.TagSet` to ASN.1 objects considered
|
||||
as being unconditionally present in the *TagMap*.
|
||||
|
||||
skipTypes: :py:class:`dict`
|
||||
A collection of :class:`~pyasn1.type.tag.TagSet` objects considered
|
||||
as absent in the *TagMap* even when *defaultType* is present.
|
||||
|
||||
defaultType: ASN.1 type object
|
||||
An ASN.1 type object callee *TagMap* returns for any *TagSet* key not present
|
||||
in *presentTypes* (unless given key is present in *skipTypes*).
|
||||
"""
|
||||
def __init__(self, presentTypes=None, skipTypes=None, defaultType=None):
|
||||
self.__presentTypes = presentTypes or {}
|
||||
self.__skipTypes = skipTypes or {}
|
||||
self.__defaultType = defaultType
|
||||
|
||||
def __contains__(self, tagSet):
|
||||
return (tagSet in self.__presentTypes or
|
||||
self.__defaultType is not None and tagSet not in self.__skipTypes)
|
||||
|
||||
def __getitem__(self, tagSet):
|
||||
try:
|
||||
return self.__presentTypes[tagSet]
|
||||
except KeyError:
|
||||
if self.__defaultType is None:
|
||||
raise KeyError()
|
||||
elif tagSet in self.__skipTypes:
|
||||
raise error.PyAsn1Error('Key in negative map')
|
||||
else:
|
||||
return self.__defaultType
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__presentTypes)
|
||||
|
||||
def __repr__(self):
|
||||
representation = '%s object' % self.__class__.__name__
|
||||
|
||||
if self.__presentTypes:
|
||||
representation += ', present %s' % repr(self.__presentTypes)
|
||||
|
||||
if self.__skipTypes:
|
||||
representation += ', skip %s' % repr(self.__skipTypes)
|
||||
|
||||
if self.__defaultType is not None:
|
||||
representation += ', default %s' % repr(self.__defaultType)
|
||||
|
||||
return '<%s>' % representation
|
||||
|
||||
@property
|
||||
def presentTypes(self):
|
||||
"""Return *TagSet* to ASN.1 type map present in callee *TagMap*"""
|
||||
return self.__presentTypes
|
||||
|
||||
@property
|
||||
def skipTypes(self):
|
||||
"""Return *TagSet* collection unconditionally absent in callee *TagMap*"""
|
||||
return self.__skipTypes
|
||||
|
||||
@property
|
||||
def defaultType(self):
|
||||
"""Return default ASN.1 type being returned for any missing *TagSet*"""
|
||||
return self.__defaultType
|
||||
|
||||
# Backward compatibility
|
||||
|
||||
def getPosMap(self):
|
||||
return self.presentTypes
|
||||
|
||||
def getNegMap(self):
|
||||
return self.skipTypes
|
||||
|
||||
def getDef(self):
|
||||
return self.defaultType
|
||||
3321
Lambdas/Websocket Authorizer/pyasn1/type/univ.py
Normal file
3321
Lambdas/Websocket Authorizer/pyasn1/type/univ.py
Normal file
File diff suppressed because it is too large
Load Diff
191
Lambdas/Websocket Authorizer/pyasn1/type/useful.py
Normal file
191
Lambdas/Websocket Authorizer/pyasn1/type/useful.py
Normal file
@@ -0,0 +1,191 @@
|
||||
#
|
||||
# This file is part of pyasn1 software.
|
||||
#
|
||||
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
|
||||
# License: http://snmplabs.com/pyasn1/license.html
|
||||
#
|
||||
import datetime
|
||||
|
||||
from pyasn1 import error
|
||||
from pyasn1.compat import dateandtime
|
||||
from pyasn1.compat import string
|
||||
from pyasn1.type import char
|
||||
from pyasn1.type import tag
|
||||
from pyasn1.type import univ
|
||||
|
||||
__all__ = ['ObjectDescriptor', 'GeneralizedTime', 'UTCTime']
|
||||
|
||||
NoValue = univ.NoValue
|
||||
noValue = univ.noValue
|
||||
|
||||
|
||||
class ObjectDescriptor(char.GraphicString):
|
||||
__doc__ = char.GraphicString.__doc__
|
||||
|
||||
#: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
|
||||
tagSet = char.GraphicString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 7)
|
||||
)
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = char.GraphicString.getTypeId()
|
||||
|
||||
|
||||
class TimeMixIn(object):
|
||||
|
||||
_yearsDigits = 4
|
||||
_hasSubsecond = False
|
||||
_optionalMinutes = False
|
||||
_shortTZ = False
|
||||
|
||||
class FixedOffset(datetime.tzinfo):
|
||||
"""Fixed offset in minutes east from UTC."""
|
||||
|
||||
# defaulted arguments required
|
||||
# https: // docs.python.org / 2.3 / lib / datetime - tzinfo.html
|
||||
def __init__(self, offset=0, name='UTC'):
|
||||
self.__offset = datetime.timedelta(minutes=offset)
|
||||
self.__name = name
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self.__offset
|
||||
|
||||
def tzname(self, dt):
|
||||
return self.__name
|
||||
|
||||
def dst(self, dt):
|
||||
return datetime.timedelta(0)
|
||||
|
||||
UTC = FixedOffset()
|
||||
|
||||
@property
|
||||
def asDateTime(self):
|
||||
"""Create :py:class:`datetime.datetime` object from a |ASN.1| object.
|
||||
|
||||
Returns
|
||||
-------
|
||||
:
|
||||
new instance of :py:class:`datetime.datetime` object
|
||||
"""
|
||||
text = str(self)
|
||||
if text.endswith('Z'):
|
||||
tzinfo = TimeMixIn.UTC
|
||||
text = text[:-1]
|
||||
|
||||
elif '-' in text or '+' in text:
|
||||
if '+' in text:
|
||||
text, plusminus, tz = string.partition(text, '+')
|
||||
else:
|
||||
text, plusminus, tz = string.partition(text, '-')
|
||||
|
||||
if self._shortTZ and len(tz) == 2:
|
||||
tz += '00'
|
||||
|
||||
if len(tz) != 4:
|
||||
raise error.PyAsn1Error('malformed time zone offset %s' % tz)
|
||||
|
||||
try:
|
||||
minutes = int(tz[:2]) * 60 + int(tz[2:])
|
||||
if plusminus == '-':
|
||||
minutes *= -1
|
||||
|
||||
except ValueError:
|
||||
raise error.PyAsn1Error('unknown time specification %s' % self)
|
||||
|
||||
tzinfo = TimeMixIn.FixedOffset(minutes, '?')
|
||||
|
||||
else:
|
||||
tzinfo = None
|
||||
|
||||
if '.' in text or ',' in text:
|
||||
if '.' in text:
|
||||
text, _, ms = string.partition(text, '.')
|
||||
else:
|
||||
text, _, ms = string.partition(text, ',')
|
||||
|
||||
try:
|
||||
ms = int(ms) * 1000
|
||||
|
||||
except ValueError:
|
||||
raise error.PyAsn1Error('bad sub-second time specification %s' % self)
|
||||
|
||||
else:
|
||||
ms = 0
|
||||
|
||||
if self._optionalMinutes and len(text) - self._yearsDigits == 6:
|
||||
text += '0000'
|
||||
elif len(text) - self._yearsDigits == 8:
|
||||
text += '00'
|
||||
|
||||
try:
|
||||
dt = dateandtime.strptime(text, self._yearsDigits == 4 and '%Y%m%d%H%M%S' or '%y%m%d%H%M%S')
|
||||
|
||||
except ValueError:
|
||||
raise error.PyAsn1Error('malformed datetime format %s' % self)
|
||||
|
||||
return dt.replace(microsecond=ms, tzinfo=tzinfo)
|
||||
|
||||
@classmethod
|
||||
def fromDateTime(cls, dt):
|
||||
"""Create |ASN.1| object from a :py:class:`datetime.datetime` object.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
dt: :py:class:`datetime.datetime` object
|
||||
The `datetime.datetime` object to initialize the |ASN.1| object
|
||||
from
|
||||
|
||||
Returns
|
||||
-------
|
||||
:
|
||||
new instance of |ASN.1| value
|
||||
"""
|
||||
text = dt.strftime(cls._yearsDigits == 4 and '%Y%m%d%H%M%S' or '%y%m%d%H%M%S')
|
||||
if cls._hasSubsecond:
|
||||
text += '.%d' % (dt.microsecond // 1000)
|
||||
|
||||
if dt.utcoffset():
|
||||
seconds = dt.utcoffset().seconds
|
||||
if seconds < 0:
|
||||
text += '-'
|
||||
else:
|
||||
text += '+'
|
||||
text += '%.2d%.2d' % (seconds // 3600, seconds % 3600)
|
||||
else:
|
||||
text += 'Z'
|
||||
|
||||
return cls(text)
|
||||
|
||||
|
||||
class GeneralizedTime(char.VisibleString, TimeMixIn):
|
||||
__doc__ = char.VisibleString.__doc__
|
||||
|
||||
#: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
|
||||
tagSet = char.VisibleString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24)
|
||||
)
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = char.VideotexString.getTypeId()
|
||||
|
||||
_yearsDigits = 4
|
||||
_hasSubsecond = True
|
||||
_optionalMinutes = True
|
||||
_shortTZ = True
|
||||
|
||||
|
||||
class UTCTime(char.VisibleString, TimeMixIn):
|
||||
__doc__ = char.VisibleString.__doc__
|
||||
|
||||
#: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects
|
||||
tagSet = char.VisibleString.tagSet.tagImplicitly(
|
||||
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 23)
|
||||
)
|
||||
|
||||
# Optimization for faster codec lookup
|
||||
typeId = char.VideotexString.getTypeId()
|
||||
|
||||
_yearsDigits = 2
|
||||
_hasSubsecond = False
|
||||
_optionalMinutes = False
|
||||
_shortTZ = False
|
||||
42
Lambdas/Websocket Authorizer/rsa/__init__.py
Normal file
42
Lambdas/Websocket Authorizer/rsa/__init__.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""RSA module
|
||||
|
||||
Module for calculating large primes, and RSA encryption, decryption, signing
|
||||
and verification. Includes generating public and private keys.
|
||||
|
||||
WARNING: this implementation does not use compression of the cleartext input to
|
||||
prevent repetitions, or other common security improvements. Use with care.
|
||||
|
||||
"""
|
||||
|
||||
from rsa.key import newkeys, PrivateKey, PublicKey
|
||||
from rsa.pkcs1 import encrypt, decrypt, sign, verify, DecryptionError, \
|
||||
VerificationError, find_signature_hash, sign_hash, compute_hash
|
||||
|
||||
__author__ = "Sybren Stuvel, Barry Mead and Yesudeep Mangalapilly"
|
||||
__date__ = "2018-09-16"
|
||||
__version__ = '4.0'
|
||||
|
||||
# Do doctest if we're run directly
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
|
||||
doctest.testmod()
|
||||
|
||||
__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify", 'PublicKey',
|
||||
'PrivateKey', 'DecryptionError', 'VerificationError',
|
||||
'compute_hash', 'sign_hash']
|
||||
162
Lambdas/Websocket Authorizer/rsa/_compat.py
Normal file
162
Lambdas/Websocket Authorizer/rsa/_compat.py
Normal file
@@ -0,0 +1,162 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Python compatibility wrappers."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import itertools
|
||||
import sys
|
||||
from struct import pack
|
||||
|
||||
MAX_INT = sys.maxsize
|
||||
MAX_INT64 = (1 << 63) - 1
|
||||
MAX_INT32 = (1 << 31) - 1
|
||||
MAX_INT16 = (1 << 15) - 1
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
|
||||
# Determine the word size of the processor.
|
||||
if MAX_INT == MAX_INT64:
|
||||
# 64-bit processor.
|
||||
MACHINE_WORD_SIZE = 64
|
||||
elif MAX_INT == MAX_INT32:
|
||||
# 32-bit processor.
|
||||
MACHINE_WORD_SIZE = 32
|
||||
else:
|
||||
# Else we just assume 64-bit processor keeping up with modern times.
|
||||
MACHINE_WORD_SIZE = 64
|
||||
|
||||
if PY2:
|
||||
integer_types = (int, long)
|
||||
range = xrange
|
||||
zip = itertools.izip
|
||||
else:
|
||||
integer_types = (int, )
|
||||
range = range
|
||||
zip = zip
|
||||
|
||||
|
||||
def write_to_stdout(data):
|
||||
"""Writes bytes to stdout
|
||||
|
||||
:type data: bytes
|
||||
"""
|
||||
if PY2:
|
||||
sys.stdout.write(data)
|
||||
else:
|
||||
# On Py3 we must use the buffer interface to write bytes.
|
||||
sys.stdout.buffer.write(data)
|
||||
|
||||
|
||||
def is_bytes(obj):
|
||||
"""
|
||||
Determines whether the given value is a byte string.
|
||||
|
||||
:param obj:
|
||||
The value to test.
|
||||
:returns:
|
||||
``True`` if ``value`` is a byte string; ``False`` otherwise.
|
||||
"""
|
||||
return isinstance(obj, bytes)
|
||||
|
||||
|
||||
def is_integer(obj):
|
||||
"""
|
||||
Determines whether the given value is an integer.
|
||||
|
||||
:param obj:
|
||||
The value to test.
|
||||
:returns:
|
||||
``True`` if ``value`` is an integer; ``False`` otherwise.
|
||||
"""
|
||||
return isinstance(obj, integer_types)
|
||||
|
||||
|
||||
def byte(num):
|
||||
"""
|
||||
Converts a number between 0 and 255 (both inclusive) to a base-256 (byte)
|
||||
representation.
|
||||
|
||||
Use it as a replacement for ``chr`` where you are expecting a byte
|
||||
because this will work on all current versions of Python::
|
||||
|
||||
:param num:
|
||||
An unsigned integer between 0 and 255 (both inclusive).
|
||||
:returns:
|
||||
A single byte.
|
||||
"""
|
||||
return pack("B", num)
|
||||
|
||||
|
||||
def xor_bytes(b1, b2):
|
||||
"""
|
||||
Returns the bitwise XOR result between two bytes objects, b1 ^ b2.
|
||||
|
||||
Bitwise XOR operation is commutative, so order of parameters doesn't
|
||||
generate different results. If parameters have different length, extra
|
||||
length of the largest one is ignored.
|
||||
|
||||
:param b1:
|
||||
First bytes object.
|
||||
:param b2:
|
||||
Second bytes object.
|
||||
:returns:
|
||||
Bytes object, result of XOR operation.
|
||||
"""
|
||||
if PY2:
|
||||
return ''.join(byte(ord(x) ^ ord(y)) for x, y in zip(b1, b2))
|
||||
|
||||
return bytes(x ^ y for x, y in zip(b1, b2))
|
||||
|
||||
|
||||
def get_word_alignment(num, force_arch=64,
|
||||
_machine_word_size=MACHINE_WORD_SIZE):
|
||||
"""
|
||||
Returns alignment details for the given number based on the platform
|
||||
Python is running on.
|
||||
|
||||
:param num:
|
||||
Unsigned integral number.
|
||||
:param force_arch:
|
||||
If you don't want to use 64-bit unsigned chunks, set this to
|
||||
anything other than 64. 32-bit chunks will be preferred then.
|
||||
Default 64 will be used when on a 64-bit machine.
|
||||
:param _machine_word_size:
|
||||
(Internal) The machine word size used for alignment.
|
||||
:returns:
|
||||
4-tuple::
|
||||
|
||||
(word_bits, word_bytes,
|
||||
max_uint, packing_format_type)
|
||||
"""
|
||||
max_uint64 = 0xffffffffffffffff
|
||||
max_uint32 = 0xffffffff
|
||||
max_uint16 = 0xffff
|
||||
max_uint8 = 0xff
|
||||
|
||||
if force_arch == 64 and _machine_word_size >= 64 and num > max_uint32:
|
||||
# 64-bit unsigned integer.
|
||||
return 64, 8, max_uint64, "Q"
|
||||
elif num > max_uint16:
|
||||
# 32-bit unsigned integer
|
||||
return 32, 4, max_uint32, "L"
|
||||
elif num > max_uint8:
|
||||
# 16-bit unsigned integer.
|
||||
return 16, 2, max_uint16, "H"
|
||||
else:
|
||||
# 8-bit unsigned integer.
|
||||
return 8, 1, max_uint8, "B"
|
||||
53
Lambdas/Websocket Authorizer/rsa/asn1.py
Normal file
53
Lambdas/Websocket Authorizer/rsa/asn1.py
Normal file
@@ -0,0 +1,53 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""ASN.1 definitions.
|
||||
|
||||
Not all ASN.1-handling code use these definitions, but when it does, they should be here.
|
||||
"""
|
||||
|
||||
from pyasn1.type import univ, namedtype, tag
|
||||
|
||||
|
||||
class PubKeyHeader(univ.Sequence):
|
||||
componentType = namedtype.NamedTypes(
|
||||
namedtype.NamedType('oid', univ.ObjectIdentifier()),
|
||||
namedtype.NamedType('parameters', univ.Null()),
|
||||
)
|
||||
|
||||
|
||||
class OpenSSLPubKey(univ.Sequence):
|
||||
componentType = namedtype.NamedTypes(
|
||||
namedtype.NamedType('header', PubKeyHeader()),
|
||||
|
||||
# This little hack (the implicit tag) allows us to get a Bit String as Octet String
|
||||
namedtype.NamedType('key', univ.OctetString().subtype(
|
||||
implicitTag=tag.Tag(tagClass=0, tagFormat=0, tagId=3))),
|
||||
)
|
||||
|
||||
|
||||
class AsnPubKey(univ.Sequence):
|
||||
"""ASN.1 contents of DER encoded public key:
|
||||
|
||||
RSAPublicKey ::= SEQUENCE {
|
||||
modulus INTEGER, -- n
|
||||
publicExponent INTEGER, -- e
|
||||
"""
|
||||
|
||||
componentType = namedtype.NamedTypes(
|
||||
namedtype.NamedType('modulus', univ.Integer()),
|
||||
namedtype.NamedType('publicExponent', univ.Integer()),
|
||||
)
|
||||
288
Lambdas/Websocket Authorizer/rsa/cli.py
Normal file
288
Lambdas/Websocket Authorizer/rsa/cli.py
Normal file
@@ -0,0 +1,288 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Commandline scripts.
|
||||
|
||||
These scripts are called by the executables defined in setup.py.
|
||||
"""
|
||||
|
||||
from __future__ import with_statement, print_function
|
||||
|
||||
import abc
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
import rsa
|
||||
import rsa.pkcs1
|
||||
|
||||
HASH_METHODS = sorted(rsa.pkcs1.HASH_METHODS.keys())
|
||||
|
||||
|
||||
def keygen():
|
||||
"""Key generator."""
|
||||
|
||||
# Parse the CLI options
|
||||
parser = OptionParser(usage='usage: %prog [options] keysize',
|
||||
description='Generates a new RSA keypair of "keysize" bits.')
|
||||
|
||||
parser.add_option('--pubout', type='string',
|
||||
help='Output filename for the public key. The public key is '
|
||||
'not saved if this option is not present. You can use '
|
||||
'pyrsa-priv2pub to create the public key file later.')
|
||||
|
||||
parser.add_option('-o', '--out', type='string',
|
||||
help='Output filename for the private key. The key is '
|
||||
'written to stdout if this option is not present.')
|
||||
|
||||
parser.add_option('--form',
|
||||
help='key format of the private and public keys - default PEM',
|
||||
choices=('PEM', 'DER'), default='PEM')
|
||||
|
||||
(cli, cli_args) = parser.parse_args(sys.argv[1:])
|
||||
|
||||
if len(cli_args) != 1:
|
||||
parser.print_help()
|
||||
raise SystemExit(1)
|
||||
|
||||
try:
|
||||
keysize = int(cli_args[0])
|
||||
except ValueError:
|
||||
parser.print_help()
|
||||
print('Not a valid number: %s' % cli_args[0], file=sys.stderr)
|
||||
raise SystemExit(1)
|
||||
|
||||
print('Generating %i-bit key' % keysize, file=sys.stderr)
|
||||
(pub_key, priv_key) = rsa.newkeys(keysize)
|
||||
|
||||
# Save public key
|
||||
if cli.pubout:
|
||||
print('Writing public key to %s' % cli.pubout, file=sys.stderr)
|
||||
data = pub_key.save_pkcs1(format=cli.form)
|
||||
with open(cli.pubout, 'wb') as outfile:
|
||||
outfile.write(data)
|
||||
|
||||
# Save private key
|
||||
data = priv_key.save_pkcs1(format=cli.form)
|
||||
|
||||
if cli.out:
|
||||
print('Writing private key to %s' % cli.out, file=sys.stderr)
|
||||
with open(cli.out, 'wb') as outfile:
|
||||
outfile.write(data)
|
||||
else:
|
||||
print('Writing private key to stdout', file=sys.stderr)
|
||||
rsa._compat.write_to_stdout(data)
|
||||
|
||||
|
||||
class CryptoOperation(object):
|
||||
"""CLI callable that operates with input, output, and a key."""
|
||||
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
keyname = 'public' # or 'private'
|
||||
usage = 'usage: %%prog [options] %(keyname)s_key'
|
||||
description = None
|
||||
operation = 'decrypt'
|
||||
operation_past = 'decrypted'
|
||||
operation_progressive = 'decrypting'
|
||||
input_help = 'Name of the file to %(operation)s. Reads from stdin if ' \
|
||||
'not specified.'
|
||||
output_help = 'Name of the file to write the %(operation_past)s file ' \
|
||||
'to. Written to stdout if this option is not present.'
|
||||
expected_cli_args = 1
|
||||
has_output = True
|
||||
|
||||
key_class = rsa.PublicKey
|
||||
|
||||
def __init__(self):
|
||||
self.usage = self.usage % self.__class__.__dict__
|
||||
self.input_help = self.input_help % self.__class__.__dict__
|
||||
self.output_help = self.output_help % self.__class__.__dict__
|
||||
|
||||
@abc.abstractmethod
|
||||
def perform_operation(self, indata, key, cli_args):
|
||||
"""Performs the program's operation.
|
||||
|
||||
Implement in a subclass.
|
||||
|
||||
:returns: the data to write to the output.
|
||||
"""
|
||||
|
||||
def __call__(self):
|
||||
"""Runs the program."""
|
||||
|
||||
(cli, cli_args) = self.parse_cli()
|
||||
|
||||
key = self.read_key(cli_args[0], cli.keyform)
|
||||
|
||||
indata = self.read_infile(cli.input)
|
||||
|
||||
print(self.operation_progressive.title(), file=sys.stderr)
|
||||
outdata = self.perform_operation(indata, key, cli_args)
|
||||
|
||||
if self.has_output:
|
||||
self.write_outfile(outdata, cli.output)
|
||||
|
||||
def parse_cli(self):
|
||||
"""Parse the CLI options
|
||||
|
||||
:returns: (cli_opts, cli_args)
|
||||
"""
|
||||
|
||||
parser = OptionParser(usage=self.usage, description=self.description)
|
||||
|
||||
parser.add_option('-i', '--input', type='string', help=self.input_help)
|
||||
|
||||
if self.has_output:
|
||||
parser.add_option('-o', '--output', type='string', help=self.output_help)
|
||||
|
||||
parser.add_option('--keyform',
|
||||
help='Key format of the %s key - default PEM' % self.keyname,
|
||||
choices=('PEM', 'DER'), default='PEM')
|
||||
|
||||
(cli, cli_args) = parser.parse_args(sys.argv[1:])
|
||||
|
||||
if len(cli_args) != self.expected_cli_args:
|
||||
parser.print_help()
|
||||
raise SystemExit(1)
|
||||
|
||||
return cli, cli_args
|
||||
|
||||
def read_key(self, filename, keyform):
|
||||
"""Reads a public or private key."""
|
||||
|
||||
print('Reading %s key from %s' % (self.keyname, filename), file=sys.stderr)
|
||||
with open(filename, 'rb') as keyfile:
|
||||
keydata = keyfile.read()
|
||||
|
||||
return self.key_class.load_pkcs1(keydata, keyform)
|
||||
|
||||
def read_infile(self, inname):
|
||||
"""Read the input file"""
|
||||
|
||||
if inname:
|
||||
print('Reading input from %s' % inname, file=sys.stderr)
|
||||
with open(inname, 'rb') as infile:
|
||||
return infile.read()
|
||||
|
||||
print('Reading input from stdin', file=sys.stderr)
|
||||
return sys.stdin.read()
|
||||
|
||||
def write_outfile(self, outdata, outname):
|
||||
"""Write the output file"""
|
||||
|
||||
if outname:
|
||||
print('Writing output to %s' % outname, file=sys.stderr)
|
||||
with open(outname, 'wb') as outfile:
|
||||
outfile.write(outdata)
|
||||
else:
|
||||
print('Writing output to stdout', file=sys.stderr)
|
||||
rsa._compat.write_to_stdout(outdata)
|
||||
|
||||
|
||||
class EncryptOperation(CryptoOperation):
|
||||
"""Encrypts a file."""
|
||||
|
||||
keyname = 'public'
|
||||
description = ('Encrypts a file. The file must be shorter than the key '
|
||||
'length in order to be encrypted.')
|
||||
operation = 'encrypt'
|
||||
operation_past = 'encrypted'
|
||||
operation_progressive = 'encrypting'
|
||||
|
||||
def perform_operation(self, indata, pub_key, cli_args=None):
|
||||
"""Encrypts files."""
|
||||
|
||||
return rsa.encrypt(indata, pub_key)
|
||||
|
||||
|
||||
class DecryptOperation(CryptoOperation):
|
||||
"""Decrypts a file."""
|
||||
|
||||
keyname = 'private'
|
||||
description = ('Decrypts a file. The original file must be shorter than '
|
||||
'the key length in order to have been encrypted.')
|
||||
operation = 'decrypt'
|
||||
operation_past = 'decrypted'
|
||||
operation_progressive = 'decrypting'
|
||||
key_class = rsa.PrivateKey
|
||||
|
||||
def perform_operation(self, indata, priv_key, cli_args=None):
|
||||
"""Decrypts files."""
|
||||
|
||||
return rsa.decrypt(indata, priv_key)
|
||||
|
||||
|
||||
class SignOperation(CryptoOperation):
|
||||
"""Signs a file."""
|
||||
|
||||
keyname = 'private'
|
||||
usage = 'usage: %%prog [options] private_key hash_method'
|
||||
description = ('Signs a file, outputs the signature. Choose the hash '
|
||||
'method from %s' % ', '.join(HASH_METHODS))
|
||||
operation = 'sign'
|
||||
operation_past = 'signature'
|
||||
operation_progressive = 'Signing'
|
||||
key_class = rsa.PrivateKey
|
||||
expected_cli_args = 2
|
||||
|
||||
output_help = ('Name of the file to write the signature to. Written '
|
||||
'to stdout if this option is not present.')
|
||||
|
||||
def perform_operation(self, indata, priv_key, cli_args):
|
||||
"""Signs files."""
|
||||
|
||||
hash_method = cli_args[1]
|
||||
if hash_method not in HASH_METHODS:
|
||||
raise SystemExit('Invalid hash method, choose one of %s' %
|
||||
', '.join(HASH_METHODS))
|
||||
|
||||
return rsa.sign(indata, priv_key, hash_method)
|
||||
|
||||
|
||||
class VerifyOperation(CryptoOperation):
|
||||
"""Verify a signature."""
|
||||
|
||||
keyname = 'public'
|
||||
usage = 'usage: %%prog [options] public_key signature_file'
|
||||
description = ('Verifies a signature, exits with status 0 upon success, '
|
||||
'prints an error message and exits with status 1 upon error.')
|
||||
operation = 'verify'
|
||||
operation_past = 'verified'
|
||||
operation_progressive = 'Verifying'
|
||||
key_class = rsa.PublicKey
|
||||
expected_cli_args = 2
|
||||
has_output = False
|
||||
|
||||
def perform_operation(self, indata, pub_key, cli_args):
|
||||
"""Verifies files."""
|
||||
|
||||
signature_file = cli_args[1]
|
||||
|
||||
with open(signature_file, 'rb') as sigfile:
|
||||
signature = sigfile.read()
|
||||
|
||||
try:
|
||||
rsa.verify(indata, signature, pub_key)
|
||||
except rsa.VerificationError:
|
||||
raise SystemExit('Verification failed.')
|
||||
|
||||
print('Verification OK', file=sys.stderr)
|
||||
|
||||
|
||||
encrypt = EncryptOperation()
|
||||
decrypt = DecryptOperation()
|
||||
sign = SignOperation()
|
||||
verify = VerifyOperation()
|
||||
188
Lambdas/Websocket Authorizer/rsa/common.py
Normal file
188
Lambdas/Websocket Authorizer/rsa/common.py
Normal file
@@ -0,0 +1,188 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from rsa._compat import zip
|
||||
|
||||
"""Common functionality shared by several modules."""
|
||||
|
||||
|
||||
class NotRelativePrimeError(ValueError):
|
||||
def __init__(self, a, b, d, msg=None):
|
||||
super(NotRelativePrimeError, self).__init__(
|
||||
msg or "%d and %d are not relatively prime, divider=%i" % (a, b, d))
|
||||
self.a = a
|
||||
self.b = b
|
||||
self.d = d
|
||||
|
||||
|
||||
def bit_size(num):
|
||||
"""
|
||||
Number of bits needed to represent a integer excluding any prefix
|
||||
0 bits.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> bit_size(1023)
|
||||
10
|
||||
>>> bit_size(1024)
|
||||
11
|
||||
>>> bit_size(1025)
|
||||
11
|
||||
|
||||
:param num:
|
||||
Integer value. If num is 0, returns 0. Only the absolute value of the
|
||||
number is considered. Therefore, signed integers will be abs(num)
|
||||
before the number's bit length is determined.
|
||||
:returns:
|
||||
Returns the number of bits in the integer.
|
||||
"""
|
||||
|
||||
try:
|
||||
return num.bit_length()
|
||||
except AttributeError:
|
||||
raise TypeError('bit_size(num) only supports integers, not %r' % type(num))
|
||||
|
||||
|
||||
def byte_size(number):
|
||||
"""
|
||||
Returns the number of bytes required to hold a specific long number.
|
||||
|
||||
The number of bytes is rounded up.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> byte_size(1 << 1023)
|
||||
128
|
||||
>>> byte_size((1 << 1024) - 1)
|
||||
128
|
||||
>>> byte_size(1 << 1024)
|
||||
129
|
||||
|
||||
:param number:
|
||||
An unsigned integer
|
||||
:returns:
|
||||
The number of bytes required to hold a specific long number.
|
||||
"""
|
||||
if number == 0:
|
||||
return 1
|
||||
return ceil_div(bit_size(number), 8)
|
||||
|
||||
|
||||
def ceil_div(num, div):
|
||||
"""
|
||||
Returns the ceiling function of a division between `num` and `div`.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> ceil_div(100, 7)
|
||||
15
|
||||
>>> ceil_div(100, 10)
|
||||
10
|
||||
>>> ceil_div(1, 4)
|
||||
1
|
||||
|
||||
:param num: Division's numerator, a number
|
||||
:param div: Division's divisor, a number
|
||||
|
||||
:return: Rounded up result of the division between the parameters.
|
||||
"""
|
||||
quanta, mod = divmod(num, div)
|
||||
if mod:
|
||||
quanta += 1
|
||||
return quanta
|
||||
|
||||
|
||||
def extended_gcd(a, b):
|
||||
"""Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb
|
||||
"""
|
||||
# r = gcd(a,b) i = multiplicitive inverse of a mod b
|
||||
# or j = multiplicitive inverse of b mod a
|
||||
# Neg return values for i or j are made positive mod b or a respectively
|
||||
# Iterateive Version is faster and uses much less stack space
|
||||
x = 0
|
||||
y = 1
|
||||
lx = 1
|
||||
ly = 0
|
||||
oa = a # Remember original a/b to remove
|
||||
ob = b # negative values from return results
|
||||
while b != 0:
|
||||
q = a // b
|
||||
(a, b) = (b, a % b)
|
||||
(x, lx) = ((lx - (q * x)), x)
|
||||
(y, ly) = ((ly - (q * y)), y)
|
||||
if lx < 0:
|
||||
lx += ob # If neg wrap modulo orignal b
|
||||
if ly < 0:
|
||||
ly += oa # If neg wrap modulo orignal a
|
||||
return a, lx, ly # Return only positive values
|
||||
|
||||
|
||||
def inverse(x, n):
|
||||
"""Returns the inverse of x % n under multiplication, a.k.a x^-1 (mod n)
|
||||
|
||||
>>> inverse(7, 4)
|
||||
3
|
||||
>>> (inverse(143, 4) * 143) % 4
|
||||
1
|
||||
"""
|
||||
|
||||
(divider, inv, _) = extended_gcd(x, n)
|
||||
|
||||
if divider != 1:
|
||||
raise NotRelativePrimeError(x, n, divider)
|
||||
|
||||
return inv
|
||||
|
||||
|
||||
def crt(a_values, modulo_values):
|
||||
"""Chinese Remainder Theorem.
|
||||
|
||||
Calculates x such that x = a[i] (mod m[i]) for each i.
|
||||
|
||||
:param a_values: the a-values of the above equation
|
||||
:param modulo_values: the m-values of the above equation
|
||||
:returns: x such that x = a[i] (mod m[i]) for each i
|
||||
|
||||
|
||||
>>> crt([2, 3], [3, 5])
|
||||
8
|
||||
|
||||
>>> crt([2, 3, 2], [3, 5, 7])
|
||||
23
|
||||
|
||||
>>> crt([2, 3, 0], [7, 11, 15])
|
||||
135
|
||||
"""
|
||||
|
||||
m = 1
|
||||
x = 0
|
||||
|
||||
for modulo in modulo_values:
|
||||
m *= modulo
|
||||
|
||||
for (m_i, a_i) in zip(modulo_values, a_values):
|
||||
M_i = m // m_i
|
||||
inv = inverse(M_i, m_i)
|
||||
|
||||
x = (x + a_i * M_i * inv) % m
|
||||
|
||||
return x
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
|
||||
doctest.testmod()
|
||||
57
Lambdas/Websocket Authorizer/rsa/core.py
Normal file
57
Lambdas/Websocket Authorizer/rsa/core.py
Normal file
@@ -0,0 +1,57 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Core mathematical operations.
|
||||
|
||||
This is the actual core RSA implementation, which is only defined
|
||||
mathematically on integers.
|
||||
"""
|
||||
|
||||
from rsa._compat import is_integer
|
||||
|
||||
|
||||
def assert_int(var, name):
|
||||
if is_integer(var):
|
||||
return
|
||||
|
||||
raise TypeError('%s should be an integer, not %s' % (name, var.__class__))
|
||||
|
||||
|
||||
def encrypt_int(message, ekey, n):
|
||||
"""Encrypts a message using encryption key 'ekey', working modulo n"""
|
||||
|
||||
assert_int(message, 'message')
|
||||
assert_int(ekey, 'ekey')
|
||||
assert_int(n, 'n')
|
||||
|
||||
if message < 0:
|
||||
raise ValueError('Only non-negative numbers are supported')
|
||||
|
||||
if message > n:
|
||||
raise OverflowError("The message %i is too long for n=%i" % (message, n))
|
||||
|
||||
return pow(message, ekey, n)
|
||||
|
||||
|
||||
def decrypt_int(cyphertext, dkey, n):
|
||||
"""Decrypts a cypher text using the decryption key 'dkey', working modulo n"""
|
||||
|
||||
assert_int(cyphertext, 'cyphertext')
|
||||
assert_int(dkey, 'dkey')
|
||||
assert_int(n, 'n')
|
||||
|
||||
message = pow(cyphertext, dkey, n)
|
||||
return message
|
||||
791
Lambdas/Websocket Authorizer/rsa/key.py
Normal file
791
Lambdas/Websocket Authorizer/rsa/key.py
Normal file
@@ -0,0 +1,791 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""RSA key generation code.
|
||||
|
||||
Create new keys with the newkeys() function. It will give you a PublicKey and a
|
||||
PrivateKey object.
|
||||
|
||||
Loading and saving keys requires the pyasn1 module. This module is imported as
|
||||
late as possible, such that other functionality will remain working in absence
|
||||
of pyasn1.
|
||||
|
||||
.. note::
|
||||
|
||||
Storing public and private keys via the `pickle` module is possible.
|
||||
However, it is insecure to load a key from an untrusted source.
|
||||
The pickle module is not secure against erroneous or maliciously
|
||||
constructed data. Never unpickle data received from an untrusted
|
||||
or unauthenticated source.
|
||||
|
||||
"""
|
||||
|
||||
import logging
|
||||
import warnings
|
||||
|
||||
from rsa._compat import range
|
||||
import rsa.prime
|
||||
import rsa.pem
|
||||
import rsa.common
|
||||
import rsa.randnum
|
||||
import rsa.core
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
DEFAULT_EXPONENT = 65537
|
||||
|
||||
|
||||
class AbstractKey(object):
|
||||
"""Abstract superclass for private and public keys."""
|
||||
|
||||
__slots__ = ('n', 'e')
|
||||
|
||||
def __init__(self, n, e):
|
||||
self.n = n
|
||||
self.e = e
|
||||
|
||||
@classmethod
|
||||
def _load_pkcs1_pem(cls, keyfile):
|
||||
"""Loads a key in PKCS#1 PEM format, implement in a subclass.
|
||||
|
||||
:param keyfile: contents of a PEM-encoded file that contains
|
||||
the public key.
|
||||
:type keyfile: bytes
|
||||
|
||||
:return: the loaded key
|
||||
:rtype: AbstractKey
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _load_pkcs1_der(cls, keyfile):
|
||||
"""Loads a key in PKCS#1 PEM format, implement in a subclass.
|
||||
|
||||
:param keyfile: contents of a DER-encoded file that contains
|
||||
the public key.
|
||||
:type keyfile: bytes
|
||||
|
||||
:return: the loaded key
|
||||
:rtype: AbstractKey
|
||||
"""
|
||||
|
||||
def _save_pkcs1_pem(self):
|
||||
"""Saves the key in PKCS#1 PEM format, implement in a subclass.
|
||||
|
||||
:returns: the PEM-encoded key.
|
||||
:rtype: bytes
|
||||
"""
|
||||
|
||||
def _save_pkcs1_der(self):
|
||||
"""Saves the key in PKCS#1 DER format, implement in a subclass.
|
||||
|
||||
:returns: the DER-encoded key.
|
||||
:rtype: bytes
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def load_pkcs1(cls, keyfile, format='PEM'):
|
||||
"""Loads a key in PKCS#1 DER or PEM format.
|
||||
|
||||
:param keyfile: contents of a DER- or PEM-encoded file that contains
|
||||
the key.
|
||||
:type keyfile: bytes
|
||||
:param format: the format of the file to load; 'PEM' or 'DER'
|
||||
:type format: str
|
||||
|
||||
:return: the loaded key
|
||||
:rtype: AbstractKey
|
||||
"""
|
||||
|
||||
methods = {
|
||||
'PEM': cls._load_pkcs1_pem,
|
||||
'DER': cls._load_pkcs1_der,
|
||||
}
|
||||
|
||||
method = cls._assert_format_exists(format, methods)
|
||||
return method(keyfile)
|
||||
|
||||
@staticmethod
|
||||
def _assert_format_exists(file_format, methods):
|
||||
"""Checks whether the given file format exists in 'methods'.
|
||||
"""
|
||||
|
||||
try:
|
||||
return methods[file_format]
|
||||
except KeyError:
|
||||
formats = ', '.join(sorted(methods.keys()))
|
||||
raise ValueError('Unsupported format: %r, try one of %s' % (file_format,
|
||||
formats))
|
||||
|
||||
def save_pkcs1(self, format='PEM'):
|
||||
"""Saves the key in PKCS#1 DER or PEM format.
|
||||
|
||||
:param format: the format to save; 'PEM' or 'DER'
|
||||
:type format: str
|
||||
:returns: the DER- or PEM-encoded key.
|
||||
:rtype: bytes
|
||||
"""
|
||||
|
||||
methods = {
|
||||
'PEM': self._save_pkcs1_pem,
|
||||
'DER': self._save_pkcs1_der,
|
||||
}
|
||||
|
||||
method = self._assert_format_exists(format, methods)
|
||||
return method()
|
||||
|
||||
def blind(self, message, r):
|
||||
"""Performs blinding on the message using random number 'r'.
|
||||
|
||||
:param message: the message, as integer, to blind.
|
||||
:type message: int
|
||||
:param r: the random number to blind with.
|
||||
:type r: int
|
||||
:return: the blinded message.
|
||||
:rtype: int
|
||||
|
||||
The blinding is such that message = unblind(decrypt(blind(encrypt(message))).
|
||||
|
||||
See https://en.wikipedia.org/wiki/Blinding_%28cryptography%29
|
||||
"""
|
||||
|
||||
return (message * pow(r, self.e, self.n)) % self.n
|
||||
|
||||
def unblind(self, blinded, r):
|
||||
"""Performs blinding on the message using random number 'r'.
|
||||
|
||||
:param blinded: the blinded message, as integer, to unblind.
|
||||
:param r: the random number to unblind with.
|
||||
:return: the original message.
|
||||
|
||||
The blinding is such that message = unblind(decrypt(blind(encrypt(message))).
|
||||
|
||||
See https://en.wikipedia.org/wiki/Blinding_%28cryptography%29
|
||||
"""
|
||||
|
||||
return (rsa.common.inverse(r, self.n) * blinded) % self.n
|
||||
|
||||
|
||||
class PublicKey(AbstractKey):
|
||||
"""Represents a public RSA key.
|
||||
|
||||
This key is also known as the 'encryption key'. It contains the 'n' and 'e'
|
||||
values.
|
||||
|
||||
Supports attributes as well as dictionary-like access. Attribute access is
|
||||
faster, though.
|
||||
|
||||
>>> PublicKey(5, 3)
|
||||
PublicKey(5, 3)
|
||||
|
||||
>>> key = PublicKey(5, 3)
|
||||
>>> key.n
|
||||
5
|
||||
>>> key['n']
|
||||
5
|
||||
>>> key.e
|
||||
3
|
||||
>>> key['e']
|
||||
3
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ('n', 'e')
|
||||
|
||||
def __getitem__(self, key):
|
||||
return getattr(self, key)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PublicKey(%i, %i)' % (self.n, self.e)
|
||||
|
||||
def __getstate__(self):
|
||||
"""Returns the key as tuple for pickling."""
|
||||
return self.n, self.e
|
||||
|
||||
def __setstate__(self, state):
|
||||
"""Sets the key from tuple."""
|
||||
self.n, self.e = state
|
||||
|
||||
def __eq__(self, other):
|
||||
if other is None:
|
||||
return False
|
||||
|
||||
if not isinstance(other, PublicKey):
|
||||
return False
|
||||
|
||||
return self.n == other.n and self.e == other.e
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.n, self.e))
|
||||
|
||||
@classmethod
|
||||
def _load_pkcs1_der(cls, keyfile):
|
||||
"""Loads a key in PKCS#1 DER format.
|
||||
|
||||
:param keyfile: contents of a DER-encoded file that contains the public
|
||||
key.
|
||||
:return: a PublicKey object
|
||||
|
||||
First let's construct a DER encoded key:
|
||||
|
||||
>>> import base64
|
||||
>>> b64der = 'MAwCBQCNGmYtAgMBAAE='
|
||||
>>> der = base64.standard_b64decode(b64der)
|
||||
|
||||
This loads the file:
|
||||
|
||||
>>> PublicKey._load_pkcs1_der(der)
|
||||
PublicKey(2367317549, 65537)
|
||||
|
||||
"""
|
||||
|
||||
from pyasn1.codec.der import decoder
|
||||
from rsa.asn1 import AsnPubKey
|
||||
|
||||
(priv, _) = decoder.decode(keyfile, asn1Spec=AsnPubKey())
|
||||
return cls(n=int(priv['modulus']), e=int(priv['publicExponent']))
|
||||
|
||||
def _save_pkcs1_der(self):
|
||||
"""Saves the public key in PKCS#1 DER format.
|
||||
|
||||
:returns: the DER-encoded public key.
|
||||
:rtype: bytes
|
||||
"""
|
||||
|
||||
from pyasn1.codec.der import encoder
|
||||
from rsa.asn1 import AsnPubKey
|
||||
|
||||
# Create the ASN object
|
||||
asn_key = AsnPubKey()
|
||||
asn_key.setComponentByName('modulus', self.n)
|
||||
asn_key.setComponentByName('publicExponent', self.e)
|
||||
|
||||
return encoder.encode(asn_key)
|
||||
|
||||
@classmethod
|
||||
def _load_pkcs1_pem(cls, keyfile):
|
||||
"""Loads a PKCS#1 PEM-encoded public key file.
|
||||
|
||||
The contents of the file before the "-----BEGIN RSA PUBLIC KEY-----" and
|
||||
after the "-----END RSA PUBLIC KEY-----" lines is ignored.
|
||||
|
||||
:param keyfile: contents of a PEM-encoded file that contains the public
|
||||
key.
|
||||
:return: a PublicKey object
|
||||
"""
|
||||
|
||||
der = rsa.pem.load_pem(keyfile, 'RSA PUBLIC KEY')
|
||||
return cls._load_pkcs1_der(der)
|
||||
|
||||
def _save_pkcs1_pem(self):
|
||||
"""Saves a PKCS#1 PEM-encoded public key file.
|
||||
|
||||
:return: contents of a PEM-encoded file that contains the public key.
|
||||
:rtype: bytes
|
||||
"""
|
||||
|
||||
der = self._save_pkcs1_der()
|
||||
return rsa.pem.save_pem(der, 'RSA PUBLIC KEY')
|
||||
|
||||
@classmethod
|
||||
def load_pkcs1_openssl_pem(cls, keyfile):
|
||||
"""Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL.
|
||||
|
||||
These files can be recognised in that they start with BEGIN PUBLIC KEY
|
||||
rather than BEGIN RSA PUBLIC KEY.
|
||||
|
||||
The contents of the file before the "-----BEGIN PUBLIC KEY-----" and
|
||||
after the "-----END PUBLIC KEY-----" lines is ignored.
|
||||
|
||||
:param keyfile: contents of a PEM-encoded file that contains the public
|
||||
key, from OpenSSL.
|
||||
:type keyfile: bytes
|
||||
:return: a PublicKey object
|
||||
"""
|
||||
|
||||
der = rsa.pem.load_pem(keyfile, 'PUBLIC KEY')
|
||||
return cls.load_pkcs1_openssl_der(der)
|
||||
|
||||
@classmethod
|
||||
def load_pkcs1_openssl_der(cls, keyfile):
|
||||
"""Loads a PKCS#1 DER-encoded public key file from OpenSSL.
|
||||
|
||||
:param keyfile: contents of a DER-encoded file that contains the public
|
||||
key, from OpenSSL.
|
||||
:return: a PublicKey object
|
||||
:rtype: bytes
|
||||
|
||||
"""
|
||||
|
||||
from rsa.asn1 import OpenSSLPubKey
|
||||
from pyasn1.codec.der import decoder
|
||||
from pyasn1.type import univ
|
||||
|
||||
(keyinfo, _) = decoder.decode(keyfile, asn1Spec=OpenSSLPubKey())
|
||||
|
||||
if keyinfo['header']['oid'] != univ.ObjectIdentifier('1.2.840.113549.1.1.1'):
|
||||
raise TypeError("This is not a DER-encoded OpenSSL-compatible public key")
|
||||
|
||||
return cls._load_pkcs1_der(keyinfo['key'][1:])
|
||||
|
||||
|
||||
class PrivateKey(AbstractKey):
|
||||
"""Represents a private RSA key.
|
||||
|
||||
This key is also known as the 'decryption key'. It contains the 'n', 'e',
|
||||
'd', 'p', 'q' and other values.
|
||||
|
||||
Supports attributes as well as dictionary-like access. Attribute access is
|
||||
faster, though.
|
||||
|
||||
>>> PrivateKey(3247, 65537, 833, 191, 17)
|
||||
PrivateKey(3247, 65537, 833, 191, 17)
|
||||
|
||||
exp1, exp2 and coef will be calculated:
|
||||
|
||||
>>> pk = PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
|
||||
>>> pk.exp1
|
||||
55063
|
||||
>>> pk.exp2
|
||||
10095
|
||||
>>> pk.coef
|
||||
50797
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ('n', 'e', 'd', 'p', 'q', 'exp1', 'exp2', 'coef')
|
||||
|
||||
def __init__(self, n, e, d, p, q):
|
||||
AbstractKey.__init__(self, n, e)
|
||||
self.d = d
|
||||
self.p = p
|
||||
self.q = q
|
||||
|
||||
# Calculate exponents and coefficient.
|
||||
self.exp1 = int(d % (p - 1))
|
||||
self.exp2 = int(d % (q - 1))
|
||||
self.coef = rsa.common.inverse(q, p)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return getattr(self, key)
|
||||
|
||||
def __repr__(self):
|
||||
return 'PrivateKey(%(n)i, %(e)i, %(d)i, %(p)i, %(q)i)' % self
|
||||
|
||||
def __getstate__(self):
|
||||
"""Returns the key as tuple for pickling."""
|
||||
return self.n, self.e, self.d, self.p, self.q, self.exp1, self.exp2, self.coef
|
||||
|
||||
def __setstate__(self, state):
|
||||
"""Sets the key from tuple."""
|
||||
self.n, self.e, self.d, self.p, self.q, self.exp1, self.exp2, self.coef = state
|
||||
|
||||
def __eq__(self, other):
|
||||
if other is None:
|
||||
return False
|
||||
|
||||
if not isinstance(other, PrivateKey):
|
||||
return False
|
||||
|
||||
return (self.n == other.n and
|
||||
self.e == other.e and
|
||||
self.d == other.d and
|
||||
self.p == other.p and
|
||||
self.q == other.q and
|
||||
self.exp1 == other.exp1 and
|
||||
self.exp2 == other.exp2 and
|
||||
self.coef == other.coef)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.n, self.e, self.d, self.p, self.q, self.exp1, self.exp2, self.coef))
|
||||
|
||||
def blinded_decrypt(self, encrypted):
|
||||
"""Decrypts the message using blinding to prevent side-channel attacks.
|
||||
|
||||
:param encrypted: the encrypted message
|
||||
:type encrypted: int
|
||||
|
||||
:returns: the decrypted message
|
||||
:rtype: int
|
||||
"""
|
||||
|
||||
blind_r = rsa.randnum.randint(self.n - 1)
|
||||
blinded = self.blind(encrypted, blind_r) # blind before decrypting
|
||||
decrypted = rsa.core.decrypt_int(blinded, self.d, self.n)
|
||||
|
||||
return self.unblind(decrypted, blind_r)
|
||||
|
||||
def blinded_encrypt(self, message):
|
||||
"""Encrypts the message using blinding to prevent side-channel attacks.
|
||||
|
||||
:param message: the message to encrypt
|
||||
:type message: int
|
||||
|
||||
:returns: the encrypted message
|
||||
:rtype: int
|
||||
"""
|
||||
|
||||
blind_r = rsa.randnum.randint(self.n - 1)
|
||||
blinded = self.blind(message, blind_r) # blind before encrypting
|
||||
encrypted = rsa.core.encrypt_int(blinded, self.d, self.n)
|
||||
return self.unblind(encrypted, blind_r)
|
||||
|
||||
@classmethod
|
||||
def _load_pkcs1_der(cls, keyfile):
|
||||
"""Loads a key in PKCS#1 DER format.
|
||||
|
||||
:param keyfile: contents of a DER-encoded file that contains the private
|
||||
key.
|
||||
:type keyfile: bytes
|
||||
:return: a PrivateKey object
|
||||
|
||||
First let's construct a DER encoded key:
|
||||
|
||||
>>> import base64
|
||||
>>> b64der = 'MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt'
|
||||
>>> der = base64.standard_b64decode(b64der)
|
||||
|
||||
This loads the file:
|
||||
|
||||
>>> PrivateKey._load_pkcs1_der(der)
|
||||
PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
|
||||
|
||||
"""
|
||||
|
||||
from pyasn1.codec.der import decoder
|
||||
(priv, _) = decoder.decode(keyfile)
|
||||
|
||||
# ASN.1 contents of DER encoded private key:
|
||||
#
|
||||
# RSAPrivateKey ::= SEQUENCE {
|
||||
# version Version,
|
||||
# modulus INTEGER, -- n
|
||||
# publicExponent INTEGER, -- e
|
||||
# privateExponent INTEGER, -- d
|
||||
# prime1 INTEGER, -- p
|
||||
# prime2 INTEGER, -- q
|
||||
# exponent1 INTEGER, -- d mod (p-1)
|
||||
# exponent2 INTEGER, -- d mod (q-1)
|
||||
# coefficient INTEGER, -- (inverse of q) mod p
|
||||
# otherPrimeInfos OtherPrimeInfos OPTIONAL
|
||||
# }
|
||||
|
||||
if priv[0] != 0:
|
||||
raise ValueError('Unable to read this file, version %s != 0' % priv[0])
|
||||
|
||||
as_ints = map(int, priv[1:6])
|
||||
key = cls(*as_ints)
|
||||
|
||||
exp1, exp2, coef = map(int, priv[6:9])
|
||||
|
||||
if (key.exp1, key.exp2, key.coef) != (exp1, exp2, coef):
|
||||
warnings.warn(
|
||||
'You have provided a malformed keyfile. Either the exponents '
|
||||
'or the coefficient are incorrect. Using the correct values '
|
||||
'instead.',
|
||||
UserWarning,
|
||||
)
|
||||
|
||||
return key
|
||||
|
||||
def _save_pkcs1_der(self):
|
||||
"""Saves the private key in PKCS#1 DER format.
|
||||
|
||||
:returns: the DER-encoded private key.
|
||||
:rtype: bytes
|
||||
"""
|
||||
|
||||
from pyasn1.type import univ, namedtype
|
||||
from pyasn1.codec.der import encoder
|
||||
|
||||
class AsnPrivKey(univ.Sequence):
|
||||
componentType = namedtype.NamedTypes(
|
||||
namedtype.NamedType('version', univ.Integer()),
|
||||
namedtype.NamedType('modulus', univ.Integer()),
|
||||
namedtype.NamedType('publicExponent', univ.Integer()),
|
||||
namedtype.NamedType('privateExponent', univ.Integer()),
|
||||
namedtype.NamedType('prime1', univ.Integer()),
|
||||
namedtype.NamedType('prime2', univ.Integer()),
|
||||
namedtype.NamedType('exponent1', univ.Integer()),
|
||||
namedtype.NamedType('exponent2', univ.Integer()),
|
||||
namedtype.NamedType('coefficient', univ.Integer()),
|
||||
)
|
||||
|
||||
# Create the ASN object
|
||||
asn_key = AsnPrivKey()
|
||||
asn_key.setComponentByName('version', 0)
|
||||
asn_key.setComponentByName('modulus', self.n)
|
||||
asn_key.setComponentByName('publicExponent', self.e)
|
||||
asn_key.setComponentByName('privateExponent', self.d)
|
||||
asn_key.setComponentByName('prime1', self.p)
|
||||
asn_key.setComponentByName('prime2', self.q)
|
||||
asn_key.setComponentByName('exponent1', self.exp1)
|
||||
asn_key.setComponentByName('exponent2', self.exp2)
|
||||
asn_key.setComponentByName('coefficient', self.coef)
|
||||
|
||||
return encoder.encode(asn_key)
|
||||
|
||||
@classmethod
|
||||
def _load_pkcs1_pem(cls, keyfile):
|
||||
"""Loads a PKCS#1 PEM-encoded private key file.
|
||||
|
||||
The contents of the file before the "-----BEGIN RSA PRIVATE KEY-----" and
|
||||
after the "-----END RSA PRIVATE KEY-----" lines is ignored.
|
||||
|
||||
:param keyfile: contents of a PEM-encoded file that contains the private
|
||||
key.
|
||||
:type keyfile: bytes
|
||||
:return: a PrivateKey object
|
||||
"""
|
||||
|
||||
der = rsa.pem.load_pem(keyfile, b'RSA PRIVATE KEY')
|
||||
return cls._load_pkcs1_der(der)
|
||||
|
||||
def _save_pkcs1_pem(self):
|
||||
"""Saves a PKCS#1 PEM-encoded private key file.
|
||||
|
||||
:return: contents of a PEM-encoded file that contains the private key.
|
||||
:rtype: bytes
|
||||
"""
|
||||
|
||||
der = self._save_pkcs1_der()
|
||||
return rsa.pem.save_pem(der, b'RSA PRIVATE KEY')
|
||||
|
||||
|
||||
def find_p_q(nbits, getprime_func=rsa.prime.getprime, accurate=True):
|
||||
"""Returns a tuple of two different primes of nbits bits each.
|
||||
|
||||
The resulting p * q has exacty 2 * nbits bits, and the returned p and q
|
||||
will not be equal.
|
||||
|
||||
:param nbits: the number of bits in each of p and q.
|
||||
:param getprime_func: the getprime function, defaults to
|
||||
:py:func:`rsa.prime.getprime`.
|
||||
|
||||
*Introduced in Python-RSA 3.1*
|
||||
|
||||
:param accurate: whether to enable accurate mode or not.
|
||||
:returns: (p, q), where p > q
|
||||
|
||||
>>> (p, q) = find_p_q(128)
|
||||
>>> from rsa import common
|
||||
>>> common.bit_size(p * q)
|
||||
256
|
||||
|
||||
When not in accurate mode, the number of bits can be slightly less
|
||||
|
||||
>>> (p, q) = find_p_q(128, accurate=False)
|
||||
>>> from rsa import common
|
||||
>>> common.bit_size(p * q) <= 256
|
||||
True
|
||||
>>> common.bit_size(p * q) > 240
|
||||
True
|
||||
|
||||
"""
|
||||
|
||||
total_bits = nbits * 2
|
||||
|
||||
# Make sure that p and q aren't too close or the factoring programs can
|
||||
# factor n.
|
||||
shift = nbits // 16
|
||||
pbits = nbits + shift
|
||||
qbits = nbits - shift
|
||||
|
||||
# Choose the two initial primes
|
||||
log.debug('find_p_q(%i): Finding p', nbits)
|
||||
p = getprime_func(pbits)
|
||||
log.debug('find_p_q(%i): Finding q', nbits)
|
||||
q = getprime_func(qbits)
|
||||
|
||||
def is_acceptable(p, q):
|
||||
"""Returns True iff p and q are acceptable:
|
||||
|
||||
- p and q differ
|
||||
- (p * q) has the right nr of bits (when accurate=True)
|
||||
"""
|
||||
|
||||
if p == q:
|
||||
return False
|
||||
|
||||
if not accurate:
|
||||
return True
|
||||
|
||||
# Make sure we have just the right amount of bits
|
||||
found_size = rsa.common.bit_size(p * q)
|
||||
return total_bits == found_size
|
||||
|
||||
# Keep choosing other primes until they match our requirements.
|
||||
change_p = False
|
||||
while not is_acceptable(p, q):
|
||||
# Change p on one iteration and q on the other
|
||||
if change_p:
|
||||
p = getprime_func(pbits)
|
||||
else:
|
||||
q = getprime_func(qbits)
|
||||
|
||||
change_p = not change_p
|
||||
|
||||
# We want p > q as described on
|
||||
# http://www.di-mgt.com.au/rsa_alg.html#crt
|
||||
return max(p, q), min(p, q)
|
||||
|
||||
|
||||
def calculate_keys_custom_exponent(p, q, exponent):
|
||||
"""Calculates an encryption and a decryption key given p, q and an exponent,
|
||||
and returns them as a tuple (e, d)
|
||||
|
||||
:param p: the first large prime
|
||||
:param q: the second large prime
|
||||
:param exponent: the exponent for the key; only change this if you know
|
||||
what you're doing, as the exponent influences how difficult your
|
||||
private key can be cracked. A very common choice for e is 65537.
|
||||
:type exponent: int
|
||||
|
||||
"""
|
||||
|
||||
phi_n = (p - 1) * (q - 1)
|
||||
|
||||
try:
|
||||
d = rsa.common.inverse(exponent, phi_n)
|
||||
except rsa.common.NotRelativePrimeError as ex:
|
||||
raise rsa.common.NotRelativePrimeError(
|
||||
exponent, phi_n, ex.d,
|
||||
msg="e (%d) and phi_n (%d) are not relatively prime (divider=%i)" %
|
||||
(exponent, phi_n, ex.d))
|
||||
|
||||
if (exponent * d) % phi_n != 1:
|
||||
raise ValueError("e (%d) and d (%d) are not mult. inv. modulo "
|
||||
"phi_n (%d)" % (exponent, d, phi_n))
|
||||
|
||||
return exponent, d
|
||||
|
||||
|
||||
def calculate_keys(p, q):
|
||||
"""Calculates an encryption and a decryption key given p and q, and
|
||||
returns them as a tuple (e, d)
|
||||
|
||||
:param p: the first large prime
|
||||
:param q: the second large prime
|
||||
|
||||
:return: tuple (e, d) with the encryption and decryption exponents.
|
||||
"""
|
||||
|
||||
return calculate_keys_custom_exponent(p, q, DEFAULT_EXPONENT)
|
||||
|
||||
|
||||
def gen_keys(nbits, getprime_func, accurate=True, exponent=DEFAULT_EXPONENT):
|
||||
"""Generate RSA keys of nbits bits. Returns (p, q, e, d).
|
||||
|
||||
Note: this can take a long time, depending on the key size.
|
||||
|
||||
:param nbits: the total number of bits in ``p`` and ``q``. Both ``p`` and
|
||||
``q`` will use ``nbits/2`` bits.
|
||||
:param getprime_func: either :py:func:`rsa.prime.getprime` or a function
|
||||
with similar signature.
|
||||
:param exponent: the exponent for the key; only change this if you know
|
||||
what you're doing, as the exponent influences how difficult your
|
||||
private key can be cracked. A very common choice for e is 65537.
|
||||
:type exponent: int
|
||||
"""
|
||||
|
||||
# Regenerate p and q values, until calculate_keys doesn't raise a
|
||||
# ValueError.
|
||||
while True:
|
||||
(p, q) = find_p_q(nbits // 2, getprime_func, accurate)
|
||||
try:
|
||||
(e, d) = calculate_keys_custom_exponent(p, q, exponent=exponent)
|
||||
break
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return p, q, e, d
|
||||
|
||||
|
||||
def newkeys(nbits, accurate=True, poolsize=1, exponent=DEFAULT_EXPONENT):
|
||||
"""Generates public and private keys, and returns them as (pub, priv).
|
||||
|
||||
The public key is also known as the 'encryption key', and is a
|
||||
:py:class:`rsa.PublicKey` object. The private key is also known as the
|
||||
'decryption key' and is a :py:class:`rsa.PrivateKey` object.
|
||||
|
||||
:param nbits: the number of bits required to store ``n = p*q``.
|
||||
:param accurate: when True, ``n`` will have exactly the number of bits you
|
||||
asked for. However, this makes key generation much slower. When False,
|
||||
`n`` may have slightly less bits.
|
||||
:param poolsize: the number of processes to use to generate the prime
|
||||
numbers. If set to a number > 1, a parallel algorithm will be used.
|
||||
This requires Python 2.6 or newer.
|
||||
:param exponent: the exponent for the key; only change this if you know
|
||||
what you're doing, as the exponent influences how difficult your
|
||||
private key can be cracked. A very common choice for e is 65537.
|
||||
:type exponent: int
|
||||
|
||||
:returns: a tuple (:py:class:`rsa.PublicKey`, :py:class:`rsa.PrivateKey`)
|
||||
|
||||
The ``poolsize`` parameter was added in *Python-RSA 3.1* and requires
|
||||
Python 2.6 or newer.
|
||||
|
||||
"""
|
||||
|
||||
if nbits < 16:
|
||||
raise ValueError('Key too small')
|
||||
|
||||
if poolsize < 1:
|
||||
raise ValueError('Pool size (%i) should be >= 1' % poolsize)
|
||||
|
||||
# Determine which getprime function to use
|
||||
if poolsize > 1:
|
||||
from rsa import parallel
|
||||
import functools
|
||||
|
||||
getprime_func = functools.partial(parallel.getprime, poolsize=poolsize)
|
||||
else:
|
||||
getprime_func = rsa.prime.getprime
|
||||
|
||||
# Generate the key components
|
||||
(p, q, e, d) = gen_keys(nbits, getprime_func, accurate=accurate, exponent=exponent)
|
||||
|
||||
# Create the key objects
|
||||
n = p * q
|
||||
|
||||
return (
|
||||
PublicKey(n, e),
|
||||
PrivateKey(n, e, d, p, q)
|
||||
)
|
||||
|
||||
|
||||
__all__ = ['PublicKey', 'PrivateKey', 'newkeys']
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
|
||||
try:
|
||||
for count in range(100):
|
||||
(failures, tests) = doctest.testmod()
|
||||
if failures:
|
||||
break
|
||||
|
||||
if (count % 10 == 0 and count) or count == 1:
|
||||
print('%i times' % count)
|
||||
except KeyboardInterrupt:
|
||||
print('Aborted')
|
||||
else:
|
||||
print('Doctests done')
|
||||
74
Lambdas/Websocket Authorizer/rsa/machine_size.py
Normal file
74
Lambdas/Websocket Authorizer/rsa/machine_size.py
Normal file
@@ -0,0 +1,74 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Detection of 32-bit and 64-bit machines and byte alignment."""
|
||||
|
||||
import sys
|
||||
|
||||
MAX_INT = sys.maxsize
|
||||
MAX_INT64 = (1 << 63) - 1
|
||||
MAX_INT32 = (1 << 31) - 1
|
||||
MAX_INT16 = (1 << 15) - 1
|
||||
|
||||
# Determine the word size of the processor.
|
||||
if MAX_INT == MAX_INT64:
|
||||
# 64-bit processor.
|
||||
MACHINE_WORD_SIZE = 64
|
||||
elif MAX_INT == MAX_INT32:
|
||||
# 32-bit processor.
|
||||
MACHINE_WORD_SIZE = 32
|
||||
else:
|
||||
# Else we just assume 64-bit processor keeping up with modern times.
|
||||
MACHINE_WORD_SIZE = 64
|
||||
|
||||
|
||||
def get_word_alignment(num, force_arch=64,
|
||||
_machine_word_size=MACHINE_WORD_SIZE):
|
||||
"""
|
||||
Returns alignment details for the given number based on the platform
|
||||
Python is running on.
|
||||
|
||||
:param num:
|
||||
Unsigned integral number.
|
||||
:param force_arch:
|
||||
If you don't want to use 64-bit unsigned chunks, set this to
|
||||
anything other than 64. 32-bit chunks will be preferred then.
|
||||
Default 64 will be used when on a 64-bit machine.
|
||||
:param _machine_word_size:
|
||||
(Internal) The machine word size used for alignment.
|
||||
:returns:
|
||||
4-tuple::
|
||||
|
||||
(word_bits, word_bytes,
|
||||
max_uint, packing_format_type)
|
||||
"""
|
||||
max_uint64 = 0xffffffffffffffff
|
||||
max_uint32 = 0xffffffff
|
||||
max_uint16 = 0xffff
|
||||
max_uint8 = 0xff
|
||||
|
||||
if force_arch == 64 and _machine_word_size >= 64 and num > max_uint32:
|
||||
# 64-bit unsigned integer.
|
||||
return 64, 8, max_uint64, "Q"
|
||||
elif num > max_uint16:
|
||||
# 32-bit unsigned integer
|
||||
return 32, 4, max_uint32, "L"
|
||||
elif num > max_uint8:
|
||||
# 16-bit unsigned integer.
|
||||
return 16, 2, max_uint16, "H"
|
||||
else:
|
||||
# 8-bit unsigned integer.
|
||||
return 8, 1, max_uint8, "B"
|
||||
101
Lambdas/Websocket Authorizer/rsa/parallel.py
Normal file
101
Lambdas/Websocket Authorizer/rsa/parallel.py
Normal file
@@ -0,0 +1,101 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Functions for parallel computation on multiple cores.
|
||||
|
||||
Introduced in Python-RSA 3.1.
|
||||
|
||||
.. note::
|
||||
|
||||
Requires Python 2.6 or newer.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import multiprocessing as mp
|
||||
|
||||
from rsa._compat import range
|
||||
import rsa.prime
|
||||
import rsa.randnum
|
||||
|
||||
|
||||
def _find_prime(nbits, pipe):
|
||||
while True:
|
||||
integer = rsa.randnum.read_random_odd_int(nbits)
|
||||
|
||||
# Test for primeness
|
||||
if rsa.prime.is_prime(integer):
|
||||
pipe.send(integer)
|
||||
return
|
||||
|
||||
|
||||
def getprime(nbits, poolsize):
|
||||
"""Returns a prime number that can be stored in 'nbits' bits.
|
||||
|
||||
Works in multiple threads at the same time.
|
||||
|
||||
>>> p = getprime(128, 3)
|
||||
>>> rsa.prime.is_prime(p-1)
|
||||
False
|
||||
>>> rsa.prime.is_prime(p)
|
||||
True
|
||||
>>> rsa.prime.is_prime(p+1)
|
||||
False
|
||||
|
||||
>>> from rsa import common
|
||||
>>> common.bit_size(p) == 128
|
||||
True
|
||||
|
||||
"""
|
||||
|
||||
(pipe_recv, pipe_send) = mp.Pipe(duplex=False)
|
||||
|
||||
# Create processes
|
||||
try:
|
||||
procs = [mp.Process(target=_find_prime, args=(nbits, pipe_send))
|
||||
for _ in range(poolsize)]
|
||||
# Start processes
|
||||
for p in procs:
|
||||
p.start()
|
||||
|
||||
result = pipe_recv.recv()
|
||||
finally:
|
||||
pipe_recv.close()
|
||||
pipe_send.close()
|
||||
|
||||
# Terminate processes
|
||||
for p in procs:
|
||||
p.terminate()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
__all__ = ['getprime']
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Running doctests 1000x or until failure')
|
||||
import doctest
|
||||
|
||||
for count in range(100):
|
||||
(failures, tests) = doctest.testmod()
|
||||
if failures:
|
||||
break
|
||||
|
||||
if count % 10 == 0 and count:
|
||||
print('%i times' % count)
|
||||
|
||||
print('Doctests done')
|
||||
126
Lambdas/Websocket Authorizer/rsa/pem.py
Normal file
126
Lambdas/Websocket Authorizer/rsa/pem.py
Normal file
@@ -0,0 +1,126 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Functions that load and write PEM-encoded files."""
|
||||
|
||||
import base64
|
||||
|
||||
from rsa._compat import is_bytes, range
|
||||
|
||||
|
||||
def _markers(pem_marker):
|
||||
"""
|
||||
Returns the start and end PEM markers, as bytes.
|
||||
"""
|
||||
|
||||
if not is_bytes(pem_marker):
|
||||
pem_marker = pem_marker.encode('ascii')
|
||||
|
||||
return (b'-----BEGIN ' + pem_marker + b'-----',
|
||||
b'-----END ' + pem_marker + b'-----')
|
||||
|
||||
|
||||
def load_pem(contents, pem_marker):
|
||||
"""Loads a PEM file.
|
||||
|
||||
:param contents: the contents of the file to interpret
|
||||
:param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY'
|
||||
when your file has '-----BEGIN RSA PRIVATE KEY-----' and
|
||||
'-----END RSA PRIVATE KEY-----' markers.
|
||||
|
||||
:return: the base64-decoded content between the start and end markers.
|
||||
|
||||
@raise ValueError: when the content is invalid, for example when the start
|
||||
marker cannot be found.
|
||||
|
||||
"""
|
||||
|
||||
# We want bytes, not text. If it's text, it can be converted to ASCII bytes.
|
||||
if not is_bytes(contents):
|
||||
contents = contents.encode('ascii')
|
||||
|
||||
(pem_start, pem_end) = _markers(pem_marker)
|
||||
|
||||
pem_lines = []
|
||||
in_pem_part = False
|
||||
|
||||
for line in contents.splitlines():
|
||||
line = line.strip()
|
||||
|
||||
# Skip empty lines
|
||||
if not line:
|
||||
continue
|
||||
|
||||
# Handle start marker
|
||||
if line == pem_start:
|
||||
if in_pem_part:
|
||||
raise ValueError('Seen start marker "%s" twice' % pem_start)
|
||||
|
||||
in_pem_part = True
|
||||
continue
|
||||
|
||||
# Skip stuff before first marker
|
||||
if not in_pem_part:
|
||||
continue
|
||||
|
||||
# Handle end marker
|
||||
if in_pem_part and line == pem_end:
|
||||
in_pem_part = False
|
||||
break
|
||||
|
||||
# Load fields
|
||||
if b':' in line:
|
||||
continue
|
||||
|
||||
pem_lines.append(line)
|
||||
|
||||
# Do some sanity checks
|
||||
if not pem_lines:
|
||||
raise ValueError('No PEM start marker "%s" found' % pem_start)
|
||||
|
||||
if in_pem_part:
|
||||
raise ValueError('No PEM end marker "%s" found' % pem_end)
|
||||
|
||||
# Base64-decode the contents
|
||||
pem = b''.join(pem_lines)
|
||||
return base64.standard_b64decode(pem)
|
||||
|
||||
|
||||
def save_pem(contents, pem_marker):
|
||||
"""Saves a PEM file.
|
||||
|
||||
:param contents: the contents to encode in PEM format
|
||||
:param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY'
|
||||
when your file has '-----BEGIN RSA PRIVATE KEY-----' and
|
||||
'-----END RSA PRIVATE KEY-----' markers.
|
||||
|
||||
:return: the base64-encoded content between the start and end markers, as bytes.
|
||||
|
||||
"""
|
||||
|
||||
(pem_start, pem_end) = _markers(pem_marker)
|
||||
|
||||
b64 = base64.standard_b64encode(contents).replace(b'\n', b'')
|
||||
pem_lines = [pem_start]
|
||||
|
||||
for block_start in range(0, len(b64), 64):
|
||||
block = b64[block_start:block_start + 64]
|
||||
pem_lines.append(block)
|
||||
|
||||
pem_lines.append(pem_end)
|
||||
pem_lines.append(b'')
|
||||
|
||||
return b'\n'.join(pem_lines)
|
||||
439
Lambdas/Websocket Authorizer/rsa/pkcs1.py
Normal file
439
Lambdas/Websocket Authorizer/rsa/pkcs1.py
Normal file
@@ -0,0 +1,439 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Functions for PKCS#1 version 1.5 encryption and signing
|
||||
|
||||
This module implements certain functionality from PKCS#1 version 1.5. For a
|
||||
very clear example, read http://www.di-mgt.com.au/rsa_alg.html#pkcs1schemes
|
||||
|
||||
At least 8 bytes of random padding is used when encrypting a message. This makes
|
||||
these methods much more secure than the ones in the ``rsa`` module.
|
||||
|
||||
WARNING: this module leaks information when decryption fails. The exceptions
|
||||
that are raised contain the Python traceback information, which can be used to
|
||||
deduce where in the process the failure occurred. DO NOT PASS SUCH INFORMATION
|
||||
to your users.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from rsa._compat import range
|
||||
from rsa import common, transform, core
|
||||
|
||||
# ASN.1 codes that describe the hash algorithm used.
|
||||
HASH_ASN1 = {
|
||||
'MD5': b'\x30\x20\x30\x0c\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05\x05\x00\x04\x10',
|
||||
'SHA-1': b'\x30\x21\x30\x09\x06\x05\x2b\x0e\x03\x02\x1a\x05\x00\x04\x14',
|
||||
'SHA-224': b'\x30\x2d\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x04\x05\x00\x04\x1c',
|
||||
'SHA-256': b'\x30\x31\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01\x05\x00\x04\x20',
|
||||
'SHA-384': b'\x30\x41\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02\x05\x00\x04\x30',
|
||||
'SHA-512': b'\x30\x51\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03\x05\x00\x04\x40',
|
||||
}
|
||||
|
||||
HASH_METHODS = {
|
||||
'MD5': hashlib.md5,
|
||||
'SHA-1': hashlib.sha1,
|
||||
'SHA-224': hashlib.sha224,
|
||||
'SHA-256': hashlib.sha256,
|
||||
'SHA-384': hashlib.sha384,
|
||||
'SHA-512': hashlib.sha512,
|
||||
}
|
||||
|
||||
|
||||
class CryptoError(Exception):
|
||||
"""Base class for all exceptions in this module."""
|
||||
|
||||
|
||||
class DecryptionError(CryptoError):
|
||||
"""Raised when decryption fails."""
|
||||
|
||||
|
||||
class VerificationError(CryptoError):
|
||||
"""Raised when verification fails."""
|
||||
|
||||
|
||||
def _pad_for_encryption(message, target_length):
|
||||
r"""Pads the message for encryption, returning the padded message.
|
||||
|
||||
:return: 00 02 RANDOM_DATA 00 MESSAGE
|
||||
|
||||
>>> block = _pad_for_encryption(b'hello', 16)
|
||||
>>> len(block)
|
||||
16
|
||||
>>> block[0:2]
|
||||
b'\x00\x02'
|
||||
>>> block[-6:]
|
||||
b'\x00hello'
|
||||
|
||||
"""
|
||||
|
||||
max_msglength = target_length - 11
|
||||
msglength = len(message)
|
||||
|
||||
if msglength > max_msglength:
|
||||
raise OverflowError('%i bytes needed for message, but there is only'
|
||||
' space for %i' % (msglength, max_msglength))
|
||||
|
||||
# Get random padding
|
||||
padding = b''
|
||||
padding_length = target_length - msglength - 3
|
||||
|
||||
# We remove 0-bytes, so we'll end up with less padding than we've asked for,
|
||||
# so keep adding data until we're at the correct length.
|
||||
while len(padding) < padding_length:
|
||||
needed_bytes = padding_length - len(padding)
|
||||
|
||||
# Always read at least 8 bytes more than we need, and trim off the rest
|
||||
# after removing the 0-bytes. This increases the chance of getting
|
||||
# enough bytes, especially when needed_bytes is small
|
||||
new_padding = os.urandom(needed_bytes + 5)
|
||||
new_padding = new_padding.replace(b'\x00', b'')
|
||||
padding = padding + new_padding[:needed_bytes]
|
||||
|
||||
assert len(padding) == padding_length
|
||||
|
||||
return b''.join([b'\x00\x02',
|
||||
padding,
|
||||
b'\x00',
|
||||
message])
|
||||
|
||||
|
||||
def _pad_for_signing(message, target_length):
|
||||
r"""Pads the message for signing, returning the padded message.
|
||||
|
||||
The padding is always a repetition of FF bytes.
|
||||
|
||||
:return: 00 01 PADDING 00 MESSAGE
|
||||
|
||||
>>> block = _pad_for_signing(b'hello', 16)
|
||||
>>> len(block)
|
||||
16
|
||||
>>> block[0:2]
|
||||
b'\x00\x01'
|
||||
>>> block[-6:]
|
||||
b'\x00hello'
|
||||
>>> block[2:-6]
|
||||
b'\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
|
||||
"""
|
||||
|
||||
max_msglength = target_length - 11
|
||||
msglength = len(message)
|
||||
|
||||
if msglength > max_msglength:
|
||||
raise OverflowError('%i bytes needed for message, but there is only'
|
||||
' space for %i' % (msglength, max_msglength))
|
||||
|
||||
padding_length = target_length - msglength - 3
|
||||
|
||||
return b''.join([b'\x00\x01',
|
||||
padding_length * b'\xff',
|
||||
b'\x00',
|
||||
message])
|
||||
|
||||
|
||||
def encrypt(message, pub_key):
|
||||
"""Encrypts the given message using PKCS#1 v1.5
|
||||
|
||||
:param message: the message to encrypt. Must be a byte string no longer than
|
||||
``k-11`` bytes, where ``k`` is the number of bytes needed to encode
|
||||
the ``n`` component of the public key.
|
||||
:param pub_key: the :py:class:`rsa.PublicKey` to encrypt with.
|
||||
:raise OverflowError: when the message is too large to fit in the padded
|
||||
block.
|
||||
|
||||
>>> from rsa import key, common
|
||||
>>> (pub_key, priv_key) = key.newkeys(256)
|
||||
>>> message = b'hello'
|
||||
>>> crypto = encrypt(message, pub_key)
|
||||
|
||||
The crypto text should be just as long as the public key 'n' component:
|
||||
|
||||
>>> len(crypto) == common.byte_size(pub_key.n)
|
||||
True
|
||||
|
||||
"""
|
||||
|
||||
keylength = common.byte_size(pub_key.n)
|
||||
padded = _pad_for_encryption(message, keylength)
|
||||
|
||||
payload = transform.bytes2int(padded)
|
||||
encrypted = core.encrypt_int(payload, pub_key.e, pub_key.n)
|
||||
block = transform.int2bytes(encrypted, keylength)
|
||||
|
||||
return block
|
||||
|
||||
|
||||
def decrypt(crypto, priv_key):
|
||||
r"""Decrypts the given message using PKCS#1 v1.5
|
||||
|
||||
The decryption is considered 'failed' when the resulting cleartext doesn't
|
||||
start with the bytes 00 02, or when the 00 byte between the padding and
|
||||
the message cannot be found.
|
||||
|
||||
:param crypto: the crypto text as returned by :py:func:`rsa.encrypt`
|
||||
:param priv_key: the :py:class:`rsa.PrivateKey` to decrypt with.
|
||||
:raise DecryptionError: when the decryption fails. No details are given as
|
||||
to why the code thinks the decryption fails, as this would leak
|
||||
information about the private key.
|
||||
|
||||
|
||||
>>> import rsa
|
||||
>>> (pub_key, priv_key) = rsa.newkeys(256)
|
||||
|
||||
It works with strings:
|
||||
|
||||
>>> crypto = encrypt(b'hello', pub_key)
|
||||
>>> decrypt(crypto, priv_key)
|
||||
b'hello'
|
||||
|
||||
And with binary data:
|
||||
|
||||
>>> crypto = encrypt(b'\x00\x00\x00\x00\x01', pub_key)
|
||||
>>> decrypt(crypto, priv_key)
|
||||
b'\x00\x00\x00\x00\x01'
|
||||
|
||||
Altering the encrypted information will *likely* cause a
|
||||
:py:class:`rsa.pkcs1.DecryptionError`. If you want to be *sure*, use
|
||||
:py:func:`rsa.sign`.
|
||||
|
||||
|
||||
.. warning::
|
||||
|
||||
Never display the stack trace of a
|
||||
:py:class:`rsa.pkcs1.DecryptionError` exception. It shows where in the
|
||||
code the exception occurred, and thus leaks information about the key.
|
||||
It's only a tiny bit of information, but every bit makes cracking the
|
||||
keys easier.
|
||||
|
||||
>>> crypto = encrypt(b'hello', pub_key)
|
||||
>>> crypto = crypto[0:5] + b'X' + crypto[6:] # change a byte
|
||||
>>> decrypt(crypto, priv_key)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
rsa.pkcs1.DecryptionError: Decryption failed
|
||||
|
||||
"""
|
||||
|
||||
blocksize = common.byte_size(priv_key.n)
|
||||
encrypted = transform.bytes2int(crypto)
|
||||
decrypted = priv_key.blinded_decrypt(encrypted)
|
||||
cleartext = transform.int2bytes(decrypted, blocksize)
|
||||
|
||||
# If we can't find the cleartext marker, decryption failed.
|
||||
if cleartext[0:2] != b'\x00\x02':
|
||||
raise DecryptionError('Decryption failed')
|
||||
|
||||
# Find the 00 separator between the padding and the message
|
||||
try:
|
||||
sep_idx = cleartext.index(b'\x00', 2)
|
||||
except ValueError:
|
||||
raise DecryptionError('Decryption failed')
|
||||
|
||||
return cleartext[sep_idx + 1:]
|
||||
|
||||
|
||||
def sign_hash(hash_value, priv_key, hash_method):
|
||||
"""Signs a precomputed hash with the private key.
|
||||
|
||||
Hashes the message, then signs the hash with the given key. This is known
|
||||
as a "detached signature", because the message itself isn't altered.
|
||||
|
||||
:param hash_value: A precomputed hash to sign (ignores message). Should be set to
|
||||
None if needing to hash and sign message.
|
||||
:param priv_key: the :py:class:`rsa.PrivateKey` to sign with
|
||||
:param hash_method: the hash method used on the message. Use 'MD5', 'SHA-1',
|
||||
'SHA-224', SHA-256', 'SHA-384' or 'SHA-512'.
|
||||
:return: a message signature block.
|
||||
:raise OverflowError: if the private key is too small to contain the
|
||||
requested hash.
|
||||
|
||||
"""
|
||||
|
||||
# Get the ASN1 code for this hash method
|
||||
if hash_method not in HASH_ASN1:
|
||||
raise ValueError('Invalid hash method: %s' % hash_method)
|
||||
asn1code = HASH_ASN1[hash_method]
|
||||
|
||||
# Encrypt the hash with the private key
|
||||
cleartext = asn1code + hash_value
|
||||
keylength = common.byte_size(priv_key.n)
|
||||
padded = _pad_for_signing(cleartext, keylength)
|
||||
|
||||
payload = transform.bytes2int(padded)
|
||||
encrypted = priv_key.blinded_encrypt(payload)
|
||||
block = transform.int2bytes(encrypted, keylength)
|
||||
|
||||
return block
|
||||
|
||||
|
||||
def sign(message, priv_key, hash_method):
|
||||
"""Signs the message with the private key.
|
||||
|
||||
Hashes the message, then signs the hash with the given key. This is known
|
||||
as a "detached signature", because the message itself isn't altered.
|
||||
|
||||
:param message: the message to sign. Can be an 8-bit string or a file-like
|
||||
object. If ``message`` has a ``read()`` method, it is assumed to be a
|
||||
file-like object.
|
||||
:param priv_key: the :py:class:`rsa.PrivateKey` to sign with
|
||||
:param hash_method: the hash method used on the message. Use 'MD5', 'SHA-1',
|
||||
'SHA-224', SHA-256', 'SHA-384' or 'SHA-512'.
|
||||
:return: a message signature block.
|
||||
:raise OverflowError: if the private key is too small to contain the
|
||||
requested hash.
|
||||
|
||||
"""
|
||||
|
||||
msg_hash = compute_hash(message, hash_method)
|
||||
return sign_hash(msg_hash, priv_key, hash_method)
|
||||
|
||||
|
||||
def verify(message, signature, pub_key):
|
||||
"""Verifies that the signature matches the message.
|
||||
|
||||
The hash method is detected automatically from the signature.
|
||||
|
||||
:param message: the signed message. Can be an 8-bit string or a file-like
|
||||
object. If ``message`` has a ``read()`` method, it is assumed to be a
|
||||
file-like object.
|
||||
:param signature: the signature block, as created with :py:func:`rsa.sign`.
|
||||
:param pub_key: the :py:class:`rsa.PublicKey` of the person signing the message.
|
||||
:raise VerificationError: when the signature doesn't match the message.
|
||||
:returns: the name of the used hash.
|
||||
|
||||
"""
|
||||
|
||||
keylength = common.byte_size(pub_key.n)
|
||||
encrypted = transform.bytes2int(signature)
|
||||
decrypted = core.decrypt_int(encrypted, pub_key.e, pub_key.n)
|
||||
clearsig = transform.int2bytes(decrypted, keylength)
|
||||
|
||||
# Get the hash method
|
||||
method_name = _find_method_hash(clearsig)
|
||||
message_hash = compute_hash(message, method_name)
|
||||
|
||||
# Reconstruct the expected padded hash
|
||||
cleartext = HASH_ASN1[method_name] + message_hash
|
||||
expected = _pad_for_signing(cleartext, keylength)
|
||||
|
||||
# Compare with the signed one
|
||||
if expected != clearsig:
|
||||
raise VerificationError('Verification failed')
|
||||
|
||||
return method_name
|
||||
|
||||
|
||||
def find_signature_hash(signature, pub_key):
|
||||
"""Returns the hash name detected from the signature.
|
||||
|
||||
If you also want to verify the message, use :py:func:`rsa.verify()` instead.
|
||||
It also returns the name of the used hash.
|
||||
|
||||
:param signature: the signature block, as created with :py:func:`rsa.sign`.
|
||||
:param pub_key: the :py:class:`rsa.PublicKey` of the person signing the message.
|
||||
:returns: the name of the used hash.
|
||||
"""
|
||||
|
||||
keylength = common.byte_size(pub_key.n)
|
||||
encrypted = transform.bytes2int(signature)
|
||||
decrypted = core.decrypt_int(encrypted, pub_key.e, pub_key.n)
|
||||
clearsig = transform.int2bytes(decrypted, keylength)
|
||||
|
||||
return _find_method_hash(clearsig)
|
||||
|
||||
|
||||
def yield_fixedblocks(infile, blocksize):
|
||||
"""Generator, yields each block of ``blocksize`` bytes in the input file.
|
||||
|
||||
:param infile: file to read and separate in blocks.
|
||||
:param blocksize: block size in bytes.
|
||||
:returns: a generator that yields the contents of each block
|
||||
"""
|
||||
|
||||
while True:
|
||||
block = infile.read(blocksize)
|
||||
|
||||
read_bytes = len(block)
|
||||
if read_bytes == 0:
|
||||
break
|
||||
|
||||
yield block
|
||||
|
||||
if read_bytes < blocksize:
|
||||
break
|
||||
|
||||
|
||||
def compute_hash(message, method_name):
|
||||
"""Returns the message digest.
|
||||
|
||||
:param message: the signed message. Can be an 8-bit string or a file-like
|
||||
object. If ``message`` has a ``read()`` method, it is assumed to be a
|
||||
file-like object.
|
||||
:param method_name: the hash method, must be a key of
|
||||
:py:const:`HASH_METHODS`.
|
||||
|
||||
"""
|
||||
|
||||
if method_name not in HASH_METHODS:
|
||||
raise ValueError('Invalid hash method: %s' % method_name)
|
||||
|
||||
method = HASH_METHODS[method_name]
|
||||
hasher = method()
|
||||
|
||||
if hasattr(message, 'read') and hasattr(message.read, '__call__'):
|
||||
# read as 1K blocks
|
||||
for block in yield_fixedblocks(message, 1024):
|
||||
hasher.update(block)
|
||||
else:
|
||||
# hash the message object itself.
|
||||
hasher.update(message)
|
||||
|
||||
return hasher.digest()
|
||||
|
||||
|
||||
def _find_method_hash(clearsig):
|
||||
"""Finds the hash method.
|
||||
|
||||
:param clearsig: full padded ASN1 and hash.
|
||||
:return: the used hash method.
|
||||
:raise VerificationFailed: when the hash method cannot be found
|
||||
"""
|
||||
|
||||
for (hashname, asn1code) in HASH_ASN1.items():
|
||||
if asn1code in clearsig:
|
||||
return hashname
|
||||
|
||||
raise VerificationError('Verification failed')
|
||||
|
||||
|
||||
__all__ = ['encrypt', 'decrypt', 'sign', 'verify',
|
||||
'DecryptionError', 'VerificationError', 'CryptoError']
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Running doctests 1000x or until failure')
|
||||
import doctest
|
||||
|
||||
for count in range(1000):
|
||||
(failures, tests) = doctest.testmod()
|
||||
if failures:
|
||||
break
|
||||
|
||||
if count % 100 == 0 and count:
|
||||
print('%i times' % count)
|
||||
|
||||
print('Doctests done')
|
||||
103
Lambdas/Websocket Authorizer/rsa/pkcs1_v2.py
Normal file
103
Lambdas/Websocket Authorizer/rsa/pkcs1_v2.py
Normal file
@@ -0,0 +1,103 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Functions for PKCS#1 version 2 encryption and signing
|
||||
|
||||
This module implements certain functionality from PKCS#1 version 2. Main
|
||||
documentation is RFC 2437: https://tools.ietf.org/html/rfc2437
|
||||
"""
|
||||
|
||||
from rsa._compat import range
|
||||
from rsa import (
|
||||
common,
|
||||
pkcs1,
|
||||
transform,
|
||||
)
|
||||
|
||||
|
||||
def mgf1(seed, length, hasher='SHA-1'):
|
||||
"""
|
||||
MGF1 is a Mask Generation Function based on a hash function.
|
||||
|
||||
A mask generation function takes an octet string of variable length and a
|
||||
desired output length as input, and outputs an octet string of the desired
|
||||
length. The plaintext-awareness of RSAES-OAEP relies on the random nature of
|
||||
the output of the mask generation function, which in turn relies on the
|
||||
random nature of the underlying hash.
|
||||
|
||||
:param bytes seed: seed from which mask is generated, an octet string
|
||||
:param int length: intended length in octets of the mask, at most 2^32(hLen)
|
||||
:param str hasher: hash function (hLen denotes the length in octets of the hash
|
||||
function output)
|
||||
|
||||
:return: mask, an octet string of length `length`
|
||||
:rtype: bytes
|
||||
|
||||
:raise OverflowError: when `length` is too large for the specified `hasher`
|
||||
:raise ValueError: when specified `hasher` is invalid
|
||||
"""
|
||||
|
||||
try:
|
||||
hash_length = pkcs1.HASH_METHODS[hasher]().digest_size
|
||||
except KeyError:
|
||||
raise ValueError(
|
||||
'Invalid `hasher` specified. Please select one of: {hash_list}'.format(
|
||||
hash_list=', '.join(sorted(pkcs1.HASH_METHODS.keys()))
|
||||
)
|
||||
)
|
||||
|
||||
# If l > 2^32(hLen), output "mask too long" and stop.
|
||||
if length > (2**32 * hash_length):
|
||||
raise OverflowError(
|
||||
"Desired length should be at most 2**32 times the hasher's output "
|
||||
"length ({hash_length} for {hasher} function)".format(
|
||||
hash_length=hash_length,
|
||||
hasher=hasher,
|
||||
)
|
||||
)
|
||||
|
||||
# Looping `counter` from 0 to ceil(l / hLen)-1, build `output` based on the
|
||||
# hashes formed by (`seed` + C), being `C` an octet string of length 4
|
||||
# generated by converting `counter` with the primitive I2OSP
|
||||
output = b''.join(
|
||||
pkcs1.compute_hash(
|
||||
seed + transform.int2bytes(counter, fill_size=4),
|
||||
method_name=hasher,
|
||||
)
|
||||
for counter in range(common.ceil_div(length, hash_length) + 1)
|
||||
)
|
||||
|
||||
# Output the leading `length` octets of `output` as the octet string mask.
|
||||
return output[:length]
|
||||
|
||||
|
||||
__all__ = [
|
||||
'mgf1',
|
||||
]
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Running doctests 1000x or until failure')
|
||||
import doctest
|
||||
|
||||
for count in range(1000):
|
||||
(failures, tests) = doctest.testmod()
|
||||
if failures:
|
||||
break
|
||||
|
||||
if count % 100 == 0 and count:
|
||||
print('%i times' % count)
|
||||
|
||||
print('Doctests done')
|
||||
201
Lambdas/Websocket Authorizer/rsa/prime.py
Normal file
201
Lambdas/Websocket Authorizer/rsa/prime.py
Normal file
@@ -0,0 +1,201 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Numerical functions related to primes.
|
||||
|
||||
Implementation based on the book Algorithm Design by Michael T. Goodrich and
|
||||
Roberto Tamassia, 2002.
|
||||
"""
|
||||
|
||||
from rsa._compat import range
|
||||
import rsa.common
|
||||
import rsa.randnum
|
||||
|
||||
__all__ = ['getprime', 'are_relatively_prime']
|
||||
|
||||
|
||||
def gcd(p, q):
|
||||
"""Returns the greatest common divisor of p and q
|
||||
|
||||
>>> gcd(48, 180)
|
||||
12
|
||||
"""
|
||||
|
||||
while q != 0:
|
||||
(p, q) = (q, p % q)
|
||||
return p
|
||||
|
||||
|
||||
def get_primality_testing_rounds(number):
|
||||
"""Returns minimum number of rounds for Miller-Rabing primality testing,
|
||||
based on number bitsize.
|
||||
|
||||
According to NIST FIPS 186-4, Appendix C, Table C.3, minimum number of
|
||||
rounds of M-R testing, using an error probability of 2 ** (-100), for
|
||||
different p, q bitsizes are:
|
||||
* p, q bitsize: 512; rounds: 7
|
||||
* p, q bitsize: 1024; rounds: 4
|
||||
* p, q bitsize: 1536; rounds: 3
|
||||
See: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf
|
||||
"""
|
||||
|
||||
# Calculate number bitsize.
|
||||
bitsize = rsa.common.bit_size(number)
|
||||
# Set number of rounds.
|
||||
if bitsize >= 1536:
|
||||
return 3
|
||||
if bitsize >= 1024:
|
||||
return 4
|
||||
if bitsize >= 512:
|
||||
return 7
|
||||
# For smaller bitsizes, set arbitrary number of rounds.
|
||||
return 10
|
||||
|
||||
|
||||
def miller_rabin_primality_testing(n, k):
|
||||
"""Calculates whether n is composite (which is always correct) or prime
|
||||
(which theoretically is incorrect with error probability 4**-k), by
|
||||
applying Miller-Rabin primality testing.
|
||||
|
||||
For reference and implementation example, see:
|
||||
https://en.wikipedia.org/wiki/Miller%E2%80%93Rabin_primality_test
|
||||
|
||||
:param n: Integer to be tested for primality.
|
||||
:type n: int
|
||||
:param k: Number of rounds (witnesses) of Miller-Rabin testing.
|
||||
:type k: int
|
||||
:return: False if the number is composite, True if it's probably prime.
|
||||
:rtype: bool
|
||||
"""
|
||||
|
||||
# prevent potential infinite loop when d = 0
|
||||
if n < 2:
|
||||
return False
|
||||
|
||||
# Decompose (n - 1) to write it as (2 ** r) * d
|
||||
# While d is even, divide it by 2 and increase the exponent.
|
||||
d = n - 1
|
||||
r = 0
|
||||
|
||||
while not (d & 1):
|
||||
r += 1
|
||||
d >>= 1
|
||||
|
||||
# Test k witnesses.
|
||||
for _ in range(k):
|
||||
# Generate random integer a, where 2 <= a <= (n - 2)
|
||||
a = rsa.randnum.randint(n - 3) + 1
|
||||
|
||||
x = pow(a, d, n)
|
||||
if x == 1 or x == n - 1:
|
||||
continue
|
||||
|
||||
for _ in range(r - 1):
|
||||
x = pow(x, 2, n)
|
||||
if x == 1:
|
||||
# n is composite.
|
||||
return False
|
||||
if x == n - 1:
|
||||
# Exit inner loop and continue with next witness.
|
||||
break
|
||||
else:
|
||||
# If loop doesn't break, n is composite.
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def is_prime(number):
|
||||
"""Returns True if the number is prime, and False otherwise.
|
||||
|
||||
>>> is_prime(2)
|
||||
True
|
||||
>>> is_prime(42)
|
||||
False
|
||||
>>> is_prime(41)
|
||||
True
|
||||
"""
|
||||
|
||||
# Check for small numbers.
|
||||
if number < 10:
|
||||
return number in {2, 3, 5, 7}
|
||||
|
||||
# Check for even numbers.
|
||||
if not (number & 1):
|
||||
return False
|
||||
|
||||
# Calculate minimum number of rounds.
|
||||
k = get_primality_testing_rounds(number)
|
||||
|
||||
# Run primality testing with (minimum + 1) rounds.
|
||||
return miller_rabin_primality_testing(number, k + 1)
|
||||
|
||||
|
||||
def getprime(nbits):
|
||||
"""Returns a prime number that can be stored in 'nbits' bits.
|
||||
|
||||
>>> p = getprime(128)
|
||||
>>> is_prime(p-1)
|
||||
False
|
||||
>>> is_prime(p)
|
||||
True
|
||||
>>> is_prime(p+1)
|
||||
False
|
||||
|
||||
>>> from rsa import common
|
||||
>>> common.bit_size(p) == 128
|
||||
True
|
||||
"""
|
||||
|
||||
assert nbits > 3 # the loop wil hang on too small numbers
|
||||
|
||||
while True:
|
||||
integer = rsa.randnum.read_random_odd_int(nbits)
|
||||
|
||||
# Test for primeness
|
||||
if is_prime(integer):
|
||||
return integer
|
||||
|
||||
# Retry if not prime
|
||||
|
||||
|
||||
def are_relatively_prime(a, b):
|
||||
"""Returns True if a and b are relatively prime, and False if they
|
||||
are not.
|
||||
|
||||
>>> are_relatively_prime(2, 3)
|
||||
True
|
||||
>>> are_relatively_prime(2, 4)
|
||||
False
|
||||
"""
|
||||
|
||||
d = gcd(a, b)
|
||||
return d == 1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Running doctests 1000x or until failure')
|
||||
import doctest
|
||||
|
||||
for count in range(1000):
|
||||
(failures, tests) = doctest.testmod()
|
||||
if failures:
|
||||
break
|
||||
|
||||
if count % 100 == 0 and count:
|
||||
print('%i times' % count)
|
||||
|
||||
print('Doctests done')
|
||||
98
Lambdas/Websocket Authorizer/rsa/randnum.py
Normal file
98
Lambdas/Websocket Authorizer/rsa/randnum.py
Normal file
@@ -0,0 +1,98 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Functions for generating random numbers."""
|
||||
|
||||
# Source inspired by code by Yesudeep Mangalapilly <yesudeep@gmail.com>
|
||||
|
||||
import os
|
||||
|
||||
from rsa import common, transform
|
||||
from rsa._compat import byte
|
||||
|
||||
|
||||
def read_random_bits(nbits):
|
||||
"""Reads 'nbits' random bits.
|
||||
|
||||
If nbits isn't a whole number of bytes, an extra byte will be appended with
|
||||
only the lower bits set.
|
||||
"""
|
||||
|
||||
nbytes, rbits = divmod(nbits, 8)
|
||||
|
||||
# Get the random bytes
|
||||
randomdata = os.urandom(nbytes)
|
||||
|
||||
# Add the remaining random bits
|
||||
if rbits > 0:
|
||||
randomvalue = ord(os.urandom(1))
|
||||
randomvalue >>= (8 - rbits)
|
||||
randomdata = byte(randomvalue) + randomdata
|
||||
|
||||
return randomdata
|
||||
|
||||
|
||||
def read_random_int(nbits):
|
||||
"""Reads a random integer of approximately nbits bits.
|
||||
"""
|
||||
|
||||
randomdata = read_random_bits(nbits)
|
||||
value = transform.bytes2int(randomdata)
|
||||
|
||||
# Ensure that the number is large enough to just fill out the required
|
||||
# number of bits.
|
||||
value |= 1 << (nbits - 1)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def read_random_odd_int(nbits):
|
||||
"""Reads a random odd integer of approximately nbits bits.
|
||||
|
||||
>>> read_random_odd_int(512) & 1
|
||||
1
|
||||
"""
|
||||
|
||||
value = read_random_int(nbits)
|
||||
|
||||
# Make sure it's odd
|
||||
return value | 1
|
||||
|
||||
|
||||
def randint(maxvalue):
|
||||
"""Returns a random integer x with 1 <= x <= maxvalue
|
||||
|
||||
May take a very long time in specific situations. If maxvalue needs N bits
|
||||
to store, the closer maxvalue is to (2 ** N) - 1, the faster this function
|
||||
is.
|
||||
"""
|
||||
|
||||
bit_size = common.bit_size(maxvalue)
|
||||
|
||||
tries = 0
|
||||
while True:
|
||||
value = read_random_int(bit_size)
|
||||
if value <= maxvalue:
|
||||
break
|
||||
|
||||
if tries % 10 == 0 and tries:
|
||||
# After a lot of tries to get the right number of bits but still
|
||||
# smaller than maxvalue, decrease the number of bits by 1. That'll
|
||||
# dramatically increase the chances to get a large enough number.
|
||||
bit_size -= 1
|
||||
tries += 1
|
||||
|
||||
return value
|
||||
215
Lambdas/Websocket Authorizer/rsa/transform.py
Normal file
215
Lambdas/Websocket Authorizer/rsa/transform.py
Normal file
@@ -0,0 +1,215 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Data transformation functions.
|
||||
|
||||
From bytes to a number, number to bytes, etc.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import binascii
|
||||
from struct import pack
|
||||
|
||||
from rsa._compat import byte, is_integer
|
||||
from rsa import common, machine_size
|
||||
|
||||
|
||||
def bytes2int(raw_bytes):
|
||||
r"""Converts a list of bytes or an 8-bit string to an integer.
|
||||
|
||||
When using unicode strings, encode it to some encoding like UTF8 first.
|
||||
|
||||
>>> (((128 * 256) + 64) * 256) + 15
|
||||
8405007
|
||||
>>> bytes2int(b'\x80@\x0f')
|
||||
8405007
|
||||
|
||||
"""
|
||||
|
||||
return int(binascii.hexlify(raw_bytes), 16)
|
||||
|
||||
|
||||
def _int2bytes(number, block_size=None):
|
||||
r"""Converts a number to a string of bytes.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> _int2bytes(123456789)
|
||||
b'\x07[\xcd\x15'
|
||||
>>> bytes2int(_int2bytes(123456789))
|
||||
123456789
|
||||
|
||||
>>> _int2bytes(123456789, 6)
|
||||
b'\x00\x00\x07[\xcd\x15'
|
||||
>>> bytes2int(_int2bytes(123456789, 128))
|
||||
123456789
|
||||
|
||||
>>> _int2bytes(123456789, 3)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
OverflowError: Needed 4 bytes for number, but block size is 3
|
||||
|
||||
@param number: the number to convert
|
||||
@param block_size: the number of bytes to output. If the number encoded to
|
||||
bytes is less than this, the block will be zero-padded. When not given,
|
||||
the returned block is not padded.
|
||||
|
||||
@throws OverflowError when block_size is given and the number takes up more
|
||||
bytes than fit into the block.
|
||||
"""
|
||||
|
||||
# Type checking
|
||||
if not is_integer(number):
|
||||
raise TypeError("You must pass an integer for 'number', not %s" %
|
||||
number.__class__)
|
||||
|
||||
if number < 0:
|
||||
raise ValueError('Negative numbers cannot be used: %i' % number)
|
||||
|
||||
# Do some bounds checking
|
||||
if number == 0:
|
||||
needed_bytes = 1
|
||||
raw_bytes = [b'\x00']
|
||||
else:
|
||||
needed_bytes = common.byte_size(number)
|
||||
raw_bytes = []
|
||||
|
||||
# You cannot compare None > 0 in Python 3x. It will fail with a TypeError.
|
||||
if block_size and block_size > 0:
|
||||
if needed_bytes > block_size:
|
||||
raise OverflowError('Needed %i bytes for number, but block size '
|
||||
'is %i' % (needed_bytes, block_size))
|
||||
|
||||
# Convert the number to bytes.
|
||||
while number > 0:
|
||||
raw_bytes.insert(0, byte(number & 0xFF))
|
||||
number >>= 8
|
||||
|
||||
# Pad with zeroes to fill the block
|
||||
if block_size and block_size > 0:
|
||||
padding = (block_size - needed_bytes) * b'\x00'
|
||||
else:
|
||||
padding = b''
|
||||
|
||||
return padding + b''.join(raw_bytes)
|
||||
|
||||
|
||||
def bytes_leading(raw_bytes, needle=b'\x00'):
|
||||
"""
|
||||
Finds the number of prefixed byte occurrences in the haystack.
|
||||
|
||||
Useful when you want to deal with padding.
|
||||
|
||||
:param raw_bytes:
|
||||
Raw bytes.
|
||||
:param needle:
|
||||
The byte to count. Default \x00.
|
||||
:returns:
|
||||
The number of leading needle bytes.
|
||||
"""
|
||||
|
||||
leading = 0
|
||||
# Indexing keeps compatibility between Python 2.x and Python 3.x
|
||||
_byte = needle[0]
|
||||
for x in raw_bytes:
|
||||
if x == _byte:
|
||||
leading += 1
|
||||
else:
|
||||
break
|
||||
return leading
|
||||
|
||||
|
||||
def int2bytes(number, fill_size=None, chunk_size=None, overflow=False):
|
||||
"""
|
||||
Convert an unsigned integer to bytes (base-256 representation)::
|
||||
|
||||
Does not preserve leading zeros if you don't specify a chunk size or
|
||||
fill size.
|
||||
|
||||
.. NOTE:
|
||||
You must not specify both fill_size and chunk_size. Only one
|
||||
of them is allowed.
|
||||
|
||||
:param number:
|
||||
Integer value
|
||||
:param fill_size:
|
||||
If the optional fill size is given the length of the resulting
|
||||
byte string is expected to be the fill size and will be padded
|
||||
with prefix zero bytes to satisfy that length.
|
||||
:param chunk_size:
|
||||
If optional chunk size is given and greater than zero, pad the front of
|
||||
the byte string with binary zeros so that the length is a multiple of
|
||||
``chunk_size``.
|
||||
:param overflow:
|
||||
``False`` (default). If this is ``True``, no ``OverflowError``
|
||||
will be raised when the fill_size is shorter than the length
|
||||
of the generated byte sequence. Instead the byte sequence will
|
||||
be returned as is.
|
||||
:returns:
|
||||
Raw bytes (base-256 representation).
|
||||
:raises:
|
||||
``OverflowError`` when fill_size is given and the number takes up more
|
||||
bytes than fit into the block. This requires the ``overflow``
|
||||
argument to this function to be set to ``False`` otherwise, no
|
||||
error will be raised.
|
||||
"""
|
||||
|
||||
if number < 0:
|
||||
raise ValueError("Number must be an unsigned integer: %d" % number)
|
||||
|
||||
if fill_size and chunk_size:
|
||||
raise ValueError("You can either fill or pad chunks, but not both")
|
||||
|
||||
# Ensure these are integers.
|
||||
number & 1
|
||||
|
||||
raw_bytes = b''
|
||||
|
||||
# Pack the integer one machine word at a time into bytes.
|
||||
num = number
|
||||
word_bits, _, max_uint, pack_type = machine_size.get_word_alignment(num)
|
||||
pack_format = ">%s" % pack_type
|
||||
while num > 0:
|
||||
raw_bytes = pack(pack_format, num & max_uint) + raw_bytes
|
||||
num >>= word_bits
|
||||
# Obtain the index of the first non-zero byte.
|
||||
zero_leading = bytes_leading(raw_bytes)
|
||||
if number == 0:
|
||||
raw_bytes = b'\x00'
|
||||
# De-padding.
|
||||
raw_bytes = raw_bytes[zero_leading:]
|
||||
|
||||
length = len(raw_bytes)
|
||||
if fill_size and fill_size > 0:
|
||||
if not overflow and length > fill_size:
|
||||
raise OverflowError(
|
||||
"Need %d bytes for number, but fill size is %d" %
|
||||
(length, fill_size)
|
||||
)
|
||||
raw_bytes = raw_bytes.rjust(fill_size, b'\x00')
|
||||
elif chunk_size and chunk_size > 0:
|
||||
remainder = length % chunk_size
|
||||
if remainder:
|
||||
padding_size = chunk_size - remainder
|
||||
raw_bytes = raw_bytes.rjust(length + padding_size, b'\x00')
|
||||
return raw_bytes
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
|
||||
doctest.testmod()
|
||||
79
Lambdas/Websocket Authorizer/rsa/util.py
Normal file
79
Lambdas/Websocket Authorizer/rsa/util.py
Normal file
@@ -0,0 +1,79 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Utility functions."""
|
||||
|
||||
from __future__ import with_statement, print_function
|
||||
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
import rsa.key
|
||||
|
||||
|
||||
def private_to_public():
|
||||
"""Reads a private key and outputs the corresponding public key."""
|
||||
|
||||
# Parse the CLI options
|
||||
parser = OptionParser(usage='usage: %prog [options]',
|
||||
description='Reads a private key and outputs the '
|
||||
'corresponding public key. Both private and public keys use '
|
||||
'the format described in PKCS#1 v1.5')
|
||||
|
||||
parser.add_option('-i', '--input', dest='infilename', type='string',
|
||||
help='Input filename. Reads from stdin if not specified')
|
||||
parser.add_option('-o', '--output', dest='outfilename', type='string',
|
||||
help='Output filename. Writes to stdout of not specified')
|
||||
|
||||
parser.add_option('--inform', dest='inform',
|
||||
help='key format of input - default PEM',
|
||||
choices=('PEM', 'DER'), default='PEM')
|
||||
|
||||
parser.add_option('--outform', dest='outform',
|
||||
help='key format of output - default PEM',
|
||||
choices=('PEM', 'DER'), default='PEM')
|
||||
|
||||
(cli, cli_args) = parser.parse_args(sys.argv)
|
||||
|
||||
# Read the input data
|
||||
if cli.infilename:
|
||||
print('Reading private key from %s in %s format' %
|
||||
(cli.infilename, cli.inform), file=sys.stderr)
|
||||
with open(cli.infilename, 'rb') as infile:
|
||||
in_data = infile.read()
|
||||
else:
|
||||
print('Reading private key from stdin in %s format' % cli.inform,
|
||||
file=sys.stderr)
|
||||
in_data = sys.stdin.read().encode('ascii')
|
||||
|
||||
assert type(in_data) == bytes, type(in_data)
|
||||
|
||||
# Take the public fields and create a public key
|
||||
priv_key = rsa.key.PrivateKey.load_pkcs1(in_data, cli.inform)
|
||||
pub_key = rsa.key.PublicKey(priv_key.n, priv_key.e)
|
||||
|
||||
# Save to the output file
|
||||
out_data = pub_key.save_pkcs1(cli.outform)
|
||||
|
||||
if cli.outfilename:
|
||||
print('Writing public key to %s in %s format' %
|
||||
(cli.outfilename, cli.outform), file=sys.stderr)
|
||||
with open(cli.outfilename, 'wb') as outfile:
|
||||
outfile.write(out_data)
|
||||
else:
|
||||
print('Writing public key to stdout in %s format' % cli.outform,
|
||||
file=sys.stderr)
|
||||
sys.stdout.write(out_data.decode('ascii'))
|
||||
952
Lambdas/Websocket Authorizer/six.py
Normal file
952
Lambdas/Websocket Authorizer/six.py
Normal file
@@ -0,0 +1,952 @@
|
||||
# Copyright (c) 2010-2018 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.12.0"
|
||||
|
||||
|
||||
# Useful for very coarse version differentiation.
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
PY34 = sys.version_info[0:2] >= (3, 4)
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
integer_types = int,
|
||||
class_types = type,
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
|
||||
MAXSIZE = sys.maxsize
|
||||
else:
|
||||
string_types = basestring,
|
||||
integer_types = (int, long)
|
||||
class_types = (type, types.ClassType)
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
if sys.platform.startswith("java"):
|
||||
# Jython always uses 32 bits.
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
len(X())
|
||||
except OverflowError:
|
||||
# 32-bit
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# 64-bit
|
||||
MAXSIZE = int((1 << 63) - 1)
|
||||
del X
|
||||
|
||||
|
||||
def _add_doc(func, doc):
|
||||
"""Add documentation to a function."""
|
||||
func.__doc__ = doc
|
||||
|
||||
|
||||
def _import_module(name):
|
||||
"""Import module, returning the module after the last dot."""
|
||||
__import__(name)
|
||||
return sys.modules[name]
|
||||
|
||||
|
||||
class _LazyDescr(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result) # Invokes __set__.
|
||||
try:
|
||||
# This is a bit ugly, but it avoids running this again by
|
||||
# removing this descriptor.
|
||||
delattr(obj.__class__, self.name)
|
||||
except AttributeError:
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
class MovedModule(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old, new=None):
|
||||
super(MovedModule, self).__init__(name)
|
||||
if PY3:
|
||||
if new is None:
|
||||
new = name
|
||||
self.mod = new
|
||||
else:
|
||||
self.mod = old
|
||||
|
||||
def _resolve(self):
|
||||
return _import_module(self.mod)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
_module = self._resolve()
|
||||
value = getattr(_module, attr)
|
||||
setattr(self, attr, value)
|
||||
return value
|
||||
|
||||
|
||||
class _LazyModule(types.ModuleType):
|
||||
|
||||
def __init__(self, name):
|
||||
super(_LazyModule, self).__init__(name)
|
||||
self.__doc__ = self.__class__.__doc__
|
||||
|
||||
def __dir__(self):
|
||||
attrs = ["__doc__", "__name__"]
|
||||
attrs += [attr.name for attr in self._moved_attributes]
|
||||
return attrs
|
||||
|
||||
# Subclasses should override this
|
||||
_moved_attributes = []
|
||||
|
||||
|
||||
class MovedAttribute(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||
super(MovedAttribute, self).__init__(name)
|
||||
if PY3:
|
||||
if new_mod is None:
|
||||
new_mod = name
|
||||
self.mod = new_mod
|
||||
if new_attr is None:
|
||||
if old_attr is None:
|
||||
new_attr = name
|
||||
else:
|
||||
new_attr = old_attr
|
||||
self.attr = new_attr
|
||||
else:
|
||||
self.mod = old_mod
|
||||
if old_attr is None:
|
||||
old_attr = name
|
||||
self.attr = old_attr
|
||||
|
||||
def _resolve(self):
|
||||
module = _import_module(self.mod)
|
||||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
class _SixMetaPathImporter(object):
|
||||
|
||||
"""
|
||||
A meta path importer to import six.moves and its submodules.
|
||||
|
||||
This class implements a PEP302 finder and loader. It should be compatible
|
||||
with Python 2.5 and all existing versions of Python3
|
||||
"""
|
||||
|
||||
def __init__(self, six_module_name):
|
||||
self.name = six_module_name
|
||||
self.known_modules = {}
|
||||
|
||||
def _add_module(self, mod, *fullnames):
|
||||
for fullname in fullnames:
|
||||
self.known_modules[self.name + "." + fullname] = mod
|
||||
|
||||
def _get_module(self, fullname):
|
||||
return self.known_modules[self.name + "." + fullname]
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname in self.known_modules:
|
||||
return self
|
||||
return None
|
||||
|
||||
def __get_module(self, fullname):
|
||||
try:
|
||||
return self.known_modules[fullname]
|
||||
except KeyError:
|
||||
raise ImportError("This loader does not know module " + fullname)
|
||||
|
||||
def load_module(self, fullname):
|
||||
try:
|
||||
# in case of a reload
|
||||
return sys.modules[fullname]
|
||||
except KeyError:
|
||||
pass
|
||||
mod = self.__get_module(fullname)
|
||||
if isinstance(mod, MovedModule):
|
||||
mod = mod._resolve()
|
||||
else:
|
||||
mod.__loader__ = self
|
||||
sys.modules[fullname] = mod
|
||||
return mod
|
||||
|
||||
def is_package(self, fullname):
|
||||
"""
|
||||
Return true, if the named module is a package.
|
||||
|
||||
We need this method to get correct spec objects with
|
||||
Python 3.4 (see PEP451)
|
||||
"""
|
||||
return hasattr(self.__get_module(fullname), "__path__")
|
||||
|
||||
def get_code(self, fullname):
|
||||
"""Return None
|
||||
|
||||
Required, if is_package is implemented"""
|
||||
self.__get_module(fullname) # eventually raises ImportError
|
||||
return None
|
||||
get_source = get_code # same as get_code
|
||||
|
||||
_importer = _SixMetaPathImporter(__name__)
|
||||
|
||||
|
||||
class _MovedItems(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects"""
|
||||
__path__ = [] # mark as package
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("intern", "__builtin__", "sys"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
|
||||
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
|
||||
MovedAttribute("getoutput", "commands", "subprocess"),
|
||||
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||
MovedAttribute("UserList", "UserList", "collections"),
|
||||
MovedAttribute("UserString", "UserString", "collections"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||
MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||
MovedModule("cPickle", "cPickle", "pickle"),
|
||||
MovedModule("queue", "Queue"),
|
||||
MovedModule("reprlib", "repr"),
|
||||
MovedModule("socketserver", "SocketServer"),
|
||||
MovedModule("_thread", "thread", "_thread"),
|
||||
MovedModule("tkinter", "Tkinter"),
|
||||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
|
||||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||
"tkinter.colorchooser"),
|
||||
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||
"tkinter.commondialog"),
|
||||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||
"tkinter.simpledialog"),
|
||||
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
|
||||
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
|
||||
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||
]
|
||||
# Add windows specific modules.
|
||||
if sys.platform == "win32":
|
||||
_moved_attributes += [
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
if isinstance(attr, MovedModule):
|
||||
_importer._add_module(attr, "moves." + attr.name)
|
||||
del attr
|
||||
|
||||
_MovedItems._moved_attributes = _moved_attributes
|
||||
|
||||
moves = _MovedItems(__name__ + ".moves")
|
||||
_importer._add_module(moves, "moves")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_parse(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||
|
||||
|
||||
_urllib_parse_moved_attributes = [
|
||||
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("quote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
|
||||
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splittag", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splituser", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitvalue", "urllib", "urllib.parse"),
|
||||
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
|
||||
]
|
||||
for attr in _urllib_parse_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||
"moves.urllib_parse", "moves.urllib.parse")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_error(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||
|
||||
|
||||
_urllib_error_moved_attributes = [
|
||||
MovedAttribute("URLError", "urllib2", "urllib.error"),
|
||||
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
|
||||
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
|
||||
]
|
||||
for attr in _urllib_error_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_error, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||
"moves.urllib_error", "moves.urllib.error")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_request(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||
|
||||
|
||||
_urllib_request_moved_attributes = [
|
||||
MovedAttribute("urlopen", "urllib2", "urllib.request"),
|
||||
MovedAttribute("install_opener", "urllib2", "urllib.request"),
|
||||
MovedAttribute("build_opener", "urllib2", "urllib.request"),
|
||||
MovedAttribute("pathname2url", "urllib", "urllib.request"),
|
||||
MovedAttribute("url2pathname", "urllib", "urllib.request"),
|
||||
MovedAttribute("getproxies", "urllib", "urllib.request"),
|
||||
MovedAttribute("Request", "urllib2", "urllib.request"),
|
||||
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
|
||||
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
|
||||
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
|
||||
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
|
||||
MovedAttribute("URLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
|
||||
MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
|
||||
MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
|
||||
]
|
||||
for attr in _urllib_request_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_request, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||
"moves.urllib_request", "moves.urllib.request")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_response(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||
|
||||
|
||||
_urllib_response_moved_attributes = [
|
||||
MovedAttribute("addbase", "urllib", "urllib.response"),
|
||||
MovedAttribute("addclosehook", "urllib", "urllib.response"),
|
||||
MovedAttribute("addinfo", "urllib", "urllib.response"),
|
||||
MovedAttribute("addinfourl", "urllib", "urllib.response"),
|
||||
]
|
||||
for attr in _urllib_response_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_response, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||
"moves.urllib_response", "moves.urllib.response")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||
|
||||
|
||||
_urllib_robotparser_moved_attributes = [
|
||||
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
|
||||
]
|
||||
for attr in _urllib_robotparser_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||
|
||||
|
||||
class Module_six_moves_urllib(types.ModuleType):
|
||||
|
||||
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||
__path__ = [] # mark as package
|
||||
parse = _importer._get_module("moves.urllib_parse")
|
||||
error = _importer._get_module("moves.urllib_error")
|
||||
request = _importer._get_module("moves.urllib_request")
|
||||
response = _importer._get_module("moves.urllib_response")
|
||||
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||
|
||||
def __dir__(self):
|
||||
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||
"moves.urllib")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
"""Add an item to six.moves."""
|
||||
setattr(_MovedItems, move.name, move)
|
||||
|
||||
|
||||
def remove_move(name):
|
||||
"""Remove item from six.moves."""
|
||||
try:
|
||||
delattr(_MovedItems, name)
|
||||
except AttributeError:
|
||||
try:
|
||||
del moves.__dict__[name]
|
||||
except KeyError:
|
||||
raise AttributeError("no such move, %r" % (name,))
|
||||
|
||||
|
||||
if PY3:
|
||||
_meth_func = "__func__"
|
||||
_meth_self = "__self__"
|
||||
|
||||
_func_closure = "__closure__"
|
||||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
_func_globals = "__globals__"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
|
||||
_func_closure = "func_closure"
|
||||
_func_code = "func_code"
|
||||
_func_defaults = "func_defaults"
|
||||
_func_globals = "func_globals"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
||||
next = advance_iterator
|
||||
|
||||
|
||||
try:
|
||||
callable = callable
|
||||
except NameError:
|
||||
def callable(obj):
|
||||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||
|
||||
|
||||
if PY3:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound
|
||||
|
||||
create_bound_method = types.MethodType
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return func
|
||||
|
||||
Iterator = object
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound.im_func
|
||||
|
||||
def create_bound_method(func, obj):
|
||||
return types.MethodType(func, obj, obj.__class__)
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return types.MethodType(func, None, cls)
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
return type(self).__next__(self)
|
||||
|
||||
callable = callable
|
||||
_add_doc(get_unbound_function,
|
||||
"""Get the function out of a possibly unbound function""")
|
||||
|
||||
|
||||
get_method_function = operator.attrgetter(_meth_func)
|
||||
get_method_self = operator.attrgetter(_meth_self)
|
||||
get_function_closure = operator.attrgetter(_func_closure)
|
||||
get_function_code = operator.attrgetter(_func_code)
|
||||
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||
get_function_globals = operator.attrgetter(_func_globals)
|
||||
|
||||
|
||||
if PY3:
|
||||
def iterkeys(d, **kw):
|
||||
return iter(d.keys(**kw))
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return iter(d.values(**kw))
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.items(**kw))
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return iter(d.lists(**kw))
|
||||
|
||||
viewkeys = operator.methodcaller("keys")
|
||||
|
||||
viewvalues = operator.methodcaller("values")
|
||||
|
||||
viewitems = operator.methodcaller("items")
|
||||
else:
|
||||
def iterkeys(d, **kw):
|
||||
return d.iterkeys(**kw)
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return d.itervalues(**kw)
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return d.iteritems(**kw)
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return d.iterlists(**kw)
|
||||
|
||||
viewkeys = operator.methodcaller("viewkeys")
|
||||
|
||||
viewvalues = operator.methodcaller("viewvalues")
|
||||
|
||||
viewitems = operator.methodcaller("viewitems")
|
||||
|
||||
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||
_add_doc(iteritems,
|
||||
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||
_add_doc(iterlists,
|
||||
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
unichr = chr
|
||||
import struct
|
||||
int2byte = struct.Struct(">B").pack
|
||||
del struct
|
||||
byte2int = operator.itemgetter(0)
|
||||
indexbytes = operator.getitem
|
||||
iterbytes = iter
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
_assertCountEqual = "assertCountEqual"
|
||||
if sys.version_info[1] <= 1:
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
else:
|
||||
_assertRaisesRegex = "assertRaisesRegex"
|
||||
_assertRegex = "assertRegex"
|
||||
else:
|
||||
def b(s):
|
||||
return s
|
||||
# Workaround for standalone backslash
|
||||
|
||||
def u(s):
|
||||
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||
unichr = unichr
|
||||
int2byte = chr
|
||||
|
||||
def byte2int(bs):
|
||||
return ord(bs[0])
|
||||
|
||||
def indexbytes(buf, i):
|
||||
return ord(buf[i])
|
||||
iterbytes = functools.partial(itertools.imap, ord)
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_assertCountEqual = "assertItemsEqual"
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
def assertCountEqual(self, *args, **kwargs):
|
||||
return getattr(self, _assertCountEqual)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRaisesRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
if PY3:
|
||||
exec_ = getattr(moves.builtins, "exec")
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
try:
|
||||
if value is None:
|
||||
value = tp()
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
finally:
|
||||
value = None
|
||||
tb = None
|
||||
|
||||
else:
|
||||
def exec_(_code_, _globs_=None, _locs_=None):
|
||||
"""Execute code in a namespace."""
|
||||
if _globs_ is None:
|
||||
frame = sys._getframe(1)
|
||||
_globs_ = frame.f_globals
|
||||
if _locs_ is None:
|
||||
_locs_ = frame.f_locals
|
||||
del frame
|
||||
elif _locs_ is None:
|
||||
_locs_ = _globs_
|
||||
exec("""exec _code_ in _globs_, _locs_""")
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
try:
|
||||
raise tp, value, tb
|
||||
finally:
|
||||
tb = None
|
||||
""")
|
||||
|
||||
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
try:
|
||||
if from_value is None:
|
||||
raise value
|
||||
raise value from from_value
|
||||
finally:
|
||||
value = None
|
||||
""")
|
||||
elif sys.version_info[:2] > (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
try:
|
||||
raise value from from_value
|
||||
finally:
|
||||
value = None
|
||||
""")
|
||||
else:
|
||||
def raise_from(value, from_value):
|
||||
raise value
|
||||
|
||||
|
||||
print_ = getattr(moves.builtins, "print", None)
|
||||
if print_ is None:
|
||||
def print_(*args, **kwargs):
|
||||
"""The new-style print function for Python 2.4 and 2.5."""
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
# If the file has an encoding, encode unicode with it.
|
||||
if (isinstance(fp, file) and
|
||||
isinstance(data, unicode) and
|
||||
fp.encoding is not None):
|
||||
errors = getattr(fp, "errors", None)
|
||||
if errors is None:
|
||||
errors = "strict"
|
||||
data = data.encode(fp.encoding, errors)
|
||||
fp.write(data)
|
||||
want_unicode = False
|
||||
sep = kwargs.pop("sep", None)
|
||||
if sep is not None:
|
||||
if isinstance(sep, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(sep, str):
|
||||
raise TypeError("sep must be None or a string")
|
||||
end = kwargs.pop("end", None)
|
||||
if end is not None:
|
||||
if isinstance(end, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(end, str):
|
||||
raise TypeError("end must be None or a string")
|
||||
if kwargs:
|
||||
raise TypeError("invalid keyword arguments to print()")
|
||||
if not want_unicode:
|
||||
for arg in args:
|
||||
if isinstance(arg, unicode):
|
||||
want_unicode = True
|
||||
break
|
||||
if want_unicode:
|
||||
newline = unicode("\n")
|
||||
space = unicode(" ")
|
||||
else:
|
||||
newline = "\n"
|
||||
space = " "
|
||||
if sep is None:
|
||||
sep = space
|
||||
if end is None:
|
||||
end = newline
|
||||
for i, arg in enumerate(args):
|
||||
if i:
|
||||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
if sys.version_info[:2] < (3, 3):
|
||||
_print = print_
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
fp = kwargs.get("file", sys.stdout)
|
||||
flush = kwargs.pop("flush", False)
|
||||
_print(*args, **kwargs)
|
||||
if flush and fp is not None:
|
||||
fp.flush()
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
if sys.version_info[0:2] < (3, 4):
|
||||
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES):
|
||||
def wrapper(f):
|
||||
f = functools.wraps(wrapped, assigned, updated)(f)
|
||||
f.__wrapped__ = wrapped
|
||||
return f
|
||||
return wrapper
|
||||
else:
|
||||
wraps = functools.wraps
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(type):
|
||||
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
|
||||
@classmethod
|
||||
def __prepare__(cls, name, this_bases):
|
||||
return meta.__prepare__(name, bases)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
def add_metaclass(metaclass):
|
||||
"""Class decorator for creating a class with a metaclass."""
|
||||
def wrapper(cls):
|
||||
orig_vars = cls.__dict__.copy()
|
||||
slots = orig_vars.get('__slots__')
|
||||
if slots is not None:
|
||||
if isinstance(slots, str):
|
||||
slots = [slots]
|
||||
for slots_var in slots:
|
||||
orig_vars.pop(slots_var)
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
if hasattr(cls, '__qualname__'):
|
||||
orig_vars['__qualname__'] = cls.__qualname__
|
||||
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||
return wrapper
|
||||
|
||||
|
||||
def ensure_binary(s, encoding='utf-8', errors='strict'):
|
||||
"""Coerce **s** to six.binary_type.
|
||||
|
||||
For Python 2:
|
||||
- `unicode` -> encoded to `str`
|
||||
- `str` -> `str`
|
||||
|
||||
For Python 3:
|
||||
- `str` -> encoded to `bytes`
|
||||
- `bytes` -> `bytes`
|
||||
"""
|
||||
if isinstance(s, text_type):
|
||||
return s.encode(encoding, errors)
|
||||
elif isinstance(s, binary_type):
|
||||
return s
|
||||
else:
|
||||
raise TypeError("not expecting type '%s'" % type(s))
|
||||
|
||||
|
||||
def ensure_str(s, encoding='utf-8', errors='strict'):
|
||||
"""Coerce *s* to `str`.
|
||||
|
||||
For Python 2:
|
||||
- `unicode` -> encoded to `str`
|
||||
- `str` -> `str`
|
||||
|
||||
For Python 3:
|
||||
- `str` -> `str`
|
||||
- `bytes` -> decoded to `str`
|
||||
"""
|
||||
if not isinstance(s, (text_type, binary_type)):
|
||||
raise TypeError("not expecting type '%s'" % type(s))
|
||||
if PY2 and isinstance(s, text_type):
|
||||
s = s.encode(encoding, errors)
|
||||
elif PY3 and isinstance(s, binary_type):
|
||||
s = s.decode(encoding, errors)
|
||||
return s
|
||||
|
||||
|
||||
def ensure_text(s, encoding='utf-8', errors='strict'):
|
||||
"""Coerce *s* to six.text_type.
|
||||
|
||||
For Python 2:
|
||||
- `unicode` -> `unicode`
|
||||
- `str` -> `unicode`
|
||||
|
||||
For Python 3:
|
||||
- `str` -> `str`
|
||||
- `bytes` -> decoded to `str`
|
||||
"""
|
||||
if isinstance(s, binary_type):
|
||||
return s.decode(encoding, errors)
|
||||
elif isinstance(s, text_type):
|
||||
return s
|
||||
else:
|
||||
raise TypeError("not expecting type '%s'" % type(s))
|
||||
|
||||
|
||||
|
||||
def python_2_unicode_compatible(klass):
|
||||
"""
|
||||
A decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||
Under Python 3 it does nothing.
|
||||
|
||||
To support Python 2 and 3 with a single code base, define a __str__ method
|
||||
returning text and apply this decorator to the class.
|
||||
"""
|
||||
if PY2:
|
||||
if '__str__' not in klass.__dict__:
|
||||
raise ValueError("@python_2_unicode_compatible cannot be applied "
|
||||
"to %s because it doesn't define __str__()." %
|
||||
klass.__name__)
|
||||
klass.__unicode__ = klass.__str__
|
||||
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
|
||||
return klass
|
||||
|
||||
|
||||
# Complete the moves implementation.
|
||||
# This code is at the end of this module to speed up module loading.
|
||||
# Turn this module into a package.
|
||||
__path__ = [] # required for PEP 302 and PEP 451
|
||||
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||
if globals().get("__spec__") is not None:
|
||||
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||
# Remove other six meta path importers, since they cause problems. This can
|
||||
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||
# this for some reason.)
|
||||
if sys.meta_path:
|
||||
for i, importer in enumerate(sys.meta_path):
|
||||
# Here's some real nastiness: Another "instance" of the six module might
|
||||
# be floating around. Therefore, we can't use isinstance() to check for
|
||||
# the six meta path importer, since the other six instance will have
|
||||
# inserted an importer with different class.
|
||||
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||
importer.name == __name__):
|
||||
del sys.meta_path[i]
|
||||
break
|
||||
del i, importer
|
||||
# Finally, add the importer to the meta path import hook.
|
||||
sys.meta_path.append(_importer)
|
||||
BIN
Lambdas/Websocket Authorizer/six.pyc
Normal file
BIN
Lambdas/Websocket Authorizer/six.pyc
Normal file
Binary file not shown.
Binary file not shown.
Reference in New Issue
Block a user